Merge -r 127928:132243 from trunk
[official-gcc.git] / gcc / expr.c
blobade0396f81752f25f7e8b7a9075263a97a4ad02c
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
56 #include "diagnostic.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
64 #ifdef PUSH_ROUNDING
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
69 #endif
70 #endif
72 #endif
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
77 #else
78 #define STACK_PUSH_CODE PRE_INC
79 #endif
80 #endif
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
91 /* This structure is used by move_by_pieces to describe the move to
92 be performed. */
93 struct move_by_pieces
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 rtx from;
100 rtx from_addr;
101 int autinc_from;
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
105 int reverse;
108 /* This structure is used by store_by_pieces to describe the clear to
109 be performed. */
111 struct store_by_pieces
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
120 void *constfundata;
121 int reverse;
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 unsigned int,
126 unsigned int);
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
179 #endif
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
187 #endif
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero. */
191 #ifndef SET_BY_PIECES_P
192 #define SET_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) SET_RATIO)
195 #endif
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memcpy" storage when the source is a constant string. */
199 #ifndef STORE_BY_PIECES_P
200 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
202 < (unsigned int) MOVE_RATIO)
203 #endif
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movmem_optab[NUM_MACHINE_MODES];
208 /* This array records the insn_code of insns to perform block sets. */
209 enum insn_code setmem_optab[NUM_MACHINE_MODES];
211 /* These arrays record the insn_code of three different kinds of insns
212 to perform block compares. */
213 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217 /* Synchronization primitives. */
218 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
245 #endif
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
251 void
252 init_expr_target (void)
254 rtx insn, pat;
255 enum machine_mode mode;
256 int num_clobbers;
257 rtx mem, mem1;
258 rtx reg;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
277 int regno;
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 regno++)
292 if (! HARD_REGNO_MODE_OK (regno, mode))
293 continue;
295 SET_REGNO (reg, regno);
297 SET_SRC (pat) = mem;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
328 enum insn_code ic;
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
332 continue;
334 PUT_MODE (mem, srcmode);
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
342 /* This is run at the start of compiling a function. */
344 void
345 init_expr (void)
347 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
352 fixed-point.
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
356 void
357 convert_move (rtx to, rtx from, int unsignedp)
359 enum machine_mode to_mode = GET_MODE (to);
360 enum machine_mode from_mode = GET_MODE (from);
361 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
363 enum insn_code code;
364 rtx libcall;
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
371 gcc_assert (to_real == from_real);
372 gcc_assert (to_mode != BLKmode);
373 gcc_assert (from_mode != BLKmode);
375 /* If the source and destination are already the same, then there's
376 nothing to do. */
377 if (to == from)
378 return;
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
382 TO here. */
384 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 >= GET_MODE_SIZE (to_mode))
387 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388 from = gen_lowpart (to_mode, from), from_mode = to_mode;
390 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
392 if (to_mode == from_mode
393 || (from_mode == VOIDmode && CONSTANT_P (from)))
395 emit_move_insn (to, from);
396 return;
399 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
401 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
403 if (VECTOR_MODE_P (to_mode))
404 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
405 else
406 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
408 emit_move_insn (to, from);
409 return;
412 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
414 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
416 return;
419 if (to_real)
421 rtx value, insns;
422 convert_optab tab;
424 gcc_assert ((GET_MODE_PRECISION (from_mode)
425 != GET_MODE_PRECISION (to_mode))
426 || (DECIMAL_FLOAT_MODE_P (from_mode)
427 != DECIMAL_FLOAT_MODE_P (to_mode)));
429 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 /* Conversion between decimal float and binary float, same size. */
431 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
433 tab = sext_optab;
434 else
435 tab = trunc_optab;
437 /* Try converting directly if the insn is supported. */
439 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440 if (code != CODE_FOR_nothing)
442 emit_unop_insn (code, to, from,
443 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
444 return;
447 /* Otherwise use a libcall. */
448 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall);
453 start_sequence ();
454 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
455 1, from, from_mode);
456 insns = get_insns ();
457 end_sequence ();
458 emit_libcall_block (insns, to, value,
459 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
460 from)
461 : gen_rtx_FLOAT_EXTEND (to_mode, from));
462 return;
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
473 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 != CODE_FOR_nothing);
476 if (full_mode != from_mode)
477 from = convert_to_mode (full_mode, from, unsignedp);
478 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
479 to, from, UNKNOWN);
480 return;
482 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
484 rtx new_from;
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
488 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 != CODE_FOR_nothing);
491 if (to_mode == full_mode)
493 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
494 to, from, UNKNOWN);
495 return;
498 new_from = gen_reg_rtx (full_mode);
499 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 new_from, from, UNKNOWN);
502 /* else proceed to integer conversions below. */
503 from_mode = full_mode;
504 from = new_from;
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 expand_fixed_convert (to, from, 0, 0);
518 else
519 expand_fixed_convert (to, from, 0, 1);
520 return;
523 /* Now both modes are integers. */
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
529 rtx insns;
530 rtx lowpart;
531 rtx fill_value;
532 rtx lowfrom;
533 int i;
534 enum machine_mode lowpart_mode;
535 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
539 != CODE_FOR_nothing)
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize > 0 && GET_CODE (from) == SUBREG)
546 from = force_reg (from_mode, from);
547 emit_unop_insn (code, to, from, equiv_code);
548 return;
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 != CODE_FOR_nothing))
555 if (REG_P (to))
557 if (reg_overlap_mentioned_p (to, from))
558 from = force_reg (from_mode, from);
559 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
561 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
562 emit_unop_insn (code, to,
563 gen_lowpart (word_mode, to), equiv_code);
564 return;
567 /* No special multiword conversion insn; do it by hand. */
568 start_sequence ();
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
573 if (reg_overlap_mentioned_p (to, from))
574 from = force_reg (from_mode, from);
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 lowpart_mode = word_mode;
579 else
580 lowpart_mode = from_mode;
582 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
584 lowpart = gen_lowpart (lowpart_mode, to);
585 emit_move_insn (lowpart, lowfrom);
587 /* Compute the value to put in each remaining word. */
588 if (unsignedp)
589 fill_value = const0_rtx;
590 else
592 #ifdef HAVE_slt
593 if (HAVE_slt
594 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
595 && STORE_FLAG_VALUE == -1)
597 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
598 lowpart_mode, 0);
599 fill_value = gen_reg_rtx (word_mode);
600 emit_insn (gen_slt (fill_value));
602 else
603 #endif
605 fill_value
606 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
607 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
608 NULL_RTX, 0);
609 fill_value = convert_to_mode (word_mode, fill_value, 1);
613 /* Fill the remaining words. */
614 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
616 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
617 rtx subword = operand_subword (to, index, 1, to_mode);
619 gcc_assert (subword);
621 if (fill_value != subword)
622 emit_move_insn (subword, fill_value);
625 insns = get_insns ();
626 end_sequence ();
628 emit_no_conflict_block (insns, to, from, NULL_RTX,
629 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
630 return;
633 /* Truncating multi-word to a word or less. */
634 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
635 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
637 if (!((MEM_P (from)
638 && ! MEM_VOLATILE_P (from)
639 && direct_load[(int) to_mode]
640 && ! mode_dependent_address_p (XEXP (from, 0)))
641 || REG_P (from)
642 || GET_CODE (from) == SUBREG))
643 from = force_reg (from_mode, from);
644 convert_move (to, gen_lowpart (word_mode, from), 0);
645 return;
648 /* Now follow all the conversions between integers
649 no more than a word long. */
651 /* For truncation, usually we can just refer to FROM in a narrower mode. */
652 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
653 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
654 GET_MODE_BITSIZE (from_mode)))
656 if (!((MEM_P (from)
657 && ! MEM_VOLATILE_P (from)
658 && direct_load[(int) to_mode]
659 && ! mode_dependent_address_p (XEXP (from, 0)))
660 || REG_P (from)
661 || GET_CODE (from) == SUBREG))
662 from = force_reg (from_mode, from);
663 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
664 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
665 from = copy_to_reg (from);
666 emit_move_insn (to, gen_lowpart (to_mode, from));
667 return;
670 /* Handle extension. */
671 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
673 /* Convert directly if that works. */
674 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
675 != CODE_FOR_nothing)
677 emit_unop_insn (code, to, from, equiv_code);
678 return;
680 else
682 enum machine_mode intermediate;
683 rtx tmp;
684 tree shift_amount;
686 /* Search for a mode to convert via. */
687 for (intermediate = from_mode; intermediate != VOIDmode;
688 intermediate = GET_MODE_WIDER_MODE (intermediate))
689 if (((can_extend_p (to_mode, intermediate, unsignedp)
690 != CODE_FOR_nothing)
691 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
692 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
693 GET_MODE_BITSIZE (intermediate))))
694 && (can_extend_p (intermediate, from_mode, unsignedp)
695 != CODE_FOR_nothing))
697 convert_move (to, convert_to_mode (intermediate, from,
698 unsignedp), unsignedp);
699 return;
702 /* No suitable intermediate mode.
703 Generate what we need with shifts. */
704 shift_amount = build_int_cst (NULL_TREE,
705 GET_MODE_BITSIZE (to_mode)
706 - GET_MODE_BITSIZE (from_mode));
707 from = gen_lowpart (to_mode, force_reg (from_mode, from));
708 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
709 to, unsignedp);
710 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
711 to, unsignedp);
712 if (tmp != to)
713 emit_move_insn (to, tmp);
714 return;
718 /* Support special truncate insns for certain modes. */
719 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
721 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
722 to, from, UNKNOWN);
723 return;
726 /* Handle truncation of volatile memrefs, and so on;
727 the things that couldn't be truncated directly,
728 and for which there was no special instruction.
730 ??? Code above formerly short-circuited this, for most integer
731 mode pairs, with a force_reg in from_mode followed by a recursive
732 call to this routine. Appears always to have been wrong. */
733 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
735 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
736 emit_move_insn (to, temp);
737 return;
740 /* Mode combination is not recognized. */
741 gcc_unreachable ();
744 /* Return an rtx for a value that would result
745 from converting X to mode MODE.
746 Both X and MODE may be floating, or both integer.
747 UNSIGNEDP is nonzero if X is an unsigned value.
748 This can be done by referring to a part of X in place
749 or by copying to a new temporary with conversion. */
752 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
754 return convert_modes (mode, VOIDmode, x, unsignedp);
757 /* Return an rtx for a value that would result
758 from converting X from mode OLDMODE to mode MODE.
759 Both modes may be floating, or both integer.
760 UNSIGNEDP is nonzero if X is an unsigned value.
762 This can be done by referring to a part of X in place
763 or by copying to a new temporary with conversion.
765 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
768 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
770 rtx temp;
772 /* If FROM is a SUBREG that indicates that we have already done at least
773 the required extension, strip it. */
775 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
776 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
777 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
778 x = gen_lowpart (mode, x);
780 if (GET_MODE (x) != VOIDmode)
781 oldmode = GET_MODE (x);
783 if (mode == oldmode)
784 return x;
786 /* There is one case that we must handle specially: If we are converting
787 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
788 we are to interpret the constant as unsigned, gen_lowpart will do
789 the wrong if the constant appears negative. What we want to do is
790 make the high-order word of the constant zero, not all ones. */
792 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
793 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
794 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
796 HOST_WIDE_INT val = INTVAL (x);
798 if (oldmode != VOIDmode
799 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
801 int width = GET_MODE_BITSIZE (oldmode);
803 /* We need to zero extend VAL. */
804 val &= ((HOST_WIDE_INT) 1 << width) - 1;
807 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
810 /* We can do this with a gen_lowpart if both desired and current modes
811 are integer, and this is either a constant integer, a register, or a
812 non-volatile MEM. Except for the constant case where MODE is no
813 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
815 if ((GET_CODE (x) == CONST_INT
816 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
817 || (GET_MODE_CLASS (mode) == MODE_INT
818 && GET_MODE_CLASS (oldmode) == MODE_INT
819 && (GET_CODE (x) == CONST_DOUBLE
820 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
821 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
822 && direct_load[(int) mode])
823 || (REG_P (x)
824 && (! HARD_REGISTER_P (x)
825 || HARD_REGNO_MODE_OK (REGNO (x), mode))
826 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
827 GET_MODE_BITSIZE (GET_MODE (x)))))))))
829 /* ?? If we don't know OLDMODE, we have to assume here that
830 X does not need sign- or zero-extension. This may not be
831 the case, but it's the best we can do. */
832 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
833 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
835 HOST_WIDE_INT val = INTVAL (x);
836 int width = GET_MODE_BITSIZE (oldmode);
838 /* We must sign or zero-extend in this case. Start by
839 zero-extending, then sign extend if we need to. */
840 val &= ((HOST_WIDE_INT) 1 << width) - 1;
841 if (! unsignedp
842 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
843 val |= (HOST_WIDE_INT) (-1) << width;
845 return gen_int_mode (val, mode);
848 return gen_lowpart (mode, x);
851 /* Converting from integer constant into mode is always equivalent to an
852 subreg operation. */
853 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
855 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
856 return simplify_gen_subreg (mode, x, oldmode, 0);
859 temp = gen_reg_rtx (mode);
860 convert_move (temp, x, unsignedp);
861 return temp;
864 /* STORE_MAX_PIECES is the number of bytes at a time that we can
865 store efficiently. Due to internal GCC limitations, this is
866 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
867 for an immediate constant. */
869 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
871 /* Determine whether the LEN bytes can be moved by using several move
872 instructions. Return nonzero if a call to move_by_pieces should
873 succeed. */
876 can_move_by_pieces (unsigned HOST_WIDE_INT len,
877 unsigned int align ATTRIBUTE_UNUSED)
879 return MOVE_BY_PIECES_P (len, align);
882 /* Generate several move instructions to copy LEN bytes from block FROM to
883 block TO. (These are MEM rtx's with BLKmode).
885 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
886 used to push FROM to the stack.
888 ALIGN is maximum stack alignment we can assume.
890 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
891 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
892 stpcpy. */
895 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
896 unsigned int align, int endp)
898 struct move_by_pieces data;
899 rtx to_addr, from_addr = XEXP (from, 0);
900 unsigned int max_size = MOVE_MAX_PIECES + 1;
901 enum machine_mode mode = VOIDmode, tmode;
902 enum insn_code icode;
904 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
906 data.offset = 0;
907 data.from_addr = from_addr;
908 if (to)
910 to_addr = XEXP (to, 0);
911 data.to = to;
912 data.autinc_to
913 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
914 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
915 data.reverse
916 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
918 else
920 to_addr = NULL_RTX;
921 data.to = NULL_RTX;
922 data.autinc_to = 1;
923 #ifdef STACK_GROWS_DOWNWARD
924 data.reverse = 1;
925 #else
926 data.reverse = 0;
927 #endif
929 data.to_addr = to_addr;
930 data.from = from;
931 data.autinc_from
932 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
933 || GET_CODE (from_addr) == POST_INC
934 || GET_CODE (from_addr) == POST_DEC);
936 data.explicit_inc_from = 0;
937 data.explicit_inc_to = 0;
938 if (data.reverse) data.offset = len;
939 data.len = len;
941 /* If copying requires more than two move insns,
942 copy addresses to registers (to make displacements shorter)
943 and use post-increment if available. */
944 if (!(data.autinc_from && data.autinc_to)
945 && move_by_pieces_ninsns (len, align, max_size) > 2)
947 /* Find the mode of the largest move... */
948 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
949 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
950 if (GET_MODE_SIZE (tmode) < max_size)
951 mode = tmode;
953 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
955 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
956 data.autinc_from = 1;
957 data.explicit_inc_from = -1;
959 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
961 data.from_addr = copy_addr_to_reg (from_addr);
962 data.autinc_from = 1;
963 data.explicit_inc_from = 1;
965 if (!data.autinc_from && CONSTANT_P (from_addr))
966 data.from_addr = copy_addr_to_reg (from_addr);
967 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
969 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
970 data.autinc_to = 1;
971 data.explicit_inc_to = -1;
973 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
975 data.to_addr = copy_addr_to_reg (to_addr);
976 data.autinc_to = 1;
977 data.explicit_inc_to = 1;
979 if (!data.autinc_to && CONSTANT_P (to_addr))
980 data.to_addr = copy_addr_to_reg (to_addr);
983 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
984 if (align >= GET_MODE_ALIGNMENT (tmode))
985 align = GET_MODE_ALIGNMENT (tmode);
986 else
988 enum machine_mode xmode;
990 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
991 tmode != VOIDmode;
992 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
993 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
994 || SLOW_UNALIGNED_ACCESS (tmode, align))
995 break;
997 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1000 /* First move what we can in the largest integer mode, then go to
1001 successively smaller modes. */
1003 while (max_size > 1)
1005 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1006 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1007 if (GET_MODE_SIZE (tmode) < max_size)
1008 mode = tmode;
1010 if (mode == VOIDmode)
1011 break;
1013 icode = optab_handler (mov_optab, mode)->insn_code;
1014 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1015 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1017 max_size = GET_MODE_SIZE (mode);
1020 /* The code above should have handled everything. */
1021 gcc_assert (!data.len);
1023 if (endp)
1025 rtx to1;
1027 gcc_assert (!data.reverse);
1028 if (data.autinc_to)
1030 if (endp == 2)
1032 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1033 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1034 else
1035 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1036 -1));
1038 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1039 data.offset);
1041 else
1043 if (endp == 2)
1044 --data.offset;
1045 to1 = adjust_address (data.to, QImode, data.offset);
1047 return to1;
1049 else
1050 return data.to;
1053 /* Return number of insns required to move L bytes by pieces.
1054 ALIGN (in bits) is maximum alignment we can assume. */
1056 static unsigned HOST_WIDE_INT
1057 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1058 unsigned int max_size)
1060 unsigned HOST_WIDE_INT n_insns = 0;
1061 enum machine_mode tmode;
1063 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1064 if (align >= GET_MODE_ALIGNMENT (tmode))
1065 align = GET_MODE_ALIGNMENT (tmode);
1066 else
1068 enum machine_mode tmode, xmode;
1070 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1071 tmode != VOIDmode;
1072 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1073 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1074 || SLOW_UNALIGNED_ACCESS (tmode, align))
1075 break;
1077 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1080 while (max_size > 1)
1082 enum machine_mode mode = VOIDmode;
1083 enum insn_code icode;
1085 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1086 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1087 if (GET_MODE_SIZE (tmode) < max_size)
1088 mode = tmode;
1090 if (mode == VOIDmode)
1091 break;
1093 icode = optab_handler (mov_optab, mode)->insn_code;
1094 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1095 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1097 max_size = GET_MODE_SIZE (mode);
1100 gcc_assert (!l);
1101 return n_insns;
1104 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1105 with move instructions for mode MODE. GENFUN is the gen_... function
1106 to make a move insn for that mode. DATA has all the other info. */
1108 static void
1109 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1110 struct move_by_pieces *data)
1112 unsigned int size = GET_MODE_SIZE (mode);
1113 rtx to1 = NULL_RTX, from1;
1115 while (data->len >= size)
1117 if (data->reverse)
1118 data->offset -= size;
1120 if (data->to)
1122 if (data->autinc_to)
1123 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1124 data->offset);
1125 else
1126 to1 = adjust_address (data->to, mode, data->offset);
1129 if (data->autinc_from)
1130 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1131 data->offset);
1132 else
1133 from1 = adjust_address (data->from, mode, data->offset);
1135 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1136 emit_insn (gen_add2_insn (data->to_addr,
1137 GEN_INT (-(HOST_WIDE_INT)size)));
1138 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1139 emit_insn (gen_add2_insn (data->from_addr,
1140 GEN_INT (-(HOST_WIDE_INT)size)));
1142 if (data->to)
1143 emit_insn ((*genfun) (to1, from1));
1144 else
1146 #ifdef PUSH_ROUNDING
1147 emit_single_push_insn (mode, from1, NULL);
1148 #else
1149 gcc_unreachable ();
1150 #endif
1153 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1154 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1155 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1156 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1158 if (! data->reverse)
1159 data->offset += size;
1161 data->len -= size;
1165 /* Emit code to move a block Y to a block X. This may be done with
1166 string-move instructions, with multiple scalar move instructions,
1167 or with a library call.
1169 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1170 SIZE is an rtx that says how long they are.
1171 ALIGN is the maximum alignment we can assume they have.
1172 METHOD describes what kind of copy this is, and what mechanisms may be used.
1174 Return the address of the new block, if memcpy is called and returns it,
1175 0 otherwise. */
1178 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1179 unsigned int expected_align, HOST_WIDE_INT expected_size)
1181 bool may_use_call;
1182 rtx retval = 0;
1183 unsigned int align;
1185 switch (method)
1187 case BLOCK_OP_NORMAL:
1188 case BLOCK_OP_TAILCALL:
1189 may_use_call = true;
1190 break;
1192 case BLOCK_OP_CALL_PARM:
1193 may_use_call = block_move_libcall_safe_for_call_parm ();
1195 /* Make inhibit_defer_pop nonzero around the library call
1196 to force it to pop the arguments right away. */
1197 NO_DEFER_POP;
1198 break;
1200 case BLOCK_OP_NO_LIBCALL:
1201 may_use_call = false;
1202 break;
1204 default:
1205 gcc_unreachable ();
1208 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1210 gcc_assert (MEM_P (x));
1211 gcc_assert (MEM_P (y));
1212 gcc_assert (size);
1214 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1215 block copy is more efficient for other large modes, e.g. DCmode. */
1216 x = adjust_address (x, BLKmode, 0);
1217 y = adjust_address (y, BLKmode, 0);
1219 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1220 can be incorrect is coming from __builtin_memcpy. */
1221 if (GET_CODE (size) == CONST_INT)
1223 if (INTVAL (size) == 0)
1224 return 0;
1226 x = shallow_copy_rtx (x);
1227 y = shallow_copy_rtx (y);
1228 set_mem_size (x, size);
1229 set_mem_size (y, size);
1232 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1233 move_by_pieces (x, y, INTVAL (size), align, 0);
1234 else if (emit_block_move_via_movmem (x, y, size, align,
1235 expected_align, expected_size))
1237 else if (may_use_call)
1238 retval = emit_block_move_via_libcall (x, y, size,
1239 method == BLOCK_OP_TAILCALL);
1240 else
1241 emit_block_move_via_loop (x, y, size, align);
1243 if (method == BLOCK_OP_CALL_PARM)
1244 OK_DEFER_POP;
1246 return retval;
1250 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1252 return emit_block_move_hints (x, y, size, method, 0, -1);
1255 /* A subroutine of emit_block_move. Returns true if calling the
1256 block move libcall will not clobber any parameters which may have
1257 already been placed on the stack. */
1259 static bool
1260 block_move_libcall_safe_for_call_parm (void)
1262 /* If arguments are pushed on the stack, then they're safe. */
1263 if (PUSH_ARGS)
1264 return true;
1266 /* If registers go on the stack anyway, any argument is sure to clobber
1267 an outgoing argument. */
1268 #if defined (REG_PARM_STACK_SPACE)
1269 if (OUTGOING_REG_PARM_STACK_SPACE)
1271 tree fn;
1272 fn = emit_block_move_libcall_fn (false);
1273 if (REG_PARM_STACK_SPACE (fn) != 0)
1274 return false;
1276 #endif
1278 /* If any argument goes in memory, then it might clobber an outgoing
1279 argument. */
1281 CUMULATIVE_ARGS args_so_far;
1282 tree fn, arg;
1284 fn = emit_block_move_libcall_fn (false);
1285 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1287 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1288 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1290 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1291 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1292 if (!tmp || !REG_P (tmp))
1293 return false;
1294 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1295 return false;
1296 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1299 return true;
1302 /* A subroutine of emit_block_move. Expand a movmem pattern;
1303 return true if successful. */
1305 static bool
1306 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1307 unsigned int expected_align, HOST_WIDE_INT expected_size)
1309 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1310 int save_volatile_ok = volatile_ok;
1311 enum machine_mode mode;
1313 if (expected_align < align)
1314 expected_align = align;
1316 /* Since this is a move insn, we don't care about volatility. */
1317 volatile_ok = 1;
1319 /* Try the most limited insn first, because there's no point
1320 including more than one in the machine description unless
1321 the more limited one has some advantage. */
1323 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1324 mode = GET_MODE_WIDER_MODE (mode))
1326 enum insn_code code = movmem_optab[(int) mode];
1327 insn_operand_predicate_fn pred;
1329 if (code != CODE_FOR_nothing
1330 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1331 here because if SIZE is less than the mode mask, as it is
1332 returned by the macro, it will definitely be less than the
1333 actual mode mask. */
1334 && ((GET_CODE (size) == CONST_INT
1335 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1336 <= (GET_MODE_MASK (mode) >> 1)))
1337 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1338 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1339 || (*pred) (x, BLKmode))
1340 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1341 || (*pred) (y, BLKmode))
1342 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1343 || (*pred) (opalign, VOIDmode)))
1345 rtx op2;
1346 rtx last = get_last_insn ();
1347 rtx pat;
1349 op2 = convert_to_mode (mode, size, 1);
1350 pred = insn_data[(int) code].operand[2].predicate;
1351 if (pred != 0 && ! (*pred) (op2, mode))
1352 op2 = copy_to_mode_reg (mode, op2);
1354 /* ??? When called via emit_block_move_for_call, it'd be
1355 nice if there were some way to inform the backend, so
1356 that it doesn't fail the expansion because it thinks
1357 emitting the libcall would be more efficient. */
1359 if (insn_data[(int) code].n_operands == 4)
1360 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1361 else
1362 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1363 GEN_INT (expected_align),
1364 GEN_INT (expected_size));
1365 if (pat)
1367 emit_insn (pat);
1368 volatile_ok = save_volatile_ok;
1369 return true;
1371 else
1372 delete_insns_since (last);
1376 volatile_ok = save_volatile_ok;
1377 return false;
1380 /* A subroutine of emit_block_move. Expand a call to memcpy.
1381 Return the return value from memcpy, 0 otherwise. */
1384 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1386 rtx dst_addr, src_addr;
1387 tree call_expr, fn, src_tree, dst_tree, size_tree;
1388 enum machine_mode size_mode;
1389 rtx retval;
1391 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1392 pseudos. We can then place those new pseudos into a VAR_DECL and
1393 use them later. */
1395 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1396 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1398 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1399 src_addr = convert_memory_address (ptr_mode, src_addr);
1401 dst_tree = make_tree (ptr_type_node, dst_addr);
1402 src_tree = make_tree (ptr_type_node, src_addr);
1404 size_mode = TYPE_MODE (sizetype);
1406 size = convert_to_mode (size_mode, size, 1);
1407 size = copy_to_mode_reg (size_mode, size);
1409 /* It is incorrect to use the libcall calling conventions to call
1410 memcpy in this context. This could be a user call to memcpy and
1411 the user may wish to examine the return value from memcpy. For
1412 targets where libcalls and normal calls have different conventions
1413 for returning pointers, we could end up generating incorrect code. */
1415 size_tree = make_tree (sizetype, size);
1417 fn = emit_block_move_libcall_fn (true);
1418 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1419 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1421 retval = expand_normal (call_expr);
1423 return retval;
1426 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1427 for the function we use for block copies. The first time FOR_CALL
1428 is true, we call assemble_external. */
1430 static GTY(()) tree block_move_fn;
1432 void
1433 init_block_move_fn (const char *asmspec)
1435 if (!block_move_fn)
1437 tree args, fn;
1439 fn = get_identifier ("memcpy");
1440 args = build_function_type_list (ptr_type_node, ptr_type_node,
1441 const_ptr_type_node, sizetype,
1442 NULL_TREE);
1444 fn = build_decl (FUNCTION_DECL, fn, args);
1445 DECL_EXTERNAL (fn) = 1;
1446 TREE_PUBLIC (fn) = 1;
1447 DECL_ARTIFICIAL (fn) = 1;
1448 TREE_NOTHROW (fn) = 1;
1449 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1450 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1452 block_move_fn = fn;
1455 if (asmspec)
1456 set_user_assembler_name (block_move_fn, asmspec);
1459 static tree
1460 emit_block_move_libcall_fn (int for_call)
1462 static bool emitted_extern;
1464 if (!block_move_fn)
1465 init_block_move_fn (NULL);
1467 if (for_call && !emitted_extern)
1469 emitted_extern = true;
1470 make_decl_rtl (block_move_fn);
1471 assemble_external (block_move_fn);
1474 return block_move_fn;
1477 /* A subroutine of emit_block_move. Copy the data via an explicit
1478 loop. This is used only when libcalls are forbidden. */
1479 /* ??? It'd be nice to copy in hunks larger than QImode. */
1481 static void
1482 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1483 unsigned int align ATTRIBUTE_UNUSED)
1485 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1486 enum machine_mode iter_mode;
1488 iter_mode = GET_MODE (size);
1489 if (iter_mode == VOIDmode)
1490 iter_mode = word_mode;
1492 top_label = gen_label_rtx ();
1493 cmp_label = gen_label_rtx ();
1494 iter = gen_reg_rtx (iter_mode);
1496 emit_move_insn (iter, const0_rtx);
1498 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1499 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1500 do_pending_stack_adjust ();
1502 emit_jump (cmp_label);
1503 emit_label (top_label);
1505 tmp = convert_modes (Pmode, iter_mode, iter, true);
1506 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1507 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1508 x = change_address (x, QImode, x_addr);
1509 y = change_address (y, QImode, y_addr);
1511 emit_move_insn (x, y);
1513 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1514 true, OPTAB_LIB_WIDEN);
1515 if (tmp != iter)
1516 emit_move_insn (iter, tmp);
1518 emit_label (cmp_label);
1520 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1521 true, top_label);
1524 /* Copy all or part of a value X into registers starting at REGNO.
1525 The number of registers to be filled is NREGS. */
1527 void
1528 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1530 int i;
1531 #ifdef HAVE_load_multiple
1532 rtx pat;
1533 rtx last;
1534 #endif
1536 if (nregs == 0)
1537 return;
1539 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1540 x = validize_mem (force_const_mem (mode, x));
1542 /* See if the machine can do this with a load multiple insn. */
1543 #ifdef HAVE_load_multiple
1544 if (HAVE_load_multiple)
1546 last = get_last_insn ();
1547 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1548 GEN_INT (nregs));
1549 if (pat)
1551 emit_insn (pat);
1552 return;
1554 else
1555 delete_insns_since (last);
1557 #endif
1559 for (i = 0; i < nregs; i++)
1560 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1561 operand_subword_force (x, i, mode));
1564 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1565 The number of registers to be filled is NREGS. */
1567 void
1568 move_block_from_reg (int regno, rtx x, int nregs)
1570 int i;
1572 if (nregs == 0)
1573 return;
1575 /* See if the machine can do this with a store multiple insn. */
1576 #ifdef HAVE_store_multiple
1577 if (HAVE_store_multiple)
1579 rtx last = get_last_insn ();
1580 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1581 GEN_INT (nregs));
1582 if (pat)
1584 emit_insn (pat);
1585 return;
1587 else
1588 delete_insns_since (last);
1590 #endif
1592 for (i = 0; i < nregs; i++)
1594 rtx tem = operand_subword (x, i, 1, BLKmode);
1596 gcc_assert (tem);
1598 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1602 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1603 ORIG, where ORIG is a non-consecutive group of registers represented by
1604 a PARALLEL. The clone is identical to the original except in that the
1605 original set of registers is replaced by a new set of pseudo registers.
1606 The new set has the same modes as the original set. */
1609 gen_group_rtx (rtx orig)
1611 int i, length;
1612 rtx *tmps;
1614 gcc_assert (GET_CODE (orig) == PARALLEL);
1616 length = XVECLEN (orig, 0);
1617 tmps = alloca (sizeof (rtx) * length);
1619 /* Skip a NULL entry in first slot. */
1620 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1622 if (i)
1623 tmps[0] = 0;
1625 for (; i < length; i++)
1627 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1628 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1630 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1633 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1636 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1637 except that values are placed in TMPS[i], and must later be moved
1638 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1640 static void
1641 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1643 rtx src;
1644 int start, i;
1645 enum machine_mode m = GET_MODE (orig_src);
1647 gcc_assert (GET_CODE (dst) == PARALLEL);
1649 if (m != VOIDmode
1650 && !SCALAR_INT_MODE_P (m)
1651 && !MEM_P (orig_src)
1652 && GET_CODE (orig_src) != CONCAT)
1654 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1655 if (imode == BLKmode)
1656 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1657 else
1658 src = gen_reg_rtx (imode);
1659 if (imode != BLKmode)
1660 src = gen_lowpart (GET_MODE (orig_src), src);
1661 emit_move_insn (src, orig_src);
1662 /* ...and back again. */
1663 if (imode != BLKmode)
1664 src = gen_lowpart (imode, src);
1665 emit_group_load_1 (tmps, dst, src, type, ssize);
1666 return;
1669 /* Check for a NULL entry, used to indicate that the parameter goes
1670 both on the stack and in registers. */
1671 if (XEXP (XVECEXP (dst, 0, 0), 0))
1672 start = 0;
1673 else
1674 start = 1;
1676 /* Process the pieces. */
1677 for (i = start; i < XVECLEN (dst, 0); i++)
1679 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1680 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1681 unsigned int bytelen = GET_MODE_SIZE (mode);
1682 int shift = 0;
1684 /* Handle trailing fragments that run over the size of the struct. */
1685 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1687 /* Arrange to shift the fragment to where it belongs.
1688 extract_bit_field loads to the lsb of the reg. */
1689 if (
1690 #ifdef BLOCK_REG_PADDING
1691 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1692 == (BYTES_BIG_ENDIAN ? upward : downward)
1693 #else
1694 BYTES_BIG_ENDIAN
1695 #endif
1697 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1698 bytelen = ssize - bytepos;
1699 gcc_assert (bytelen > 0);
1702 /* If we won't be loading directly from memory, protect the real source
1703 from strange tricks we might play; but make sure that the source can
1704 be loaded directly into the destination. */
1705 src = orig_src;
1706 if (!MEM_P (orig_src)
1707 && (!CONSTANT_P (orig_src)
1708 || (GET_MODE (orig_src) != mode
1709 && GET_MODE (orig_src) != VOIDmode)))
1711 if (GET_MODE (orig_src) == VOIDmode)
1712 src = gen_reg_rtx (mode);
1713 else
1714 src = gen_reg_rtx (GET_MODE (orig_src));
1716 emit_move_insn (src, orig_src);
1719 /* Optimize the access just a bit. */
1720 if (MEM_P (src)
1721 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1722 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1723 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1724 && bytelen == GET_MODE_SIZE (mode))
1726 tmps[i] = gen_reg_rtx (mode);
1727 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1729 else if (COMPLEX_MODE_P (mode)
1730 && GET_MODE (src) == mode
1731 && bytelen == GET_MODE_SIZE (mode))
1732 /* Let emit_move_complex do the bulk of the work. */
1733 tmps[i] = src;
1734 else if (GET_CODE (src) == CONCAT)
1736 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1737 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1739 if ((bytepos == 0 && bytelen == slen0)
1740 || (bytepos != 0 && bytepos + bytelen <= slen))
1742 /* The following assumes that the concatenated objects all
1743 have the same size. In this case, a simple calculation
1744 can be used to determine the object and the bit field
1745 to be extracted. */
1746 tmps[i] = XEXP (src, bytepos / slen0);
1747 if (! CONSTANT_P (tmps[i])
1748 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1749 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1750 (bytepos % slen0) * BITS_PER_UNIT,
1751 1, NULL_RTX, mode, mode);
1753 else
1755 rtx mem;
1757 gcc_assert (!bytepos);
1758 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1759 emit_move_insn (mem, src);
1760 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1761 0, 1, NULL_RTX, mode, mode);
1764 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1765 SIMD register, which is currently broken. While we get GCC
1766 to emit proper RTL for these cases, let's dump to memory. */
1767 else if (VECTOR_MODE_P (GET_MODE (dst))
1768 && REG_P (src))
1770 int slen = GET_MODE_SIZE (GET_MODE (src));
1771 rtx mem;
1773 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1774 emit_move_insn (mem, src);
1775 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1777 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1778 && XVECLEN (dst, 0) > 1)
1779 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1780 else if (CONSTANT_P (src))
1782 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1784 if (len == ssize)
1785 tmps[i] = src;
1786 else
1788 rtx first, second;
1790 gcc_assert (2 * len == ssize);
1791 split_double (src, &first, &second);
1792 if (i)
1793 tmps[i] = second;
1794 else
1795 tmps[i] = first;
1798 else if (REG_P (src) && GET_MODE (src) == mode)
1799 tmps[i] = src;
1800 else
1801 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1802 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1803 mode, mode);
1805 if (shift)
1806 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1807 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1811 /* Emit code to move a block SRC of type TYPE to a block DST,
1812 where DST is non-consecutive registers represented by a PARALLEL.
1813 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1814 if not known. */
1816 void
1817 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1819 rtx *tmps;
1820 int i;
1822 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1823 emit_group_load_1 (tmps, dst, src, type, ssize);
1825 /* Copy the extracted pieces into the proper (probable) hard regs. */
1826 for (i = 0; i < XVECLEN (dst, 0); i++)
1828 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1829 if (d == NULL)
1830 continue;
1831 emit_move_insn (d, tmps[i]);
1835 /* Similar, but load SRC into new pseudos in a format that looks like
1836 PARALLEL. This can later be fed to emit_group_move to get things
1837 in the right place. */
1840 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1842 rtvec vec;
1843 int i;
1845 vec = rtvec_alloc (XVECLEN (parallel, 0));
1846 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1848 /* Convert the vector to look just like the original PARALLEL, except
1849 with the computed values. */
1850 for (i = 0; i < XVECLEN (parallel, 0); i++)
1852 rtx e = XVECEXP (parallel, 0, i);
1853 rtx d = XEXP (e, 0);
1855 if (d)
1857 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1858 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1860 RTVEC_ELT (vec, i) = e;
1863 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1866 /* Emit code to move a block SRC to block DST, where SRC and DST are
1867 non-consecutive groups of registers, each represented by a PARALLEL. */
1869 void
1870 emit_group_move (rtx dst, rtx src)
1872 int i;
1874 gcc_assert (GET_CODE (src) == PARALLEL
1875 && GET_CODE (dst) == PARALLEL
1876 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1878 /* Skip first entry if NULL. */
1879 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1880 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1881 XEXP (XVECEXP (src, 0, i), 0));
1884 /* Move a group of registers represented by a PARALLEL into pseudos. */
1887 emit_group_move_into_temps (rtx src)
1889 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1890 int i;
1892 for (i = 0; i < XVECLEN (src, 0); i++)
1894 rtx e = XVECEXP (src, 0, i);
1895 rtx d = XEXP (e, 0);
1897 if (d)
1898 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1899 RTVEC_ELT (vec, i) = e;
1902 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1905 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1906 where SRC is non-consecutive registers represented by a PARALLEL.
1907 SSIZE represents the total size of block ORIG_DST, or -1 if not
1908 known. */
1910 void
1911 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1913 rtx *tmps, dst;
1914 int start, finish, i;
1915 enum machine_mode m = GET_MODE (orig_dst);
1917 gcc_assert (GET_CODE (src) == PARALLEL);
1919 if (!SCALAR_INT_MODE_P (m)
1920 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1922 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1923 if (imode == BLKmode)
1924 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1925 else
1926 dst = gen_reg_rtx (imode);
1927 emit_group_store (dst, src, type, ssize);
1928 if (imode != BLKmode)
1929 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1930 emit_move_insn (orig_dst, dst);
1931 return;
1934 /* Check for a NULL entry, used to indicate that the parameter goes
1935 both on the stack and in registers. */
1936 if (XEXP (XVECEXP (src, 0, 0), 0))
1937 start = 0;
1938 else
1939 start = 1;
1940 finish = XVECLEN (src, 0);
1942 tmps = alloca (sizeof (rtx) * finish);
1944 /* Copy the (probable) hard regs into pseudos. */
1945 for (i = start; i < finish; i++)
1947 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1948 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1950 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1951 emit_move_insn (tmps[i], reg);
1953 else
1954 tmps[i] = reg;
1957 /* If we won't be storing directly into memory, protect the real destination
1958 from strange tricks we might play. */
1959 dst = orig_dst;
1960 if (GET_CODE (dst) == PARALLEL)
1962 rtx temp;
1964 /* We can get a PARALLEL dst if there is a conditional expression in
1965 a return statement. In that case, the dst and src are the same,
1966 so no action is necessary. */
1967 if (rtx_equal_p (dst, src))
1968 return;
1970 /* It is unclear if we can ever reach here, but we may as well handle
1971 it. Allocate a temporary, and split this into a store/load to/from
1972 the temporary. */
1974 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1975 emit_group_store (temp, src, type, ssize);
1976 emit_group_load (dst, temp, type, ssize);
1977 return;
1979 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1981 enum machine_mode outer = GET_MODE (dst);
1982 enum machine_mode inner;
1983 HOST_WIDE_INT bytepos;
1984 bool done = false;
1985 rtx temp;
1987 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1988 dst = gen_reg_rtx (outer);
1990 /* Make life a bit easier for combine. */
1991 /* If the first element of the vector is the low part
1992 of the destination mode, use a paradoxical subreg to
1993 initialize the destination. */
1994 if (start < finish)
1996 inner = GET_MODE (tmps[start]);
1997 bytepos = subreg_lowpart_offset (inner, outer);
1998 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2000 temp = simplify_gen_subreg (outer, tmps[start],
2001 inner, 0);
2002 if (temp)
2004 emit_move_insn (dst, temp);
2005 done = true;
2006 start++;
2011 /* If the first element wasn't the low part, try the last. */
2012 if (!done
2013 && start < finish - 1)
2015 inner = GET_MODE (tmps[finish - 1]);
2016 bytepos = subreg_lowpart_offset (inner, outer);
2017 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2019 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2020 inner, 0);
2021 if (temp)
2023 emit_move_insn (dst, temp);
2024 done = true;
2025 finish--;
2030 /* Otherwise, simply initialize the result to zero. */
2031 if (!done)
2032 emit_move_insn (dst, CONST0_RTX (outer));
2035 /* Process the pieces. */
2036 for (i = start; i < finish; i++)
2038 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2039 enum machine_mode mode = GET_MODE (tmps[i]);
2040 unsigned int bytelen = GET_MODE_SIZE (mode);
2041 rtx dest = dst;
2043 /* Handle trailing fragments that run over the size of the struct. */
2044 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2046 /* store_bit_field always takes its value from the lsb.
2047 Move the fragment to the lsb if it's not already there. */
2048 if (
2049 #ifdef BLOCK_REG_PADDING
2050 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2051 == (BYTES_BIG_ENDIAN ? upward : downward)
2052 #else
2053 BYTES_BIG_ENDIAN
2054 #endif
2057 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2058 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2059 build_int_cst (NULL_TREE, shift),
2060 tmps[i], 0);
2062 bytelen = ssize - bytepos;
2065 if (GET_CODE (dst) == CONCAT)
2067 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2068 dest = XEXP (dst, 0);
2069 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2071 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2072 dest = XEXP (dst, 1);
2074 else
2076 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2077 dest = assign_stack_temp (GET_MODE (dest),
2078 GET_MODE_SIZE (GET_MODE (dest)), 0);
2079 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2080 tmps[i]);
2081 dst = dest;
2082 break;
2086 /* Optimize the access just a bit. */
2087 if (MEM_P (dest)
2088 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2089 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2090 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2091 && bytelen == GET_MODE_SIZE (mode))
2092 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2093 else
2094 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2095 mode, tmps[i]);
2098 /* Copy from the pseudo into the (probable) hard reg. */
2099 if (orig_dst != dst)
2100 emit_move_insn (orig_dst, dst);
2103 /* Generate code to copy a BLKmode object of TYPE out of a
2104 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2105 is null, a stack temporary is created. TGTBLK is returned.
2107 The purpose of this routine is to handle functions that return
2108 BLKmode structures in registers. Some machines (the PA for example)
2109 want to return all small structures in registers regardless of the
2110 structure's alignment. */
2113 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2115 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2116 rtx src = NULL, dst = NULL;
2117 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2118 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2119 enum machine_mode copy_mode;
2121 if (tgtblk == 0)
2123 tgtblk = assign_temp (build_qualified_type (type,
2124 (TYPE_QUALS (type)
2125 | TYPE_QUAL_CONST)),
2126 0, 1, 1);
2127 preserve_temp_slots (tgtblk);
2130 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2131 into a new pseudo which is a full word. */
2133 if (GET_MODE (srcreg) != BLKmode
2134 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2135 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2137 /* If the structure doesn't take up a whole number of words, see whether
2138 SRCREG is padded on the left or on the right. If it's on the left,
2139 set PADDING_CORRECTION to the number of bits to skip.
2141 In most ABIs, the structure will be returned at the least end of
2142 the register, which translates to right padding on little-endian
2143 targets and left padding on big-endian targets. The opposite
2144 holds if the structure is returned at the most significant
2145 end of the register. */
2146 if (bytes % UNITS_PER_WORD != 0
2147 && (targetm.calls.return_in_msb (type)
2148 ? !BYTES_BIG_ENDIAN
2149 : BYTES_BIG_ENDIAN))
2150 padding_correction
2151 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2153 /* Copy the structure BITSIZE bits at a time. If the target lives in
2154 memory, take care of not reading/writing past its end by selecting
2155 a copy mode suited to BITSIZE. This should always be possible given
2156 how it is computed.
2158 We could probably emit more efficient code for machines which do not use
2159 strict alignment, but it doesn't seem worth the effort at the current
2160 time. */
2162 copy_mode = word_mode;
2163 if (MEM_P (tgtblk))
2165 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2166 if (mem_mode != BLKmode)
2167 copy_mode = mem_mode;
2170 for (bitpos = 0, xbitpos = padding_correction;
2171 bitpos < bytes * BITS_PER_UNIT;
2172 bitpos += bitsize, xbitpos += bitsize)
2174 /* We need a new source operand each time xbitpos is on a
2175 word boundary and when xbitpos == padding_correction
2176 (the first time through). */
2177 if (xbitpos % BITS_PER_WORD == 0
2178 || xbitpos == padding_correction)
2179 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2180 GET_MODE (srcreg));
2182 /* We need a new destination operand each time bitpos is on
2183 a word boundary. */
2184 if (bitpos % BITS_PER_WORD == 0)
2185 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2187 /* Use xbitpos for the source extraction (right justified) and
2188 bitpos for the destination store (left justified). */
2189 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2190 extract_bit_field (src, bitsize,
2191 xbitpos % BITS_PER_WORD, 1,
2192 NULL_RTX, copy_mode, copy_mode));
2195 return tgtblk;
2198 /* Add a USE expression for REG to the (possibly empty) list pointed
2199 to by CALL_FUSAGE. REG must denote a hard register. */
2201 void
2202 use_reg (rtx *call_fusage, rtx reg)
2204 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2206 *call_fusage
2207 = gen_rtx_EXPR_LIST (VOIDmode,
2208 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2211 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2212 starting at REGNO. All of these registers must be hard registers. */
2214 void
2215 use_regs (rtx *call_fusage, int regno, int nregs)
2217 int i;
2219 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2221 for (i = 0; i < nregs; i++)
2222 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2225 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2226 PARALLEL REGS. This is for calls that pass values in multiple
2227 non-contiguous locations. The Irix 6 ABI has examples of this. */
2229 void
2230 use_group_regs (rtx *call_fusage, rtx regs)
2232 int i;
2234 for (i = 0; i < XVECLEN (regs, 0); i++)
2236 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2238 /* A NULL entry means the parameter goes both on the stack and in
2239 registers. This can also be a MEM for targets that pass values
2240 partially on the stack and partially in registers. */
2241 if (reg != 0 && REG_P (reg))
2242 use_reg (call_fusage, reg);
2247 /* Determine whether the LEN bytes generated by CONSTFUN can be
2248 stored to memory using several move instructions. CONSTFUNDATA is
2249 a pointer which will be passed as argument in every CONSTFUN call.
2250 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2251 a memset operation and false if it's a copy of a constant string.
2252 Return nonzero if a call to store_by_pieces should succeed. */
2255 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2256 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2257 void *constfundata, unsigned int align, bool memsetp)
2259 unsigned HOST_WIDE_INT l;
2260 unsigned int max_size;
2261 HOST_WIDE_INT offset = 0;
2262 enum machine_mode mode, tmode;
2263 enum insn_code icode;
2264 int reverse;
2265 rtx cst;
2267 if (len == 0)
2268 return 1;
2270 if (! (memsetp
2271 ? SET_BY_PIECES_P (len, align)
2272 : STORE_BY_PIECES_P (len, align)))
2273 return 0;
2275 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2276 if (align >= GET_MODE_ALIGNMENT (tmode))
2277 align = GET_MODE_ALIGNMENT (tmode);
2278 else
2280 enum machine_mode xmode;
2282 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2283 tmode != VOIDmode;
2284 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2285 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2286 || SLOW_UNALIGNED_ACCESS (tmode, align))
2287 break;
2289 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2292 /* We would first store what we can in the largest integer mode, then go to
2293 successively smaller modes. */
2295 for (reverse = 0;
2296 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2297 reverse++)
2299 l = len;
2300 mode = VOIDmode;
2301 max_size = STORE_MAX_PIECES + 1;
2302 while (max_size > 1)
2304 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2305 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2306 if (GET_MODE_SIZE (tmode) < max_size)
2307 mode = tmode;
2309 if (mode == VOIDmode)
2310 break;
2312 icode = optab_handler (mov_optab, mode)->insn_code;
2313 if (icode != CODE_FOR_nothing
2314 && align >= GET_MODE_ALIGNMENT (mode))
2316 unsigned int size = GET_MODE_SIZE (mode);
2318 while (l >= size)
2320 if (reverse)
2321 offset -= size;
2323 cst = (*constfun) (constfundata, offset, mode);
2324 if (!LEGITIMATE_CONSTANT_P (cst))
2325 return 0;
2327 if (!reverse)
2328 offset += size;
2330 l -= size;
2334 max_size = GET_MODE_SIZE (mode);
2337 /* The code above should have handled everything. */
2338 gcc_assert (!l);
2341 return 1;
2344 /* Generate several move instructions to store LEN bytes generated by
2345 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2346 pointer which will be passed as argument in every CONSTFUN call.
2347 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2348 a memset operation and false if it's a copy of a constant string.
2349 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2350 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2351 stpcpy. */
2354 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2355 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2356 void *constfundata, unsigned int align, bool memsetp, int endp)
2358 struct store_by_pieces data;
2360 if (len == 0)
2362 gcc_assert (endp != 2);
2363 return to;
2366 gcc_assert (memsetp
2367 ? SET_BY_PIECES_P (len, align)
2368 : STORE_BY_PIECES_P (len, align));
2369 data.constfun = constfun;
2370 data.constfundata = constfundata;
2371 data.len = len;
2372 data.to = to;
2373 store_by_pieces_1 (&data, align);
2374 if (endp)
2376 rtx to1;
2378 gcc_assert (!data.reverse);
2379 if (data.autinc_to)
2381 if (endp == 2)
2383 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2384 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2385 else
2386 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2387 -1));
2389 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2390 data.offset);
2392 else
2394 if (endp == 2)
2395 --data.offset;
2396 to1 = adjust_address (data.to, QImode, data.offset);
2398 return to1;
2400 else
2401 return data.to;
2404 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2405 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2407 static void
2408 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2410 struct store_by_pieces data;
2412 if (len == 0)
2413 return;
2415 data.constfun = clear_by_pieces_1;
2416 data.constfundata = NULL;
2417 data.len = len;
2418 data.to = to;
2419 store_by_pieces_1 (&data, align);
2422 /* Callback routine for clear_by_pieces.
2423 Return const0_rtx unconditionally. */
2425 static rtx
2426 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2427 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2428 enum machine_mode mode ATTRIBUTE_UNUSED)
2430 return const0_rtx;
2433 /* Subroutine of clear_by_pieces and store_by_pieces.
2434 Generate several move instructions to store LEN bytes of block TO. (A MEM
2435 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2437 static void
2438 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2439 unsigned int align ATTRIBUTE_UNUSED)
2441 rtx to_addr = XEXP (data->to, 0);
2442 unsigned int max_size = STORE_MAX_PIECES + 1;
2443 enum machine_mode mode = VOIDmode, tmode;
2444 enum insn_code icode;
2446 data->offset = 0;
2447 data->to_addr = to_addr;
2448 data->autinc_to
2449 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2450 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2452 data->explicit_inc_to = 0;
2453 data->reverse
2454 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2455 if (data->reverse)
2456 data->offset = data->len;
2458 /* If storing requires more than two move insns,
2459 copy addresses to registers (to make displacements shorter)
2460 and use post-increment if available. */
2461 if (!data->autinc_to
2462 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2464 /* Determine the main mode we'll be using. */
2465 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2466 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2467 if (GET_MODE_SIZE (tmode) < max_size)
2468 mode = tmode;
2470 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2472 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2473 data->autinc_to = 1;
2474 data->explicit_inc_to = -1;
2477 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2478 && ! data->autinc_to)
2480 data->to_addr = copy_addr_to_reg (to_addr);
2481 data->autinc_to = 1;
2482 data->explicit_inc_to = 1;
2485 if ( !data->autinc_to && CONSTANT_P (to_addr))
2486 data->to_addr = copy_addr_to_reg (to_addr);
2489 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2490 if (align >= GET_MODE_ALIGNMENT (tmode))
2491 align = GET_MODE_ALIGNMENT (tmode);
2492 else
2494 enum machine_mode xmode;
2496 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2497 tmode != VOIDmode;
2498 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2499 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2500 || SLOW_UNALIGNED_ACCESS (tmode, align))
2501 break;
2503 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2506 /* First store what we can in the largest integer mode, then go to
2507 successively smaller modes. */
2509 while (max_size > 1)
2511 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2512 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2513 if (GET_MODE_SIZE (tmode) < max_size)
2514 mode = tmode;
2516 if (mode == VOIDmode)
2517 break;
2519 icode = optab_handler (mov_optab, mode)->insn_code;
2520 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2521 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2523 max_size = GET_MODE_SIZE (mode);
2526 /* The code above should have handled everything. */
2527 gcc_assert (!data->len);
2530 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2531 with move instructions for mode MODE. GENFUN is the gen_... function
2532 to make a move insn for that mode. DATA has all the other info. */
2534 static void
2535 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2536 struct store_by_pieces *data)
2538 unsigned int size = GET_MODE_SIZE (mode);
2539 rtx to1, cst;
2541 while (data->len >= size)
2543 if (data->reverse)
2544 data->offset -= size;
2546 if (data->autinc_to)
2547 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2548 data->offset);
2549 else
2550 to1 = adjust_address (data->to, mode, data->offset);
2552 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2553 emit_insn (gen_add2_insn (data->to_addr,
2554 GEN_INT (-(HOST_WIDE_INT) size)));
2556 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2557 emit_insn ((*genfun) (to1, cst));
2559 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2560 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2562 if (! data->reverse)
2563 data->offset += size;
2565 data->len -= size;
2569 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2570 its length in bytes. */
2573 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2574 unsigned int expected_align, HOST_WIDE_INT expected_size)
2576 enum machine_mode mode = GET_MODE (object);
2577 unsigned int align;
2579 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2581 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2582 just move a zero. Otherwise, do this a piece at a time. */
2583 if (mode != BLKmode
2584 && GET_CODE (size) == CONST_INT
2585 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2587 rtx zero = CONST0_RTX (mode);
2588 if (zero != NULL)
2590 emit_move_insn (object, zero);
2591 return NULL;
2594 if (COMPLEX_MODE_P (mode))
2596 zero = CONST0_RTX (GET_MODE_INNER (mode));
2597 if (zero != NULL)
2599 write_complex_part (object, zero, 0);
2600 write_complex_part (object, zero, 1);
2601 return NULL;
2606 if (size == const0_rtx)
2607 return NULL;
2609 align = MEM_ALIGN (object);
2611 if (GET_CODE (size) == CONST_INT
2612 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2613 clear_by_pieces (object, INTVAL (size), align);
2614 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2615 expected_align, expected_size))
2617 else
2618 return set_storage_via_libcall (object, size, const0_rtx,
2619 method == BLOCK_OP_TAILCALL);
2621 return NULL;
2625 clear_storage (rtx object, rtx size, enum block_op_methods method)
2627 return clear_storage_hints (object, size, method, 0, -1);
2631 /* A subroutine of clear_storage. Expand a call to memset.
2632 Return the return value of memset, 0 otherwise. */
2635 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2637 tree call_expr, fn, object_tree, size_tree, val_tree;
2638 enum machine_mode size_mode;
2639 rtx retval;
2641 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2642 place those into new pseudos into a VAR_DECL and use them later. */
2644 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2646 size_mode = TYPE_MODE (sizetype);
2647 size = convert_to_mode (size_mode, size, 1);
2648 size = copy_to_mode_reg (size_mode, size);
2650 /* It is incorrect to use the libcall calling conventions to call
2651 memset in this context. This could be a user call to memset and
2652 the user may wish to examine the return value from memset. For
2653 targets where libcalls and normal calls have different conventions
2654 for returning pointers, we could end up generating incorrect code. */
2656 object_tree = make_tree (ptr_type_node, object);
2657 if (GET_CODE (val) != CONST_INT)
2658 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2659 size_tree = make_tree (sizetype, size);
2660 val_tree = make_tree (integer_type_node, val);
2662 fn = clear_storage_libcall_fn (true);
2663 call_expr = build_call_expr (fn, 3,
2664 object_tree, integer_zero_node, size_tree);
2665 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2667 retval = expand_normal (call_expr);
2669 return retval;
2672 /* A subroutine of set_storage_via_libcall. Create the tree node
2673 for the function we use for block clears. The first time FOR_CALL
2674 is true, we call assemble_external. */
2676 static GTY(()) tree block_clear_fn;
2678 void
2679 init_block_clear_fn (const char *asmspec)
2681 if (!block_clear_fn)
2683 tree fn, args;
2685 fn = get_identifier ("memset");
2686 args = build_function_type_list (ptr_type_node, ptr_type_node,
2687 integer_type_node, sizetype,
2688 NULL_TREE);
2690 fn = build_decl (FUNCTION_DECL, fn, args);
2691 DECL_EXTERNAL (fn) = 1;
2692 TREE_PUBLIC (fn) = 1;
2693 DECL_ARTIFICIAL (fn) = 1;
2694 TREE_NOTHROW (fn) = 1;
2695 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2696 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2698 block_clear_fn = fn;
2701 if (asmspec)
2702 set_user_assembler_name (block_clear_fn, asmspec);
2705 static tree
2706 clear_storage_libcall_fn (int for_call)
2708 static bool emitted_extern;
2710 if (!block_clear_fn)
2711 init_block_clear_fn (NULL);
2713 if (for_call && !emitted_extern)
2715 emitted_extern = true;
2716 make_decl_rtl (block_clear_fn);
2717 assemble_external (block_clear_fn);
2720 return block_clear_fn;
2723 /* Expand a setmem pattern; return true if successful. */
2725 bool
2726 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2727 unsigned int expected_align, HOST_WIDE_INT expected_size)
2729 /* Try the most limited insn first, because there's no point
2730 including more than one in the machine description unless
2731 the more limited one has some advantage. */
2733 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2734 enum machine_mode mode;
2736 if (expected_align < align)
2737 expected_align = align;
2739 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2740 mode = GET_MODE_WIDER_MODE (mode))
2742 enum insn_code code = setmem_optab[(int) mode];
2743 insn_operand_predicate_fn pred;
2745 if (code != CODE_FOR_nothing
2746 /* We don't need MODE to be narrower than
2747 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2748 the mode mask, as it is returned by the macro, it will
2749 definitely be less than the actual mode mask. */
2750 && ((GET_CODE (size) == CONST_INT
2751 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2752 <= (GET_MODE_MASK (mode) >> 1)))
2753 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2754 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2755 || (*pred) (object, BLKmode))
2756 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2757 || (*pred) (opalign, VOIDmode)))
2759 rtx opsize, opchar;
2760 enum machine_mode char_mode;
2761 rtx last = get_last_insn ();
2762 rtx pat;
2764 opsize = convert_to_mode (mode, size, 1);
2765 pred = insn_data[(int) code].operand[1].predicate;
2766 if (pred != 0 && ! (*pred) (opsize, mode))
2767 opsize = copy_to_mode_reg (mode, opsize);
2769 opchar = val;
2770 char_mode = insn_data[(int) code].operand[2].mode;
2771 if (char_mode != VOIDmode)
2773 opchar = convert_to_mode (char_mode, opchar, 1);
2774 pred = insn_data[(int) code].operand[2].predicate;
2775 if (pred != 0 && ! (*pred) (opchar, char_mode))
2776 opchar = copy_to_mode_reg (char_mode, opchar);
2779 if (insn_data[(int) code].n_operands == 4)
2780 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2781 else
2782 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2783 GEN_INT (expected_align),
2784 GEN_INT (expected_size));
2785 if (pat)
2787 emit_insn (pat);
2788 return true;
2790 else
2791 delete_insns_since (last);
2795 return false;
2799 /* Write to one of the components of the complex value CPLX. Write VAL to
2800 the real part if IMAG_P is false, and the imaginary part if its true. */
2802 static void
2803 write_complex_part (rtx cplx, rtx val, bool imag_p)
2805 enum machine_mode cmode;
2806 enum machine_mode imode;
2807 unsigned ibitsize;
2809 if (GET_CODE (cplx) == CONCAT)
2811 emit_move_insn (XEXP (cplx, imag_p), val);
2812 return;
2815 cmode = GET_MODE (cplx);
2816 imode = GET_MODE_INNER (cmode);
2817 ibitsize = GET_MODE_BITSIZE (imode);
2819 /* For MEMs simplify_gen_subreg may generate an invalid new address
2820 because, e.g., the original address is considered mode-dependent
2821 by the target, which restricts simplify_subreg from invoking
2822 adjust_address_nv. Instead of preparing fallback support for an
2823 invalid address, we call adjust_address_nv directly. */
2824 if (MEM_P (cplx))
2826 emit_move_insn (adjust_address_nv (cplx, imode,
2827 imag_p ? GET_MODE_SIZE (imode) : 0),
2828 val);
2829 return;
2832 /* If the sub-object is at least word sized, then we know that subregging
2833 will work. This special case is important, since store_bit_field
2834 wants to operate on integer modes, and there's rarely an OImode to
2835 correspond to TCmode. */
2836 if (ibitsize >= BITS_PER_WORD
2837 /* For hard regs we have exact predicates. Assume we can split
2838 the original object if it spans an even number of hard regs.
2839 This special case is important for SCmode on 64-bit platforms
2840 where the natural size of floating-point regs is 32-bit. */
2841 || (REG_P (cplx)
2842 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2843 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2845 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2846 imag_p ? GET_MODE_SIZE (imode) : 0);
2847 if (part)
2849 emit_move_insn (part, val);
2850 return;
2852 else
2853 /* simplify_gen_subreg may fail for sub-word MEMs. */
2854 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2857 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2860 /* Extract one of the components of the complex value CPLX. Extract the
2861 real part if IMAG_P is false, and the imaginary part if it's true. */
2863 static rtx
2864 read_complex_part (rtx cplx, bool imag_p)
2866 enum machine_mode cmode, imode;
2867 unsigned ibitsize;
2869 if (GET_CODE (cplx) == CONCAT)
2870 return XEXP (cplx, imag_p);
2872 cmode = GET_MODE (cplx);
2873 imode = GET_MODE_INNER (cmode);
2874 ibitsize = GET_MODE_BITSIZE (imode);
2876 /* Special case reads from complex constants that got spilled to memory. */
2877 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2879 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2880 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2882 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2883 if (CONSTANT_CLASS_P (part))
2884 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2888 /* For MEMs simplify_gen_subreg may generate an invalid new address
2889 because, e.g., the original address is considered mode-dependent
2890 by the target, which restricts simplify_subreg from invoking
2891 adjust_address_nv. Instead of preparing fallback support for an
2892 invalid address, we call adjust_address_nv directly. */
2893 if (MEM_P (cplx))
2894 return adjust_address_nv (cplx, imode,
2895 imag_p ? GET_MODE_SIZE (imode) : 0);
2897 /* If the sub-object is at least word sized, then we know that subregging
2898 will work. This special case is important, since extract_bit_field
2899 wants to operate on integer modes, and there's rarely an OImode to
2900 correspond to TCmode. */
2901 if (ibitsize >= BITS_PER_WORD
2902 /* For hard regs we have exact predicates. Assume we can split
2903 the original object if it spans an even number of hard regs.
2904 This special case is important for SCmode on 64-bit platforms
2905 where the natural size of floating-point regs is 32-bit. */
2906 || (REG_P (cplx)
2907 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2908 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2910 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2911 imag_p ? GET_MODE_SIZE (imode) : 0);
2912 if (ret)
2913 return ret;
2914 else
2915 /* simplify_gen_subreg may fail for sub-word MEMs. */
2916 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2919 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2920 true, NULL_RTX, imode, imode);
2923 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2924 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2925 represented in NEW_MODE. If FORCE is true, this will never happen, as
2926 we'll force-create a SUBREG if needed. */
2928 static rtx
2929 emit_move_change_mode (enum machine_mode new_mode,
2930 enum machine_mode old_mode, rtx x, bool force)
2932 rtx ret;
2934 if (push_operand (x, GET_MODE (x)))
2936 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2937 MEM_COPY_ATTRIBUTES (ret, x);
2939 else if (MEM_P (x))
2941 /* We don't have to worry about changing the address since the
2942 size in bytes is supposed to be the same. */
2943 if (reload_in_progress)
2945 /* Copy the MEM to change the mode and move any
2946 substitutions from the old MEM to the new one. */
2947 ret = adjust_address_nv (x, new_mode, 0);
2948 copy_replacements (x, ret);
2950 else
2951 ret = adjust_address (x, new_mode, 0);
2953 else
2955 /* Note that we do want simplify_subreg's behavior of validating
2956 that the new mode is ok for a hard register. If we were to use
2957 simplify_gen_subreg, we would create the subreg, but would
2958 probably run into the target not being able to implement it. */
2959 /* Except, of course, when FORCE is true, when this is exactly what
2960 we want. Which is needed for CCmodes on some targets. */
2961 if (force)
2962 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2963 else
2964 ret = simplify_subreg (new_mode, x, old_mode, 0);
2967 return ret;
2970 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2971 an integer mode of the same size as MODE. Returns the instruction
2972 emitted, or NULL if such a move could not be generated. */
2974 static rtx
2975 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2977 enum machine_mode imode;
2978 enum insn_code code;
2980 /* There must exist a mode of the exact size we require. */
2981 imode = int_mode_for_mode (mode);
2982 if (imode == BLKmode)
2983 return NULL_RTX;
2985 /* The target must support moves in this mode. */
2986 code = optab_handler (mov_optab, imode)->insn_code;
2987 if (code == CODE_FOR_nothing)
2988 return NULL_RTX;
2990 x = emit_move_change_mode (imode, mode, x, force);
2991 if (x == NULL_RTX)
2992 return NULL_RTX;
2993 y = emit_move_change_mode (imode, mode, y, force);
2994 if (y == NULL_RTX)
2995 return NULL_RTX;
2996 return emit_insn (GEN_FCN (code) (x, y));
2999 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3000 Return an equivalent MEM that does not use an auto-increment. */
3002 static rtx
3003 emit_move_resolve_push (enum machine_mode mode, rtx x)
3005 enum rtx_code code = GET_CODE (XEXP (x, 0));
3006 HOST_WIDE_INT adjust;
3007 rtx temp;
3009 adjust = GET_MODE_SIZE (mode);
3010 #ifdef PUSH_ROUNDING
3011 adjust = PUSH_ROUNDING (adjust);
3012 #endif
3013 if (code == PRE_DEC || code == POST_DEC)
3014 adjust = -adjust;
3015 else if (code == PRE_MODIFY || code == POST_MODIFY)
3017 rtx expr = XEXP (XEXP (x, 0), 1);
3018 HOST_WIDE_INT val;
3020 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3021 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
3022 val = INTVAL (XEXP (expr, 1));
3023 if (GET_CODE (expr) == MINUS)
3024 val = -val;
3025 gcc_assert (adjust == val || adjust == -val);
3026 adjust = val;
3029 /* Do not use anti_adjust_stack, since we don't want to update
3030 stack_pointer_delta. */
3031 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3032 GEN_INT (adjust), stack_pointer_rtx,
3033 0, OPTAB_LIB_WIDEN);
3034 if (temp != stack_pointer_rtx)
3035 emit_move_insn (stack_pointer_rtx, temp);
3037 switch (code)
3039 case PRE_INC:
3040 case PRE_DEC:
3041 case PRE_MODIFY:
3042 temp = stack_pointer_rtx;
3043 break;
3044 case POST_INC:
3045 case POST_DEC:
3046 case POST_MODIFY:
3047 temp = plus_constant (stack_pointer_rtx, -adjust);
3048 break;
3049 default:
3050 gcc_unreachable ();
3053 return replace_equiv_address (x, temp);
3056 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3057 X is known to satisfy push_operand, and MODE is known to be complex.
3058 Returns the last instruction emitted. */
3061 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3063 enum machine_mode submode = GET_MODE_INNER (mode);
3064 bool imag_first;
3066 #ifdef PUSH_ROUNDING
3067 unsigned int submodesize = GET_MODE_SIZE (submode);
3069 /* In case we output to the stack, but the size is smaller than the
3070 machine can push exactly, we need to use move instructions. */
3071 if (PUSH_ROUNDING (submodesize) != submodesize)
3073 x = emit_move_resolve_push (mode, x);
3074 return emit_move_insn (x, y);
3076 #endif
3078 /* Note that the real part always precedes the imag part in memory
3079 regardless of machine's endianness. */
3080 switch (GET_CODE (XEXP (x, 0)))
3082 case PRE_DEC:
3083 case POST_DEC:
3084 imag_first = true;
3085 break;
3086 case PRE_INC:
3087 case POST_INC:
3088 imag_first = false;
3089 break;
3090 default:
3091 gcc_unreachable ();
3094 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3095 read_complex_part (y, imag_first));
3096 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3097 read_complex_part (y, !imag_first));
3100 /* A subroutine of emit_move_complex. Perform the move from Y to X
3101 via two moves of the parts. Returns the last instruction emitted. */
3104 emit_move_complex_parts (rtx x, rtx y)
3106 /* Show the output dies here. This is necessary for SUBREGs
3107 of pseudos since we cannot track their lifetimes correctly;
3108 hard regs shouldn't appear here except as return values. */
3109 if (!reload_completed && !reload_in_progress
3110 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3111 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3113 write_complex_part (x, read_complex_part (y, false), false);
3114 write_complex_part (x, read_complex_part (y, true), true);
3116 return get_last_insn ();
3119 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3120 MODE is known to be complex. Returns the last instruction emitted. */
3122 static rtx
3123 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3125 bool try_int;
3127 /* Need to take special care for pushes, to maintain proper ordering
3128 of the data, and possibly extra padding. */
3129 if (push_operand (x, mode))
3130 return emit_move_complex_push (mode, x, y);
3132 /* See if we can coerce the target into moving both values at once. */
3134 /* Move floating point as parts. */
3135 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3136 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3137 try_int = false;
3138 /* Not possible if the values are inherently not adjacent. */
3139 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3140 try_int = false;
3141 /* Is possible if both are registers (or subregs of registers). */
3142 else if (register_operand (x, mode) && register_operand (y, mode))
3143 try_int = true;
3144 /* If one of the operands is a memory, and alignment constraints
3145 are friendly enough, we may be able to do combined memory operations.
3146 We do not attempt this if Y is a constant because that combination is
3147 usually better with the by-parts thing below. */
3148 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3149 && (!STRICT_ALIGNMENT
3150 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3151 try_int = true;
3152 else
3153 try_int = false;
3155 if (try_int)
3157 rtx ret;
3159 /* For memory to memory moves, optimal behavior can be had with the
3160 existing block move logic. */
3161 if (MEM_P (x) && MEM_P (y))
3163 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3164 BLOCK_OP_NO_LIBCALL);
3165 return get_last_insn ();
3168 ret = emit_move_via_integer (mode, x, y, true);
3169 if (ret)
3170 return ret;
3173 return emit_move_complex_parts (x, y);
3176 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3177 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3179 static rtx
3180 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3182 rtx ret;
3184 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3185 if (mode != CCmode)
3187 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3188 if (code != CODE_FOR_nothing)
3190 x = emit_move_change_mode (CCmode, mode, x, true);
3191 y = emit_move_change_mode (CCmode, mode, y, true);
3192 return emit_insn (GEN_FCN (code) (x, y));
3196 /* Otherwise, find the MODE_INT mode of the same width. */
3197 ret = emit_move_via_integer (mode, x, y, false);
3198 gcc_assert (ret != NULL);
3199 return ret;
3202 /* Return true if word I of OP lies entirely in the
3203 undefined bits of a paradoxical subreg. */
3205 static bool
3206 undefined_operand_subword_p (const_rtx op, int i)
3208 enum machine_mode innermode, innermostmode;
3209 int offset;
3210 if (GET_CODE (op) != SUBREG)
3211 return false;
3212 innermode = GET_MODE (op);
3213 innermostmode = GET_MODE (SUBREG_REG (op));
3214 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3215 /* The SUBREG_BYTE represents offset, as if the value were stored in
3216 memory, except for a paradoxical subreg where we define
3217 SUBREG_BYTE to be 0; undo this exception as in
3218 simplify_subreg. */
3219 if (SUBREG_BYTE (op) == 0
3220 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3222 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3223 if (WORDS_BIG_ENDIAN)
3224 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3225 if (BYTES_BIG_ENDIAN)
3226 offset += difference % UNITS_PER_WORD;
3228 if (offset >= GET_MODE_SIZE (innermostmode)
3229 || offset <= -GET_MODE_SIZE (word_mode))
3230 return true;
3231 return false;
3234 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3235 MODE is any multi-word or full-word mode that lacks a move_insn
3236 pattern. Note that you will get better code if you define such
3237 patterns, even if they must turn into multiple assembler instructions. */
3239 static rtx
3240 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3242 rtx last_insn = 0;
3243 rtx seq, inner;
3244 bool need_clobber;
3245 int i;
3247 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3249 /* If X is a push on the stack, do the push now and replace
3250 X with a reference to the stack pointer. */
3251 if (push_operand (x, mode))
3252 x = emit_move_resolve_push (mode, x);
3254 /* If we are in reload, see if either operand is a MEM whose address
3255 is scheduled for replacement. */
3256 if (reload_in_progress && MEM_P (x)
3257 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3258 x = replace_equiv_address_nv (x, inner);
3259 if (reload_in_progress && MEM_P (y)
3260 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3261 y = replace_equiv_address_nv (y, inner);
3263 start_sequence ();
3265 need_clobber = false;
3266 for (i = 0;
3267 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3268 i++)
3270 rtx xpart = operand_subword (x, i, 1, mode);
3271 rtx ypart;
3273 /* Do not generate code for a move if it would come entirely
3274 from the undefined bits of a paradoxical subreg. */
3275 if (undefined_operand_subword_p (y, i))
3276 continue;
3278 ypart = operand_subword (y, i, 1, mode);
3280 /* If we can't get a part of Y, put Y into memory if it is a
3281 constant. Otherwise, force it into a register. Then we must
3282 be able to get a part of Y. */
3283 if (ypart == 0 && CONSTANT_P (y))
3285 y = use_anchored_address (force_const_mem (mode, y));
3286 ypart = operand_subword (y, i, 1, mode);
3288 else if (ypart == 0)
3289 ypart = operand_subword_force (y, i, mode);
3291 gcc_assert (xpart && ypart);
3293 need_clobber |= (GET_CODE (xpart) == SUBREG);
3295 last_insn = emit_move_insn (xpart, ypart);
3298 seq = get_insns ();
3299 end_sequence ();
3301 /* Show the output dies here. This is necessary for SUBREGs
3302 of pseudos since we cannot track their lifetimes correctly;
3303 hard regs shouldn't appear here except as return values.
3304 We never want to emit such a clobber after reload. */
3305 if (x != y
3306 && ! (reload_in_progress || reload_completed)
3307 && need_clobber != 0)
3308 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3310 emit_insn (seq);
3312 return last_insn;
3315 /* Low level part of emit_move_insn.
3316 Called just like emit_move_insn, but assumes X and Y
3317 are basically valid. */
3320 emit_move_insn_1 (rtx x, rtx y)
3322 enum machine_mode mode = GET_MODE (x);
3323 enum insn_code code;
3325 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3327 code = optab_handler (mov_optab, mode)->insn_code;
3328 if (code != CODE_FOR_nothing)
3329 return emit_insn (GEN_FCN (code) (x, y));
3331 /* Expand complex moves by moving real part and imag part. */
3332 if (COMPLEX_MODE_P (mode))
3333 return emit_move_complex (mode, x, y);
3335 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3336 || ALL_FIXED_POINT_MODE_P (mode))
3338 rtx result = emit_move_via_integer (mode, x, y, true);
3340 /* If we can't find an integer mode, use multi words. */
3341 if (result)
3342 return result;
3343 else
3344 return emit_move_multi_word (mode, x, y);
3347 if (GET_MODE_CLASS (mode) == MODE_CC)
3348 return emit_move_ccmode (mode, x, y);
3350 /* Try using a move pattern for the corresponding integer mode. This is
3351 only safe when simplify_subreg can convert MODE constants into integer
3352 constants. At present, it can only do this reliably if the value
3353 fits within a HOST_WIDE_INT. */
3354 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3356 rtx ret = emit_move_via_integer (mode, x, y, false);
3357 if (ret)
3358 return ret;
3361 return emit_move_multi_word (mode, x, y);
3364 /* Generate code to copy Y into X.
3365 Both Y and X must have the same mode, except that
3366 Y can be a constant with VOIDmode.
3367 This mode cannot be BLKmode; use emit_block_move for that.
3369 Return the last instruction emitted. */
3372 emit_move_insn (rtx x, rtx y)
3374 enum machine_mode mode = GET_MODE (x);
3375 rtx y_cst = NULL_RTX;
3376 rtx last_insn, set;
3378 gcc_assert (mode != BLKmode
3379 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3381 if (CONSTANT_P (y))
3383 if (optimize
3384 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3385 && (last_insn = compress_float_constant (x, y)))
3386 return last_insn;
3388 y_cst = y;
3390 if (!LEGITIMATE_CONSTANT_P (y))
3392 y = force_const_mem (mode, y);
3394 /* If the target's cannot_force_const_mem prevented the spill,
3395 assume that the target's move expanders will also take care
3396 of the non-legitimate constant. */
3397 if (!y)
3398 y = y_cst;
3399 else
3400 y = use_anchored_address (y);
3404 /* If X or Y are memory references, verify that their addresses are valid
3405 for the machine. */
3406 if (MEM_P (x)
3407 && (! memory_address_p (GET_MODE (x), XEXP (x, 0))
3408 && ! push_operand (x, GET_MODE (x))))
3409 x = validize_mem (x);
3411 if (MEM_P (y)
3412 && ! memory_address_p (GET_MODE (y), XEXP (y, 0)))
3413 y = validize_mem (y);
3415 gcc_assert (mode != BLKmode);
3417 last_insn = emit_move_insn_1 (x, y);
3419 if (y_cst && REG_P (x)
3420 && (set = single_set (last_insn)) != NULL_RTX
3421 && SET_DEST (set) == x
3422 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3423 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3425 return last_insn;
3428 /* If Y is representable exactly in a narrower mode, and the target can
3429 perform the extension directly from constant or memory, then emit the
3430 move as an extension. */
3432 static rtx
3433 compress_float_constant (rtx x, rtx y)
3435 enum machine_mode dstmode = GET_MODE (x);
3436 enum machine_mode orig_srcmode = GET_MODE (y);
3437 enum machine_mode srcmode;
3438 REAL_VALUE_TYPE r;
3439 int oldcost, newcost;
3441 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3443 if (LEGITIMATE_CONSTANT_P (y))
3444 oldcost = rtx_cost (y, SET);
3445 else
3446 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3448 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3449 srcmode != orig_srcmode;
3450 srcmode = GET_MODE_WIDER_MODE (srcmode))
3452 enum insn_code ic;
3453 rtx trunc_y, last_insn;
3455 /* Skip if the target can't extend this way. */
3456 ic = can_extend_p (dstmode, srcmode, 0);
3457 if (ic == CODE_FOR_nothing)
3458 continue;
3460 /* Skip if the narrowed value isn't exact. */
3461 if (! exact_real_truncate (srcmode, &r))
3462 continue;
3464 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3466 if (LEGITIMATE_CONSTANT_P (trunc_y))
3468 /* Skip if the target needs extra instructions to perform
3469 the extension. */
3470 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3471 continue;
3472 /* This is valid, but may not be cheaper than the original. */
3473 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3474 if (oldcost < newcost)
3475 continue;
3477 else if (float_extend_from_mem[dstmode][srcmode])
3479 trunc_y = force_const_mem (srcmode, trunc_y);
3480 /* This is valid, but may not be cheaper than the original. */
3481 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3482 if (oldcost < newcost)
3483 continue;
3484 trunc_y = validize_mem (trunc_y);
3486 else
3487 continue;
3489 /* For CSE's benefit, force the compressed constant pool entry
3490 into a new pseudo. This constant may be used in different modes,
3491 and if not, combine will put things back together for us. */
3492 trunc_y = force_reg (srcmode, trunc_y);
3493 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3494 last_insn = get_last_insn ();
3496 if (REG_P (x))
3497 set_unique_reg_note (last_insn, REG_EQUAL, y);
3499 return last_insn;
3502 return NULL_RTX;
3505 /* Pushing data onto the stack. */
3507 /* Push a block of length SIZE (perhaps variable)
3508 and return an rtx to address the beginning of the block.
3509 The value may be virtual_outgoing_args_rtx.
3511 EXTRA is the number of bytes of padding to push in addition to SIZE.
3512 BELOW nonzero means this padding comes at low addresses;
3513 otherwise, the padding comes at high addresses. */
3516 push_block (rtx size, int extra, int below)
3518 rtx temp;
3520 size = convert_modes (Pmode, ptr_mode, size, 1);
3521 if (CONSTANT_P (size))
3522 anti_adjust_stack (plus_constant (size, extra));
3523 else if (REG_P (size) && extra == 0)
3524 anti_adjust_stack (size);
3525 else
3527 temp = copy_to_mode_reg (Pmode, size);
3528 if (extra != 0)
3529 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3530 temp, 0, OPTAB_LIB_WIDEN);
3531 anti_adjust_stack (temp);
3534 #ifndef STACK_GROWS_DOWNWARD
3535 if (0)
3536 #else
3537 if (1)
3538 #endif
3540 temp = virtual_outgoing_args_rtx;
3541 if (extra != 0 && below)
3542 temp = plus_constant (temp, extra);
3544 else
3546 if (GET_CODE (size) == CONST_INT)
3547 temp = plus_constant (virtual_outgoing_args_rtx,
3548 -INTVAL (size) - (below ? 0 : extra));
3549 else if (extra != 0 && !below)
3550 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3551 negate_rtx (Pmode, plus_constant (size, extra)));
3552 else
3553 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3554 negate_rtx (Pmode, size));
3557 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3560 #ifdef PUSH_ROUNDING
3562 /* Emit single push insn. */
3564 static void
3565 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3567 rtx dest_addr;
3568 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3569 rtx dest;
3570 enum insn_code icode;
3571 insn_operand_predicate_fn pred;
3573 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3574 /* If there is push pattern, use it. Otherwise try old way of throwing
3575 MEM representing push operation to move expander. */
3576 icode = optab_handler (push_optab, mode)->insn_code;
3577 if (icode != CODE_FOR_nothing)
3579 if (((pred = insn_data[(int) icode].operand[0].predicate)
3580 && !((*pred) (x, mode))))
3581 x = force_reg (mode, x);
3582 emit_insn (GEN_FCN (icode) (x));
3583 return;
3585 if (GET_MODE_SIZE (mode) == rounded_size)
3586 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3587 /* If we are to pad downward, adjust the stack pointer first and
3588 then store X into the stack location using an offset. This is
3589 because emit_move_insn does not know how to pad; it does not have
3590 access to type. */
3591 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3593 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3594 HOST_WIDE_INT offset;
3596 emit_move_insn (stack_pointer_rtx,
3597 expand_binop (Pmode,
3598 #ifdef STACK_GROWS_DOWNWARD
3599 sub_optab,
3600 #else
3601 add_optab,
3602 #endif
3603 stack_pointer_rtx,
3604 GEN_INT (rounded_size),
3605 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3607 offset = (HOST_WIDE_INT) padding_size;
3608 #ifdef STACK_GROWS_DOWNWARD
3609 if (STACK_PUSH_CODE == POST_DEC)
3610 /* We have already decremented the stack pointer, so get the
3611 previous value. */
3612 offset += (HOST_WIDE_INT) rounded_size;
3613 #else
3614 if (STACK_PUSH_CODE == POST_INC)
3615 /* We have already incremented the stack pointer, so get the
3616 previous value. */
3617 offset -= (HOST_WIDE_INT) rounded_size;
3618 #endif
3619 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3621 else
3623 #ifdef STACK_GROWS_DOWNWARD
3624 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3625 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3626 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3627 #else
3628 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3629 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3630 GEN_INT (rounded_size));
3631 #endif
3632 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3635 dest = gen_rtx_MEM (mode, dest_addr);
3637 if (type != 0)
3639 set_mem_attributes (dest, type, 1);
3641 if (flag_optimize_sibling_calls)
3642 /* Function incoming arguments may overlap with sibling call
3643 outgoing arguments and we cannot allow reordering of reads
3644 from function arguments with stores to outgoing arguments
3645 of sibling calls. */
3646 set_mem_alias_set (dest, 0);
3648 emit_move_insn (dest, x);
3650 #endif
3652 /* Generate code to push X onto the stack, assuming it has mode MODE and
3653 type TYPE.
3654 MODE is redundant except when X is a CONST_INT (since they don't
3655 carry mode info).
3656 SIZE is an rtx for the size of data to be copied (in bytes),
3657 needed only if X is BLKmode.
3659 ALIGN (in bits) is maximum alignment we can assume.
3661 If PARTIAL and REG are both nonzero, then copy that many of the first
3662 bytes of X into registers starting with REG, and push the rest of X.
3663 The amount of space pushed is decreased by PARTIAL bytes.
3664 REG must be a hard register in this case.
3665 If REG is zero but PARTIAL is not, take any all others actions for an
3666 argument partially in registers, but do not actually load any
3667 registers.
3669 EXTRA is the amount in bytes of extra space to leave next to this arg.
3670 This is ignored if an argument block has already been allocated.
3672 On a machine that lacks real push insns, ARGS_ADDR is the address of
3673 the bottom of the argument block for this call. We use indexing off there
3674 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3675 argument block has not been preallocated.
3677 ARGS_SO_FAR is the size of args previously pushed for this call.
3679 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3680 for arguments passed in registers. If nonzero, it will be the number
3681 of bytes required. */
3683 void
3684 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3685 unsigned int align, int partial, rtx reg, int extra,
3686 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3687 rtx alignment_pad)
3689 rtx xinner;
3690 enum direction stack_direction
3691 #ifdef STACK_GROWS_DOWNWARD
3692 = downward;
3693 #else
3694 = upward;
3695 #endif
3697 /* Decide where to pad the argument: `downward' for below,
3698 `upward' for above, or `none' for don't pad it.
3699 Default is below for small data on big-endian machines; else above. */
3700 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3702 /* Invert direction if stack is post-decrement.
3703 FIXME: why? */
3704 if (STACK_PUSH_CODE == POST_DEC)
3705 if (where_pad != none)
3706 where_pad = (where_pad == downward ? upward : downward);
3708 xinner = x;
3710 if (mode == BLKmode
3711 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3713 /* Copy a block into the stack, entirely or partially. */
3715 rtx temp;
3716 int used;
3717 int offset;
3718 int skip;
3720 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3721 used = partial - offset;
3723 if (mode != BLKmode)
3725 /* A value is to be stored in an insufficiently aligned
3726 stack slot; copy via a suitably aligned slot if
3727 necessary. */
3728 size = GEN_INT (GET_MODE_SIZE (mode));
3729 if (!MEM_P (xinner))
3731 temp = assign_temp (type, 0, 1, 1);
3732 emit_move_insn (temp, xinner);
3733 xinner = temp;
3737 gcc_assert (size);
3739 /* USED is now the # of bytes we need not copy to the stack
3740 because registers will take care of them. */
3742 if (partial != 0)
3743 xinner = adjust_address (xinner, BLKmode, used);
3745 /* If the partial register-part of the arg counts in its stack size,
3746 skip the part of stack space corresponding to the registers.
3747 Otherwise, start copying to the beginning of the stack space,
3748 by setting SKIP to 0. */
3749 skip = (reg_parm_stack_space == 0) ? 0 : used;
3751 #ifdef PUSH_ROUNDING
3752 /* Do it with several push insns if that doesn't take lots of insns
3753 and if there is no difficulty with push insns that skip bytes
3754 on the stack for alignment purposes. */
3755 if (args_addr == 0
3756 && PUSH_ARGS
3757 && GET_CODE (size) == CONST_INT
3758 && skip == 0
3759 && MEM_ALIGN (xinner) >= align
3760 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3761 /* Here we avoid the case of a structure whose weak alignment
3762 forces many pushes of a small amount of data,
3763 and such small pushes do rounding that causes trouble. */
3764 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3765 || align >= BIGGEST_ALIGNMENT
3766 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3767 == (align / BITS_PER_UNIT)))
3768 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3770 /* Push padding now if padding above and stack grows down,
3771 or if padding below and stack grows up.
3772 But if space already allocated, this has already been done. */
3773 if (extra && args_addr == 0
3774 && where_pad != none && where_pad != stack_direction)
3775 anti_adjust_stack (GEN_INT (extra));
3777 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3779 else
3780 #endif /* PUSH_ROUNDING */
3782 rtx target;
3784 /* Otherwise make space on the stack and copy the data
3785 to the address of that space. */
3787 /* Deduct words put into registers from the size we must copy. */
3788 if (partial != 0)
3790 if (GET_CODE (size) == CONST_INT)
3791 size = GEN_INT (INTVAL (size) - used);
3792 else
3793 size = expand_binop (GET_MODE (size), sub_optab, size,
3794 GEN_INT (used), NULL_RTX, 0,
3795 OPTAB_LIB_WIDEN);
3798 /* Get the address of the stack space.
3799 In this case, we do not deal with EXTRA separately.
3800 A single stack adjust will do. */
3801 if (! args_addr)
3803 temp = push_block (size, extra, where_pad == downward);
3804 extra = 0;
3806 else if (GET_CODE (args_so_far) == CONST_INT)
3807 temp = memory_address (BLKmode,
3808 plus_constant (args_addr,
3809 skip + INTVAL (args_so_far)));
3810 else
3811 temp = memory_address (BLKmode,
3812 plus_constant (gen_rtx_PLUS (Pmode,
3813 args_addr,
3814 args_so_far),
3815 skip));
3817 if (!ACCUMULATE_OUTGOING_ARGS)
3819 /* If the source is referenced relative to the stack pointer,
3820 copy it to another register to stabilize it. We do not need
3821 to do this if we know that we won't be changing sp. */
3823 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3824 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3825 temp = copy_to_reg (temp);
3828 target = gen_rtx_MEM (BLKmode, temp);
3830 /* We do *not* set_mem_attributes here, because incoming arguments
3831 may overlap with sibling call outgoing arguments and we cannot
3832 allow reordering of reads from function arguments with stores
3833 to outgoing arguments of sibling calls. We do, however, want
3834 to record the alignment of the stack slot. */
3835 /* ALIGN may well be better aligned than TYPE, e.g. due to
3836 PARM_BOUNDARY. Assume the caller isn't lying. */
3837 set_mem_align (target, align);
3839 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3842 else if (partial > 0)
3844 /* Scalar partly in registers. */
3846 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3847 int i;
3848 int not_stack;
3849 /* # bytes of start of argument
3850 that we must make space for but need not store. */
3851 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3852 int args_offset = INTVAL (args_so_far);
3853 int skip;
3855 /* Push padding now if padding above and stack grows down,
3856 or if padding below and stack grows up.
3857 But if space already allocated, this has already been done. */
3858 if (extra && args_addr == 0
3859 && where_pad != none && where_pad != stack_direction)
3860 anti_adjust_stack (GEN_INT (extra));
3862 /* If we make space by pushing it, we might as well push
3863 the real data. Otherwise, we can leave OFFSET nonzero
3864 and leave the space uninitialized. */
3865 if (args_addr == 0)
3866 offset = 0;
3868 /* Now NOT_STACK gets the number of words that we don't need to
3869 allocate on the stack. Convert OFFSET to words too. */
3870 not_stack = (partial - offset) / UNITS_PER_WORD;
3871 offset /= UNITS_PER_WORD;
3873 /* If the partial register-part of the arg counts in its stack size,
3874 skip the part of stack space corresponding to the registers.
3875 Otherwise, start copying to the beginning of the stack space,
3876 by setting SKIP to 0. */
3877 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3879 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3880 x = validize_mem (force_const_mem (mode, x));
3882 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3883 SUBREGs of such registers are not allowed. */
3884 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3885 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3886 x = copy_to_reg (x);
3888 /* Loop over all the words allocated on the stack for this arg. */
3889 /* We can do it by words, because any scalar bigger than a word
3890 has a size a multiple of a word. */
3891 #ifndef PUSH_ARGS_REVERSED
3892 for (i = not_stack; i < size; i++)
3893 #else
3894 for (i = size - 1; i >= not_stack; i--)
3895 #endif
3896 if (i >= not_stack + offset)
3897 emit_push_insn (operand_subword_force (x, i, mode),
3898 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3899 0, args_addr,
3900 GEN_INT (args_offset + ((i - not_stack + skip)
3901 * UNITS_PER_WORD)),
3902 reg_parm_stack_space, alignment_pad);
3904 else
3906 rtx addr;
3907 rtx dest;
3909 /* Push padding now if padding above and stack grows down,
3910 or if padding below and stack grows up.
3911 But if space already allocated, this has already been done. */
3912 if (extra && args_addr == 0
3913 && where_pad != none && where_pad != stack_direction)
3914 anti_adjust_stack (GEN_INT (extra));
3916 #ifdef PUSH_ROUNDING
3917 if (args_addr == 0 && PUSH_ARGS)
3918 emit_single_push_insn (mode, x, type);
3919 else
3920 #endif
3922 if (GET_CODE (args_so_far) == CONST_INT)
3923 addr
3924 = memory_address (mode,
3925 plus_constant (args_addr,
3926 INTVAL (args_so_far)));
3927 else
3928 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3929 args_so_far));
3930 dest = gen_rtx_MEM (mode, addr);
3932 /* We do *not* set_mem_attributes here, because incoming arguments
3933 may overlap with sibling call outgoing arguments and we cannot
3934 allow reordering of reads from function arguments with stores
3935 to outgoing arguments of sibling calls. We do, however, want
3936 to record the alignment of the stack slot. */
3937 /* ALIGN may well be better aligned than TYPE, e.g. due to
3938 PARM_BOUNDARY. Assume the caller isn't lying. */
3939 set_mem_align (dest, align);
3941 emit_move_insn (dest, x);
3945 /* If part should go in registers, copy that part
3946 into the appropriate registers. Do this now, at the end,
3947 since mem-to-mem copies above may do function calls. */
3948 if (partial > 0 && reg != 0)
3950 /* Handle calls that pass values in multiple non-contiguous locations.
3951 The Irix 6 ABI has examples of this. */
3952 if (GET_CODE (reg) == PARALLEL)
3953 emit_group_load (reg, x, type, -1);
3954 else
3956 gcc_assert (partial % UNITS_PER_WORD == 0);
3957 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3961 if (extra && args_addr == 0 && where_pad == stack_direction)
3962 anti_adjust_stack (GEN_INT (extra));
3964 if (alignment_pad && args_addr == 0)
3965 anti_adjust_stack (alignment_pad);
3968 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3969 operations. */
3971 static rtx
3972 get_subtarget (rtx x)
3974 return (optimize
3975 || x == 0
3976 /* Only registers can be subtargets. */
3977 || !REG_P (x)
3978 /* Don't use hard regs to avoid extending their life. */
3979 || REGNO (x) < FIRST_PSEUDO_REGISTER
3980 ? 0 : x);
3983 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3984 FIELD is a bitfield. Returns true if the optimization was successful,
3985 and there's nothing else to do. */
3987 static bool
3988 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3989 unsigned HOST_WIDE_INT bitpos,
3990 enum machine_mode mode1, rtx str_rtx,
3991 tree to, tree src)
3993 enum machine_mode str_mode = GET_MODE (str_rtx);
3994 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3995 tree op0, op1;
3996 rtx value, result;
3997 optab binop;
3999 if (mode1 != VOIDmode
4000 || bitsize >= BITS_PER_WORD
4001 || str_bitsize > BITS_PER_WORD
4002 || TREE_SIDE_EFFECTS (to)
4003 || TREE_THIS_VOLATILE (to))
4004 return false;
4006 STRIP_NOPS (src);
4007 if (!BINARY_CLASS_P (src)
4008 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4009 return false;
4011 op0 = TREE_OPERAND (src, 0);
4012 op1 = TREE_OPERAND (src, 1);
4013 STRIP_NOPS (op0);
4015 if (!operand_equal_p (to, op0, 0))
4016 return false;
4018 if (MEM_P (str_rtx))
4020 unsigned HOST_WIDE_INT offset1;
4022 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4023 str_mode = word_mode;
4024 str_mode = get_best_mode (bitsize, bitpos,
4025 MEM_ALIGN (str_rtx), str_mode, 0);
4026 if (str_mode == VOIDmode)
4027 return false;
4028 str_bitsize = GET_MODE_BITSIZE (str_mode);
4030 offset1 = bitpos;
4031 bitpos %= str_bitsize;
4032 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4033 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4035 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4036 return false;
4038 /* If the bit field covers the whole REG/MEM, store_field
4039 will likely generate better code. */
4040 if (bitsize >= str_bitsize)
4041 return false;
4043 /* We can't handle fields split across multiple entities. */
4044 if (bitpos + bitsize > str_bitsize)
4045 return false;
4047 if (BYTES_BIG_ENDIAN)
4048 bitpos = str_bitsize - bitpos - bitsize;
4050 switch (TREE_CODE (src))
4052 case PLUS_EXPR:
4053 case MINUS_EXPR:
4054 /* For now, just optimize the case of the topmost bitfield
4055 where we don't need to do any masking and also
4056 1 bit bitfields where xor can be used.
4057 We might win by one instruction for the other bitfields
4058 too if insv/extv instructions aren't used, so that
4059 can be added later. */
4060 if (bitpos + bitsize != str_bitsize
4061 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4062 break;
4064 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4065 value = convert_modes (str_mode,
4066 TYPE_MODE (TREE_TYPE (op1)), value,
4067 TYPE_UNSIGNED (TREE_TYPE (op1)));
4069 /* We may be accessing data outside the field, which means
4070 we can alias adjacent data. */
4071 if (MEM_P (str_rtx))
4073 str_rtx = shallow_copy_rtx (str_rtx);
4074 set_mem_alias_set (str_rtx, 0);
4075 set_mem_expr (str_rtx, 0);
4078 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4079 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4081 value = expand_and (str_mode, value, const1_rtx, NULL);
4082 binop = xor_optab;
4084 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4085 build_int_cst (NULL_TREE, bitpos),
4086 NULL_RTX, 1);
4087 result = expand_binop (str_mode, binop, str_rtx,
4088 value, str_rtx, 1, OPTAB_WIDEN);
4089 if (result != str_rtx)
4090 emit_move_insn (str_rtx, result);
4091 return true;
4093 case BIT_IOR_EXPR:
4094 case BIT_XOR_EXPR:
4095 if (TREE_CODE (op1) != INTEGER_CST)
4096 break;
4097 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4098 value = convert_modes (GET_MODE (str_rtx),
4099 TYPE_MODE (TREE_TYPE (op1)), value,
4100 TYPE_UNSIGNED (TREE_TYPE (op1)));
4102 /* We may be accessing data outside the field, which means
4103 we can alias adjacent data. */
4104 if (MEM_P (str_rtx))
4106 str_rtx = shallow_copy_rtx (str_rtx);
4107 set_mem_alias_set (str_rtx, 0);
4108 set_mem_expr (str_rtx, 0);
4111 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4112 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4114 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4115 - 1);
4116 value = expand_and (GET_MODE (str_rtx), value, mask,
4117 NULL_RTX);
4119 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4120 build_int_cst (NULL_TREE, bitpos),
4121 NULL_RTX, 1);
4122 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4123 value, str_rtx, 1, OPTAB_WIDEN);
4124 if (result != str_rtx)
4125 emit_move_insn (str_rtx, result);
4126 return true;
4128 default:
4129 break;
4132 return false;
4136 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4137 is true, try generating a nontemporal store. */
4139 void
4140 expand_assignment (tree to, tree from, bool nontemporal)
4142 rtx to_rtx = 0;
4143 rtx result;
4145 /* Don't crash if the lhs of the assignment was erroneous. */
4146 if (TREE_CODE (to) == ERROR_MARK)
4148 result = expand_normal (from);
4149 return;
4152 /* Optimize away no-op moves without side-effects. */
4153 if (operand_equal_p (to, from, 0))
4154 return;
4156 /* Assignment of a structure component needs special treatment
4157 if the structure component's rtx is not simply a MEM.
4158 Assignment of an array element at a constant index, and assignment of
4159 an array element in an unaligned packed structure field, has the same
4160 problem. */
4161 if (handled_component_p (to)
4162 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4164 enum machine_mode mode1;
4165 HOST_WIDE_INT bitsize, bitpos;
4166 tree offset;
4167 int unsignedp;
4168 int volatilep = 0;
4169 tree tem;
4171 push_temp_slots ();
4172 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4173 &unsignedp, &volatilep, true);
4175 /* If we are going to use store_bit_field and extract_bit_field,
4176 make sure to_rtx will be safe for multiple use. */
4178 to_rtx = expand_normal (tem);
4180 if (offset != 0)
4182 rtx offset_rtx;
4184 if (!MEM_P (to_rtx))
4186 /* We can get constant negative offsets into arrays with broken
4187 user code. Translate this to a trap instead of ICEing. */
4188 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4189 expand_builtin_trap ();
4190 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4193 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4194 #ifdef POINTERS_EXTEND_UNSIGNED
4195 if (GET_MODE (offset_rtx) != Pmode)
4196 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4197 #else
4198 if (GET_MODE (offset_rtx) != ptr_mode)
4199 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4200 #endif
4202 /* A constant address in TO_RTX can have VOIDmode, we must not try
4203 to call force_reg for that case. Avoid that case. */
4204 if (MEM_P (to_rtx)
4205 && GET_MODE (to_rtx) == BLKmode
4206 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4207 && bitsize > 0
4208 && (bitpos % bitsize) == 0
4209 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4210 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4212 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4213 bitpos = 0;
4216 to_rtx = offset_address (to_rtx, offset_rtx,
4217 highest_pow2_factor_for_target (to,
4218 offset));
4221 /* Handle expand_expr of a complex value returning a CONCAT. */
4222 if (GET_CODE (to_rtx) == CONCAT)
4224 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4226 gcc_assert (bitpos == 0);
4227 result = store_expr (from, to_rtx, false, nontemporal);
4229 else
4231 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4232 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4233 nontemporal);
4236 else
4238 if (MEM_P (to_rtx))
4240 /* If the field is at offset zero, we could have been given the
4241 DECL_RTX of the parent struct. Don't munge it. */
4242 to_rtx = shallow_copy_rtx (to_rtx);
4244 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4246 /* Deal with volatile and readonly fields. The former is only
4247 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4248 if (volatilep)
4249 MEM_VOLATILE_P (to_rtx) = 1;
4250 if (component_uses_parent_alias_set (to))
4251 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4254 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4255 to_rtx, to, from))
4256 result = NULL;
4257 else
4258 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4259 TREE_TYPE (tem), get_alias_set (to),
4260 nontemporal);
4263 if (result)
4264 preserve_temp_slots (result);
4265 free_temp_slots ();
4266 pop_temp_slots ();
4267 return;
4270 /* If the rhs is a function call and its value is not an aggregate,
4271 call the function before we start to compute the lhs.
4272 This is needed for correct code for cases such as
4273 val = setjmp (buf) on machines where reference to val
4274 requires loading up part of an address in a separate insn.
4276 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4277 since it might be a promoted variable where the zero- or sign- extension
4278 needs to be done. Handling this in the normal way is safe because no
4279 computation is done before the call. */
4280 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4281 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4282 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4283 && REG_P (DECL_RTL (to))))
4285 rtx value;
4287 push_temp_slots ();
4288 value = expand_normal (from);
4289 if (to_rtx == 0)
4290 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4292 /* Handle calls that return values in multiple non-contiguous locations.
4293 The Irix 6 ABI has examples of this. */
4294 if (GET_CODE (to_rtx) == PARALLEL)
4295 emit_group_load (to_rtx, value, TREE_TYPE (from),
4296 int_size_in_bytes (TREE_TYPE (from)));
4297 else if (GET_MODE (to_rtx) == BLKmode)
4298 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4299 else
4301 if (POINTER_TYPE_P (TREE_TYPE (to)))
4302 value = convert_memory_address (GET_MODE (to_rtx), value);
4303 emit_move_insn (to_rtx, value);
4305 preserve_temp_slots (to_rtx);
4306 free_temp_slots ();
4307 pop_temp_slots ();
4308 return;
4311 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4312 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4314 if (to_rtx == 0)
4315 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4317 /* Don't move directly into a return register. */
4318 if (TREE_CODE (to) == RESULT_DECL
4319 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4321 rtx temp;
4323 push_temp_slots ();
4324 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4326 if (GET_CODE (to_rtx) == PARALLEL)
4327 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4328 int_size_in_bytes (TREE_TYPE (from)));
4329 else
4330 emit_move_insn (to_rtx, temp);
4332 preserve_temp_slots (to_rtx);
4333 free_temp_slots ();
4334 pop_temp_slots ();
4335 return;
4338 /* In case we are returning the contents of an object which overlaps
4339 the place the value is being stored, use a safe function when copying
4340 a value through a pointer into a structure value return block. */
4341 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4342 && current_function_returns_struct
4343 && !current_function_returns_pcc_struct)
4345 rtx from_rtx, size;
4347 push_temp_slots ();
4348 size = expr_size (from);
4349 from_rtx = expand_normal (from);
4351 emit_library_call (memmove_libfunc, LCT_NORMAL,
4352 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4353 XEXP (from_rtx, 0), Pmode,
4354 convert_to_mode (TYPE_MODE (sizetype),
4355 size, TYPE_UNSIGNED (sizetype)),
4356 TYPE_MODE (sizetype));
4358 preserve_temp_slots (to_rtx);
4359 free_temp_slots ();
4360 pop_temp_slots ();
4361 return;
4364 /* Compute FROM and store the value in the rtx we got. */
4366 push_temp_slots ();
4367 result = store_expr (from, to_rtx, 0, nontemporal);
4368 preserve_temp_slots (result);
4369 free_temp_slots ();
4370 pop_temp_slots ();
4371 return;
4374 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4375 succeeded, false otherwise. */
4377 static bool
4378 emit_storent_insn (rtx to, rtx from)
4380 enum machine_mode mode = GET_MODE (to), imode;
4381 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4382 rtx pattern;
4384 if (code == CODE_FOR_nothing)
4385 return false;
4387 imode = insn_data[code].operand[0].mode;
4388 if (!insn_data[code].operand[0].predicate (to, imode))
4389 return false;
4391 imode = insn_data[code].operand[1].mode;
4392 if (!insn_data[code].operand[1].predicate (from, imode))
4394 from = copy_to_mode_reg (imode, from);
4395 if (!insn_data[code].operand[1].predicate (from, imode))
4396 return false;
4399 pattern = GEN_FCN (code) (to, from);
4400 if (pattern == NULL_RTX)
4401 return false;
4403 emit_insn (pattern);
4404 return true;
4407 /* Generate code for computing expression EXP,
4408 and storing the value into TARGET.
4410 If the mode is BLKmode then we may return TARGET itself.
4411 It turns out that in BLKmode it doesn't cause a problem.
4412 because C has no operators that could combine two different
4413 assignments into the same BLKmode object with different values
4414 with no sequence point. Will other languages need this to
4415 be more thorough?
4417 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4418 stack, and block moves may need to be treated specially.
4420 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4423 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4425 rtx temp;
4426 rtx alt_rtl = NULL_RTX;
4427 int dont_return_target = 0;
4429 if (VOID_TYPE_P (TREE_TYPE (exp)))
4431 /* C++ can generate ?: expressions with a throw expression in one
4432 branch and an rvalue in the other. Here, we resolve attempts to
4433 store the throw expression's nonexistent result. */
4434 gcc_assert (!call_param_p);
4435 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4436 return NULL_RTX;
4438 if (TREE_CODE (exp) == COMPOUND_EXPR)
4440 /* Perform first part of compound expression, then assign from second
4441 part. */
4442 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4443 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4444 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4445 nontemporal);
4447 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4449 /* For conditional expression, get safe form of the target. Then
4450 test the condition, doing the appropriate assignment on either
4451 side. This avoids the creation of unnecessary temporaries.
4452 For non-BLKmode, it is more efficient not to do this. */
4454 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4456 do_pending_stack_adjust ();
4457 NO_DEFER_POP;
4458 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4459 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4460 nontemporal);
4461 emit_jump_insn (gen_jump (lab2));
4462 emit_barrier ();
4463 emit_label (lab1);
4464 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4465 nontemporal);
4466 emit_label (lab2);
4467 OK_DEFER_POP;
4469 return NULL_RTX;
4471 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4472 /* If this is a scalar in a register that is stored in a wider mode
4473 than the declared mode, compute the result into its declared mode
4474 and then convert to the wider mode. Our value is the computed
4475 expression. */
4477 rtx inner_target = 0;
4479 /* We can do the conversion inside EXP, which will often result
4480 in some optimizations. Do the conversion in two steps: first
4481 change the signedness, if needed, then the extend. But don't
4482 do this if the type of EXP is a subtype of something else
4483 since then the conversion might involve more than just
4484 converting modes. */
4485 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4486 && TREE_TYPE (TREE_TYPE (exp)) == 0
4487 && (!lang_hooks.reduce_bit_field_operations
4488 || (GET_MODE_PRECISION (GET_MODE (target))
4489 == TYPE_PRECISION (TREE_TYPE (exp)))))
4491 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4492 != SUBREG_PROMOTED_UNSIGNED_P (target))
4494 /* Some types, e.g. Fortran's logical*4, won't have a signed
4495 version, so use the mode instead. */
4496 tree ntype
4497 = (signed_or_unsigned_type_for
4498 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4499 if (ntype == NULL)
4500 ntype = lang_hooks.types.type_for_mode
4501 (TYPE_MODE (TREE_TYPE (exp)),
4502 SUBREG_PROMOTED_UNSIGNED_P (target));
4504 exp = fold_convert (ntype, exp);
4507 exp = fold_convert (lang_hooks.types.type_for_mode
4508 (GET_MODE (SUBREG_REG (target)),
4509 SUBREG_PROMOTED_UNSIGNED_P (target)),
4510 exp);
4512 inner_target = SUBREG_REG (target);
4515 temp = expand_expr (exp, inner_target, VOIDmode,
4516 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4518 /* If TEMP is a VOIDmode constant, use convert_modes to make
4519 sure that we properly convert it. */
4520 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4522 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4523 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4524 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4525 GET_MODE (target), temp,
4526 SUBREG_PROMOTED_UNSIGNED_P (target));
4529 convert_move (SUBREG_REG (target), temp,
4530 SUBREG_PROMOTED_UNSIGNED_P (target));
4532 return NULL_RTX;
4534 else if (TREE_CODE (exp) == STRING_CST
4535 && !nontemporal && !call_param_p
4536 && TREE_STRING_LENGTH (exp) > 0
4537 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4539 /* Optimize initialization of an array with a STRING_CST. */
4540 HOST_WIDE_INT exp_len, str_copy_len;
4541 rtx dest_mem;
4543 exp_len = int_expr_size (exp);
4544 if (exp_len <= 0)
4545 goto normal_expr;
4547 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4548 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4549 goto normal_expr;
4551 str_copy_len = TREE_STRING_LENGTH (exp);
4552 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4554 str_copy_len += STORE_MAX_PIECES - 1;
4555 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4557 str_copy_len = MIN (str_copy_len, exp_len);
4558 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4559 (void *) TREE_STRING_POINTER (exp),
4560 MEM_ALIGN (target), false))
4561 goto normal_expr;
4563 dest_mem = target;
4565 dest_mem = store_by_pieces (dest_mem,
4566 str_copy_len, builtin_strncpy_read_str,
4567 (void *) TREE_STRING_POINTER (exp),
4568 MEM_ALIGN (target), false,
4569 exp_len > str_copy_len ? 1 : 0);
4570 if (exp_len > str_copy_len)
4571 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4572 GEN_INT (exp_len - str_copy_len),
4573 BLOCK_OP_NORMAL);
4574 return NULL_RTX;
4576 else
4578 rtx tmp_target;
4580 normal_expr:
4581 /* If we want to use a nontemporal store, force the value to
4582 register first. */
4583 tmp_target = nontemporal ? NULL_RTX : target;
4584 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4585 (call_param_p
4586 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4587 &alt_rtl);
4588 /* Return TARGET if it's a specified hardware register.
4589 If TARGET is a volatile mem ref, either return TARGET
4590 or return a reg copied *from* TARGET; ANSI requires this.
4592 Otherwise, if TEMP is not TARGET, return TEMP
4593 if it is constant (for efficiency),
4594 or if we really want the correct value. */
4595 if (!(target && REG_P (target)
4596 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4597 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4598 && ! rtx_equal_p (temp, target)
4599 && CONSTANT_P (temp))
4600 dont_return_target = 1;
4603 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4604 the same as that of TARGET, adjust the constant. This is needed, for
4605 example, in case it is a CONST_DOUBLE and we want only a word-sized
4606 value. */
4607 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4608 && TREE_CODE (exp) != ERROR_MARK
4609 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4610 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4611 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4613 /* If value was not generated in the target, store it there.
4614 Convert the value to TARGET's type first if necessary and emit the
4615 pending incrementations that have been queued when expanding EXP.
4616 Note that we cannot emit the whole queue blindly because this will
4617 effectively disable the POST_INC optimization later.
4619 If TEMP and TARGET compare equal according to rtx_equal_p, but
4620 one or both of them are volatile memory refs, we have to distinguish
4621 two cases:
4622 - expand_expr has used TARGET. In this case, we must not generate
4623 another copy. This can be detected by TARGET being equal according
4624 to == .
4625 - expand_expr has not used TARGET - that means that the source just
4626 happens to have the same RTX form. Since temp will have been created
4627 by expand_expr, it will compare unequal according to == .
4628 We must generate a copy in this case, to reach the correct number
4629 of volatile memory references. */
4631 if ((! rtx_equal_p (temp, target)
4632 || (temp != target && (side_effects_p (temp)
4633 || side_effects_p (target))))
4634 && TREE_CODE (exp) != ERROR_MARK
4635 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4636 but TARGET is not valid memory reference, TEMP will differ
4637 from TARGET although it is really the same location. */
4638 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4639 /* If there's nothing to copy, don't bother. Don't call
4640 expr_size unless necessary, because some front-ends (C++)
4641 expr_size-hook must not be given objects that are not
4642 supposed to be bit-copied or bit-initialized. */
4643 && expr_size (exp) != const0_rtx)
4645 if (GET_MODE (temp) != GET_MODE (target)
4646 && GET_MODE (temp) != VOIDmode)
4648 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4649 if (dont_return_target)
4651 /* In this case, we will return TEMP,
4652 so make sure it has the proper mode.
4653 But don't forget to store the value into TARGET. */
4654 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4655 emit_move_insn (target, temp);
4657 else if (GET_MODE (target) == BLKmode)
4658 emit_block_move (target, temp, expr_size (exp),
4659 (call_param_p
4660 ? BLOCK_OP_CALL_PARM
4661 : BLOCK_OP_NORMAL));
4662 else
4663 convert_move (target, temp, unsignedp);
4666 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4668 /* Handle copying a string constant into an array. The string
4669 constant may be shorter than the array. So copy just the string's
4670 actual length, and clear the rest. First get the size of the data
4671 type of the string, which is actually the size of the target. */
4672 rtx size = expr_size (exp);
4674 if (GET_CODE (size) == CONST_INT
4675 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4676 emit_block_move (target, temp, size,
4677 (call_param_p
4678 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4679 else
4681 /* Compute the size of the data to copy from the string. */
4682 tree copy_size
4683 = size_binop (MIN_EXPR,
4684 make_tree (sizetype, size),
4685 size_int (TREE_STRING_LENGTH (exp)));
4686 rtx copy_size_rtx
4687 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4688 (call_param_p
4689 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4690 rtx label = 0;
4692 /* Copy that much. */
4693 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4694 TYPE_UNSIGNED (sizetype));
4695 emit_block_move (target, temp, copy_size_rtx,
4696 (call_param_p
4697 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4699 /* Figure out how much is left in TARGET that we have to clear.
4700 Do all calculations in ptr_mode. */
4701 if (GET_CODE (copy_size_rtx) == CONST_INT)
4703 size = plus_constant (size, -INTVAL (copy_size_rtx));
4704 target = adjust_address (target, BLKmode,
4705 INTVAL (copy_size_rtx));
4707 else
4709 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4710 copy_size_rtx, NULL_RTX, 0,
4711 OPTAB_LIB_WIDEN);
4713 #ifdef POINTERS_EXTEND_UNSIGNED
4714 if (GET_MODE (copy_size_rtx) != Pmode)
4715 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4716 TYPE_UNSIGNED (sizetype));
4717 #endif
4719 target = offset_address (target, copy_size_rtx,
4720 highest_pow2_factor (copy_size));
4721 label = gen_label_rtx ();
4722 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4723 GET_MODE (size), 0, label);
4726 if (size != const0_rtx)
4727 clear_storage (target, size, BLOCK_OP_NORMAL);
4729 if (label)
4730 emit_label (label);
4733 /* Handle calls that return values in multiple non-contiguous locations.
4734 The Irix 6 ABI has examples of this. */
4735 else if (GET_CODE (target) == PARALLEL)
4736 emit_group_load (target, temp, TREE_TYPE (exp),
4737 int_size_in_bytes (TREE_TYPE (exp)));
4738 else if (GET_MODE (temp) == BLKmode)
4739 emit_block_move (target, temp, expr_size (exp),
4740 (call_param_p
4741 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4742 else if (nontemporal
4743 && emit_storent_insn (target, temp))
4744 /* If we managed to emit a nontemporal store, there is nothing else to
4745 do. */
4747 else
4749 temp = force_operand (temp, target);
4750 if (temp != target)
4751 emit_move_insn (target, temp);
4755 return NULL_RTX;
4758 /* Helper for categorize_ctor_elements. Identical interface. */
4760 static bool
4761 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4762 HOST_WIDE_INT *p_elt_count,
4763 bool *p_must_clear)
4765 unsigned HOST_WIDE_INT idx;
4766 HOST_WIDE_INT nz_elts, elt_count;
4767 tree value, purpose;
4769 /* Whether CTOR is a valid constant initializer, in accordance with what
4770 initializer_constant_valid_p does. If inferred from the constructor
4771 elements, true until proven otherwise. */
4772 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4773 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4775 nz_elts = 0;
4776 elt_count = 0;
4778 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4780 HOST_WIDE_INT mult;
4782 mult = 1;
4783 if (TREE_CODE (purpose) == RANGE_EXPR)
4785 tree lo_index = TREE_OPERAND (purpose, 0);
4786 tree hi_index = TREE_OPERAND (purpose, 1);
4788 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4789 mult = (tree_low_cst (hi_index, 1)
4790 - tree_low_cst (lo_index, 1) + 1);
4793 switch (TREE_CODE (value))
4795 case CONSTRUCTOR:
4797 HOST_WIDE_INT nz = 0, ic = 0;
4799 bool const_elt_p
4800 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4802 nz_elts += mult * nz;
4803 elt_count += mult * ic;
4805 if (const_from_elts_p && const_p)
4806 const_p = const_elt_p;
4808 break;
4810 case INTEGER_CST:
4811 case REAL_CST:
4812 case FIXED_CST:
4813 if (!initializer_zerop (value))
4814 nz_elts += mult;
4815 elt_count += mult;
4816 break;
4818 case STRING_CST:
4819 nz_elts += mult * TREE_STRING_LENGTH (value);
4820 elt_count += mult * TREE_STRING_LENGTH (value);
4821 break;
4823 case COMPLEX_CST:
4824 if (!initializer_zerop (TREE_REALPART (value)))
4825 nz_elts += mult;
4826 if (!initializer_zerop (TREE_IMAGPART (value)))
4827 nz_elts += mult;
4828 elt_count += mult;
4829 break;
4831 case VECTOR_CST:
4833 tree v;
4834 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4836 if (!initializer_zerop (TREE_VALUE (v)))
4837 nz_elts += mult;
4838 elt_count += mult;
4841 break;
4843 default:
4844 nz_elts += mult;
4845 elt_count += mult;
4847 if (const_from_elts_p && const_p)
4848 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4849 != NULL_TREE;
4850 break;
4854 if (!*p_must_clear
4855 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4856 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4858 tree init_sub_type;
4859 bool clear_this = true;
4861 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4863 /* We don't expect more than one element of the union to be
4864 initialized. Not sure what we should do otherwise... */
4865 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4866 == 1);
4868 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4869 CONSTRUCTOR_ELTS (ctor),
4870 0)->value);
4872 /* ??? We could look at each element of the union, and find the
4873 largest element. Which would avoid comparing the size of the
4874 initialized element against any tail padding in the union.
4875 Doesn't seem worth the effort... */
4876 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4877 TYPE_SIZE (init_sub_type)) == 1)
4879 /* And now we have to find out if the element itself is fully
4880 constructed. E.g. for union { struct { int a, b; } s; } u
4881 = { .s = { .a = 1 } }. */
4882 if (elt_count == count_type_elements (init_sub_type, false))
4883 clear_this = false;
4887 *p_must_clear = clear_this;
4890 *p_nz_elts += nz_elts;
4891 *p_elt_count += elt_count;
4893 return const_p;
4896 /* Examine CTOR to discover:
4897 * how many scalar fields are set to nonzero values,
4898 and place it in *P_NZ_ELTS;
4899 * how many scalar fields in total are in CTOR,
4900 and place it in *P_ELT_COUNT.
4901 * if a type is a union, and the initializer from the constructor
4902 is not the largest element in the union, then set *p_must_clear.
4904 Return whether or not CTOR is a valid static constant initializer, the same
4905 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4907 bool
4908 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4909 HOST_WIDE_INT *p_elt_count,
4910 bool *p_must_clear)
4912 *p_nz_elts = 0;
4913 *p_elt_count = 0;
4914 *p_must_clear = false;
4916 return
4917 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4920 /* Count the number of scalars in TYPE. Return -1 on overflow or
4921 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4922 array member at the end of the structure. */
4924 HOST_WIDE_INT
4925 count_type_elements (const_tree type, bool allow_flexarr)
4927 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4928 switch (TREE_CODE (type))
4930 case ARRAY_TYPE:
4932 tree telts = array_type_nelts (type);
4933 if (telts && host_integerp (telts, 1))
4935 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4936 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4937 if (n == 0)
4938 return 0;
4939 else if (max / n > m)
4940 return n * m;
4942 return -1;
4945 case RECORD_TYPE:
4947 HOST_WIDE_INT n = 0, t;
4948 tree f;
4950 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4951 if (TREE_CODE (f) == FIELD_DECL)
4953 t = count_type_elements (TREE_TYPE (f), false);
4954 if (t < 0)
4956 /* Check for structures with flexible array member. */
4957 tree tf = TREE_TYPE (f);
4958 if (allow_flexarr
4959 && TREE_CHAIN (f) == NULL
4960 && TREE_CODE (tf) == ARRAY_TYPE
4961 && TYPE_DOMAIN (tf)
4962 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4963 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4964 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4965 && int_size_in_bytes (type) >= 0)
4966 break;
4968 return -1;
4970 n += t;
4973 return n;
4976 case UNION_TYPE:
4977 case QUAL_UNION_TYPE:
4978 return -1;
4980 case COMPLEX_TYPE:
4981 return 2;
4983 case VECTOR_TYPE:
4984 return TYPE_VECTOR_SUBPARTS (type);
4986 case INTEGER_TYPE:
4987 case REAL_TYPE:
4988 case FIXED_POINT_TYPE:
4989 case ENUMERAL_TYPE:
4990 case BOOLEAN_TYPE:
4991 case POINTER_TYPE:
4992 case OFFSET_TYPE:
4993 case REFERENCE_TYPE:
4994 return 1;
4996 case VOID_TYPE:
4997 case METHOD_TYPE:
4998 case FUNCTION_TYPE:
4999 case LANG_TYPE:
5000 default:
5001 gcc_unreachable ();
5005 /* Return 1 if EXP contains mostly (3/4) zeros. */
5007 static int
5008 mostly_zeros_p (const_tree exp)
5010 if (TREE_CODE (exp) == CONSTRUCTOR)
5013 HOST_WIDE_INT nz_elts, count, elts;
5014 bool must_clear;
5016 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5017 if (must_clear)
5018 return 1;
5020 elts = count_type_elements (TREE_TYPE (exp), false);
5022 return nz_elts < elts / 4;
5025 return initializer_zerop (exp);
5028 /* Return 1 if EXP contains all zeros. */
5030 static int
5031 all_zeros_p (const_tree exp)
5033 if (TREE_CODE (exp) == CONSTRUCTOR)
5036 HOST_WIDE_INT nz_elts, count;
5037 bool must_clear;
5039 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5040 return nz_elts == 0;
5043 return initializer_zerop (exp);
5046 /* Helper function for store_constructor.
5047 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5048 TYPE is the type of the CONSTRUCTOR, not the element type.
5049 CLEARED is as for store_constructor.
5050 ALIAS_SET is the alias set to use for any stores.
5052 This provides a recursive shortcut back to store_constructor when it isn't
5053 necessary to go through store_field. This is so that we can pass through
5054 the cleared field to let store_constructor know that we may not have to
5055 clear a substructure if the outer structure has already been cleared. */
5057 static void
5058 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5059 HOST_WIDE_INT bitpos, enum machine_mode mode,
5060 tree exp, tree type, int cleared,
5061 alias_set_type alias_set)
5063 if (TREE_CODE (exp) == CONSTRUCTOR
5064 /* We can only call store_constructor recursively if the size and
5065 bit position are on a byte boundary. */
5066 && bitpos % BITS_PER_UNIT == 0
5067 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5068 /* If we have a nonzero bitpos for a register target, then we just
5069 let store_field do the bitfield handling. This is unlikely to
5070 generate unnecessary clear instructions anyways. */
5071 && (bitpos == 0 || MEM_P (target)))
5073 if (MEM_P (target))
5074 target
5075 = adjust_address (target,
5076 GET_MODE (target) == BLKmode
5077 || 0 != (bitpos
5078 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5079 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5082 /* Update the alias set, if required. */
5083 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5084 && MEM_ALIAS_SET (target) != 0)
5086 target = copy_rtx (target);
5087 set_mem_alias_set (target, alias_set);
5090 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5092 else
5093 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5096 /* Store the value of constructor EXP into the rtx TARGET.
5097 TARGET is either a REG or a MEM; we know it cannot conflict, since
5098 safe_from_p has been called.
5099 CLEARED is true if TARGET is known to have been zero'd.
5100 SIZE is the number of bytes of TARGET we are allowed to modify: this
5101 may not be the same as the size of EXP if we are assigning to a field
5102 which has been packed to exclude padding bits. */
5104 static void
5105 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5107 tree type = TREE_TYPE (exp);
5108 #ifdef WORD_REGISTER_OPERATIONS
5109 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5110 #endif
5112 switch (TREE_CODE (type))
5114 case RECORD_TYPE:
5115 case UNION_TYPE:
5116 case QUAL_UNION_TYPE:
5118 unsigned HOST_WIDE_INT idx;
5119 tree field, value;
5121 /* If size is zero or the target is already cleared, do nothing. */
5122 if (size == 0 || cleared)
5123 cleared = 1;
5124 /* We either clear the aggregate or indicate the value is dead. */
5125 else if ((TREE_CODE (type) == UNION_TYPE
5126 || TREE_CODE (type) == QUAL_UNION_TYPE)
5127 && ! CONSTRUCTOR_ELTS (exp))
5128 /* If the constructor is empty, clear the union. */
5130 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5131 cleared = 1;
5134 /* If we are building a static constructor into a register,
5135 set the initial value as zero so we can fold the value into
5136 a constant. But if more than one register is involved,
5137 this probably loses. */
5138 else if (REG_P (target) && TREE_STATIC (exp)
5139 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5141 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5142 cleared = 1;
5145 /* If the constructor has fewer fields than the structure or
5146 if we are initializing the structure to mostly zeros, clear
5147 the whole structure first. Don't do this if TARGET is a
5148 register whose mode size isn't equal to SIZE since
5149 clear_storage can't handle this case. */
5150 else if (size > 0
5151 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5152 != fields_length (type))
5153 || mostly_zeros_p (exp))
5154 && (!REG_P (target)
5155 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5156 == size)))
5158 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5159 cleared = 1;
5162 if (REG_P (target) && !cleared)
5163 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5165 /* Store each element of the constructor into the
5166 corresponding field of TARGET. */
5167 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5169 enum machine_mode mode;
5170 HOST_WIDE_INT bitsize;
5171 HOST_WIDE_INT bitpos = 0;
5172 tree offset;
5173 rtx to_rtx = target;
5175 /* Just ignore missing fields. We cleared the whole
5176 structure, above, if any fields are missing. */
5177 if (field == 0)
5178 continue;
5180 if (cleared && initializer_zerop (value))
5181 continue;
5183 if (host_integerp (DECL_SIZE (field), 1))
5184 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5185 else
5186 bitsize = -1;
5188 mode = DECL_MODE (field);
5189 if (DECL_BIT_FIELD (field))
5190 mode = VOIDmode;
5192 offset = DECL_FIELD_OFFSET (field);
5193 if (host_integerp (offset, 0)
5194 && host_integerp (bit_position (field), 0))
5196 bitpos = int_bit_position (field);
5197 offset = 0;
5199 else
5200 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5202 if (offset)
5204 rtx offset_rtx;
5206 offset
5207 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5208 make_tree (TREE_TYPE (exp),
5209 target));
5211 offset_rtx = expand_normal (offset);
5212 gcc_assert (MEM_P (to_rtx));
5214 #ifdef POINTERS_EXTEND_UNSIGNED
5215 if (GET_MODE (offset_rtx) != Pmode)
5216 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5217 #else
5218 if (GET_MODE (offset_rtx) != ptr_mode)
5219 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5220 #endif
5222 to_rtx = offset_address (to_rtx, offset_rtx,
5223 highest_pow2_factor (offset));
5226 #ifdef WORD_REGISTER_OPERATIONS
5227 /* If this initializes a field that is smaller than a
5228 word, at the start of a word, try to widen it to a full
5229 word. This special case allows us to output C++ member
5230 function initializations in a form that the optimizers
5231 can understand. */
5232 if (REG_P (target)
5233 && bitsize < BITS_PER_WORD
5234 && bitpos % BITS_PER_WORD == 0
5235 && GET_MODE_CLASS (mode) == MODE_INT
5236 && TREE_CODE (value) == INTEGER_CST
5237 && exp_size >= 0
5238 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5240 tree type = TREE_TYPE (value);
5242 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5244 type = lang_hooks.types.type_for_size
5245 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5246 value = fold_convert (type, value);
5249 if (BYTES_BIG_ENDIAN)
5250 value
5251 = fold_build2 (LSHIFT_EXPR, type, value,
5252 build_int_cst (type,
5253 BITS_PER_WORD - bitsize));
5254 bitsize = BITS_PER_WORD;
5255 mode = word_mode;
5257 #endif
5259 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5260 && DECL_NONADDRESSABLE_P (field))
5262 to_rtx = copy_rtx (to_rtx);
5263 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5266 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5267 value, type, cleared,
5268 get_alias_set (TREE_TYPE (field)));
5270 break;
5272 case ARRAY_TYPE:
5274 tree value, index;
5275 unsigned HOST_WIDE_INT i;
5276 int need_to_clear;
5277 tree domain;
5278 tree elttype = TREE_TYPE (type);
5279 int const_bounds_p;
5280 HOST_WIDE_INT minelt = 0;
5281 HOST_WIDE_INT maxelt = 0;
5283 domain = TYPE_DOMAIN (type);
5284 const_bounds_p = (TYPE_MIN_VALUE (domain)
5285 && TYPE_MAX_VALUE (domain)
5286 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5287 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5289 /* If we have constant bounds for the range of the type, get them. */
5290 if (const_bounds_p)
5292 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5293 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5296 /* If the constructor has fewer elements than the array, clear
5297 the whole array first. Similarly if this is static
5298 constructor of a non-BLKmode object. */
5299 if (cleared)
5300 need_to_clear = 0;
5301 else if (REG_P (target) && TREE_STATIC (exp))
5302 need_to_clear = 1;
5303 else
5305 unsigned HOST_WIDE_INT idx;
5306 tree index, value;
5307 HOST_WIDE_INT count = 0, zero_count = 0;
5308 need_to_clear = ! const_bounds_p;
5310 /* This loop is a more accurate version of the loop in
5311 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5312 is also needed to check for missing elements. */
5313 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5315 HOST_WIDE_INT this_node_count;
5317 if (need_to_clear)
5318 break;
5320 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5322 tree lo_index = TREE_OPERAND (index, 0);
5323 tree hi_index = TREE_OPERAND (index, 1);
5325 if (! host_integerp (lo_index, 1)
5326 || ! host_integerp (hi_index, 1))
5328 need_to_clear = 1;
5329 break;
5332 this_node_count = (tree_low_cst (hi_index, 1)
5333 - tree_low_cst (lo_index, 1) + 1);
5335 else
5336 this_node_count = 1;
5338 count += this_node_count;
5339 if (mostly_zeros_p (value))
5340 zero_count += this_node_count;
5343 /* Clear the entire array first if there are any missing
5344 elements, or if the incidence of zero elements is >=
5345 75%. */
5346 if (! need_to_clear
5347 && (count < maxelt - minelt + 1
5348 || 4 * zero_count >= 3 * count))
5349 need_to_clear = 1;
5352 if (need_to_clear && size > 0)
5354 if (REG_P (target))
5355 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5356 else
5357 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5358 cleared = 1;
5361 if (!cleared && REG_P (target))
5362 /* Inform later passes that the old value is dead. */
5363 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5365 /* Store each element of the constructor into the
5366 corresponding element of TARGET, determined by counting the
5367 elements. */
5368 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5370 enum machine_mode mode;
5371 HOST_WIDE_INT bitsize;
5372 HOST_WIDE_INT bitpos;
5373 int unsignedp;
5374 rtx xtarget = target;
5376 if (cleared && initializer_zerop (value))
5377 continue;
5379 unsignedp = TYPE_UNSIGNED (elttype);
5380 mode = TYPE_MODE (elttype);
5381 if (mode == BLKmode)
5382 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5383 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5384 : -1);
5385 else
5386 bitsize = GET_MODE_BITSIZE (mode);
5388 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5390 tree lo_index = TREE_OPERAND (index, 0);
5391 tree hi_index = TREE_OPERAND (index, 1);
5392 rtx index_r, pos_rtx;
5393 HOST_WIDE_INT lo, hi, count;
5394 tree position;
5396 /* If the range is constant and "small", unroll the loop. */
5397 if (const_bounds_p
5398 && host_integerp (lo_index, 0)
5399 && host_integerp (hi_index, 0)
5400 && (lo = tree_low_cst (lo_index, 0),
5401 hi = tree_low_cst (hi_index, 0),
5402 count = hi - lo + 1,
5403 (!MEM_P (target)
5404 || count <= 2
5405 || (host_integerp (TYPE_SIZE (elttype), 1)
5406 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5407 <= 40 * 8)))))
5409 lo -= minelt; hi -= minelt;
5410 for (; lo <= hi; lo++)
5412 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5414 if (MEM_P (target)
5415 && !MEM_KEEP_ALIAS_SET_P (target)
5416 && TREE_CODE (type) == ARRAY_TYPE
5417 && TYPE_NONALIASED_COMPONENT (type))
5419 target = copy_rtx (target);
5420 MEM_KEEP_ALIAS_SET_P (target) = 1;
5423 store_constructor_field
5424 (target, bitsize, bitpos, mode, value, type, cleared,
5425 get_alias_set (elttype));
5428 else
5430 rtx loop_start = gen_label_rtx ();
5431 rtx loop_end = gen_label_rtx ();
5432 tree exit_cond;
5434 expand_normal (hi_index);
5435 unsignedp = TYPE_UNSIGNED (domain);
5437 index = build_decl (VAR_DECL, NULL_TREE, domain);
5439 index_r
5440 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5441 &unsignedp, 0));
5442 SET_DECL_RTL (index, index_r);
5443 store_expr (lo_index, index_r, 0, false);
5445 /* Build the head of the loop. */
5446 do_pending_stack_adjust ();
5447 emit_label (loop_start);
5449 /* Assign value to element index. */
5450 position =
5451 fold_convert (ssizetype,
5452 fold_build2 (MINUS_EXPR,
5453 TREE_TYPE (index),
5454 index,
5455 TYPE_MIN_VALUE (domain)));
5457 position =
5458 size_binop (MULT_EXPR, position,
5459 fold_convert (ssizetype,
5460 TYPE_SIZE_UNIT (elttype)));
5462 pos_rtx = expand_normal (position);
5463 xtarget = offset_address (target, pos_rtx,
5464 highest_pow2_factor (position));
5465 xtarget = adjust_address (xtarget, mode, 0);
5466 if (TREE_CODE (value) == CONSTRUCTOR)
5467 store_constructor (value, xtarget, cleared,
5468 bitsize / BITS_PER_UNIT);
5469 else
5470 store_expr (value, xtarget, 0, false);
5472 /* Generate a conditional jump to exit the loop. */
5473 exit_cond = build2 (LT_EXPR, integer_type_node,
5474 index, hi_index);
5475 jumpif (exit_cond, loop_end);
5477 /* Update the loop counter, and jump to the head of
5478 the loop. */
5479 expand_assignment (index,
5480 build2 (PLUS_EXPR, TREE_TYPE (index),
5481 index, integer_one_node),
5482 false);
5484 emit_jump (loop_start);
5486 /* Build the end of the loop. */
5487 emit_label (loop_end);
5490 else if ((index != 0 && ! host_integerp (index, 0))
5491 || ! host_integerp (TYPE_SIZE (elttype), 1))
5493 tree position;
5495 if (index == 0)
5496 index = ssize_int (1);
5498 if (minelt)
5499 index = fold_convert (ssizetype,
5500 fold_build2 (MINUS_EXPR,
5501 TREE_TYPE (index),
5502 index,
5503 TYPE_MIN_VALUE (domain)));
5505 position =
5506 size_binop (MULT_EXPR, index,
5507 fold_convert (ssizetype,
5508 TYPE_SIZE_UNIT (elttype)));
5509 xtarget = offset_address (target,
5510 expand_normal (position),
5511 highest_pow2_factor (position));
5512 xtarget = adjust_address (xtarget, mode, 0);
5513 store_expr (value, xtarget, 0, false);
5515 else
5517 if (index != 0)
5518 bitpos = ((tree_low_cst (index, 0) - minelt)
5519 * tree_low_cst (TYPE_SIZE (elttype), 1));
5520 else
5521 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5523 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5524 && TREE_CODE (type) == ARRAY_TYPE
5525 && TYPE_NONALIASED_COMPONENT (type))
5527 target = copy_rtx (target);
5528 MEM_KEEP_ALIAS_SET_P (target) = 1;
5530 store_constructor_field (target, bitsize, bitpos, mode, value,
5531 type, cleared, get_alias_set (elttype));
5534 break;
5537 case VECTOR_TYPE:
5539 unsigned HOST_WIDE_INT idx;
5540 constructor_elt *ce;
5541 int i;
5542 int need_to_clear;
5543 int icode = 0;
5544 tree elttype = TREE_TYPE (type);
5545 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5546 enum machine_mode eltmode = TYPE_MODE (elttype);
5547 HOST_WIDE_INT bitsize;
5548 HOST_WIDE_INT bitpos;
5549 rtvec vector = NULL;
5550 unsigned n_elts;
5552 gcc_assert (eltmode != BLKmode);
5554 n_elts = TYPE_VECTOR_SUBPARTS (type);
5555 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5557 enum machine_mode mode = GET_MODE (target);
5559 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5560 if (icode != CODE_FOR_nothing)
5562 unsigned int i;
5564 vector = rtvec_alloc (n_elts);
5565 for (i = 0; i < n_elts; i++)
5566 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5570 /* If the constructor has fewer elements than the vector,
5571 clear the whole array first. Similarly if this is static
5572 constructor of a non-BLKmode object. */
5573 if (cleared)
5574 need_to_clear = 0;
5575 else if (REG_P (target) && TREE_STATIC (exp))
5576 need_to_clear = 1;
5577 else
5579 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5580 tree value;
5582 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5584 int n_elts_here = tree_low_cst
5585 (int_const_binop (TRUNC_DIV_EXPR,
5586 TYPE_SIZE (TREE_TYPE (value)),
5587 TYPE_SIZE (elttype), 0), 1);
5589 count += n_elts_here;
5590 if (mostly_zeros_p (value))
5591 zero_count += n_elts_here;
5594 /* Clear the entire vector first if there are any missing elements,
5595 or if the incidence of zero elements is >= 75%. */
5596 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5599 if (need_to_clear && size > 0 && !vector)
5601 if (REG_P (target))
5602 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5603 else
5604 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5605 cleared = 1;
5608 /* Inform later passes that the old value is dead. */
5609 if (!cleared && !vector && REG_P (target))
5610 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5612 /* Store each element of the constructor into the corresponding
5613 element of TARGET, determined by counting the elements. */
5614 for (idx = 0, i = 0;
5615 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5616 idx++, i += bitsize / elt_size)
5618 HOST_WIDE_INT eltpos;
5619 tree value = ce->value;
5621 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5622 if (cleared && initializer_zerop (value))
5623 continue;
5625 if (ce->index)
5626 eltpos = tree_low_cst (ce->index, 1);
5627 else
5628 eltpos = i;
5630 if (vector)
5632 /* Vector CONSTRUCTORs should only be built from smaller
5633 vectors in the case of BLKmode vectors. */
5634 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5635 RTVEC_ELT (vector, eltpos)
5636 = expand_normal (value);
5638 else
5640 enum machine_mode value_mode =
5641 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5642 ? TYPE_MODE (TREE_TYPE (value))
5643 : eltmode;
5644 bitpos = eltpos * elt_size;
5645 store_constructor_field (target, bitsize, bitpos,
5646 value_mode, value, type,
5647 cleared, get_alias_set (elttype));
5651 if (vector)
5652 emit_insn (GEN_FCN (icode)
5653 (target,
5654 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5655 break;
5658 default:
5659 gcc_unreachable ();
5663 /* Store the value of EXP (an expression tree)
5664 into a subfield of TARGET which has mode MODE and occupies
5665 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5666 If MODE is VOIDmode, it means that we are storing into a bit-field.
5668 Always return const0_rtx unless we have something particular to
5669 return.
5671 TYPE is the type of the underlying object,
5673 ALIAS_SET is the alias set for the destination. This value will
5674 (in general) be different from that for TARGET, since TARGET is a
5675 reference to the containing structure.
5677 If NONTEMPORAL is true, try generating a nontemporal store. */
5679 static rtx
5680 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5681 enum machine_mode mode, tree exp, tree type,
5682 alias_set_type alias_set, bool nontemporal)
5684 HOST_WIDE_INT width_mask = 0;
5686 if (TREE_CODE (exp) == ERROR_MARK)
5687 return const0_rtx;
5689 /* If we have nothing to store, do nothing unless the expression has
5690 side-effects. */
5691 if (bitsize == 0)
5692 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5693 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5694 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5696 /* If we are storing into an unaligned field of an aligned union that is
5697 in a register, we may have the mode of TARGET being an integer mode but
5698 MODE == BLKmode. In that case, get an aligned object whose size and
5699 alignment are the same as TARGET and store TARGET into it (we can avoid
5700 the store if the field being stored is the entire width of TARGET). Then
5701 call ourselves recursively to store the field into a BLKmode version of
5702 that object. Finally, load from the object into TARGET. This is not
5703 very efficient in general, but should only be slightly more expensive
5704 than the otherwise-required unaligned accesses. Perhaps this can be
5705 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5706 twice, once with emit_move_insn and once via store_field. */
5708 if (mode == BLKmode
5709 && (REG_P (target) || GET_CODE (target) == SUBREG))
5711 rtx object = assign_temp (type, 0, 1, 1);
5712 rtx blk_object = adjust_address (object, BLKmode, 0);
5714 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5715 emit_move_insn (object, target);
5717 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5718 nontemporal);
5720 emit_move_insn (target, object);
5722 /* We want to return the BLKmode version of the data. */
5723 return blk_object;
5726 if (GET_CODE (target) == CONCAT)
5728 /* We're storing into a struct containing a single __complex. */
5730 gcc_assert (!bitpos);
5731 return store_expr (exp, target, 0, nontemporal);
5734 /* If the structure is in a register or if the component
5735 is a bit field, we cannot use addressing to access it.
5736 Use bit-field techniques or SUBREG to store in it. */
5738 if (mode == VOIDmode
5739 || (mode != BLKmode && ! direct_store[(int) mode]
5740 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5741 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5742 || REG_P (target)
5743 || GET_CODE (target) == SUBREG
5744 /* If the field isn't aligned enough to store as an ordinary memref,
5745 store it as a bit field. */
5746 || (mode != BLKmode
5747 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5748 || bitpos % GET_MODE_ALIGNMENT (mode))
5749 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5750 || (bitpos % BITS_PER_UNIT != 0)))
5751 /* If the RHS and field are a constant size and the size of the
5752 RHS isn't the same size as the bitfield, we must use bitfield
5753 operations. */
5754 || (bitsize >= 0
5755 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5756 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5758 rtx temp;
5760 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5761 implies a mask operation. If the precision is the same size as
5762 the field we're storing into, that mask is redundant. This is
5763 particularly common with bit field assignments generated by the
5764 C front end. */
5765 if (TREE_CODE (exp) == NOP_EXPR)
5767 tree type = TREE_TYPE (exp);
5768 if (INTEGRAL_TYPE_P (type)
5769 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5770 && bitsize == TYPE_PRECISION (type))
5772 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5773 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5774 exp = TREE_OPERAND (exp, 0);
5778 temp = expand_normal (exp);
5780 /* If BITSIZE is narrower than the size of the type of EXP
5781 we will be narrowing TEMP. Normally, what's wanted are the
5782 low-order bits. However, if EXP's type is a record and this is
5783 big-endian machine, we want the upper BITSIZE bits. */
5784 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5785 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5786 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5787 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5788 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5789 - bitsize),
5790 NULL_RTX, 1);
5792 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5793 MODE. */
5794 if (mode != VOIDmode && mode != BLKmode
5795 && mode != TYPE_MODE (TREE_TYPE (exp)))
5796 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5798 /* If the modes of TARGET and TEMP are both BLKmode, both
5799 must be in memory and BITPOS must be aligned on a byte
5800 boundary. If so, we simply do a block copy. */
5801 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5803 gcc_assert (MEM_P (target) && MEM_P (temp)
5804 && !(bitpos % BITS_PER_UNIT));
5806 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5807 emit_block_move (target, temp,
5808 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5809 / BITS_PER_UNIT),
5810 BLOCK_OP_NORMAL);
5812 return const0_rtx;
5815 /* Store the value in the bitfield. */
5816 store_bit_field (target, bitsize, bitpos, mode, temp);
5818 return const0_rtx;
5820 else
5822 /* Now build a reference to just the desired component. */
5823 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5825 if (to_rtx == target)
5826 to_rtx = copy_rtx (to_rtx);
5828 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5829 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5830 set_mem_alias_set (to_rtx, alias_set);
5832 return store_expr (exp, to_rtx, 0, nontemporal);
5836 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5837 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5838 codes and find the ultimate containing object, which we return.
5840 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5841 bit position, and *PUNSIGNEDP to the signedness of the field.
5842 If the position of the field is variable, we store a tree
5843 giving the variable offset (in units) in *POFFSET.
5844 This offset is in addition to the bit position.
5845 If the position is not variable, we store 0 in *POFFSET.
5847 If any of the extraction expressions is volatile,
5848 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5850 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5851 is a mode that can be used to access the field. In that case, *PBITSIZE
5852 is redundant.
5854 If the field describes a variable-sized object, *PMODE is set to
5855 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5856 this case, but the address of the object can be found.
5858 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5859 look through nodes that serve as markers of a greater alignment than
5860 the one that can be deduced from the expression. These nodes make it
5861 possible for front-ends to prevent temporaries from being created by
5862 the middle-end on alignment considerations. For that purpose, the
5863 normal operating mode at high-level is to always pass FALSE so that
5864 the ultimate containing object is really returned; moreover, the
5865 associated predicate handled_component_p will always return TRUE
5866 on these nodes, thus indicating that they are essentially handled
5867 by get_inner_reference. TRUE should only be passed when the caller
5868 is scanning the expression in order to build another representation
5869 and specifically knows how to handle these nodes; as such, this is
5870 the normal operating mode in the RTL expanders. */
5872 tree
5873 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5874 HOST_WIDE_INT *pbitpos, tree *poffset,
5875 enum machine_mode *pmode, int *punsignedp,
5876 int *pvolatilep, bool keep_aligning)
5878 tree size_tree = 0;
5879 enum machine_mode mode = VOIDmode;
5880 tree offset = size_zero_node;
5881 tree bit_offset = bitsize_zero_node;
5883 /* First get the mode, signedness, and size. We do this from just the
5884 outermost expression. */
5885 if (TREE_CODE (exp) == COMPONENT_REF)
5887 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5888 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5889 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5891 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5893 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5895 size_tree = TREE_OPERAND (exp, 1);
5896 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5898 /* For vector types, with the correct size of access, use the mode of
5899 inner type. */
5900 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5901 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5902 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5903 mode = TYPE_MODE (TREE_TYPE (exp));
5905 else
5907 mode = TYPE_MODE (TREE_TYPE (exp));
5908 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5910 if (mode == BLKmode)
5911 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5912 else
5913 *pbitsize = GET_MODE_BITSIZE (mode);
5916 if (size_tree != 0)
5918 if (! host_integerp (size_tree, 1))
5919 mode = BLKmode, *pbitsize = -1;
5920 else
5921 *pbitsize = tree_low_cst (size_tree, 1);
5924 *pmode = mode;
5926 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5927 and find the ultimate containing object. */
5928 while (1)
5930 switch (TREE_CODE (exp))
5932 case BIT_FIELD_REF:
5933 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5934 TREE_OPERAND (exp, 2));
5935 break;
5937 case COMPONENT_REF:
5939 tree field = TREE_OPERAND (exp, 1);
5940 tree this_offset = component_ref_field_offset (exp);
5942 /* If this field hasn't been filled in yet, don't go past it.
5943 This should only happen when folding expressions made during
5944 type construction. */
5945 if (this_offset == 0)
5946 break;
5948 offset = size_binop (PLUS_EXPR, offset, this_offset);
5949 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5950 DECL_FIELD_BIT_OFFSET (field));
5952 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5954 break;
5956 case ARRAY_REF:
5957 case ARRAY_RANGE_REF:
5959 tree index = TREE_OPERAND (exp, 1);
5960 tree low_bound = array_ref_low_bound (exp);
5961 tree unit_size = array_ref_element_size (exp);
5963 /* We assume all arrays have sizes that are a multiple of a byte.
5964 First subtract the lower bound, if any, in the type of the
5965 index, then convert to sizetype and multiply by the size of
5966 the array element. */
5967 if (! integer_zerop (low_bound))
5968 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5969 index, low_bound);
5971 offset = size_binop (PLUS_EXPR, offset,
5972 size_binop (MULT_EXPR,
5973 fold_convert (sizetype, index),
5974 unit_size));
5976 break;
5978 case REALPART_EXPR:
5979 break;
5981 case IMAGPART_EXPR:
5982 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5983 bitsize_int (*pbitsize));
5984 break;
5986 case VIEW_CONVERT_EXPR:
5987 if (keep_aligning && STRICT_ALIGNMENT
5988 && (TYPE_ALIGN (TREE_TYPE (exp))
5989 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5990 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5991 < BIGGEST_ALIGNMENT)
5992 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5993 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5994 goto done;
5995 break;
5997 default:
5998 goto done;
6001 /* If any reference in the chain is volatile, the effect is volatile. */
6002 if (TREE_THIS_VOLATILE (exp))
6003 *pvolatilep = 1;
6005 exp = TREE_OPERAND (exp, 0);
6007 done:
6009 /* If OFFSET is constant, see if we can return the whole thing as a
6010 constant bit position. Make sure to handle overflow during
6011 this conversion. */
6012 if (host_integerp (offset, 0))
6014 double_int tem = double_int_mul (tree_to_double_int (offset),
6015 uhwi_to_double_int (BITS_PER_UNIT));
6016 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6017 if (double_int_fits_in_shwi_p (tem))
6019 *pbitpos = double_int_to_shwi (tem);
6020 *poffset = NULL_TREE;
6021 return exp;
6025 /* Otherwise, split it up. */
6026 *pbitpos = tree_low_cst (bit_offset, 0);
6027 *poffset = offset;
6029 return exp;
6032 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
6033 look for whether EXP or any nested component-refs within EXP is marked
6034 as PACKED. */
6036 bool
6037 contains_packed_reference (const_tree exp)
6039 bool packed_p = false;
6041 while (1)
6043 switch (TREE_CODE (exp))
6045 case COMPONENT_REF:
6047 tree field = TREE_OPERAND (exp, 1);
6048 packed_p = DECL_PACKED (field)
6049 || TYPE_PACKED (TREE_TYPE (field))
6050 || TYPE_PACKED (TREE_TYPE (exp));
6051 if (packed_p)
6052 goto done;
6054 break;
6056 case BIT_FIELD_REF:
6057 case ARRAY_REF:
6058 case ARRAY_RANGE_REF:
6059 case REALPART_EXPR:
6060 case IMAGPART_EXPR:
6061 case VIEW_CONVERT_EXPR:
6062 break;
6064 default:
6065 goto done;
6067 exp = TREE_OPERAND (exp, 0);
6069 done:
6070 return packed_p;
6073 /* Return a tree of sizetype representing the size, in bytes, of the element
6074 of EXP, an ARRAY_REF. */
6076 tree
6077 array_ref_element_size (tree exp)
6079 tree aligned_size = TREE_OPERAND (exp, 3);
6080 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6082 /* If a size was specified in the ARRAY_REF, it's the size measured
6083 in alignment units of the element type. So multiply by that value. */
6084 if (aligned_size)
6086 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6087 sizetype from another type of the same width and signedness. */
6088 if (TREE_TYPE (aligned_size) != sizetype)
6089 aligned_size = fold_convert (sizetype, aligned_size);
6090 return size_binop (MULT_EXPR, aligned_size,
6091 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6094 /* Otherwise, take the size from that of the element type. Substitute
6095 any PLACEHOLDER_EXPR that we have. */
6096 else
6097 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6100 /* Return a tree representing the lower bound of the array mentioned in
6101 EXP, an ARRAY_REF. */
6103 tree
6104 array_ref_low_bound (tree exp)
6106 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6108 /* If a lower bound is specified in EXP, use it. */
6109 if (TREE_OPERAND (exp, 2))
6110 return TREE_OPERAND (exp, 2);
6112 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6113 substituting for a PLACEHOLDER_EXPR as needed. */
6114 if (domain_type && TYPE_MIN_VALUE (domain_type))
6115 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6117 /* Otherwise, return a zero of the appropriate type. */
6118 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6121 /* Return a tree representing the upper bound of the array mentioned in
6122 EXP, an ARRAY_REF. */
6124 tree
6125 array_ref_up_bound (tree exp)
6127 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6129 /* If there is a domain type and it has an upper bound, use it, substituting
6130 for a PLACEHOLDER_EXPR as needed. */
6131 if (domain_type && TYPE_MAX_VALUE (domain_type))
6132 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6134 /* Otherwise fail. */
6135 return NULL_TREE;
6138 /* Return a tree representing the offset, in bytes, of the field referenced
6139 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6141 tree
6142 component_ref_field_offset (tree exp)
6144 tree aligned_offset = TREE_OPERAND (exp, 2);
6145 tree field = TREE_OPERAND (exp, 1);
6147 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6148 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6149 value. */
6150 if (aligned_offset)
6152 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6153 sizetype from another type of the same width and signedness. */
6154 if (TREE_TYPE (aligned_offset) != sizetype)
6155 aligned_offset = fold_convert (sizetype, aligned_offset);
6156 return size_binop (MULT_EXPR, aligned_offset,
6157 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6160 /* Otherwise, take the offset from that of the field. Substitute
6161 any PLACEHOLDER_EXPR that we have. */
6162 else
6163 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6166 /* Return 1 if T is an expression that get_inner_reference handles. */
6169 handled_component_p (const_tree t)
6171 switch (TREE_CODE (t))
6173 case BIT_FIELD_REF:
6174 case COMPONENT_REF:
6175 case ARRAY_REF:
6176 case ARRAY_RANGE_REF:
6177 case VIEW_CONVERT_EXPR:
6178 case REALPART_EXPR:
6179 case IMAGPART_EXPR:
6180 return 1;
6182 default:
6183 return 0;
6187 /* Given an rtx VALUE that may contain additions and multiplications, return
6188 an equivalent value that just refers to a register, memory, or constant.
6189 This is done by generating instructions to perform the arithmetic and
6190 returning a pseudo-register containing the value.
6192 The returned value may be a REG, SUBREG, MEM or constant. */
6195 force_operand (rtx value, rtx target)
6197 rtx op1, op2;
6198 /* Use subtarget as the target for operand 0 of a binary operation. */
6199 rtx subtarget = get_subtarget (target);
6200 enum rtx_code code = GET_CODE (value);
6202 /* Check for subreg applied to an expression produced by loop optimizer. */
6203 if (code == SUBREG
6204 && !REG_P (SUBREG_REG (value))
6205 && !MEM_P (SUBREG_REG (value)))
6207 value
6208 = simplify_gen_subreg (GET_MODE (value),
6209 force_reg (GET_MODE (SUBREG_REG (value)),
6210 force_operand (SUBREG_REG (value),
6211 NULL_RTX)),
6212 GET_MODE (SUBREG_REG (value)),
6213 SUBREG_BYTE (value));
6214 code = GET_CODE (value);
6217 /* Check for a PIC address load. */
6218 if ((code == PLUS || code == MINUS)
6219 && XEXP (value, 0) == pic_offset_table_rtx
6220 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6221 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6222 || GET_CODE (XEXP (value, 1)) == CONST))
6224 if (!subtarget)
6225 subtarget = gen_reg_rtx (GET_MODE (value));
6226 emit_move_insn (subtarget, value);
6227 return subtarget;
6230 if (ARITHMETIC_P (value))
6232 op2 = XEXP (value, 1);
6233 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6234 subtarget = 0;
6235 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6237 code = PLUS;
6238 op2 = negate_rtx (GET_MODE (value), op2);
6241 /* Check for an addition with OP2 a constant integer and our first
6242 operand a PLUS of a virtual register and something else. In that
6243 case, we want to emit the sum of the virtual register and the
6244 constant first and then add the other value. This allows virtual
6245 register instantiation to simply modify the constant rather than
6246 creating another one around this addition. */
6247 if (code == PLUS && GET_CODE (op2) == CONST_INT
6248 && GET_CODE (XEXP (value, 0)) == PLUS
6249 && REG_P (XEXP (XEXP (value, 0), 0))
6250 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6251 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6253 rtx temp = expand_simple_binop (GET_MODE (value), code,
6254 XEXP (XEXP (value, 0), 0), op2,
6255 subtarget, 0, OPTAB_LIB_WIDEN);
6256 return expand_simple_binop (GET_MODE (value), code, temp,
6257 force_operand (XEXP (XEXP (value,
6258 0), 1), 0),
6259 target, 0, OPTAB_LIB_WIDEN);
6262 op1 = force_operand (XEXP (value, 0), subtarget);
6263 op2 = force_operand (op2, NULL_RTX);
6264 switch (code)
6266 case MULT:
6267 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6268 case DIV:
6269 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6270 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6271 target, 1, OPTAB_LIB_WIDEN);
6272 else
6273 return expand_divmod (0,
6274 FLOAT_MODE_P (GET_MODE (value))
6275 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6276 GET_MODE (value), op1, op2, target, 0);
6277 case MOD:
6278 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6279 target, 0);
6280 case UDIV:
6281 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6282 target, 1);
6283 case UMOD:
6284 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6285 target, 1);
6286 case ASHIFTRT:
6287 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6288 target, 0, OPTAB_LIB_WIDEN);
6289 default:
6290 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6291 target, 1, OPTAB_LIB_WIDEN);
6294 if (UNARY_P (value))
6296 if (!target)
6297 target = gen_reg_rtx (GET_MODE (value));
6298 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6299 switch (code)
6301 case ZERO_EXTEND:
6302 case SIGN_EXTEND:
6303 case TRUNCATE:
6304 case FLOAT_EXTEND:
6305 case FLOAT_TRUNCATE:
6306 convert_move (target, op1, code == ZERO_EXTEND);
6307 return target;
6309 case FIX:
6310 case UNSIGNED_FIX:
6311 expand_fix (target, op1, code == UNSIGNED_FIX);
6312 return target;
6314 case FLOAT:
6315 case UNSIGNED_FLOAT:
6316 expand_float (target, op1, code == UNSIGNED_FLOAT);
6317 return target;
6319 default:
6320 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6324 #ifdef INSN_SCHEDULING
6325 /* On machines that have insn scheduling, we want all memory reference to be
6326 explicit, so we need to deal with such paradoxical SUBREGs. */
6327 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6328 && (GET_MODE_SIZE (GET_MODE (value))
6329 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6330 value
6331 = simplify_gen_subreg (GET_MODE (value),
6332 force_reg (GET_MODE (SUBREG_REG (value)),
6333 force_operand (SUBREG_REG (value),
6334 NULL_RTX)),
6335 GET_MODE (SUBREG_REG (value)),
6336 SUBREG_BYTE (value));
6337 #endif
6339 return value;
6342 /* Subroutine of expand_expr: return nonzero iff there is no way that
6343 EXP can reference X, which is being modified. TOP_P is nonzero if this
6344 call is going to be used to determine whether we need a temporary
6345 for EXP, as opposed to a recursive call to this function.
6347 It is always safe for this routine to return zero since it merely
6348 searches for optimization opportunities. */
6351 safe_from_p (const_rtx x, tree exp, int top_p)
6353 rtx exp_rtl = 0;
6354 int i, nops;
6356 if (x == 0
6357 /* If EXP has varying size, we MUST use a target since we currently
6358 have no way of allocating temporaries of variable size
6359 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6360 So we assume here that something at a higher level has prevented a
6361 clash. This is somewhat bogus, but the best we can do. Only
6362 do this when X is BLKmode and when we are at the top level. */
6363 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6364 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6365 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6366 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6367 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6368 != INTEGER_CST)
6369 && GET_MODE (x) == BLKmode)
6370 /* If X is in the outgoing argument area, it is always safe. */
6371 || (MEM_P (x)
6372 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6373 || (GET_CODE (XEXP (x, 0)) == PLUS
6374 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6375 return 1;
6377 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6378 find the underlying pseudo. */
6379 if (GET_CODE (x) == SUBREG)
6381 x = SUBREG_REG (x);
6382 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6383 return 0;
6386 /* Now look at our tree code and possibly recurse. */
6387 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6389 case tcc_declaration:
6390 exp_rtl = DECL_RTL_IF_SET (exp);
6391 break;
6393 case tcc_constant:
6394 return 1;
6396 case tcc_exceptional:
6397 if (TREE_CODE (exp) == TREE_LIST)
6399 while (1)
6401 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6402 return 0;
6403 exp = TREE_CHAIN (exp);
6404 if (!exp)
6405 return 1;
6406 if (TREE_CODE (exp) != TREE_LIST)
6407 return safe_from_p (x, exp, 0);
6410 else if (TREE_CODE (exp) == CONSTRUCTOR)
6412 constructor_elt *ce;
6413 unsigned HOST_WIDE_INT idx;
6415 for (idx = 0;
6416 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6417 idx++)
6418 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6419 || !safe_from_p (x, ce->value, 0))
6420 return 0;
6421 return 1;
6423 else if (TREE_CODE (exp) == ERROR_MARK)
6424 return 1; /* An already-visited SAVE_EXPR? */
6425 else
6426 return 0;
6428 case tcc_statement:
6429 /* The only case we look at here is the DECL_INITIAL inside a
6430 DECL_EXPR. */
6431 return (TREE_CODE (exp) != DECL_EXPR
6432 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6433 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6434 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6436 case tcc_binary:
6437 case tcc_comparison:
6438 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6439 return 0;
6440 /* Fall through. */
6442 case tcc_unary:
6443 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6445 case tcc_expression:
6446 case tcc_reference:
6447 case tcc_vl_exp:
6448 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6449 the expression. If it is set, we conflict iff we are that rtx or
6450 both are in memory. Otherwise, we check all operands of the
6451 expression recursively. */
6453 switch (TREE_CODE (exp))
6455 case ADDR_EXPR:
6456 /* If the operand is static or we are static, we can't conflict.
6457 Likewise if we don't conflict with the operand at all. */
6458 if (staticp (TREE_OPERAND (exp, 0))
6459 || TREE_STATIC (exp)
6460 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6461 return 1;
6463 /* Otherwise, the only way this can conflict is if we are taking
6464 the address of a DECL a that address if part of X, which is
6465 very rare. */
6466 exp = TREE_OPERAND (exp, 0);
6467 if (DECL_P (exp))
6469 if (!DECL_RTL_SET_P (exp)
6470 || !MEM_P (DECL_RTL (exp)))
6471 return 0;
6472 else
6473 exp_rtl = XEXP (DECL_RTL (exp), 0);
6475 break;
6477 case MISALIGNED_INDIRECT_REF:
6478 case ALIGN_INDIRECT_REF:
6479 case INDIRECT_REF:
6480 if (MEM_P (x)
6481 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6482 get_alias_set (exp)))
6483 return 0;
6484 break;
6486 case CALL_EXPR:
6487 /* Assume that the call will clobber all hard registers and
6488 all of memory. */
6489 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6490 || MEM_P (x))
6491 return 0;
6492 break;
6494 case WITH_CLEANUP_EXPR:
6495 case CLEANUP_POINT_EXPR:
6496 /* Lowered by gimplify.c. */
6497 gcc_unreachable ();
6499 case SAVE_EXPR:
6500 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6502 default:
6503 break;
6506 /* If we have an rtx, we do not need to scan our operands. */
6507 if (exp_rtl)
6508 break;
6510 nops = TREE_OPERAND_LENGTH (exp);
6511 for (i = 0; i < nops; i++)
6512 if (TREE_OPERAND (exp, i) != 0
6513 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6514 return 0;
6516 break;
6518 case tcc_type:
6519 /* Should never get a type here. */
6520 gcc_unreachable ();
6522 case tcc_gimple_stmt:
6523 gcc_unreachable ();
6526 /* If we have an rtl, find any enclosed object. Then see if we conflict
6527 with it. */
6528 if (exp_rtl)
6530 if (GET_CODE (exp_rtl) == SUBREG)
6532 exp_rtl = SUBREG_REG (exp_rtl);
6533 if (REG_P (exp_rtl)
6534 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6535 return 0;
6538 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6539 are memory and they conflict. */
6540 return ! (rtx_equal_p (x, exp_rtl)
6541 || (MEM_P (x) && MEM_P (exp_rtl)
6542 && true_dependence (exp_rtl, VOIDmode, x,
6543 rtx_addr_varies_p)));
6546 /* If we reach here, it is safe. */
6547 return 1;
6551 /* Return the highest power of two that EXP is known to be a multiple of.
6552 This is used in updating alignment of MEMs in array references. */
6554 unsigned HOST_WIDE_INT
6555 highest_pow2_factor (const_tree exp)
6557 unsigned HOST_WIDE_INT c0, c1;
6559 switch (TREE_CODE (exp))
6561 case INTEGER_CST:
6562 /* We can find the lowest bit that's a one. If the low
6563 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6564 We need to handle this case since we can find it in a COND_EXPR,
6565 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6566 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6567 later ICE. */
6568 if (TREE_OVERFLOW (exp))
6569 return BIGGEST_ALIGNMENT;
6570 else
6572 /* Note: tree_low_cst is intentionally not used here,
6573 we don't care about the upper bits. */
6574 c0 = TREE_INT_CST_LOW (exp);
6575 c0 &= -c0;
6576 return c0 ? c0 : BIGGEST_ALIGNMENT;
6578 break;
6580 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6581 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6582 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6583 return MIN (c0, c1);
6585 case MULT_EXPR:
6586 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6587 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6588 return c0 * c1;
6590 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6591 case CEIL_DIV_EXPR:
6592 if (integer_pow2p (TREE_OPERAND (exp, 1))
6593 && host_integerp (TREE_OPERAND (exp, 1), 1))
6595 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6596 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6597 return MAX (1, c0 / c1);
6599 break;
6601 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6602 case SAVE_EXPR:
6603 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6605 case COMPOUND_EXPR:
6606 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6608 case COND_EXPR:
6609 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6610 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6611 return MIN (c0, c1);
6613 default:
6614 break;
6617 return 1;
6620 /* Similar, except that the alignment requirements of TARGET are
6621 taken into account. Assume it is at least as aligned as its
6622 type, unless it is a COMPONENT_REF in which case the layout of
6623 the structure gives the alignment. */
6625 static unsigned HOST_WIDE_INT
6626 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6628 unsigned HOST_WIDE_INT target_align, factor;
6630 factor = highest_pow2_factor (exp);
6631 if (TREE_CODE (target) == COMPONENT_REF)
6632 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6633 else
6634 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6635 return MAX (factor, target_align);
6638 /* Return &VAR expression for emulated thread local VAR. */
6640 static tree
6641 emutls_var_address (tree var)
6643 tree emuvar = emutls_decl (var);
6644 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6645 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6646 tree arglist = build_tree_list (NULL_TREE, arg);
6647 tree call = build_function_call_expr (fn, arglist);
6648 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6651 /* Expands variable VAR. */
6653 void
6654 expand_var (tree var)
6656 if (DECL_EXTERNAL (var))
6657 return;
6659 if (TREE_STATIC (var))
6660 /* If this is an inlined copy of a static local variable,
6661 look up the original decl. */
6662 var = DECL_ORIGIN (var);
6664 if (TREE_STATIC (var)
6665 ? !TREE_ASM_WRITTEN (var)
6666 : !DECL_RTL_SET_P (var))
6668 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6669 /* Should be ignored. */;
6670 else if (lang_hooks.expand_decl (var))
6671 /* OK. */;
6672 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6673 expand_decl (var);
6674 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6675 rest_of_decl_compilation (var, 0, 0);
6676 else
6677 /* No expansion needed. */
6678 gcc_assert (TREE_CODE (var) == TYPE_DECL
6679 || TREE_CODE (var) == CONST_DECL
6680 || TREE_CODE (var) == FUNCTION_DECL
6681 || TREE_CODE (var) == LABEL_DECL);
6685 /* Subroutine of expand_expr. Expand the two operands of a binary
6686 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6687 The value may be stored in TARGET if TARGET is nonzero. The
6688 MODIFIER argument is as documented by expand_expr. */
6690 static void
6691 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6692 enum expand_modifier modifier)
6694 if (! safe_from_p (target, exp1, 1))
6695 target = 0;
6696 if (operand_equal_p (exp0, exp1, 0))
6698 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6699 *op1 = copy_rtx (*op0);
6701 else
6703 /* If we need to preserve evaluation order, copy exp0 into its own
6704 temporary variable so that it can't be clobbered by exp1. */
6705 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6706 exp0 = save_expr (exp0);
6707 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6708 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6713 /* Return a MEM that contains constant EXP. DEFER is as for
6714 output_constant_def and MODIFIER is as for expand_expr. */
6716 static rtx
6717 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6719 rtx mem;
6721 mem = output_constant_def (exp, defer);
6722 if (modifier != EXPAND_INITIALIZER)
6723 mem = use_anchored_address (mem);
6724 return mem;
6727 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6728 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6730 static rtx
6731 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6732 enum expand_modifier modifier)
6734 rtx result, subtarget;
6735 tree inner, offset;
6736 HOST_WIDE_INT bitsize, bitpos;
6737 int volatilep, unsignedp;
6738 enum machine_mode mode1;
6740 /* If we are taking the address of a constant and are at the top level,
6741 we have to use output_constant_def since we can't call force_const_mem
6742 at top level. */
6743 /* ??? This should be considered a front-end bug. We should not be
6744 generating ADDR_EXPR of something that isn't an LVALUE. The only
6745 exception here is STRING_CST. */
6746 if (CONSTANT_CLASS_P (exp))
6747 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6749 /* Everything must be something allowed by is_gimple_addressable. */
6750 switch (TREE_CODE (exp))
6752 case INDIRECT_REF:
6753 /* This case will happen via recursion for &a->b. */
6754 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6756 case CONST_DECL:
6757 /* Recurse and make the output_constant_def clause above handle this. */
6758 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6759 tmode, modifier);
6761 case REALPART_EXPR:
6762 /* The real part of the complex number is always first, therefore
6763 the address is the same as the address of the parent object. */
6764 offset = 0;
6765 bitpos = 0;
6766 inner = TREE_OPERAND (exp, 0);
6767 break;
6769 case IMAGPART_EXPR:
6770 /* The imaginary part of the complex number is always second.
6771 The expression is therefore always offset by the size of the
6772 scalar type. */
6773 offset = 0;
6774 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6775 inner = TREE_OPERAND (exp, 0);
6776 break;
6778 case VAR_DECL:
6779 /* TLS emulation hook - replace __thread VAR's &VAR with
6780 __emutls_get_address (&_emutls.VAR). */
6781 if (! targetm.have_tls
6782 && TREE_CODE (exp) == VAR_DECL
6783 && DECL_THREAD_LOCAL_P (exp))
6785 exp = emutls_var_address (exp);
6786 return expand_expr (exp, target, tmode, modifier);
6788 /* Fall through. */
6790 default:
6791 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6792 expand_expr, as that can have various side effects; LABEL_DECLs for
6793 example, may not have their DECL_RTL set yet. Expand the rtl of
6794 CONSTRUCTORs too, which should yield a memory reference for the
6795 constructor's contents. Assume language specific tree nodes can
6796 be expanded in some interesting way. */
6797 if (DECL_P (exp)
6798 || TREE_CODE (exp) == CONSTRUCTOR
6799 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6801 result = expand_expr (exp, target, tmode,
6802 modifier == EXPAND_INITIALIZER
6803 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6805 /* If the DECL isn't in memory, then the DECL wasn't properly
6806 marked TREE_ADDRESSABLE, which will be either a front-end
6807 or a tree optimizer bug. */
6808 gcc_assert (MEM_P (result));
6809 result = XEXP (result, 0);
6811 /* ??? Is this needed anymore? */
6812 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6814 assemble_external (exp);
6815 TREE_USED (exp) = 1;
6818 if (modifier != EXPAND_INITIALIZER
6819 && modifier != EXPAND_CONST_ADDRESS)
6820 result = force_operand (result, target);
6821 return result;
6824 /* Pass FALSE as the last argument to get_inner_reference although
6825 we are expanding to RTL. The rationale is that we know how to
6826 handle "aligning nodes" here: we can just bypass them because
6827 they won't change the final object whose address will be returned
6828 (they actually exist only for that purpose). */
6829 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6830 &mode1, &unsignedp, &volatilep, false);
6831 break;
6834 /* We must have made progress. */
6835 gcc_assert (inner != exp);
6837 subtarget = offset || bitpos ? NULL_RTX : target;
6838 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6840 if (offset)
6842 rtx tmp;
6844 if (modifier != EXPAND_NORMAL)
6845 result = force_operand (result, NULL);
6846 tmp = expand_expr (offset, NULL_RTX, tmode,
6847 modifier == EXPAND_INITIALIZER
6848 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6850 result = convert_memory_address (tmode, result);
6851 tmp = convert_memory_address (tmode, tmp);
6853 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6854 result = gen_rtx_PLUS (tmode, result, tmp);
6855 else
6857 subtarget = bitpos ? NULL_RTX : target;
6858 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6859 1, OPTAB_LIB_WIDEN);
6863 if (bitpos)
6865 /* Someone beforehand should have rejected taking the address
6866 of such an object. */
6867 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6869 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6870 if (modifier < EXPAND_SUM)
6871 result = force_operand (result, target);
6874 return result;
6877 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6878 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6880 static rtx
6881 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6882 enum expand_modifier modifier)
6884 enum machine_mode rmode;
6885 rtx result;
6887 /* Target mode of VOIDmode says "whatever's natural". */
6888 if (tmode == VOIDmode)
6889 tmode = TYPE_MODE (TREE_TYPE (exp));
6891 /* We can get called with some Weird Things if the user does silliness
6892 like "(short) &a". In that case, convert_memory_address won't do
6893 the right thing, so ignore the given target mode. */
6894 if (tmode != Pmode && tmode != ptr_mode)
6895 tmode = Pmode;
6897 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6898 tmode, modifier);
6900 /* Despite expand_expr claims concerning ignoring TMODE when not
6901 strictly convenient, stuff breaks if we don't honor it. Note
6902 that combined with the above, we only do this for pointer modes. */
6903 rmode = GET_MODE (result);
6904 if (rmode == VOIDmode)
6905 rmode = tmode;
6906 if (rmode != tmode)
6907 result = convert_memory_address (tmode, result);
6909 return result;
6912 /* Generate code for computing CONSTRUCTOR EXP.
6913 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6914 is TRUE, instead of creating a temporary variable in memory
6915 NULL is returned and the caller needs to handle it differently. */
6917 static rtx
6918 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6919 bool avoid_temp_mem)
6921 tree type = TREE_TYPE (exp);
6922 enum machine_mode mode = TYPE_MODE (type);
6924 /* Try to avoid creating a temporary at all. This is possible
6925 if all of the initializer is zero.
6926 FIXME: try to handle all [0..255] initializers we can handle
6927 with memset. */
6928 if (TREE_STATIC (exp)
6929 && !TREE_ADDRESSABLE (exp)
6930 && target != 0 && mode == BLKmode
6931 && all_zeros_p (exp))
6933 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6934 return target;
6937 /* All elts simple constants => refer to a constant in memory. But
6938 if this is a non-BLKmode mode, let it store a field at a time
6939 since that should make a CONST_INT or CONST_DOUBLE when we
6940 fold. Likewise, if we have a target we can use, it is best to
6941 store directly into the target unless the type is large enough
6942 that memcpy will be used. If we are making an initializer and
6943 all operands are constant, put it in memory as well.
6945 FIXME: Avoid trying to fill vector constructors piece-meal.
6946 Output them with output_constant_def below unless we're sure
6947 they're zeros. This should go away when vector initializers
6948 are treated like VECTOR_CST instead of arrays. */
6949 if ((TREE_STATIC (exp)
6950 && ((mode == BLKmode
6951 && ! (target != 0 && safe_from_p (target, exp, 1)))
6952 || TREE_ADDRESSABLE (exp)
6953 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6954 && (! MOVE_BY_PIECES_P
6955 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6956 TYPE_ALIGN (type)))
6957 && ! mostly_zeros_p (exp))))
6958 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
6959 && TREE_CONSTANT (exp)))
6961 rtx constructor;
6963 if (avoid_temp_mem)
6964 return NULL_RTX;
6966 constructor = expand_expr_constant (exp, 1, modifier);
6968 if (modifier != EXPAND_CONST_ADDRESS
6969 && modifier != EXPAND_INITIALIZER
6970 && modifier != EXPAND_SUM)
6971 constructor = validize_mem (constructor);
6973 return constructor;
6976 /* Handle calls that pass values in multiple non-contiguous
6977 locations. The Irix 6 ABI has examples of this. */
6978 if (target == 0 || ! safe_from_p (target, exp, 1)
6979 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
6981 if (avoid_temp_mem)
6982 return NULL_RTX;
6984 target
6985 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
6986 | (TREE_READONLY (exp)
6987 * TYPE_QUAL_CONST))),
6988 0, TREE_ADDRESSABLE (exp), 1);
6991 store_constructor (exp, target, 0, int_expr_size (exp));
6992 return target;
6996 /* expand_expr: generate code for computing expression EXP.
6997 An rtx for the computed value is returned. The value is never null.
6998 In the case of a void EXP, const0_rtx is returned.
7000 The value may be stored in TARGET if TARGET is nonzero.
7001 TARGET is just a suggestion; callers must assume that
7002 the rtx returned may not be the same as TARGET.
7004 If TARGET is CONST0_RTX, it means that the value will be ignored.
7006 If TMODE is not VOIDmode, it suggests generating the
7007 result in mode TMODE. But this is done only when convenient.
7008 Otherwise, TMODE is ignored and the value generated in its natural mode.
7009 TMODE is just a suggestion; callers must assume that
7010 the rtx returned may not have mode TMODE.
7012 Note that TARGET may have neither TMODE nor MODE. In that case, it
7013 probably will not be used.
7015 If MODIFIER is EXPAND_SUM then when EXP is an addition
7016 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7017 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7018 products as above, or REG or MEM, or constant.
7019 Ordinarily in such cases we would output mul or add instructions
7020 and then return a pseudo reg containing the sum.
7022 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7023 it also marks a label as absolutely required (it can't be dead).
7024 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7025 This is used for outputting expressions used in initializers.
7027 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7028 with a constant address even if that address is not normally legitimate.
7029 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7031 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7032 a call parameter. Such targets require special care as we haven't yet
7033 marked TARGET so that it's safe from being trashed by libcalls. We
7034 don't want to use TARGET for anything but the final result;
7035 Intermediate values must go elsewhere. Additionally, calls to
7036 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7038 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7039 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7040 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7041 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7042 recursively. */
7044 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
7045 enum expand_modifier, rtx *);
7048 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7049 enum expand_modifier modifier, rtx *alt_rtl)
7051 int rn = -1;
7052 rtx ret, last = NULL;
7054 /* Handle ERROR_MARK before anybody tries to access its type. */
7055 if (TREE_CODE (exp) == ERROR_MARK
7056 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7058 ret = CONST0_RTX (tmode);
7059 return ret ? ret : const0_rtx;
7062 if (flag_non_call_exceptions)
7064 rn = lookup_stmt_eh_region (exp);
7065 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7066 if (rn >= 0)
7067 last = get_last_insn ();
7070 /* If this is an expression of some kind and it has an associated line
7071 number, then emit the line number before expanding the expression.
7073 We need to save and restore the file and line information so that
7074 errors discovered during expansion are emitted with the right
7075 information. It would be better of the diagnostic routines
7076 used the file/line information embedded in the tree nodes rather
7077 than globals. */
7078 if (cfun && EXPR_HAS_LOCATION (exp))
7080 location_t saved_location = input_location;
7081 input_location = EXPR_LOCATION (exp);
7082 set_curr_insn_source_location (input_location);
7084 /* Record where the insns produced belong. */
7085 set_curr_insn_block (TREE_BLOCK (exp));
7087 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7089 input_location = saved_location;
7091 else
7093 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7096 /* If using non-call exceptions, mark all insns that may trap.
7097 expand_call() will mark CALL_INSNs before we get to this code,
7098 but it doesn't handle libcalls, and these may trap. */
7099 if (rn >= 0)
7101 rtx insn;
7102 for (insn = next_real_insn (last); insn;
7103 insn = next_real_insn (insn))
7105 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7106 /* If we want exceptions for non-call insns, any
7107 may_trap_p instruction may throw. */
7108 && GET_CODE (PATTERN (insn)) != CLOBBER
7109 && GET_CODE (PATTERN (insn)) != USE
7110 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
7112 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
7113 REG_NOTES (insn));
7118 return ret;
7121 static rtx
7122 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
7123 enum expand_modifier modifier, rtx *alt_rtl)
7125 rtx op0, op1, op2, temp, decl_rtl;
7126 tree type;
7127 int unsignedp;
7128 enum machine_mode mode;
7129 enum tree_code code = TREE_CODE (exp);
7130 optab this_optab;
7131 rtx subtarget, original_target;
7132 int ignore;
7133 tree context, subexp0, subexp1;
7134 bool reduce_bit_field = false;
7135 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
7136 ? reduce_to_bit_field_precision ((expr), \
7137 target, \
7138 type) \
7139 : (expr))
7141 if (GIMPLE_STMT_P (exp))
7143 type = void_type_node;
7144 mode = VOIDmode;
7145 unsignedp = 0;
7147 else
7149 type = TREE_TYPE (exp);
7150 mode = TYPE_MODE (type);
7151 unsignedp = TYPE_UNSIGNED (type);
7153 if (lang_hooks.reduce_bit_field_operations
7154 && TREE_CODE (type) == INTEGER_TYPE
7155 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
7157 /* An operation in what may be a bit-field type needs the
7158 result to be reduced to the precision of the bit-field type,
7159 which is narrower than that of the type's mode. */
7160 reduce_bit_field = true;
7161 if (modifier == EXPAND_STACK_PARM)
7162 target = 0;
7165 /* Use subtarget as the target for operand 0 of a binary operation. */
7166 subtarget = get_subtarget (target);
7167 original_target = target;
7168 ignore = (target == const0_rtx
7169 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
7170 || code == CONVERT_EXPR || code == COND_EXPR
7171 || code == VIEW_CONVERT_EXPR)
7172 && TREE_CODE (type) == VOID_TYPE));
7174 /* If we are going to ignore this result, we need only do something
7175 if there is a side-effect somewhere in the expression. If there
7176 is, short-circuit the most common cases here. Note that we must
7177 not call expand_expr with anything but const0_rtx in case this
7178 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7180 if (ignore)
7182 if (! TREE_SIDE_EFFECTS (exp))
7183 return const0_rtx;
7185 /* Ensure we reference a volatile object even if value is ignored, but
7186 don't do this if all we are doing is taking its address. */
7187 if (TREE_THIS_VOLATILE (exp)
7188 && TREE_CODE (exp) != FUNCTION_DECL
7189 && mode != VOIDmode && mode != BLKmode
7190 && modifier != EXPAND_CONST_ADDRESS)
7192 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7193 if (MEM_P (temp))
7194 temp = copy_to_reg (temp);
7195 return const0_rtx;
7198 if (TREE_CODE_CLASS (code) == tcc_unary
7199 || code == COMPONENT_REF || code == INDIRECT_REF)
7200 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7201 modifier);
7203 else if (TREE_CODE_CLASS (code) == tcc_binary
7204 || TREE_CODE_CLASS (code) == tcc_comparison
7205 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7207 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7208 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7209 return const0_rtx;
7211 else if (code == BIT_FIELD_REF)
7213 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7214 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7215 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7216 return const0_rtx;
7219 target = 0;
7223 switch (code)
7225 case LABEL_DECL:
7227 tree function = decl_function_context (exp);
7229 temp = label_rtx (exp);
7230 temp = gen_rtx_LABEL_REF (Pmode, temp);
7232 if (function != current_function_decl
7233 && function != 0)
7234 LABEL_REF_NONLOCAL_P (temp) = 1;
7236 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7237 return temp;
7240 case SSA_NAME:
7241 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7242 NULL);
7244 case PARM_DECL:
7245 case VAR_DECL:
7246 /* If a static var's type was incomplete when the decl was written,
7247 but the type is complete now, lay out the decl now. */
7248 if (DECL_SIZE (exp) == 0
7249 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7250 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7251 layout_decl (exp, 0);
7253 /* TLS emulation hook - replace __thread vars with
7254 *__emutls_get_address (&_emutls.var). */
7255 if (! targetm.have_tls
7256 && TREE_CODE (exp) == VAR_DECL
7257 && DECL_THREAD_LOCAL_P (exp))
7259 exp = build_fold_indirect_ref (emutls_var_address (exp));
7260 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7263 /* ... fall through ... */
7265 case FUNCTION_DECL:
7266 case RESULT_DECL:
7267 decl_rtl = DECL_RTL (exp);
7268 gcc_assert (decl_rtl);
7269 decl_rtl = copy_rtx (decl_rtl);
7271 /* Ensure variable marked as used even if it doesn't go through
7272 a parser. If it hasn't be used yet, write out an external
7273 definition. */
7274 if (! TREE_USED (exp))
7276 assemble_external (exp);
7277 TREE_USED (exp) = 1;
7280 /* Show we haven't gotten RTL for this yet. */
7281 temp = 0;
7283 /* Variables inherited from containing functions should have
7284 been lowered by this point. */
7285 context = decl_function_context (exp);
7286 gcc_assert (!context
7287 || context == current_function_decl
7288 || TREE_STATIC (exp)
7289 /* ??? C++ creates functions that are not TREE_STATIC. */
7290 || TREE_CODE (exp) == FUNCTION_DECL);
7292 /* This is the case of an array whose size is to be determined
7293 from its initializer, while the initializer is still being parsed.
7294 See expand_decl. */
7296 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7297 temp = validize_mem (decl_rtl);
7299 /* If DECL_RTL is memory, we are in the normal case and the
7300 address is not valid, get the address into a register. */
7302 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7304 if (alt_rtl)
7305 *alt_rtl = decl_rtl;
7306 decl_rtl = use_anchored_address (decl_rtl);
7307 if (modifier != EXPAND_CONST_ADDRESS
7308 && modifier != EXPAND_SUM
7309 && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
7310 temp = replace_equiv_address (decl_rtl,
7311 copy_rtx (XEXP (decl_rtl, 0)));
7314 /* If we got something, return it. But first, set the alignment
7315 if the address is a register. */
7316 if (temp != 0)
7318 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7319 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7321 return temp;
7324 /* If the mode of DECL_RTL does not match that of the decl, it
7325 must be a promoted value. We return a SUBREG of the wanted mode,
7326 but mark it so that we know that it was already extended. */
7328 if (REG_P (decl_rtl)
7329 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7331 enum machine_mode pmode;
7333 /* Get the signedness used for this variable. Ensure we get the
7334 same mode we got when the variable was declared. */
7335 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7336 (TREE_CODE (exp) == RESULT_DECL
7337 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7338 gcc_assert (GET_MODE (decl_rtl) == pmode);
7340 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7341 SUBREG_PROMOTED_VAR_P (temp) = 1;
7342 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7343 return temp;
7346 return decl_rtl;
7348 case INTEGER_CST:
7349 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7350 TREE_INT_CST_HIGH (exp), mode);
7352 return temp;
7354 case VECTOR_CST:
7356 tree tmp = NULL_TREE;
7357 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7358 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7359 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
7360 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
7361 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
7362 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
7363 return const_vector_from_tree (exp);
7364 if (GET_MODE_CLASS (mode) == MODE_INT)
7366 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7367 if (type_for_mode)
7368 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7370 if (!tmp)
7371 tmp = build_constructor_from_list (type,
7372 TREE_VECTOR_CST_ELTS (exp));
7373 return expand_expr (tmp, ignore ? const0_rtx : target,
7374 tmode, modifier);
7377 case CONST_DECL:
7378 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7380 case REAL_CST:
7381 /* If optimized, generate immediate CONST_DOUBLE
7382 which will be turned into memory by reload if necessary.
7384 We used to force a register so that loop.c could see it. But
7385 this does not allow gen_* patterns to perform optimizations with
7386 the constants. It also produces two insns in cases like "x = 1.0;".
7387 On most machines, floating-point constants are not permitted in
7388 many insns, so we'd end up copying it to a register in any case.
7390 Now, we do the copying in expand_binop, if appropriate. */
7391 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7392 TYPE_MODE (TREE_TYPE (exp)));
7394 case FIXED_CST:
7395 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
7396 TYPE_MODE (TREE_TYPE (exp)));
7398 case COMPLEX_CST:
7399 /* Handle evaluating a complex constant in a CONCAT target. */
7400 if (original_target && GET_CODE (original_target) == CONCAT)
7402 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7403 rtx rtarg, itarg;
7405 rtarg = XEXP (original_target, 0);
7406 itarg = XEXP (original_target, 1);
7408 /* Move the real and imaginary parts separately. */
7409 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7410 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7412 if (op0 != rtarg)
7413 emit_move_insn (rtarg, op0);
7414 if (op1 != itarg)
7415 emit_move_insn (itarg, op1);
7417 return original_target;
7420 /* ... fall through ... */
7422 case STRING_CST:
7423 temp = expand_expr_constant (exp, 1, modifier);
7425 /* temp contains a constant address.
7426 On RISC machines where a constant address isn't valid,
7427 make some insns to get that address into a register. */
7428 if (modifier != EXPAND_CONST_ADDRESS
7429 && modifier != EXPAND_INITIALIZER
7430 && modifier != EXPAND_SUM
7431 && ! memory_address_p (mode, XEXP (temp, 0)))
7432 return replace_equiv_address (temp,
7433 copy_rtx (XEXP (temp, 0)));
7434 return temp;
7436 case SAVE_EXPR:
7438 tree val = TREE_OPERAND (exp, 0);
7439 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7441 if (!SAVE_EXPR_RESOLVED_P (exp))
7443 /* We can indeed still hit this case, typically via builtin
7444 expanders calling save_expr immediately before expanding
7445 something. Assume this means that we only have to deal
7446 with non-BLKmode values. */
7447 gcc_assert (GET_MODE (ret) != BLKmode);
7449 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7450 DECL_ARTIFICIAL (val) = 1;
7451 DECL_IGNORED_P (val) = 1;
7452 TREE_OPERAND (exp, 0) = val;
7453 SAVE_EXPR_RESOLVED_P (exp) = 1;
7455 if (!CONSTANT_P (ret))
7456 ret = copy_to_reg (ret);
7457 SET_DECL_RTL (val, ret);
7460 return ret;
7463 case GOTO_EXPR:
7464 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7465 expand_goto (TREE_OPERAND (exp, 0));
7466 else
7467 expand_computed_goto (TREE_OPERAND (exp, 0));
7468 return const0_rtx;
7470 case CONSTRUCTOR:
7471 /* If we don't need the result, just ensure we evaluate any
7472 subexpressions. */
7473 if (ignore)
7475 unsigned HOST_WIDE_INT idx;
7476 tree value;
7478 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7479 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7481 return const0_rtx;
7484 return expand_constructor (exp, target, modifier, false);
7486 case MISALIGNED_INDIRECT_REF:
7487 case ALIGN_INDIRECT_REF:
7488 case INDIRECT_REF:
7490 tree exp1 = TREE_OPERAND (exp, 0);
7492 if (modifier != EXPAND_WRITE)
7494 tree t;
7496 t = fold_read_from_constant_string (exp);
7497 if (t)
7498 return expand_expr (t, target, tmode, modifier);
7501 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7502 op0 = memory_address (mode, op0);
7504 if (code == ALIGN_INDIRECT_REF)
7506 int align = TYPE_ALIGN_UNIT (type);
7507 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7508 op0 = memory_address (mode, op0);
7511 temp = gen_rtx_MEM (mode, op0);
7513 set_mem_attributes (temp, exp, 0);
7515 /* Resolve the misalignment now, so that we don't have to remember
7516 to resolve it later. Of course, this only works for reads. */
7517 /* ??? When we get around to supporting writes, we'll have to handle
7518 this in store_expr directly. The vectorizer isn't generating
7519 those yet, however. */
7520 if (code == MISALIGNED_INDIRECT_REF)
7522 int icode;
7523 rtx reg, insn;
7525 gcc_assert (modifier == EXPAND_NORMAL
7526 || modifier == EXPAND_STACK_PARM);
7528 /* The vectorizer should have already checked the mode. */
7529 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7530 gcc_assert (icode != CODE_FOR_nothing);
7532 /* We've already validated the memory, and we're creating a
7533 new pseudo destination. The predicates really can't fail. */
7534 reg = gen_reg_rtx (mode);
7536 /* Nor can the insn generator. */
7537 insn = GEN_FCN (icode) (reg, temp);
7538 emit_insn (insn);
7540 return reg;
7543 return temp;
7546 case TARGET_MEM_REF:
7548 struct mem_address addr;
7550 get_address_description (exp, &addr);
7551 op0 = addr_for_mem_ref (&addr, true);
7552 op0 = memory_address (mode, op0);
7553 temp = gen_rtx_MEM (mode, op0);
7554 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7556 return temp;
7558 case ARRAY_REF:
7561 tree array = TREE_OPERAND (exp, 0);
7562 tree index = TREE_OPERAND (exp, 1);
7564 /* Fold an expression like: "foo"[2].
7565 This is not done in fold so it won't happen inside &.
7566 Don't fold if this is for wide characters since it's too
7567 difficult to do correctly and this is a very rare case. */
7569 if (modifier != EXPAND_CONST_ADDRESS
7570 && modifier != EXPAND_INITIALIZER
7571 && modifier != EXPAND_MEMORY)
7573 tree t = fold_read_from_constant_string (exp);
7575 if (t)
7576 return expand_expr (t, target, tmode, modifier);
7579 /* If this is a constant index into a constant array,
7580 just get the value from the array. Handle both the cases when
7581 we have an explicit constructor and when our operand is a variable
7582 that was declared const. */
7584 if (modifier != EXPAND_CONST_ADDRESS
7585 && modifier != EXPAND_INITIALIZER
7586 && modifier != EXPAND_MEMORY
7587 && TREE_CODE (array) == CONSTRUCTOR
7588 && ! TREE_SIDE_EFFECTS (array)
7589 && TREE_CODE (index) == INTEGER_CST)
7591 unsigned HOST_WIDE_INT ix;
7592 tree field, value;
7594 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7595 field, value)
7596 if (tree_int_cst_equal (field, index))
7598 if (!TREE_SIDE_EFFECTS (value))
7599 return expand_expr (fold (value), target, tmode, modifier);
7600 break;
7604 else if (optimize >= 1
7605 && modifier != EXPAND_CONST_ADDRESS
7606 && modifier != EXPAND_INITIALIZER
7607 && modifier != EXPAND_MEMORY
7608 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7609 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7610 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7611 && targetm.binds_local_p (array))
7613 if (TREE_CODE (index) == INTEGER_CST)
7615 tree init = DECL_INITIAL (array);
7617 if (TREE_CODE (init) == CONSTRUCTOR)
7619 unsigned HOST_WIDE_INT ix;
7620 tree field, value;
7622 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7623 field, value)
7624 if (tree_int_cst_equal (field, index))
7626 if (TREE_SIDE_EFFECTS (value))
7627 break;
7629 if (TREE_CODE (value) == CONSTRUCTOR)
7631 /* If VALUE is a CONSTRUCTOR, this
7632 optimization is only useful if
7633 this doesn't store the CONSTRUCTOR
7634 into memory. If it does, it is more
7635 efficient to just load the data from
7636 the array directly. */
7637 rtx ret = expand_constructor (value, target,
7638 modifier, true);
7639 if (ret == NULL_RTX)
7640 break;
7643 return expand_expr (fold (value), target, tmode,
7644 modifier);
7647 else if(TREE_CODE (init) == STRING_CST)
7649 tree index1 = index;
7650 tree low_bound = array_ref_low_bound (exp);
7651 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7653 /* Optimize the special-case of a zero lower bound.
7655 We convert the low_bound to sizetype to avoid some problems
7656 with constant folding. (E.g. suppose the lower bound is 1,
7657 and its mode is QI. Without the conversion,l (ARRAY
7658 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7659 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7661 if (! integer_zerop (low_bound))
7662 index1 = size_diffop (index1, fold_convert (sizetype,
7663 low_bound));
7665 if (0 > compare_tree_int (index1,
7666 TREE_STRING_LENGTH (init)))
7668 tree type = TREE_TYPE (TREE_TYPE (init));
7669 enum machine_mode mode = TYPE_MODE (type);
7671 if (GET_MODE_CLASS (mode) == MODE_INT
7672 && GET_MODE_SIZE (mode) == 1)
7673 return gen_int_mode (TREE_STRING_POINTER (init)
7674 [TREE_INT_CST_LOW (index1)],
7675 mode);
7681 goto normal_inner_ref;
7683 case COMPONENT_REF:
7684 /* If the operand is a CONSTRUCTOR, we can just extract the
7685 appropriate field if it is present. */
7686 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7688 unsigned HOST_WIDE_INT idx;
7689 tree field, value;
7691 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7692 idx, field, value)
7693 if (field == TREE_OPERAND (exp, 1)
7694 /* We can normally use the value of the field in the
7695 CONSTRUCTOR. However, if this is a bitfield in
7696 an integral mode that we can fit in a HOST_WIDE_INT,
7697 we must mask only the number of bits in the bitfield,
7698 since this is done implicitly by the constructor. If
7699 the bitfield does not meet either of those conditions,
7700 we can't do this optimization. */
7701 && (! DECL_BIT_FIELD (field)
7702 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7703 && (GET_MODE_BITSIZE (DECL_MODE (field))
7704 <= HOST_BITS_PER_WIDE_INT))))
7706 if (DECL_BIT_FIELD (field)
7707 && modifier == EXPAND_STACK_PARM)
7708 target = 0;
7709 op0 = expand_expr (value, target, tmode, modifier);
7710 if (DECL_BIT_FIELD (field))
7712 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7713 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7715 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7717 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7718 op0 = expand_and (imode, op0, op1, target);
7720 else
7722 tree count
7723 = build_int_cst (NULL_TREE,
7724 GET_MODE_BITSIZE (imode) - bitsize);
7726 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7727 target, 0);
7728 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7729 target, 0);
7733 return op0;
7736 goto normal_inner_ref;
7738 case BIT_FIELD_REF:
7739 case ARRAY_RANGE_REF:
7740 normal_inner_ref:
7742 enum machine_mode mode1;
7743 HOST_WIDE_INT bitsize, bitpos;
7744 tree offset;
7745 int volatilep = 0;
7746 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7747 &mode1, &unsignedp, &volatilep, true);
7748 rtx orig_op0;
7750 /* If we got back the original object, something is wrong. Perhaps
7751 we are evaluating an expression too early. In any event, don't
7752 infinitely recurse. */
7753 gcc_assert (tem != exp);
7755 /* If TEM's type is a union of variable size, pass TARGET to the inner
7756 computation, since it will need a temporary and TARGET is known
7757 to have to do. This occurs in unchecked conversion in Ada. */
7759 orig_op0 = op0
7760 = expand_expr (tem,
7761 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7762 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7763 != INTEGER_CST)
7764 && modifier != EXPAND_STACK_PARM
7765 ? target : NULL_RTX),
7766 VOIDmode,
7767 (modifier == EXPAND_INITIALIZER
7768 || modifier == EXPAND_CONST_ADDRESS
7769 || modifier == EXPAND_STACK_PARM)
7770 ? modifier : EXPAND_NORMAL);
7772 /* If this is a constant, put it into a register if it is a legitimate
7773 constant, OFFSET is 0, and we won't try to extract outside the
7774 register (in case we were passed a partially uninitialized object
7775 or a view_conversion to a larger size). Force the constant to
7776 memory otherwise. */
7777 if (CONSTANT_P (op0))
7779 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7780 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7781 && offset == 0
7782 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7783 op0 = force_reg (mode, op0);
7784 else
7785 op0 = validize_mem (force_const_mem (mode, op0));
7788 /* Otherwise, if this object not in memory and we either have an
7789 offset, a BLKmode result, or a reference outside the object, put it
7790 there. Such cases can occur in Ada if we have unchecked conversion
7791 of an expression from a scalar type to an array or record type or
7792 for an ARRAY_RANGE_REF whose type is BLKmode. */
7793 else if (!MEM_P (op0)
7794 && (offset != 0
7795 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7796 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7798 tree nt = build_qualified_type (TREE_TYPE (tem),
7799 (TYPE_QUALS (TREE_TYPE (tem))
7800 | TYPE_QUAL_CONST));
7801 rtx memloc = assign_temp (nt, 1, 1, 1);
7803 emit_move_insn (memloc, op0);
7804 op0 = memloc;
7807 if (offset != 0)
7809 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7810 EXPAND_SUM);
7812 gcc_assert (MEM_P (op0));
7814 #ifdef POINTERS_EXTEND_UNSIGNED
7815 if (GET_MODE (offset_rtx) != Pmode)
7816 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7817 #else
7818 if (GET_MODE (offset_rtx) != ptr_mode)
7819 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7820 #endif
7822 if (GET_MODE (op0) == BLKmode
7823 /* A constant address in OP0 can have VOIDmode, we must
7824 not try to call force_reg in that case. */
7825 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7826 && bitsize != 0
7827 && (bitpos % bitsize) == 0
7828 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7829 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7831 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7832 bitpos = 0;
7835 op0 = offset_address (op0, offset_rtx,
7836 highest_pow2_factor (offset));
7839 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7840 record its alignment as BIGGEST_ALIGNMENT. */
7841 if (MEM_P (op0) && bitpos == 0 && offset != 0
7842 && is_aligning_offset (offset, tem))
7843 set_mem_align (op0, BIGGEST_ALIGNMENT);
7845 /* Don't forget about volatility even if this is a bitfield. */
7846 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7848 if (op0 == orig_op0)
7849 op0 = copy_rtx (op0);
7851 MEM_VOLATILE_P (op0) = 1;
7854 /* The following code doesn't handle CONCAT.
7855 Assume only bitpos == 0 can be used for CONCAT, due to
7856 one element arrays having the same mode as its element. */
7857 if (GET_CODE (op0) == CONCAT)
7859 gcc_assert (bitpos == 0
7860 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7861 return op0;
7864 /* In cases where an aligned union has an unaligned object
7865 as a field, we might be extracting a BLKmode value from
7866 an integer-mode (e.g., SImode) object. Handle this case
7867 by doing the extract into an object as wide as the field
7868 (which we know to be the width of a basic mode), then
7869 storing into memory, and changing the mode to BLKmode. */
7870 if (mode1 == VOIDmode
7871 || REG_P (op0) || GET_CODE (op0) == SUBREG
7872 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7873 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7874 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7875 && modifier != EXPAND_CONST_ADDRESS
7876 && modifier != EXPAND_INITIALIZER)
7877 /* If the field isn't aligned enough to fetch as a memref,
7878 fetch it as a bit field. */
7879 || (mode1 != BLKmode
7880 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7881 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7882 || (MEM_P (op0)
7883 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7884 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7885 && ((modifier == EXPAND_CONST_ADDRESS
7886 || modifier == EXPAND_INITIALIZER)
7887 ? STRICT_ALIGNMENT
7888 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7889 || (bitpos % BITS_PER_UNIT != 0)))
7890 /* If the type and the field are a constant size and the
7891 size of the type isn't the same size as the bitfield,
7892 we must use bitfield operations. */
7893 || (bitsize >= 0
7894 && TYPE_SIZE (TREE_TYPE (exp))
7895 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7896 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7897 bitsize)))
7899 enum machine_mode ext_mode = mode;
7901 if (ext_mode == BLKmode
7902 && ! (target != 0 && MEM_P (op0)
7903 && MEM_P (target)
7904 && bitpos % BITS_PER_UNIT == 0))
7905 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7907 if (ext_mode == BLKmode)
7909 if (target == 0)
7910 target = assign_temp (type, 0, 1, 1);
7912 if (bitsize == 0)
7913 return target;
7915 /* In this case, BITPOS must start at a byte boundary and
7916 TARGET, if specified, must be a MEM. */
7917 gcc_assert (MEM_P (op0)
7918 && (!target || MEM_P (target))
7919 && !(bitpos % BITS_PER_UNIT));
7921 emit_block_move (target,
7922 adjust_address (op0, VOIDmode,
7923 bitpos / BITS_PER_UNIT),
7924 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7925 / BITS_PER_UNIT),
7926 (modifier == EXPAND_STACK_PARM
7927 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7929 return target;
7932 op0 = validize_mem (op0);
7934 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7935 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7937 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7938 (modifier == EXPAND_STACK_PARM
7939 ? NULL_RTX : target),
7940 ext_mode, ext_mode);
7942 /* If the result is a record type and BITSIZE is narrower than
7943 the mode of OP0, an integral mode, and this is a big endian
7944 machine, we must put the field into the high-order bits. */
7945 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7946 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7947 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7948 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7949 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7950 - bitsize),
7951 op0, 1);
7953 /* If the result type is BLKmode, store the data into a temporary
7954 of the appropriate type, but with the mode corresponding to the
7955 mode for the data we have (op0's mode). It's tempting to make
7956 this a constant type, since we know it's only being stored once,
7957 but that can cause problems if we are taking the address of this
7958 COMPONENT_REF because the MEM of any reference via that address
7959 will have flags corresponding to the type, which will not
7960 necessarily be constant. */
7961 if (mode == BLKmode)
7963 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7964 rtx new;
7966 /* If the reference doesn't use the alias set of its type,
7967 we cannot create the temporary using that type. */
7968 if (component_uses_parent_alias_set (exp))
7970 new = assign_stack_local (ext_mode, size, 0);
7971 set_mem_alias_set (new, get_alias_set (exp));
7973 else
7974 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7976 emit_move_insn (new, op0);
7977 op0 = copy_rtx (new);
7978 PUT_MODE (op0, BLKmode);
7979 set_mem_attributes (op0, exp, 1);
7982 return op0;
7985 /* If the result is BLKmode, use that to access the object
7986 now as well. */
7987 if (mode == BLKmode)
7988 mode1 = BLKmode;
7990 /* Get a reference to just this component. */
7991 if (modifier == EXPAND_CONST_ADDRESS
7992 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7993 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7994 else
7995 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7997 if (op0 == orig_op0)
7998 op0 = copy_rtx (op0);
8000 set_mem_attributes (op0, exp, 0);
8001 if (REG_P (XEXP (op0, 0)))
8002 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
8004 MEM_VOLATILE_P (op0) |= volatilep;
8005 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
8006 || modifier == EXPAND_CONST_ADDRESS
8007 || modifier == EXPAND_INITIALIZER)
8008 return op0;
8009 else if (target == 0)
8010 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8012 convert_move (target, op0, unsignedp);
8013 return target;
8016 case OBJ_TYPE_REF:
8017 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
8019 case CALL_EXPR:
8020 /* All valid uses of __builtin_va_arg_pack () are removed during
8021 inlining. */
8022 if (CALL_EXPR_VA_ARG_PACK (exp))
8023 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8025 tree fndecl = get_callee_fndecl (exp), attr;
8027 if (fndecl
8028 && (attr = lookup_attribute ("error",
8029 DECL_ATTRIBUTES (fndecl))) != NULL)
8030 error ("%Kcall to %qs declared with attribute error: %s",
8031 exp, lang_hooks.decl_printable_name (fndecl, 1),
8032 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8033 if (fndecl
8034 && (attr = lookup_attribute ("warning",
8035 DECL_ATTRIBUTES (fndecl))) != NULL)
8036 warning (0, "%Kcall to %qs declared with attribute warning: %s",
8037 exp, lang_hooks.decl_printable_name (fndecl, 1),
8038 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8040 /* Check for a built-in function. */
8041 if (fndecl && DECL_BUILT_IN (fndecl))
8043 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND)
8044 return lang_hooks.expand_expr (exp, original_target,
8045 tmode, modifier, alt_rtl);
8046 else
8047 return expand_builtin (exp, target, subtarget, tmode, ignore);
8050 return expand_call (exp, target, ignore);
8052 case NON_LVALUE_EXPR:
8053 case NOP_EXPR:
8054 case CONVERT_EXPR:
8055 if (TREE_OPERAND (exp, 0) == error_mark_node)
8056 return const0_rtx;
8058 if (TREE_CODE (type) == UNION_TYPE)
8060 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
8062 /* If both input and output are BLKmode, this conversion isn't doing
8063 anything except possibly changing memory attribute. */
8064 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8066 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8067 modifier);
8069 result = copy_rtx (result);
8070 set_mem_attributes (result, exp, 0);
8071 return result;
8074 if (target == 0)
8076 if (TYPE_MODE (type) != BLKmode)
8077 target = gen_reg_rtx (TYPE_MODE (type));
8078 else
8079 target = assign_temp (type, 0, 1, 1);
8082 if (MEM_P (target))
8083 /* Store data into beginning of memory target. */
8084 store_expr (TREE_OPERAND (exp, 0),
8085 adjust_address (target, TYPE_MODE (valtype), 0),
8086 modifier == EXPAND_STACK_PARM,
8087 false);
8089 else
8091 gcc_assert (REG_P (target));
8093 /* Store this field into a union of the proper type. */
8094 store_field (target,
8095 MIN ((int_size_in_bytes (TREE_TYPE
8096 (TREE_OPERAND (exp, 0)))
8097 * BITS_PER_UNIT),
8098 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8099 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8100 type, 0, false);
8103 /* Return the entire union. */
8104 return target;
8107 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8109 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8110 modifier);
8112 /* If the signedness of the conversion differs and OP0 is
8113 a promoted SUBREG, clear that indication since we now
8114 have to do the proper extension. */
8115 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8116 && GET_CODE (op0) == SUBREG)
8117 SUBREG_PROMOTED_VAR_P (op0) = 0;
8119 return REDUCE_BIT_FIELD (op0);
8122 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8123 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8124 if (GET_MODE (op0) == mode)
8127 /* If OP0 is a constant, just convert it into the proper mode. */
8128 else if (CONSTANT_P (op0))
8130 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8131 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8133 if (modifier == EXPAND_INITIALIZER)
8134 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8135 subreg_lowpart_offset (mode,
8136 inner_mode));
8137 else
8138 op0= convert_modes (mode, inner_mode, op0,
8139 TYPE_UNSIGNED (inner_type));
8142 else if (modifier == EXPAND_INITIALIZER)
8143 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8145 else if (target == 0)
8146 op0 = convert_to_mode (mode, op0,
8147 TYPE_UNSIGNED (TREE_TYPE
8148 (TREE_OPERAND (exp, 0))));
8149 else
8151 convert_move (target, op0,
8152 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8153 op0 = target;
8156 return REDUCE_BIT_FIELD (op0);
8158 case VIEW_CONVERT_EXPR:
8159 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8161 /* If the input and output modes are both the same, we are done. */
8162 if (TYPE_MODE (type) == GET_MODE (op0))
8164 /* If neither mode is BLKmode, and both modes are the same size
8165 then we can use gen_lowpart. */
8166 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8167 && GET_MODE_SIZE (TYPE_MODE (type))
8168 == GET_MODE_SIZE (GET_MODE (op0)))
8170 if (GET_CODE (op0) == SUBREG)
8171 op0 = force_reg (GET_MODE (op0), op0);
8172 op0 = gen_lowpart (TYPE_MODE (type), op0);
8174 /* If both modes are integral, then we can convert from one to the
8175 other. */
8176 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
8177 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
8178 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
8179 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8180 /* As a last resort, spill op0 to memory, and reload it in a
8181 different mode. */
8182 else if (!MEM_P (op0))
8184 /* If the operand is not a MEM, force it into memory. Since we
8185 are going to be changing the mode of the MEM, don't call
8186 force_const_mem for constants because we don't allow pool
8187 constants to change mode. */
8188 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8190 gcc_assert (!TREE_ADDRESSABLE (exp));
8192 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8193 target
8194 = assign_stack_temp_for_type
8195 (TYPE_MODE (inner_type),
8196 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8198 emit_move_insn (target, op0);
8199 op0 = target;
8202 /* At this point, OP0 is in the correct mode. If the output type is such
8203 that the operand is known to be aligned, indicate that it is.
8204 Otherwise, we need only be concerned about alignment for non-BLKmode
8205 results. */
8206 if (MEM_P (op0))
8208 op0 = copy_rtx (op0);
8210 if (TYPE_ALIGN_OK (type))
8211 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8212 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8213 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8215 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8216 HOST_WIDE_INT temp_size
8217 = MAX (int_size_in_bytes (inner_type),
8218 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8219 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8220 temp_size, 0, type);
8221 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8223 gcc_assert (!TREE_ADDRESSABLE (exp));
8225 if (GET_MODE (op0) == BLKmode)
8226 emit_block_move (new_with_op0_mode, op0,
8227 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8228 (modifier == EXPAND_STACK_PARM
8229 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8230 else
8231 emit_move_insn (new_with_op0_mode, op0);
8233 op0 = new;
8236 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8239 return op0;
8241 case POINTER_PLUS_EXPR:
8242 /* Even though the sizetype mode and the pointer's mode can be different
8243 expand is able to handle this correctly and get the correct result out
8244 of the PLUS_EXPR code. */
8245 case PLUS_EXPR:
8247 /* Check if this is a case for multiplication and addition. */
8248 if ((TREE_CODE (type) == INTEGER_TYPE
8249 || TREE_CODE (type) == FIXED_POINT_TYPE)
8250 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8252 tree subsubexp0, subsubexp1;
8253 enum tree_code code0, code1, this_code;
8255 subexp0 = TREE_OPERAND (exp, 0);
8256 subsubexp0 = TREE_OPERAND (subexp0, 0);
8257 subsubexp1 = TREE_OPERAND (subexp0, 1);
8258 code0 = TREE_CODE (subsubexp0);
8259 code1 = TREE_CODE (subsubexp1);
8260 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8261 : FIXED_CONVERT_EXPR;
8262 if (code0 == this_code && code1 == this_code
8263 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8264 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8265 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8266 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8267 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8268 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8270 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8271 enum machine_mode innermode = TYPE_MODE (op0type);
8272 bool zextend_p = TYPE_UNSIGNED (op0type);
8273 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8274 if (sat_p == 0)
8275 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8276 else
8277 this_optab = zextend_p ? usmadd_widen_optab
8278 : ssmadd_widen_optab;
8279 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8280 && (optab_handler (this_optab, mode)->insn_code
8281 != CODE_FOR_nothing))
8283 expand_operands (TREE_OPERAND (subsubexp0, 0),
8284 TREE_OPERAND (subsubexp1, 0),
8285 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8286 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8287 VOIDmode, EXPAND_NORMAL);
8288 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8289 target, unsignedp);
8290 gcc_assert (temp);
8291 return REDUCE_BIT_FIELD (temp);
8296 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8297 something else, make sure we add the register to the constant and
8298 then to the other thing. This case can occur during strength
8299 reduction and doing it this way will produce better code if the
8300 frame pointer or argument pointer is eliminated.
8302 fold-const.c will ensure that the constant is always in the inner
8303 PLUS_EXPR, so the only case we need to do anything about is if
8304 sp, ap, or fp is our second argument, in which case we must swap
8305 the innermost first argument and our second argument. */
8307 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8308 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8309 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8310 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8311 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8312 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8314 tree t = TREE_OPERAND (exp, 1);
8316 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8317 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8320 /* If the result is to be ptr_mode and we are adding an integer to
8321 something, we might be forming a constant. So try to use
8322 plus_constant. If it produces a sum and we can't accept it,
8323 use force_operand. This allows P = &ARR[const] to generate
8324 efficient code on machines where a SYMBOL_REF is not a valid
8325 address.
8327 If this is an EXPAND_SUM call, always return the sum. */
8328 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8329 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8331 if (modifier == EXPAND_STACK_PARM)
8332 target = 0;
8333 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8334 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8335 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8337 rtx constant_part;
8339 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8340 EXPAND_SUM);
8341 /* Use immed_double_const to ensure that the constant is
8342 truncated according to the mode of OP1, then sign extended
8343 to a HOST_WIDE_INT. Using the constant directly can result
8344 in non-canonical RTL in a 64x32 cross compile. */
8345 constant_part
8346 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8347 (HOST_WIDE_INT) 0,
8348 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8349 op1 = plus_constant (op1, INTVAL (constant_part));
8350 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8351 op1 = force_operand (op1, target);
8352 return REDUCE_BIT_FIELD (op1);
8355 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8356 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8357 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8359 rtx constant_part;
8361 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8362 (modifier == EXPAND_INITIALIZER
8363 ? EXPAND_INITIALIZER : EXPAND_SUM));
8364 if (! CONSTANT_P (op0))
8366 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8367 VOIDmode, modifier);
8368 /* Return a PLUS if modifier says it's OK. */
8369 if (modifier == EXPAND_SUM
8370 || modifier == EXPAND_INITIALIZER)
8371 return simplify_gen_binary (PLUS, mode, op0, op1);
8372 goto binop2;
8374 /* Use immed_double_const to ensure that the constant is
8375 truncated according to the mode of OP1, then sign extended
8376 to a HOST_WIDE_INT. Using the constant directly can result
8377 in non-canonical RTL in a 64x32 cross compile. */
8378 constant_part
8379 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8380 (HOST_WIDE_INT) 0,
8381 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8382 op0 = plus_constant (op0, INTVAL (constant_part));
8383 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8384 op0 = force_operand (op0, target);
8385 return REDUCE_BIT_FIELD (op0);
8389 /* No sense saving up arithmetic to be done
8390 if it's all in the wrong mode to form part of an address.
8391 And force_operand won't know whether to sign-extend or
8392 zero-extend. */
8393 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8394 || mode != ptr_mode)
8396 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8397 subtarget, &op0, &op1, 0);
8398 if (op0 == const0_rtx)
8399 return op1;
8400 if (op1 == const0_rtx)
8401 return op0;
8402 goto binop2;
8405 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8406 subtarget, &op0, &op1, modifier);
8407 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8409 case MINUS_EXPR:
8410 /* Check if this is a case for multiplication and subtraction. */
8411 if ((TREE_CODE (type) == INTEGER_TYPE
8412 || TREE_CODE (type) == FIXED_POINT_TYPE)
8413 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8415 tree subsubexp0, subsubexp1;
8416 enum tree_code code0, code1, this_code;
8418 subexp1 = TREE_OPERAND (exp, 1);
8419 subsubexp0 = TREE_OPERAND (subexp1, 0);
8420 subsubexp1 = TREE_OPERAND (subexp1, 1);
8421 code0 = TREE_CODE (subsubexp0);
8422 code1 = TREE_CODE (subsubexp1);
8423 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8424 : FIXED_CONVERT_EXPR;
8425 if (code0 == this_code && code1 == this_code
8426 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8427 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8428 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8429 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8430 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8431 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8433 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8434 enum machine_mode innermode = TYPE_MODE (op0type);
8435 bool zextend_p = TYPE_UNSIGNED (op0type);
8436 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8437 if (sat_p == 0)
8438 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8439 else
8440 this_optab = zextend_p ? usmsub_widen_optab
8441 : ssmsub_widen_optab;
8442 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8443 && (optab_handler (this_optab, mode)->insn_code
8444 != CODE_FOR_nothing))
8446 expand_operands (TREE_OPERAND (subsubexp0, 0),
8447 TREE_OPERAND (subsubexp1, 0),
8448 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8449 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8450 VOIDmode, EXPAND_NORMAL);
8451 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8452 target, unsignedp);
8453 gcc_assert (temp);
8454 return REDUCE_BIT_FIELD (temp);
8459 /* For initializers, we are allowed to return a MINUS of two
8460 symbolic constants. Here we handle all cases when both operands
8461 are constant. */
8462 /* Handle difference of two symbolic constants,
8463 for the sake of an initializer. */
8464 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8465 && really_constant_p (TREE_OPERAND (exp, 0))
8466 && really_constant_p (TREE_OPERAND (exp, 1)))
8468 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8469 NULL_RTX, &op0, &op1, modifier);
8471 /* If the last operand is a CONST_INT, use plus_constant of
8472 the negated constant. Else make the MINUS. */
8473 if (GET_CODE (op1) == CONST_INT)
8474 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8475 else
8476 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8479 /* No sense saving up arithmetic to be done
8480 if it's all in the wrong mode to form part of an address.
8481 And force_operand won't know whether to sign-extend or
8482 zero-extend. */
8483 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8484 || mode != ptr_mode)
8485 goto binop;
8487 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8488 subtarget, &op0, &op1, modifier);
8490 /* Convert A - const to A + (-const). */
8491 if (GET_CODE (op1) == CONST_INT)
8493 op1 = negate_rtx (mode, op1);
8494 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8497 goto binop2;
8499 case MULT_EXPR:
8500 /* If this is a fixed-point operation, then we cannot use the code
8501 below because "expand_mult" doesn't support sat/no-sat fixed-point
8502 multiplications. */
8503 if (ALL_FIXED_POINT_MODE_P (mode))
8504 goto binop;
8506 /* If first operand is constant, swap them.
8507 Thus the following special case checks need only
8508 check the second operand. */
8509 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8511 tree t1 = TREE_OPERAND (exp, 0);
8512 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8513 TREE_OPERAND (exp, 1) = t1;
8516 /* Attempt to return something suitable for generating an
8517 indexed address, for machines that support that. */
8519 if (modifier == EXPAND_SUM && mode == ptr_mode
8520 && host_integerp (TREE_OPERAND (exp, 1), 0))
8522 tree exp1 = TREE_OPERAND (exp, 1);
8524 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8525 EXPAND_SUM);
8527 if (!REG_P (op0))
8528 op0 = force_operand (op0, NULL_RTX);
8529 if (!REG_P (op0))
8530 op0 = copy_to_mode_reg (mode, op0);
8532 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8533 gen_int_mode (tree_low_cst (exp1, 0),
8534 TYPE_MODE (TREE_TYPE (exp1)))));
8537 if (modifier == EXPAND_STACK_PARM)
8538 target = 0;
8540 /* Check for multiplying things that have been extended
8541 from a narrower type. If this machine supports multiplying
8542 in that narrower type with a result in the desired type,
8543 do it that way, and avoid the explicit type-conversion. */
8545 subexp0 = TREE_OPERAND (exp, 0);
8546 subexp1 = TREE_OPERAND (exp, 1);
8547 /* First, check if we have a multiplication of one signed and one
8548 unsigned operand. */
8549 if (TREE_CODE (subexp0) == NOP_EXPR
8550 && TREE_CODE (subexp1) == NOP_EXPR
8551 && TREE_CODE (type) == INTEGER_TYPE
8552 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8553 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8554 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8555 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8556 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8557 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8559 enum machine_mode innermode
8560 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8561 this_optab = usmul_widen_optab;
8562 if (mode == GET_MODE_WIDER_MODE (innermode))
8564 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8566 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8567 expand_operands (TREE_OPERAND (subexp0, 0),
8568 TREE_OPERAND (subexp1, 0),
8569 NULL_RTX, &op0, &op1, 0);
8570 else
8571 expand_operands (TREE_OPERAND (subexp0, 0),
8572 TREE_OPERAND (subexp1, 0),
8573 NULL_RTX, &op1, &op0, 0);
8575 goto binop3;
8579 /* Check for a multiplication with matching signedness. */
8580 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8581 && TREE_CODE (type) == INTEGER_TYPE
8582 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8583 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8584 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8585 && int_fits_type_p (TREE_OPERAND (exp, 1),
8586 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8587 /* Don't use a widening multiply if a shift will do. */
8588 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8589 > HOST_BITS_PER_WIDE_INT)
8590 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8592 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8593 && (TYPE_PRECISION (TREE_TYPE
8594 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8595 == TYPE_PRECISION (TREE_TYPE
8596 (TREE_OPERAND
8597 (TREE_OPERAND (exp, 0), 0))))
8598 /* If both operands are extended, they must either both
8599 be zero-extended or both be sign-extended. */
8600 && (TYPE_UNSIGNED (TREE_TYPE
8601 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8602 == TYPE_UNSIGNED (TREE_TYPE
8603 (TREE_OPERAND
8604 (TREE_OPERAND (exp, 0), 0)))))))
8606 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8607 enum machine_mode innermode = TYPE_MODE (op0type);
8608 bool zextend_p = TYPE_UNSIGNED (op0type);
8609 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8610 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8612 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8614 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8616 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8617 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8618 TREE_OPERAND (exp, 1),
8619 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8620 else
8621 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8622 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8623 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8624 goto binop3;
8626 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8627 && innermode == word_mode)
8629 rtx htem, hipart;
8630 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8631 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8632 op1 = convert_modes (innermode, mode,
8633 expand_normal (TREE_OPERAND (exp, 1)),
8634 unsignedp);
8635 else
8636 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8637 temp = expand_binop (mode, other_optab, op0, op1, target,
8638 unsignedp, OPTAB_LIB_WIDEN);
8639 hipart = gen_highpart (innermode, temp);
8640 htem = expand_mult_highpart_adjust (innermode, hipart,
8641 op0, op1, hipart,
8642 zextend_p);
8643 if (htem != hipart)
8644 emit_move_insn (hipart, htem);
8645 return REDUCE_BIT_FIELD (temp);
8649 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8650 subtarget, &op0, &op1, 0);
8651 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8653 case TRUNC_DIV_EXPR:
8654 case FLOOR_DIV_EXPR:
8655 case CEIL_DIV_EXPR:
8656 case ROUND_DIV_EXPR:
8657 case EXACT_DIV_EXPR:
8658 /* If this is a fixed-point operation, then we cannot use the code
8659 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8660 divisions. */
8661 if (ALL_FIXED_POINT_MODE_P (mode))
8662 goto binop;
8664 if (modifier == EXPAND_STACK_PARM)
8665 target = 0;
8666 /* Possible optimization: compute the dividend with EXPAND_SUM
8667 then if the divisor is constant can optimize the case
8668 where some terms of the dividend have coeffs divisible by it. */
8669 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8670 subtarget, &op0, &op1, 0);
8671 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8673 case RDIV_EXPR:
8674 goto binop;
8676 case TRUNC_MOD_EXPR:
8677 case FLOOR_MOD_EXPR:
8678 case CEIL_MOD_EXPR:
8679 case ROUND_MOD_EXPR:
8680 if (modifier == EXPAND_STACK_PARM)
8681 target = 0;
8682 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8683 subtarget, &op0, &op1, 0);
8684 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8686 case FIXED_CONVERT_EXPR:
8687 op0 = expand_normal (TREE_OPERAND (exp, 0));
8688 if (target == 0 || modifier == EXPAND_STACK_PARM)
8689 target = gen_reg_rtx (mode);
8691 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
8692 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
8693 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8694 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8695 else
8696 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8697 return target;
8699 case FIX_TRUNC_EXPR:
8700 op0 = expand_normal (TREE_OPERAND (exp, 0));
8701 if (target == 0 || modifier == EXPAND_STACK_PARM)
8702 target = gen_reg_rtx (mode);
8703 expand_fix (target, op0, unsignedp);
8704 return target;
8706 case FLOAT_EXPR:
8707 op0 = expand_normal (TREE_OPERAND (exp, 0));
8708 if (target == 0 || modifier == EXPAND_STACK_PARM)
8709 target = gen_reg_rtx (mode);
8710 /* expand_float can't figure out what to do if FROM has VOIDmode.
8711 So give it the correct mode. With -O, cse will optimize this. */
8712 if (GET_MODE (op0) == VOIDmode)
8713 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8714 op0);
8715 expand_float (target, op0,
8716 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8717 return target;
8719 case NEGATE_EXPR:
8720 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8721 VOIDmode, EXPAND_NORMAL);
8722 if (modifier == EXPAND_STACK_PARM)
8723 target = 0;
8724 temp = expand_unop (mode,
8725 optab_for_tree_code (NEGATE_EXPR, type),
8726 op0, target, 0);
8727 gcc_assert (temp);
8728 return REDUCE_BIT_FIELD (temp);
8730 case ABS_EXPR:
8731 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8732 VOIDmode, EXPAND_NORMAL);
8733 if (modifier == EXPAND_STACK_PARM)
8734 target = 0;
8736 /* ABS_EXPR is not valid for complex arguments. */
8737 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8738 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8740 /* Unsigned abs is simply the operand. Testing here means we don't
8741 risk generating incorrect code below. */
8742 if (TYPE_UNSIGNED (type))
8743 return op0;
8745 return expand_abs (mode, op0, target, unsignedp,
8746 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8748 case MAX_EXPR:
8749 case MIN_EXPR:
8750 target = original_target;
8751 if (target == 0
8752 || modifier == EXPAND_STACK_PARM
8753 || (MEM_P (target) && MEM_VOLATILE_P (target))
8754 || GET_MODE (target) != mode
8755 || (REG_P (target)
8756 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8757 target = gen_reg_rtx (mode);
8758 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8759 target, &op0, &op1, 0);
8761 /* First try to do it with a special MIN or MAX instruction.
8762 If that does not win, use a conditional jump to select the proper
8763 value. */
8764 this_optab = optab_for_tree_code (code, type);
8765 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8766 OPTAB_WIDEN);
8767 if (temp != 0)
8768 return temp;
8770 /* At this point, a MEM target is no longer useful; we will get better
8771 code without it. */
8773 if (! REG_P (target))
8774 target = gen_reg_rtx (mode);
8776 /* If op1 was placed in target, swap op0 and op1. */
8777 if (target != op0 && target == op1)
8779 temp = op0;
8780 op0 = op1;
8781 op1 = temp;
8784 /* We generate better code and avoid problems with op1 mentioning
8785 target by forcing op1 into a pseudo if it isn't a constant. */
8786 if (! CONSTANT_P (op1))
8787 op1 = force_reg (mode, op1);
8790 enum rtx_code comparison_code;
8791 rtx cmpop1 = op1;
8793 if (code == MAX_EXPR)
8794 comparison_code = unsignedp ? GEU : GE;
8795 else
8796 comparison_code = unsignedp ? LEU : LE;
8798 /* Canonicalize to comparisons against 0. */
8799 if (op1 == const1_rtx)
8801 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8802 or (a != 0 ? a : 1) for unsigned.
8803 For MIN we are safe converting (a <= 1 ? a : 1)
8804 into (a <= 0 ? a : 1) */
8805 cmpop1 = const0_rtx;
8806 if (code == MAX_EXPR)
8807 comparison_code = unsignedp ? NE : GT;
8809 if (op1 == constm1_rtx && !unsignedp)
8811 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8812 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8813 cmpop1 = const0_rtx;
8814 if (code == MIN_EXPR)
8815 comparison_code = LT;
8817 #ifdef HAVE_conditional_move
8818 /* Use a conditional move if possible. */
8819 if (can_conditionally_move_p (mode))
8821 rtx insn;
8823 /* ??? Same problem as in expmed.c: emit_conditional_move
8824 forces a stack adjustment via compare_from_rtx, and we
8825 lose the stack adjustment if the sequence we are about
8826 to create is discarded. */
8827 do_pending_stack_adjust ();
8829 start_sequence ();
8831 /* Try to emit the conditional move. */
8832 insn = emit_conditional_move (target, comparison_code,
8833 op0, cmpop1, mode,
8834 op0, op1, mode,
8835 unsignedp);
8837 /* If we could do the conditional move, emit the sequence,
8838 and return. */
8839 if (insn)
8841 rtx seq = get_insns ();
8842 end_sequence ();
8843 emit_insn (seq);
8844 return target;
8847 /* Otherwise discard the sequence and fall back to code with
8848 branches. */
8849 end_sequence ();
8851 #endif
8852 if (target != op0)
8853 emit_move_insn (target, op0);
8855 temp = gen_label_rtx ();
8856 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8857 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8859 emit_move_insn (target, op1);
8860 emit_label (temp);
8861 return target;
8863 case BIT_NOT_EXPR:
8864 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8865 VOIDmode, EXPAND_NORMAL);
8866 if (modifier == EXPAND_STACK_PARM)
8867 target = 0;
8868 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8869 gcc_assert (temp);
8870 return temp;
8872 /* ??? Can optimize bitwise operations with one arg constant.
8873 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8874 and (a bitwise1 b) bitwise2 b (etc)
8875 but that is probably not worth while. */
8877 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8878 boolean values when we want in all cases to compute both of them. In
8879 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8880 as actual zero-or-1 values and then bitwise anding. In cases where
8881 there cannot be any side effects, better code would be made by
8882 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8883 how to recognize those cases. */
8885 case TRUTH_AND_EXPR:
8886 code = BIT_AND_EXPR;
8887 case BIT_AND_EXPR:
8888 goto binop;
8890 case TRUTH_OR_EXPR:
8891 code = BIT_IOR_EXPR;
8892 case BIT_IOR_EXPR:
8893 goto binop;
8895 case TRUTH_XOR_EXPR:
8896 code = BIT_XOR_EXPR;
8897 case BIT_XOR_EXPR:
8898 goto binop;
8900 case LSHIFT_EXPR:
8901 case RSHIFT_EXPR:
8902 case LROTATE_EXPR:
8903 case RROTATE_EXPR:
8904 /* If this is a fixed-point operation, then we cannot use the code
8905 below because "expand_shift" doesn't support sat/no-sat fixed-point
8906 shifts. */
8907 if (ALL_FIXED_POINT_MODE_P (mode))
8908 goto binop;
8910 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8911 subtarget = 0;
8912 if (modifier == EXPAND_STACK_PARM)
8913 target = 0;
8914 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8915 VOIDmode, EXPAND_NORMAL);
8916 temp = expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8917 unsignedp);
8918 if (code == LSHIFT_EXPR)
8919 temp = REDUCE_BIT_FIELD (temp);
8920 return temp;
8922 /* Could determine the answer when only additive constants differ. Also,
8923 the addition of one can be handled by changing the condition. */
8924 case LT_EXPR:
8925 case LE_EXPR:
8926 case GT_EXPR:
8927 case GE_EXPR:
8928 case EQ_EXPR:
8929 case NE_EXPR:
8930 case UNORDERED_EXPR:
8931 case ORDERED_EXPR:
8932 case UNLT_EXPR:
8933 case UNLE_EXPR:
8934 case UNGT_EXPR:
8935 case UNGE_EXPR:
8936 case UNEQ_EXPR:
8937 case LTGT_EXPR:
8938 temp = do_store_flag (exp,
8939 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8940 tmode != VOIDmode ? tmode : mode, 0);
8941 if (temp != 0)
8942 return temp;
8944 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8945 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8946 && original_target
8947 && REG_P (original_target)
8948 && (GET_MODE (original_target)
8949 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8951 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8952 VOIDmode, EXPAND_NORMAL);
8954 /* If temp is constant, we can just compute the result. */
8955 if (GET_CODE (temp) == CONST_INT)
8957 if (INTVAL (temp) != 0)
8958 emit_move_insn (target, const1_rtx);
8959 else
8960 emit_move_insn (target, const0_rtx);
8962 return target;
8965 if (temp != original_target)
8967 enum machine_mode mode1 = GET_MODE (temp);
8968 if (mode1 == VOIDmode)
8969 mode1 = tmode != VOIDmode ? tmode : mode;
8971 temp = copy_to_mode_reg (mode1, temp);
8974 op1 = gen_label_rtx ();
8975 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8976 GET_MODE (temp), unsignedp, op1);
8977 emit_move_insn (temp, const1_rtx);
8978 emit_label (op1);
8979 return temp;
8982 /* If no set-flag instruction, must generate a conditional store
8983 into a temporary variable. Drop through and handle this
8984 like && and ||. */
8986 if (! ignore
8987 && (target == 0
8988 || modifier == EXPAND_STACK_PARM
8989 || ! safe_from_p (target, exp, 1)
8990 /* Make sure we don't have a hard reg (such as function's return
8991 value) live across basic blocks, if not optimizing. */
8992 || (!optimize && REG_P (target)
8993 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8994 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8996 if (target)
8997 emit_move_insn (target, const0_rtx);
8999 op1 = gen_label_rtx ();
9000 jumpifnot (exp, op1);
9002 if (target)
9003 emit_move_insn (target, const1_rtx);
9005 emit_label (op1);
9006 return ignore ? const0_rtx : target;
9008 case TRUTH_NOT_EXPR:
9009 if (modifier == EXPAND_STACK_PARM)
9010 target = 0;
9011 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
9012 VOIDmode, EXPAND_NORMAL);
9013 /* The parser is careful to generate TRUTH_NOT_EXPR
9014 only with operands that are always zero or one. */
9015 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
9016 target, 1, OPTAB_LIB_WIDEN);
9017 gcc_assert (temp);
9018 return temp;
9020 case STATEMENT_LIST:
9022 tree_stmt_iterator iter;
9024 gcc_assert (ignore);
9026 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9027 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9029 return const0_rtx;
9031 case COND_EXPR:
9032 /* A COND_EXPR with its type being VOID_TYPE represents a
9033 conditional jump and is handled in
9034 expand_gimple_cond_expr. */
9035 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
9037 /* Note that COND_EXPRs whose type is a structure or union
9038 are required to be constructed to contain assignments of
9039 a temporary variable, so that we can evaluate them here
9040 for side effect only. If type is void, we must do likewise. */
9042 gcc_assert (!TREE_ADDRESSABLE (type)
9043 && !ignore
9044 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
9045 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
9047 /* If we are not to produce a result, we have no target. Otherwise,
9048 if a target was specified use it; it will not be used as an
9049 intermediate target unless it is safe. If no target, use a
9050 temporary. */
9052 if (modifier != EXPAND_STACK_PARM
9053 && original_target
9054 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
9055 && GET_MODE (original_target) == mode
9056 #ifdef HAVE_conditional_move
9057 && (! can_conditionally_move_p (mode)
9058 || REG_P (original_target))
9059 #endif
9060 && !MEM_P (original_target))
9061 temp = original_target;
9062 else
9063 temp = assign_temp (type, 0, 0, 1);
9065 do_pending_stack_adjust ();
9066 NO_DEFER_POP;
9067 op0 = gen_label_rtx ();
9068 op1 = gen_label_rtx ();
9069 jumpifnot (TREE_OPERAND (exp, 0), op0);
9070 store_expr (TREE_OPERAND (exp, 1), temp,
9071 modifier == EXPAND_STACK_PARM,
9072 false);
9074 emit_jump_insn (gen_jump (op1));
9075 emit_barrier ();
9076 emit_label (op0);
9077 store_expr (TREE_OPERAND (exp, 2), temp,
9078 modifier == EXPAND_STACK_PARM,
9079 false);
9081 emit_label (op1);
9082 OK_DEFER_POP;
9083 return temp;
9085 case VEC_COND_EXPR:
9086 target = expand_vec_cond_expr (exp, target);
9087 return target;
9089 case MODIFY_EXPR:
9091 tree lhs = TREE_OPERAND (exp, 0);
9092 tree rhs = TREE_OPERAND (exp, 1);
9093 gcc_assert (ignore);
9094 expand_assignment (lhs, rhs, false);
9095 return const0_rtx;
9098 case GIMPLE_MODIFY_STMT:
9100 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
9101 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
9103 gcc_assert (ignore);
9105 /* Check for |= or &= of a bitfield of size one into another bitfield
9106 of size 1. In this case, (unless we need the result of the
9107 assignment) we can do this more efficiently with a
9108 test followed by an assignment, if necessary.
9110 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9111 things change so we do, this code should be enhanced to
9112 support it. */
9113 if (TREE_CODE (lhs) == COMPONENT_REF
9114 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9115 || TREE_CODE (rhs) == BIT_AND_EXPR)
9116 && TREE_OPERAND (rhs, 0) == lhs
9117 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9118 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9119 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9121 rtx label = gen_label_rtx ();
9122 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9123 do_jump (TREE_OPERAND (rhs, 1),
9124 value ? label : 0,
9125 value ? 0 : label);
9126 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9127 MOVE_NONTEMPORAL (exp));
9128 do_pending_stack_adjust ();
9129 emit_label (label);
9130 return const0_rtx;
9133 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9134 return const0_rtx;
9137 case RETURN_EXPR:
9138 if (!TREE_OPERAND (exp, 0))
9139 expand_null_return ();
9140 else
9141 expand_return (TREE_OPERAND (exp, 0));
9142 return const0_rtx;
9144 case ADDR_EXPR:
9145 return expand_expr_addr_expr (exp, target, tmode, modifier);
9147 case COMPLEX_EXPR:
9148 /* Get the rtx code of the operands. */
9149 op0 = expand_normal (TREE_OPERAND (exp, 0));
9150 op1 = expand_normal (TREE_OPERAND (exp, 1));
9152 if (!target)
9153 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9155 /* Move the real (op0) and imaginary (op1) parts to their location. */
9156 write_complex_part (target, op0, false);
9157 write_complex_part (target, op1, true);
9159 return target;
9161 case REALPART_EXPR:
9162 op0 = expand_normal (TREE_OPERAND (exp, 0));
9163 return read_complex_part (op0, false);
9165 case IMAGPART_EXPR:
9166 op0 = expand_normal (TREE_OPERAND (exp, 0));
9167 return read_complex_part (op0, true);
9169 case RESX_EXPR:
9170 expand_resx_expr (exp);
9171 return const0_rtx;
9173 case TRY_CATCH_EXPR:
9174 case CATCH_EXPR:
9175 case EH_FILTER_EXPR:
9176 case TRY_FINALLY_EXPR:
9177 /* Lowered by tree-eh.c. */
9178 gcc_unreachable ();
9180 case WITH_CLEANUP_EXPR:
9181 case CLEANUP_POINT_EXPR:
9182 case TARGET_EXPR:
9183 case CASE_LABEL_EXPR:
9184 case VA_ARG_EXPR:
9185 case BIND_EXPR:
9186 case INIT_EXPR:
9187 case CONJ_EXPR:
9188 case COMPOUND_EXPR:
9189 case PREINCREMENT_EXPR:
9190 case PREDECREMENT_EXPR:
9191 case POSTINCREMENT_EXPR:
9192 case POSTDECREMENT_EXPR:
9193 case LOOP_EXPR:
9194 case EXIT_EXPR:
9195 case TRUTH_ANDIF_EXPR:
9196 case TRUTH_ORIF_EXPR:
9197 /* Lowered by gimplify.c. */
9198 gcc_unreachable ();
9200 case CHANGE_DYNAMIC_TYPE_EXPR:
9201 /* This is ignored at the RTL level. The tree level set
9202 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9203 overkill for the RTL layer but is all that we can
9204 represent. */
9205 return const0_rtx;
9207 case EXC_PTR_EXPR:
9208 return get_exception_pointer (cfun);
9210 case FILTER_EXPR:
9211 return get_exception_filter (cfun);
9213 case FDESC_EXPR:
9214 /* Function descriptors are not valid except for as
9215 initialization constants, and should not be expanded. */
9216 gcc_unreachable ();
9218 case SWITCH_EXPR:
9219 expand_case (exp);
9220 return const0_rtx;
9222 case LABEL_EXPR:
9223 expand_label (TREE_OPERAND (exp, 0));
9224 return const0_rtx;
9226 case ASM_EXPR:
9227 expand_asm_expr (exp);
9228 return const0_rtx;
9230 case WITH_SIZE_EXPR:
9231 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9232 have pulled out the size to use in whatever context it needed. */
9233 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9234 modifier, alt_rtl);
9236 case REALIGN_LOAD_EXPR:
9238 tree oprnd0 = TREE_OPERAND (exp, 0);
9239 tree oprnd1 = TREE_OPERAND (exp, 1);
9240 tree oprnd2 = TREE_OPERAND (exp, 2);
9241 rtx op2;
9243 this_optab = optab_for_tree_code (code, type);
9244 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9245 op2 = expand_normal (oprnd2);
9246 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9247 target, unsignedp);
9248 gcc_assert (temp);
9249 return temp;
9252 case DOT_PROD_EXPR:
9254 tree oprnd0 = TREE_OPERAND (exp, 0);
9255 tree oprnd1 = TREE_OPERAND (exp, 1);
9256 tree oprnd2 = TREE_OPERAND (exp, 2);
9257 rtx op2;
9259 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9260 op2 = expand_normal (oprnd2);
9261 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9262 target, unsignedp);
9263 return target;
9266 case WIDEN_SUM_EXPR:
9268 tree oprnd0 = TREE_OPERAND (exp, 0);
9269 tree oprnd1 = TREE_OPERAND (exp, 1);
9271 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9272 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9273 target, unsignedp);
9274 return target;
9277 case REDUC_MAX_EXPR:
9278 case REDUC_MIN_EXPR:
9279 case REDUC_PLUS_EXPR:
9281 op0 = expand_normal (TREE_OPERAND (exp, 0));
9282 this_optab = optab_for_tree_code (code, type);
9283 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9284 gcc_assert (temp);
9285 return temp;
9288 case VEC_EXTRACT_EVEN_EXPR:
9289 case VEC_EXTRACT_ODD_EXPR:
9291 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9292 NULL_RTX, &op0, &op1, 0);
9293 this_optab = optab_for_tree_code (code, type);
9294 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9295 OPTAB_WIDEN);
9296 gcc_assert (temp);
9297 return temp;
9300 case VEC_INTERLEAVE_HIGH_EXPR:
9301 case VEC_INTERLEAVE_LOW_EXPR:
9303 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9304 NULL_RTX, &op0, &op1, 0);
9305 this_optab = optab_for_tree_code (code, type);
9306 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9307 OPTAB_WIDEN);
9308 gcc_assert (temp);
9309 return temp;
9312 case VEC_LSHIFT_EXPR:
9313 case VEC_RSHIFT_EXPR:
9315 target = expand_vec_shift_expr (exp, target);
9316 return target;
9319 case VEC_UNPACK_HI_EXPR:
9320 case VEC_UNPACK_LO_EXPR:
9322 op0 = expand_normal (TREE_OPERAND (exp, 0));
9323 this_optab = optab_for_tree_code (code, type);
9324 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9325 target, unsignedp);
9326 gcc_assert (temp);
9327 return temp;
9330 case VEC_UNPACK_FLOAT_HI_EXPR:
9331 case VEC_UNPACK_FLOAT_LO_EXPR:
9333 op0 = expand_normal (TREE_OPERAND (exp, 0));
9334 /* The signedness is determined from input operand. */
9335 this_optab = optab_for_tree_code (code,
9336 TREE_TYPE (TREE_OPERAND (exp, 0)));
9337 temp = expand_widen_pattern_expr
9338 (exp, op0, NULL_RTX, NULL_RTX,
9339 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9341 gcc_assert (temp);
9342 return temp;
9345 case VEC_WIDEN_MULT_HI_EXPR:
9346 case VEC_WIDEN_MULT_LO_EXPR:
9348 tree oprnd0 = TREE_OPERAND (exp, 0);
9349 tree oprnd1 = TREE_OPERAND (exp, 1);
9351 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9352 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9353 target, unsignedp);
9354 gcc_assert (target);
9355 return target;
9358 case VEC_PACK_TRUNC_EXPR:
9359 case VEC_PACK_SAT_EXPR:
9360 case VEC_PACK_FIX_TRUNC_EXPR:
9362 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9363 goto binop;
9366 case OMP_ATOMIC_LOAD:
9367 case OMP_ATOMIC_STORE:
9368 /* OMP expansion is not run when there were errors, so these codes
9369 can get here. */
9370 gcc_assert (errorcount != 0);
9371 return NULL_RTX;
9373 default:
9374 return lang_hooks.expand_expr (exp, original_target, tmode,
9375 modifier, alt_rtl);
9378 /* Here to do an ordinary binary operator. */
9379 binop:
9380 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9381 subtarget, &op0, &op1, 0);
9382 binop2:
9383 this_optab = optab_for_tree_code (code, type);
9384 binop3:
9385 if (modifier == EXPAND_STACK_PARM)
9386 target = 0;
9387 temp = expand_binop (mode, this_optab, op0, op1, target,
9388 unsignedp, OPTAB_LIB_WIDEN);
9389 gcc_assert (temp);
9390 return REDUCE_BIT_FIELD (temp);
9392 #undef REDUCE_BIT_FIELD
9394 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9395 signedness of TYPE), possibly returning the result in TARGET. */
9396 static rtx
9397 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9399 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9400 if (target && GET_MODE (target) != GET_MODE (exp))
9401 target = 0;
9402 /* For constant values, reduce using build_int_cst_type. */
9403 if (GET_CODE (exp) == CONST_INT)
9405 HOST_WIDE_INT value = INTVAL (exp);
9406 tree t = build_int_cst_type (type, value);
9407 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9409 else if (TYPE_UNSIGNED (type))
9411 rtx mask;
9412 if (prec < HOST_BITS_PER_WIDE_INT)
9413 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9414 GET_MODE (exp));
9415 else
9416 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9417 ((unsigned HOST_WIDE_INT) 1
9418 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9419 GET_MODE (exp));
9420 return expand_and (GET_MODE (exp), exp, mask, target);
9422 else
9424 tree count = build_int_cst (NULL_TREE,
9425 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9426 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9427 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9431 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9432 when applied to the address of EXP produces an address known to be
9433 aligned more than BIGGEST_ALIGNMENT. */
9435 static int
9436 is_aligning_offset (const_tree offset, const_tree exp)
9438 /* Strip off any conversions. */
9439 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9440 || TREE_CODE (offset) == NOP_EXPR
9441 || TREE_CODE (offset) == CONVERT_EXPR)
9442 offset = TREE_OPERAND (offset, 0);
9444 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9445 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9446 if (TREE_CODE (offset) != BIT_AND_EXPR
9447 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9448 || compare_tree_int (TREE_OPERAND (offset, 1),
9449 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9450 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9451 return 0;
9453 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9454 It must be NEGATE_EXPR. Then strip any more conversions. */
9455 offset = TREE_OPERAND (offset, 0);
9456 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9457 || TREE_CODE (offset) == NOP_EXPR
9458 || TREE_CODE (offset) == CONVERT_EXPR)
9459 offset = TREE_OPERAND (offset, 0);
9461 if (TREE_CODE (offset) != NEGATE_EXPR)
9462 return 0;
9464 offset = TREE_OPERAND (offset, 0);
9465 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9466 || TREE_CODE (offset) == NOP_EXPR
9467 || TREE_CODE (offset) == CONVERT_EXPR)
9468 offset = TREE_OPERAND (offset, 0);
9470 /* This must now be the address of EXP. */
9471 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9474 /* Return the tree node if an ARG corresponds to a string constant or zero
9475 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9476 in bytes within the string that ARG is accessing. The type of the
9477 offset will be `sizetype'. */
9479 tree
9480 string_constant (tree arg, tree *ptr_offset)
9482 tree array, offset, lower_bound;
9483 STRIP_NOPS (arg);
9485 if (TREE_CODE (arg) == ADDR_EXPR)
9487 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9489 *ptr_offset = size_zero_node;
9490 return TREE_OPERAND (arg, 0);
9492 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9494 array = TREE_OPERAND (arg, 0);
9495 offset = size_zero_node;
9497 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9499 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9500 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9501 if (TREE_CODE (array) != STRING_CST
9502 && TREE_CODE (array) != VAR_DECL)
9503 return 0;
9505 /* Check if the array has a nonzero lower bound. */
9506 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9507 if (!integer_zerop (lower_bound))
9509 /* If the offset and base aren't both constants, return 0. */
9510 if (TREE_CODE (lower_bound) != INTEGER_CST)
9511 return 0;
9512 if (TREE_CODE (offset) != INTEGER_CST)
9513 return 0;
9514 /* Adjust offset by the lower bound. */
9515 offset = size_diffop (fold_convert (sizetype, offset),
9516 fold_convert (sizetype, lower_bound));
9519 else
9520 return 0;
9522 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9524 tree arg0 = TREE_OPERAND (arg, 0);
9525 tree arg1 = TREE_OPERAND (arg, 1);
9527 STRIP_NOPS (arg0);
9528 STRIP_NOPS (arg1);
9530 if (TREE_CODE (arg0) == ADDR_EXPR
9531 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9532 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9534 array = TREE_OPERAND (arg0, 0);
9535 offset = arg1;
9537 else if (TREE_CODE (arg1) == ADDR_EXPR
9538 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9539 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9541 array = TREE_OPERAND (arg1, 0);
9542 offset = arg0;
9544 else
9545 return 0;
9547 else
9548 return 0;
9550 if (TREE_CODE (array) == STRING_CST)
9552 *ptr_offset = fold_convert (sizetype, offset);
9553 return array;
9555 else if (TREE_CODE (array) == VAR_DECL)
9557 int length;
9559 /* Variables initialized to string literals can be handled too. */
9560 if (DECL_INITIAL (array) == NULL_TREE
9561 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9562 return 0;
9564 /* If they are read-only, non-volatile and bind locally. */
9565 if (! TREE_READONLY (array)
9566 || TREE_SIDE_EFFECTS (array)
9567 || ! targetm.binds_local_p (array))
9568 return 0;
9570 /* Avoid const char foo[4] = "abcde"; */
9571 if (DECL_SIZE_UNIT (array) == NULL_TREE
9572 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9573 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9574 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9575 return 0;
9577 /* If variable is bigger than the string literal, OFFSET must be constant
9578 and inside of the bounds of the string literal. */
9579 offset = fold_convert (sizetype, offset);
9580 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9581 && (! host_integerp (offset, 1)
9582 || compare_tree_int (offset, length) >= 0))
9583 return 0;
9585 *ptr_offset = offset;
9586 return DECL_INITIAL (array);
9589 return 0;
9592 /* Generate code to calculate EXP using a store-flag instruction
9593 and return an rtx for the result. EXP is either a comparison
9594 or a TRUTH_NOT_EXPR whose operand is a comparison.
9596 If TARGET is nonzero, store the result there if convenient.
9598 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9599 cheap.
9601 Return zero if there is no suitable set-flag instruction
9602 available on this machine.
9604 Once expand_expr has been called on the arguments of the comparison,
9605 we are committed to doing the store flag, since it is not safe to
9606 re-evaluate the expression. We emit the store-flag insn by calling
9607 emit_store_flag, but only expand the arguments if we have a reason
9608 to believe that emit_store_flag will be successful. If we think that
9609 it will, but it isn't, we have to simulate the store-flag with a
9610 set/jump/set sequence. */
9612 static rtx
9613 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9615 enum rtx_code code;
9616 tree arg0, arg1, type;
9617 tree tem;
9618 enum machine_mode operand_mode;
9619 int invert = 0;
9620 int unsignedp;
9621 rtx op0, op1;
9622 enum insn_code icode;
9623 rtx subtarget = target;
9624 rtx result, label;
9626 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9627 result at the end. We can't simply invert the test since it would
9628 have already been inverted if it were valid. This case occurs for
9629 some floating-point comparisons. */
9631 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9632 invert = 1, exp = TREE_OPERAND (exp, 0);
9634 arg0 = TREE_OPERAND (exp, 0);
9635 arg1 = TREE_OPERAND (exp, 1);
9637 /* Don't crash if the comparison was erroneous. */
9638 if (arg0 == error_mark_node || arg1 == error_mark_node)
9639 return const0_rtx;
9641 type = TREE_TYPE (arg0);
9642 operand_mode = TYPE_MODE (type);
9643 unsignedp = TYPE_UNSIGNED (type);
9645 /* We won't bother with BLKmode store-flag operations because it would mean
9646 passing a lot of information to emit_store_flag. */
9647 if (operand_mode == BLKmode)
9648 return 0;
9650 /* We won't bother with store-flag operations involving function pointers
9651 when function pointers must be canonicalized before comparisons. */
9652 #ifdef HAVE_canonicalize_funcptr_for_compare
9653 if (HAVE_canonicalize_funcptr_for_compare
9654 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9655 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9656 == FUNCTION_TYPE))
9657 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9658 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9659 == FUNCTION_TYPE))))
9660 return 0;
9661 #endif
9663 STRIP_NOPS (arg0);
9664 STRIP_NOPS (arg1);
9666 /* Get the rtx comparison code to use. We know that EXP is a comparison
9667 operation of some type. Some comparisons against 1 and -1 can be
9668 converted to comparisons with zero. Do so here so that the tests
9669 below will be aware that we have a comparison with zero. These
9670 tests will not catch constants in the first operand, but constants
9671 are rarely passed as the first operand. */
9673 switch (TREE_CODE (exp))
9675 case EQ_EXPR:
9676 code = EQ;
9677 break;
9678 case NE_EXPR:
9679 code = NE;
9680 break;
9681 case LT_EXPR:
9682 if (integer_onep (arg1))
9683 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9684 else
9685 code = unsignedp ? LTU : LT;
9686 break;
9687 case LE_EXPR:
9688 if (! unsignedp && integer_all_onesp (arg1))
9689 arg1 = integer_zero_node, code = LT;
9690 else
9691 code = unsignedp ? LEU : LE;
9692 break;
9693 case GT_EXPR:
9694 if (! unsignedp && integer_all_onesp (arg1))
9695 arg1 = integer_zero_node, code = GE;
9696 else
9697 code = unsignedp ? GTU : GT;
9698 break;
9699 case GE_EXPR:
9700 if (integer_onep (arg1))
9701 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9702 else
9703 code = unsignedp ? GEU : GE;
9704 break;
9706 case UNORDERED_EXPR:
9707 code = UNORDERED;
9708 break;
9709 case ORDERED_EXPR:
9710 code = ORDERED;
9711 break;
9712 case UNLT_EXPR:
9713 code = UNLT;
9714 break;
9715 case UNLE_EXPR:
9716 code = UNLE;
9717 break;
9718 case UNGT_EXPR:
9719 code = UNGT;
9720 break;
9721 case UNGE_EXPR:
9722 code = UNGE;
9723 break;
9724 case UNEQ_EXPR:
9725 code = UNEQ;
9726 break;
9727 case LTGT_EXPR:
9728 code = LTGT;
9729 break;
9731 default:
9732 gcc_unreachable ();
9735 /* Put a constant second. */
9736 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9737 || TREE_CODE (arg0) == FIXED_CST)
9739 tem = arg0; arg0 = arg1; arg1 = tem;
9740 code = swap_condition (code);
9743 /* If this is an equality or inequality test of a single bit, we can
9744 do this by shifting the bit being tested to the low-order bit and
9745 masking the result with the constant 1. If the condition was EQ,
9746 we xor it with 1. This does not require an scc insn and is faster
9747 than an scc insn even if we have it.
9749 The code to make this transformation was moved into fold_single_bit_test,
9750 so we just call into the folder and expand its result. */
9752 if ((code == NE || code == EQ)
9753 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9754 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9756 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9757 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9758 arg0, arg1, type),
9759 target, VOIDmode, EXPAND_NORMAL);
9762 /* Now see if we are likely to be able to do this. Return if not. */
9763 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9764 return 0;
9766 icode = setcc_gen_code[(int) code];
9768 if (icode == CODE_FOR_nothing)
9770 enum machine_mode wmode;
9772 for (wmode = operand_mode;
9773 icode == CODE_FOR_nothing && wmode != VOIDmode;
9774 wmode = GET_MODE_WIDER_MODE (wmode))
9775 icode = optab_handler (cstore_optab, wmode)->insn_code;
9778 if (icode == CODE_FOR_nothing
9779 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9781 /* We can only do this if it is one of the special cases that
9782 can be handled without an scc insn. */
9783 if ((code == LT && integer_zerop (arg1))
9784 || (! only_cheap && code == GE && integer_zerop (arg1)))
9786 else if (! only_cheap && (code == NE || code == EQ)
9787 && TREE_CODE (type) != REAL_TYPE
9788 && ((optab_handler (abs_optab, operand_mode)->insn_code
9789 != CODE_FOR_nothing)
9790 || (optab_handler (ffs_optab, operand_mode)->insn_code
9791 != CODE_FOR_nothing)))
9793 else
9794 return 0;
9797 if (! get_subtarget (target)
9798 || GET_MODE (subtarget) != operand_mode)
9799 subtarget = 0;
9801 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9803 if (target == 0)
9804 target = gen_reg_rtx (mode);
9806 result = emit_store_flag (target, code, op0, op1,
9807 operand_mode, unsignedp, 1);
9809 if (result)
9811 if (invert)
9812 result = expand_binop (mode, xor_optab, result, const1_rtx,
9813 result, 0, OPTAB_LIB_WIDEN);
9814 return result;
9817 /* If this failed, we have to do this with set/compare/jump/set code. */
9818 if (!REG_P (target)
9819 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9820 target = gen_reg_rtx (GET_MODE (target));
9822 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9823 label = gen_label_rtx ();
9824 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9825 NULL_RTX, label);
9827 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9828 emit_label (label);
9830 return target;
9834 /* Stubs in case we haven't got a casesi insn. */
9835 #ifndef HAVE_casesi
9836 # define HAVE_casesi 0
9837 # define gen_casesi(a, b, c, d, e) (0)
9838 # define CODE_FOR_casesi CODE_FOR_nothing
9839 #endif
9841 /* If the machine does not have a case insn that compares the bounds,
9842 this means extra overhead for dispatch tables, which raises the
9843 threshold for using them. */
9844 #ifndef CASE_VALUES_THRESHOLD
9845 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9846 #endif /* CASE_VALUES_THRESHOLD */
9848 unsigned int
9849 case_values_threshold (void)
9851 return CASE_VALUES_THRESHOLD;
9854 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9855 0 otherwise (i.e. if there is no casesi instruction). */
9857 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9858 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9860 enum machine_mode index_mode = SImode;
9861 int index_bits = GET_MODE_BITSIZE (index_mode);
9862 rtx op1, op2, index;
9863 enum machine_mode op_mode;
9865 if (! HAVE_casesi)
9866 return 0;
9868 /* Convert the index to SImode. */
9869 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9871 enum machine_mode omode = TYPE_MODE (index_type);
9872 rtx rangertx = expand_normal (range);
9874 /* We must handle the endpoints in the original mode. */
9875 index_expr = build2 (MINUS_EXPR, index_type,
9876 index_expr, minval);
9877 minval = integer_zero_node;
9878 index = expand_normal (index_expr);
9879 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9880 omode, 1, default_label);
9881 /* Now we can safely truncate. */
9882 index = convert_to_mode (index_mode, index, 0);
9884 else
9886 if (TYPE_MODE (index_type) != index_mode)
9888 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9889 index_expr = fold_convert (index_type, index_expr);
9892 index = expand_normal (index_expr);
9895 do_pending_stack_adjust ();
9897 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9898 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9899 (index, op_mode))
9900 index = copy_to_mode_reg (op_mode, index);
9902 op1 = expand_normal (minval);
9904 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9905 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9906 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9907 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9908 (op1, op_mode))
9909 op1 = copy_to_mode_reg (op_mode, op1);
9911 op2 = expand_normal (range);
9913 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9914 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9915 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9916 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9917 (op2, op_mode))
9918 op2 = copy_to_mode_reg (op_mode, op2);
9920 emit_jump_insn (gen_casesi (index, op1, op2,
9921 table_label, default_label));
9922 return 1;
9925 /* Attempt to generate a tablejump instruction; same concept. */
9926 #ifndef HAVE_tablejump
9927 #define HAVE_tablejump 0
9928 #define gen_tablejump(x, y) (0)
9929 #endif
9931 /* Subroutine of the next function.
9933 INDEX is the value being switched on, with the lowest value
9934 in the table already subtracted.
9935 MODE is its expected mode (needed if INDEX is constant).
9936 RANGE is the length of the jump table.
9937 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9939 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9940 index value is out of range. */
9942 static void
9943 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9944 rtx default_label)
9946 rtx temp, vector;
9948 if (INTVAL (range) > cfun->max_jumptable_ents)
9949 cfun->max_jumptable_ents = INTVAL (range);
9951 /* Do an unsigned comparison (in the proper mode) between the index
9952 expression and the value which represents the length of the range.
9953 Since we just finished subtracting the lower bound of the range
9954 from the index expression, this comparison allows us to simultaneously
9955 check that the original index expression value is both greater than
9956 or equal to the minimum value of the range and less than or equal to
9957 the maximum value of the range. */
9959 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9960 default_label);
9962 /* If index is in range, it must fit in Pmode.
9963 Convert to Pmode so we can index with it. */
9964 if (mode != Pmode)
9965 index = convert_to_mode (Pmode, index, 1);
9967 /* Don't let a MEM slip through, because then INDEX that comes
9968 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9969 and break_out_memory_refs will go to work on it and mess it up. */
9970 #ifdef PIC_CASE_VECTOR_ADDRESS
9971 if (flag_pic && !REG_P (index))
9972 index = copy_to_mode_reg (Pmode, index);
9973 #endif
9975 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9976 GET_MODE_SIZE, because this indicates how large insns are. The other
9977 uses should all be Pmode, because they are addresses. This code
9978 could fail if addresses and insns are not the same size. */
9979 index = gen_rtx_PLUS (Pmode,
9980 gen_rtx_MULT (Pmode, index,
9981 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9982 gen_rtx_LABEL_REF (Pmode, table_label));
9983 #ifdef PIC_CASE_VECTOR_ADDRESS
9984 if (flag_pic)
9985 index = PIC_CASE_VECTOR_ADDRESS (index);
9986 else
9987 #endif
9988 index = memory_address (CASE_VECTOR_MODE, index);
9989 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9990 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9991 convert_move (temp, vector, 0);
9993 emit_jump_insn (gen_tablejump (temp, table_label));
9995 /* If we are generating PIC code or if the table is PC-relative, the
9996 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9997 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9998 emit_barrier ();
10002 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10003 rtx table_label, rtx default_label)
10005 rtx index;
10007 if (! HAVE_tablejump)
10008 return 0;
10010 index_expr = fold_build2 (MINUS_EXPR, index_type,
10011 fold_convert (index_type, index_expr),
10012 fold_convert (index_type, minval));
10013 index = expand_normal (index_expr);
10014 do_pending_stack_adjust ();
10016 do_tablejump (index, TYPE_MODE (index_type),
10017 convert_modes (TYPE_MODE (index_type),
10018 TYPE_MODE (TREE_TYPE (range)),
10019 expand_normal (range),
10020 TYPE_UNSIGNED (TREE_TYPE (range))),
10021 table_label, default_label);
10022 return 1;
10025 /* Nonzero if the mode is a valid vector mode for this architecture.
10026 This returns nonzero even if there is no hardware support for the
10027 vector mode, but we can emulate with narrower modes. */
10030 vector_mode_valid_p (enum machine_mode mode)
10032 enum mode_class class = GET_MODE_CLASS (mode);
10033 enum machine_mode innermode;
10035 /* Doh! What's going on? */
10036 if (class != MODE_VECTOR_INT
10037 && class != MODE_VECTOR_FLOAT
10038 && class != MODE_VECTOR_FRACT
10039 && class != MODE_VECTOR_UFRACT
10040 && class != MODE_VECTOR_ACCUM
10041 && class != MODE_VECTOR_UACCUM)
10042 return 0;
10044 /* Hardware support. Woo hoo! */
10045 if (targetm.vector_mode_supported_p (mode))
10046 return 1;
10048 innermode = GET_MODE_INNER (mode);
10050 /* We should probably return 1 if requesting V4DI and we have no DI,
10051 but we have V2DI, but this is probably very unlikely. */
10053 /* If we have support for the inner mode, we can safely emulate it.
10054 We may not have V2DI, but me can emulate with a pair of DIs. */
10055 return targetm.scalar_mode_supported_p (innermode);
10058 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10059 static rtx
10060 const_vector_from_tree (tree exp)
10062 rtvec v;
10063 int units, i;
10064 tree link, elt;
10065 enum machine_mode inner, mode;
10067 mode = TYPE_MODE (TREE_TYPE (exp));
10069 if (initializer_zerop (exp))
10070 return CONST0_RTX (mode);
10072 units = GET_MODE_NUNITS (mode);
10073 inner = GET_MODE_INNER (mode);
10075 v = rtvec_alloc (units);
10077 link = TREE_VECTOR_CST_ELTS (exp);
10078 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10080 elt = TREE_VALUE (link);
10082 if (TREE_CODE (elt) == REAL_CST)
10083 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10084 inner);
10085 else if (TREE_CODE (elt) == FIXED_CST)
10086 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10087 inner);
10088 else
10089 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10090 TREE_INT_CST_HIGH (elt),
10091 inner);
10094 /* Initialize remaining elements to 0. */
10095 for (; i < units; ++i)
10096 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10098 return gen_rtx_CONST_VECTOR (mode, v);
10100 #include "gt-expr.h"