re PR middle-end/33617 (ICE for nonconstant callee-copied constructor arguments)
[official-gcc.git] / gcc / expr.c
blob6b7ba168d782aa13ea2ce85345b67844cc1ba109
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
63 #ifdef PUSH_ROUNDING
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
71 #endif
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
110 struct store_by_pieces
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, alias_set_type);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, alias_set_type, bool);
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
149 static int is_aligning_offset (const_tree, const_tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* Record for each mode whether we can float-extend from memory. */
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
178 #endif
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
186 #endif
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero. */
190 #ifndef SET_BY_PIECES_P
191 #define SET_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
193 < (unsigned int) SET_RATIO)
194 #endif
196 /* This macro is used to determine whether store_by_pieces should be
197 called to "memcpy" storage when the source is a constant string. */
198 #ifndef STORE_BY_PIECES_P
199 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
201 < (unsigned int) MOVE_RATIO)
202 #endif
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movmem_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block sets. */
208 enum insn_code setmem_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of three different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
216 /* Synchronization primitives. */
217 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
237 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
240 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
242 #ifndef SLOW_UNALIGNED_ACCESS
243 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
244 #endif
246 /* This is run to set up which modes can be used
247 directly in memory and to initialize the block move optab. It is run
248 at the beginning of compilation and when the target is reinitialized. */
250 void
251 init_expr_target (void)
253 rtx insn, pat;
254 enum machine_mode mode;
255 int num_clobbers;
256 rtx mem, mem1;
257 rtx reg;
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
265 /* A scratch register we can modify in-place below to avoid
266 useless RTL allocations. */
267 reg = gen_rtx_REG (VOIDmode, -1);
269 insn = rtx_alloc (INSN);
270 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
271 PATTERN (insn) = pat;
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
276 int regno;
278 direct_load[(int) mode] = direct_store[(int) mode] = 0;
279 PUT_MODE (mem, mode);
280 PUT_MODE (mem1, mode);
281 PUT_MODE (reg, mode);
283 /* See if there is some register that can be used in this mode and
284 directly loaded or stored from memory. */
286 if (mode != VOIDmode && mode != BLKmode)
287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
288 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
289 regno++)
291 if (! HARD_REGNO_MODE_OK (regno, mode))
292 continue;
294 SET_REGNO (reg, regno);
296 SET_SRC (pat) = mem;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
301 SET_SRC (pat) = mem1;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
306 SET_SRC (pat) = reg;
307 SET_DEST (pat) = mem;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
311 SET_SRC (pat) = reg;
312 SET_DEST (pat) = mem1;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
318 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
320 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
321 mode = GET_MODE_WIDER_MODE (mode))
323 enum machine_mode srcmode;
324 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
325 srcmode = GET_MODE_WIDER_MODE (srcmode))
327 enum insn_code ic;
329 ic = can_extend_p (mode, srcmode, 0);
330 if (ic == CODE_FOR_nothing)
331 continue;
333 PUT_MODE (mem, srcmode);
335 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
336 float_extend_from_mem[mode][srcmode] = true;
341 /* This is run at the start of compiling a function. */
343 void
344 init_expr (void)
346 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
349 /* Copy data from FROM to TO, where the machine modes are not the same.
350 Both modes may be integer, or both may be floating, or both may be
351 fixed-point.
352 UNSIGNEDP should be nonzero if FROM is an unsigned type.
353 This causes zero-extension instead of sign-extension. */
355 void
356 convert_move (rtx to, rtx from, int unsignedp)
358 enum machine_mode to_mode = GET_MODE (to);
359 enum machine_mode from_mode = GET_MODE (from);
360 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
361 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
362 enum insn_code code;
363 rtx libcall;
365 /* rtx code for making an equivalent value. */
366 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
367 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
370 gcc_assert (to_real == from_real);
371 gcc_assert (to_mode != BLKmode);
372 gcc_assert (from_mode != BLKmode);
374 /* If the source and destination are already the same, then there's
375 nothing to do. */
376 if (to == from)
377 return;
379 /* If FROM is a SUBREG that indicates that we have already done at least
380 the required extension, strip it. We don't handle such SUBREGs as
381 TO here. */
383 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
384 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
385 >= GET_MODE_SIZE (to_mode))
386 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
387 from = gen_lowpart (to_mode, from), from_mode = to_mode;
389 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
391 if (to_mode == from_mode
392 || (from_mode == VOIDmode && CONSTANT_P (from)))
394 emit_move_insn (to, from);
395 return;
398 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
400 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
402 if (VECTOR_MODE_P (to_mode))
403 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
404 else
405 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
407 emit_move_insn (to, from);
408 return;
411 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
413 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
414 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
415 return;
418 if (to_real)
420 rtx value, insns;
421 convert_optab tab;
423 gcc_assert ((GET_MODE_PRECISION (from_mode)
424 != GET_MODE_PRECISION (to_mode))
425 || (DECIMAL_FLOAT_MODE_P (from_mode)
426 != DECIMAL_FLOAT_MODE_P (to_mode)));
428 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
429 /* Conversion between decimal float and binary float, same size. */
430 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
431 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
432 tab = sext_optab;
433 else
434 tab = trunc_optab;
436 /* Try converting directly if the insn is supported. */
438 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
439 if (code != CODE_FOR_nothing)
441 emit_unop_insn (code, to, from,
442 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
443 return;
446 /* Otherwise use a libcall. */
447 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
449 /* Is this conversion implemented yet? */
450 gcc_assert (libcall);
452 start_sequence ();
453 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
454 1, from, from_mode);
455 insns = get_insns ();
456 end_sequence ();
457 emit_libcall_block (insns, to, value,
458 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
459 from)
460 : gen_rtx_FLOAT_EXTEND (to_mode, from));
461 return;
464 /* Handle pointer conversion. */ /* SPEE 900220. */
465 /* Targets are expected to provide conversion insns between PxImode and
466 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
467 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
469 enum machine_mode full_mode
470 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
472 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
473 != CODE_FOR_nothing);
475 if (full_mode != from_mode)
476 from = convert_to_mode (full_mode, from, unsignedp);
477 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
478 to, from, UNKNOWN);
479 return;
481 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
483 rtx new_from;
484 enum machine_mode full_mode
485 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
487 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
488 != CODE_FOR_nothing);
490 if (to_mode == full_mode)
492 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
493 to, from, UNKNOWN);
494 return;
497 new_from = gen_reg_rtx (full_mode);
498 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
499 new_from, from, UNKNOWN);
501 /* else proceed to integer conversions below. */
502 from_mode = full_mode;
503 from = new_from;
506 /* Make sure both are fixed-point modes or both are not. */
507 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
508 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
509 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
511 /* If we widen from_mode to to_mode and they are in the same class,
512 we won't saturate the result.
513 Otherwise, always saturate the result to play safe. */
514 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
515 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
516 expand_fixed_convert (to, from, 0, 0);
517 else
518 expand_fixed_convert (to, from, 0, 1);
519 return;
522 /* Now both modes are integers. */
524 /* Handle expanding beyond a word. */
525 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
526 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
528 rtx insns;
529 rtx lowpart;
530 rtx fill_value;
531 rtx lowfrom;
532 int i;
533 enum machine_mode lowpart_mode;
534 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
536 /* Try converting directly if the insn is supported. */
537 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
538 != CODE_FOR_nothing)
540 /* If FROM is a SUBREG, put it into a register. Do this
541 so that we always generate the same set of insns for
542 better cse'ing; if an intermediate assignment occurred,
543 we won't be doing the operation directly on the SUBREG. */
544 if (optimize > 0 && GET_CODE (from) == SUBREG)
545 from = force_reg (from_mode, from);
546 emit_unop_insn (code, to, from, equiv_code);
547 return;
549 /* Next, try converting via full word. */
550 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
551 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
552 != CODE_FOR_nothing))
554 if (REG_P (to))
556 if (reg_overlap_mentioned_p (to, from))
557 from = force_reg (from_mode, from);
558 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
560 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
561 emit_unop_insn (code, to,
562 gen_lowpart (word_mode, to), equiv_code);
563 return;
566 /* No special multiword conversion insn; do it by hand. */
567 start_sequence ();
569 /* Since we will turn this into a no conflict block, we must ensure
570 that the source does not overlap the target. */
572 if (reg_overlap_mentioned_p (to, from))
573 from = force_reg (from_mode, from);
575 /* Get a copy of FROM widened to a word, if necessary. */
576 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
577 lowpart_mode = word_mode;
578 else
579 lowpart_mode = from_mode;
581 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
583 lowpart = gen_lowpart (lowpart_mode, to);
584 emit_move_insn (lowpart, lowfrom);
586 /* Compute the value to put in each remaining word. */
587 if (unsignedp)
588 fill_value = const0_rtx;
589 else
591 #ifdef HAVE_slt
592 if (HAVE_slt
593 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
594 && STORE_FLAG_VALUE == -1)
596 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
597 lowpart_mode, 0);
598 fill_value = gen_reg_rtx (word_mode);
599 emit_insn (gen_slt (fill_value));
601 else
602 #endif
604 fill_value
605 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
606 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
607 NULL_RTX, 0);
608 fill_value = convert_to_mode (word_mode, fill_value, 1);
612 /* Fill the remaining words. */
613 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
615 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
616 rtx subword = operand_subword (to, index, 1, to_mode);
618 gcc_assert (subword);
620 if (fill_value != subword)
621 emit_move_insn (subword, fill_value);
624 insns = get_insns ();
625 end_sequence ();
627 emit_no_conflict_block (insns, to, from, NULL_RTX,
628 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
629 return;
632 /* Truncating multi-word to a word or less. */
633 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
634 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
636 if (!((MEM_P (from)
637 && ! MEM_VOLATILE_P (from)
638 && direct_load[(int) to_mode]
639 && ! mode_dependent_address_p (XEXP (from, 0)))
640 || REG_P (from)
641 || GET_CODE (from) == SUBREG))
642 from = force_reg (from_mode, from);
643 convert_move (to, gen_lowpart (word_mode, from), 0);
644 return;
647 /* Now follow all the conversions between integers
648 no more than a word long. */
650 /* For truncation, usually we can just refer to FROM in a narrower mode. */
651 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
652 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
653 GET_MODE_BITSIZE (from_mode)))
655 if (!((MEM_P (from)
656 && ! MEM_VOLATILE_P (from)
657 && direct_load[(int) to_mode]
658 && ! mode_dependent_address_p (XEXP (from, 0)))
659 || REG_P (from)
660 || GET_CODE (from) == SUBREG))
661 from = force_reg (from_mode, from);
662 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
663 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
664 from = copy_to_reg (from);
665 emit_move_insn (to, gen_lowpart (to_mode, from));
666 return;
669 /* Handle extension. */
670 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
672 /* Convert directly if that works. */
673 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
674 != CODE_FOR_nothing)
676 emit_unop_insn (code, to, from, equiv_code);
677 return;
679 else
681 enum machine_mode intermediate;
682 rtx tmp;
683 tree shift_amount;
685 /* Search for a mode to convert via. */
686 for (intermediate = from_mode; intermediate != VOIDmode;
687 intermediate = GET_MODE_WIDER_MODE (intermediate))
688 if (((can_extend_p (to_mode, intermediate, unsignedp)
689 != CODE_FOR_nothing)
690 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
691 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
692 GET_MODE_BITSIZE (intermediate))))
693 && (can_extend_p (intermediate, from_mode, unsignedp)
694 != CODE_FOR_nothing))
696 convert_move (to, convert_to_mode (intermediate, from,
697 unsignedp), unsignedp);
698 return;
701 /* No suitable intermediate mode.
702 Generate what we need with shifts. */
703 shift_amount = build_int_cst (NULL_TREE,
704 GET_MODE_BITSIZE (to_mode)
705 - GET_MODE_BITSIZE (from_mode));
706 from = gen_lowpart (to_mode, force_reg (from_mode, from));
707 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
708 to, unsignedp);
709 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
710 to, unsignedp);
711 if (tmp != to)
712 emit_move_insn (to, tmp);
713 return;
717 /* Support special truncate insns for certain modes. */
718 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
720 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
721 to, from, UNKNOWN);
722 return;
725 /* Handle truncation of volatile memrefs, and so on;
726 the things that couldn't be truncated directly,
727 and for which there was no special instruction.
729 ??? Code above formerly short-circuited this, for most integer
730 mode pairs, with a force_reg in from_mode followed by a recursive
731 call to this routine. Appears always to have been wrong. */
732 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
734 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
735 emit_move_insn (to, temp);
736 return;
739 /* Mode combination is not recognized. */
740 gcc_unreachable ();
743 /* Return an rtx for a value that would result
744 from converting X to mode MODE.
745 Both X and MODE may be floating, or both integer.
746 UNSIGNEDP is nonzero if X is an unsigned value.
747 This can be done by referring to a part of X in place
748 or by copying to a new temporary with conversion. */
751 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
753 return convert_modes (mode, VOIDmode, x, unsignedp);
756 /* Return an rtx for a value that would result
757 from converting X from mode OLDMODE to mode MODE.
758 Both modes may be floating, or both integer.
759 UNSIGNEDP is nonzero if X is an unsigned value.
761 This can be done by referring to a part of X in place
762 or by copying to a new temporary with conversion.
764 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
767 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
769 rtx temp;
771 /* If FROM is a SUBREG that indicates that we have already done at least
772 the required extension, strip it. */
774 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
775 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
776 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
777 x = gen_lowpart (mode, x);
779 if (GET_MODE (x) != VOIDmode)
780 oldmode = GET_MODE (x);
782 if (mode == oldmode)
783 return x;
785 /* There is one case that we must handle specially: If we are converting
786 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
787 we are to interpret the constant as unsigned, gen_lowpart will do
788 the wrong if the constant appears negative. What we want to do is
789 make the high-order word of the constant zero, not all ones. */
791 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
793 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
795 HOST_WIDE_INT val = INTVAL (x);
797 if (oldmode != VOIDmode
798 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
800 int width = GET_MODE_BITSIZE (oldmode);
802 /* We need to zero extend VAL. */
803 val &= ((HOST_WIDE_INT) 1 << width) - 1;
806 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
809 /* We can do this with a gen_lowpart if both desired and current modes
810 are integer, and this is either a constant integer, a register, or a
811 non-volatile MEM. Except for the constant case where MODE is no
812 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
814 if ((GET_CODE (x) == CONST_INT
815 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
816 || (GET_MODE_CLASS (mode) == MODE_INT
817 && GET_MODE_CLASS (oldmode) == MODE_INT
818 && (GET_CODE (x) == CONST_DOUBLE
819 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
820 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
821 && direct_load[(int) mode])
822 || (REG_P (x)
823 && (! HARD_REGISTER_P (x)
824 || HARD_REGNO_MODE_OK (REGNO (x), mode))
825 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
826 GET_MODE_BITSIZE (GET_MODE (x)))))))))
828 /* ?? If we don't know OLDMODE, we have to assume here that
829 X does not need sign- or zero-extension. This may not be
830 the case, but it's the best we can do. */
831 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
832 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
834 HOST_WIDE_INT val = INTVAL (x);
835 int width = GET_MODE_BITSIZE (oldmode);
837 /* We must sign or zero-extend in this case. Start by
838 zero-extending, then sign extend if we need to. */
839 val &= ((HOST_WIDE_INT) 1 << width) - 1;
840 if (! unsignedp
841 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
842 val |= (HOST_WIDE_INT) (-1) << width;
844 return gen_int_mode (val, mode);
847 return gen_lowpart (mode, x);
850 /* Converting from integer constant into mode is always equivalent to an
851 subreg operation. */
852 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
854 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
855 return simplify_gen_subreg (mode, x, oldmode, 0);
858 temp = gen_reg_rtx (mode);
859 convert_move (temp, x, unsignedp);
860 return temp;
863 /* STORE_MAX_PIECES is the number of bytes at a time that we can
864 store efficiently. Due to internal GCC limitations, this is
865 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
866 for an immediate constant. */
868 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
870 /* Determine whether the LEN bytes can be moved by using several move
871 instructions. Return nonzero if a call to move_by_pieces should
872 succeed. */
875 can_move_by_pieces (unsigned HOST_WIDE_INT len,
876 unsigned int align ATTRIBUTE_UNUSED)
878 return MOVE_BY_PIECES_P (len, align);
881 /* Generate several move instructions to copy LEN bytes from block FROM to
882 block TO. (These are MEM rtx's with BLKmode).
884 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
885 used to push FROM to the stack.
887 ALIGN is maximum stack alignment we can assume.
889 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
890 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
891 stpcpy. */
894 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
895 unsigned int align, int endp)
897 struct move_by_pieces data;
898 rtx to_addr, from_addr = XEXP (from, 0);
899 unsigned int max_size = MOVE_MAX_PIECES + 1;
900 enum machine_mode mode = VOIDmode, tmode;
901 enum insn_code icode;
903 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
905 data.offset = 0;
906 data.from_addr = from_addr;
907 if (to)
909 to_addr = XEXP (to, 0);
910 data.to = to;
911 data.autinc_to
912 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
913 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
914 data.reverse
915 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
917 else
919 to_addr = NULL_RTX;
920 data.to = NULL_RTX;
921 data.autinc_to = 1;
922 #ifdef STACK_GROWS_DOWNWARD
923 data.reverse = 1;
924 #else
925 data.reverse = 0;
926 #endif
928 data.to_addr = to_addr;
929 data.from = from;
930 data.autinc_from
931 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
932 || GET_CODE (from_addr) == POST_INC
933 || GET_CODE (from_addr) == POST_DEC);
935 data.explicit_inc_from = 0;
936 data.explicit_inc_to = 0;
937 if (data.reverse) data.offset = len;
938 data.len = len;
940 /* If copying requires more than two move insns,
941 copy addresses to registers (to make displacements shorter)
942 and use post-increment if available. */
943 if (!(data.autinc_from && data.autinc_to)
944 && move_by_pieces_ninsns (len, align, max_size) > 2)
946 /* Find the mode of the largest move... */
947 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
948 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
949 if (GET_MODE_SIZE (tmode) < max_size)
950 mode = tmode;
952 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
954 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
955 data.autinc_from = 1;
956 data.explicit_inc_from = -1;
958 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
960 data.from_addr = copy_addr_to_reg (from_addr);
961 data.autinc_from = 1;
962 data.explicit_inc_from = 1;
964 if (!data.autinc_from && CONSTANT_P (from_addr))
965 data.from_addr = copy_addr_to_reg (from_addr);
966 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
968 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
969 data.autinc_to = 1;
970 data.explicit_inc_to = -1;
972 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
974 data.to_addr = copy_addr_to_reg (to_addr);
975 data.autinc_to = 1;
976 data.explicit_inc_to = 1;
978 if (!data.autinc_to && CONSTANT_P (to_addr))
979 data.to_addr = copy_addr_to_reg (to_addr);
982 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
983 if (align >= GET_MODE_ALIGNMENT (tmode))
984 align = GET_MODE_ALIGNMENT (tmode);
985 else
987 enum machine_mode xmode;
989 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
990 tmode != VOIDmode;
991 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
992 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
993 || SLOW_UNALIGNED_ACCESS (tmode, align))
994 break;
996 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
999 /* First move what we can in the largest integer mode, then go to
1000 successively smaller modes. */
1002 while (max_size > 1)
1004 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1005 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1006 if (GET_MODE_SIZE (tmode) < max_size)
1007 mode = tmode;
1009 if (mode == VOIDmode)
1010 break;
1012 icode = optab_handler (mov_optab, mode)->insn_code;
1013 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1014 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1016 max_size = GET_MODE_SIZE (mode);
1019 /* The code above should have handled everything. */
1020 gcc_assert (!data.len);
1022 if (endp)
1024 rtx to1;
1026 gcc_assert (!data.reverse);
1027 if (data.autinc_to)
1029 if (endp == 2)
1031 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1032 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1033 else
1034 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1035 -1));
1037 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1038 data.offset);
1040 else
1042 if (endp == 2)
1043 --data.offset;
1044 to1 = adjust_address (data.to, QImode, data.offset);
1046 return to1;
1048 else
1049 return data.to;
1052 /* Return number of insns required to move L bytes by pieces.
1053 ALIGN (in bits) is maximum alignment we can assume. */
1055 static unsigned HOST_WIDE_INT
1056 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1057 unsigned int max_size)
1059 unsigned HOST_WIDE_INT n_insns = 0;
1060 enum machine_mode tmode;
1062 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1063 if (align >= GET_MODE_ALIGNMENT (tmode))
1064 align = GET_MODE_ALIGNMENT (tmode);
1065 else
1067 enum machine_mode tmode, xmode;
1069 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1070 tmode != VOIDmode;
1071 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1072 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1073 || SLOW_UNALIGNED_ACCESS (tmode, align))
1074 break;
1076 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1079 while (max_size > 1)
1081 enum machine_mode mode = VOIDmode;
1082 enum insn_code icode;
1084 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1085 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1086 if (GET_MODE_SIZE (tmode) < max_size)
1087 mode = tmode;
1089 if (mode == VOIDmode)
1090 break;
1092 icode = optab_handler (mov_optab, mode)->insn_code;
1093 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1094 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1096 max_size = GET_MODE_SIZE (mode);
1099 gcc_assert (!l);
1100 return n_insns;
1103 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1104 with move instructions for mode MODE. GENFUN is the gen_... function
1105 to make a move insn for that mode. DATA has all the other info. */
1107 static void
1108 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1109 struct move_by_pieces *data)
1111 unsigned int size = GET_MODE_SIZE (mode);
1112 rtx to1 = NULL_RTX, from1;
1114 while (data->len >= size)
1116 if (data->reverse)
1117 data->offset -= size;
1119 if (data->to)
1121 if (data->autinc_to)
1122 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1123 data->offset);
1124 else
1125 to1 = adjust_address (data->to, mode, data->offset);
1128 if (data->autinc_from)
1129 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1130 data->offset);
1131 else
1132 from1 = adjust_address (data->from, mode, data->offset);
1134 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1135 emit_insn (gen_add2_insn (data->to_addr,
1136 GEN_INT (-(HOST_WIDE_INT)size)));
1137 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1138 emit_insn (gen_add2_insn (data->from_addr,
1139 GEN_INT (-(HOST_WIDE_INT)size)));
1141 if (data->to)
1142 emit_insn ((*genfun) (to1, from1));
1143 else
1145 #ifdef PUSH_ROUNDING
1146 emit_single_push_insn (mode, from1, NULL);
1147 #else
1148 gcc_unreachable ();
1149 #endif
1152 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1153 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1154 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1155 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1157 if (! data->reverse)
1158 data->offset += size;
1160 data->len -= size;
1164 /* Emit code to move a block Y to a block X. This may be done with
1165 string-move instructions, with multiple scalar move instructions,
1166 or with a library call.
1168 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1169 SIZE is an rtx that says how long they are.
1170 ALIGN is the maximum alignment we can assume they have.
1171 METHOD describes what kind of copy this is, and what mechanisms may be used.
1173 Return the address of the new block, if memcpy is called and returns it,
1174 0 otherwise. */
1177 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1178 unsigned int expected_align, HOST_WIDE_INT expected_size)
1180 bool may_use_call;
1181 rtx retval = 0;
1182 unsigned int align;
1184 switch (method)
1186 case BLOCK_OP_NORMAL:
1187 case BLOCK_OP_TAILCALL:
1188 may_use_call = true;
1189 break;
1191 case BLOCK_OP_CALL_PARM:
1192 may_use_call = block_move_libcall_safe_for_call_parm ();
1194 /* Make inhibit_defer_pop nonzero around the library call
1195 to force it to pop the arguments right away. */
1196 NO_DEFER_POP;
1197 break;
1199 case BLOCK_OP_NO_LIBCALL:
1200 may_use_call = false;
1201 break;
1203 default:
1204 gcc_unreachable ();
1207 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1209 gcc_assert (MEM_P (x));
1210 gcc_assert (MEM_P (y));
1211 gcc_assert (size);
1213 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1214 block copy is more efficient for other large modes, e.g. DCmode. */
1215 x = adjust_address (x, BLKmode, 0);
1216 y = adjust_address (y, BLKmode, 0);
1218 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1219 can be incorrect is coming from __builtin_memcpy. */
1220 if (GET_CODE (size) == CONST_INT)
1222 if (INTVAL (size) == 0)
1223 return 0;
1225 x = shallow_copy_rtx (x);
1226 y = shallow_copy_rtx (y);
1227 set_mem_size (x, size);
1228 set_mem_size (y, size);
1231 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1232 move_by_pieces (x, y, INTVAL (size), align, 0);
1233 else if (emit_block_move_via_movmem (x, y, size, align,
1234 expected_align, expected_size))
1236 else if (may_use_call)
1237 retval = emit_block_move_via_libcall (x, y, size,
1238 method == BLOCK_OP_TAILCALL);
1239 else
1240 emit_block_move_via_loop (x, y, size, align);
1242 if (method == BLOCK_OP_CALL_PARM)
1243 OK_DEFER_POP;
1245 return retval;
1249 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1251 return emit_block_move_hints (x, y, size, method, 0, -1);
1254 /* A subroutine of emit_block_move. Returns true if calling the
1255 block move libcall will not clobber any parameters which may have
1256 already been placed on the stack. */
1258 static bool
1259 block_move_libcall_safe_for_call_parm (void)
1261 /* If arguments are pushed on the stack, then they're safe. */
1262 if (PUSH_ARGS)
1263 return true;
1265 /* If registers go on the stack anyway, any argument is sure to clobber
1266 an outgoing argument. */
1267 #if defined (REG_PARM_STACK_SPACE)
1268 if (OUTGOING_REG_PARM_STACK_SPACE)
1270 tree fn;
1271 fn = emit_block_move_libcall_fn (false);
1272 if (REG_PARM_STACK_SPACE (fn) != 0)
1273 return false;
1275 #endif
1277 /* If any argument goes in memory, then it might clobber an outgoing
1278 argument. */
1280 CUMULATIVE_ARGS args_so_far;
1281 tree fn, arg;
1283 fn = emit_block_move_libcall_fn (false);
1284 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1286 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1287 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1289 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1290 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1291 if (!tmp || !REG_P (tmp))
1292 return false;
1293 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1294 return false;
1295 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1298 return true;
1301 /* A subroutine of emit_block_move. Expand a movmem pattern;
1302 return true if successful. */
1304 static bool
1305 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1306 unsigned int expected_align, HOST_WIDE_INT expected_size)
1308 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1309 int save_volatile_ok = volatile_ok;
1310 enum machine_mode mode;
1312 if (expected_align < align)
1313 expected_align = align;
1315 /* Since this is a move insn, we don't care about volatility. */
1316 volatile_ok = 1;
1318 /* Try the most limited insn first, because there's no point
1319 including more than one in the machine description unless
1320 the more limited one has some advantage. */
1322 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1323 mode = GET_MODE_WIDER_MODE (mode))
1325 enum insn_code code = movmem_optab[(int) mode];
1326 insn_operand_predicate_fn pred;
1328 if (code != CODE_FOR_nothing
1329 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1330 here because if SIZE is less than the mode mask, as it is
1331 returned by the macro, it will definitely be less than the
1332 actual mode mask. */
1333 && ((GET_CODE (size) == CONST_INT
1334 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1335 <= (GET_MODE_MASK (mode) >> 1)))
1336 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1337 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1338 || (*pred) (x, BLKmode))
1339 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1340 || (*pred) (y, BLKmode))
1341 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1342 || (*pred) (opalign, VOIDmode)))
1344 rtx op2;
1345 rtx last = get_last_insn ();
1346 rtx pat;
1348 op2 = convert_to_mode (mode, size, 1);
1349 pred = insn_data[(int) code].operand[2].predicate;
1350 if (pred != 0 && ! (*pred) (op2, mode))
1351 op2 = copy_to_mode_reg (mode, op2);
1353 /* ??? When called via emit_block_move_for_call, it'd be
1354 nice if there were some way to inform the backend, so
1355 that it doesn't fail the expansion because it thinks
1356 emitting the libcall would be more efficient. */
1358 if (insn_data[(int) code].n_operands == 4)
1359 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1360 else
1361 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1362 GEN_INT (expected_align),
1363 GEN_INT (expected_size));
1364 if (pat)
1366 emit_insn (pat);
1367 volatile_ok = save_volatile_ok;
1368 return true;
1370 else
1371 delete_insns_since (last);
1375 volatile_ok = save_volatile_ok;
1376 return false;
1379 /* A subroutine of emit_block_move. Expand a call to memcpy.
1380 Return the return value from memcpy, 0 otherwise. */
1383 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1385 rtx dst_addr, src_addr;
1386 tree call_expr, fn, src_tree, dst_tree, size_tree;
1387 enum machine_mode size_mode;
1388 rtx retval;
1390 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1391 pseudos. We can then place those new pseudos into a VAR_DECL and
1392 use them later. */
1394 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1395 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1397 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1398 src_addr = convert_memory_address (ptr_mode, src_addr);
1400 dst_tree = make_tree (ptr_type_node, dst_addr);
1401 src_tree = make_tree (ptr_type_node, src_addr);
1403 size_mode = TYPE_MODE (sizetype);
1405 size = convert_to_mode (size_mode, size, 1);
1406 size = copy_to_mode_reg (size_mode, size);
1408 /* It is incorrect to use the libcall calling conventions to call
1409 memcpy in this context. This could be a user call to memcpy and
1410 the user may wish to examine the return value from memcpy. For
1411 targets where libcalls and normal calls have different conventions
1412 for returning pointers, we could end up generating incorrect code. */
1414 size_tree = make_tree (sizetype, size);
1416 fn = emit_block_move_libcall_fn (true);
1417 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1418 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1420 retval = expand_normal (call_expr);
1422 return retval;
1425 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1426 for the function we use for block copies. The first time FOR_CALL
1427 is true, we call assemble_external. */
1429 static GTY(()) tree block_move_fn;
1431 void
1432 init_block_move_fn (const char *asmspec)
1434 if (!block_move_fn)
1436 tree args, fn;
1438 fn = get_identifier ("memcpy");
1439 args = build_function_type_list (ptr_type_node, ptr_type_node,
1440 const_ptr_type_node, sizetype,
1441 NULL_TREE);
1443 fn = build_decl (FUNCTION_DECL, fn, args);
1444 DECL_EXTERNAL (fn) = 1;
1445 TREE_PUBLIC (fn) = 1;
1446 DECL_ARTIFICIAL (fn) = 1;
1447 TREE_NOTHROW (fn) = 1;
1448 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1449 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1451 block_move_fn = fn;
1454 if (asmspec)
1455 set_user_assembler_name (block_move_fn, asmspec);
1458 static tree
1459 emit_block_move_libcall_fn (int for_call)
1461 static bool emitted_extern;
1463 if (!block_move_fn)
1464 init_block_move_fn (NULL);
1466 if (for_call && !emitted_extern)
1468 emitted_extern = true;
1469 make_decl_rtl (block_move_fn);
1470 assemble_external (block_move_fn);
1473 return block_move_fn;
1476 /* A subroutine of emit_block_move. Copy the data via an explicit
1477 loop. This is used only when libcalls are forbidden. */
1478 /* ??? It'd be nice to copy in hunks larger than QImode. */
1480 static void
1481 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1482 unsigned int align ATTRIBUTE_UNUSED)
1484 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1485 enum machine_mode iter_mode;
1487 iter_mode = GET_MODE (size);
1488 if (iter_mode == VOIDmode)
1489 iter_mode = word_mode;
1491 top_label = gen_label_rtx ();
1492 cmp_label = gen_label_rtx ();
1493 iter = gen_reg_rtx (iter_mode);
1495 emit_move_insn (iter, const0_rtx);
1497 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1498 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1499 do_pending_stack_adjust ();
1501 emit_jump (cmp_label);
1502 emit_label (top_label);
1504 tmp = convert_modes (Pmode, iter_mode, iter, true);
1505 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1506 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1507 x = change_address (x, QImode, x_addr);
1508 y = change_address (y, QImode, y_addr);
1510 emit_move_insn (x, y);
1512 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1513 true, OPTAB_LIB_WIDEN);
1514 if (tmp != iter)
1515 emit_move_insn (iter, tmp);
1517 emit_label (cmp_label);
1519 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1520 true, top_label);
1523 /* Copy all or part of a value X into registers starting at REGNO.
1524 The number of registers to be filled is NREGS. */
1526 void
1527 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1529 int i;
1530 #ifdef HAVE_load_multiple
1531 rtx pat;
1532 rtx last;
1533 #endif
1535 if (nregs == 0)
1536 return;
1538 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1539 x = validize_mem (force_const_mem (mode, x));
1541 /* See if the machine can do this with a load multiple insn. */
1542 #ifdef HAVE_load_multiple
1543 if (HAVE_load_multiple)
1545 last = get_last_insn ();
1546 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1547 GEN_INT (nregs));
1548 if (pat)
1550 emit_insn (pat);
1551 return;
1553 else
1554 delete_insns_since (last);
1556 #endif
1558 for (i = 0; i < nregs; i++)
1559 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1560 operand_subword_force (x, i, mode));
1563 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1564 The number of registers to be filled is NREGS. */
1566 void
1567 move_block_from_reg (int regno, rtx x, int nregs)
1569 int i;
1571 if (nregs == 0)
1572 return;
1574 /* See if the machine can do this with a store multiple insn. */
1575 #ifdef HAVE_store_multiple
1576 if (HAVE_store_multiple)
1578 rtx last = get_last_insn ();
1579 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1580 GEN_INT (nregs));
1581 if (pat)
1583 emit_insn (pat);
1584 return;
1586 else
1587 delete_insns_since (last);
1589 #endif
1591 for (i = 0; i < nregs; i++)
1593 rtx tem = operand_subword (x, i, 1, BLKmode);
1595 gcc_assert (tem);
1597 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1601 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1602 ORIG, where ORIG is a non-consecutive group of registers represented by
1603 a PARALLEL. The clone is identical to the original except in that the
1604 original set of registers is replaced by a new set of pseudo registers.
1605 The new set has the same modes as the original set. */
1608 gen_group_rtx (rtx orig)
1610 int i, length;
1611 rtx *tmps;
1613 gcc_assert (GET_CODE (orig) == PARALLEL);
1615 length = XVECLEN (orig, 0);
1616 tmps = alloca (sizeof (rtx) * length);
1618 /* Skip a NULL entry in first slot. */
1619 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1621 if (i)
1622 tmps[0] = 0;
1624 for (; i < length; i++)
1626 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1627 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1629 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1632 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1635 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1636 except that values are placed in TMPS[i], and must later be moved
1637 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1639 static void
1640 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1642 rtx src;
1643 int start, i;
1644 enum machine_mode m = GET_MODE (orig_src);
1646 gcc_assert (GET_CODE (dst) == PARALLEL);
1648 if (m != VOIDmode
1649 && !SCALAR_INT_MODE_P (m)
1650 && !MEM_P (orig_src)
1651 && GET_CODE (orig_src) != CONCAT)
1653 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1654 if (imode == BLKmode)
1655 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1656 else
1657 src = gen_reg_rtx (imode);
1658 if (imode != BLKmode)
1659 src = gen_lowpart (GET_MODE (orig_src), src);
1660 emit_move_insn (src, orig_src);
1661 /* ...and back again. */
1662 if (imode != BLKmode)
1663 src = gen_lowpart (imode, src);
1664 emit_group_load_1 (tmps, dst, src, type, ssize);
1665 return;
1668 /* Check for a NULL entry, used to indicate that the parameter goes
1669 both on the stack and in registers. */
1670 if (XEXP (XVECEXP (dst, 0, 0), 0))
1671 start = 0;
1672 else
1673 start = 1;
1675 /* Process the pieces. */
1676 for (i = start; i < XVECLEN (dst, 0); i++)
1678 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1679 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1680 unsigned int bytelen = GET_MODE_SIZE (mode);
1681 int shift = 0;
1683 /* Handle trailing fragments that run over the size of the struct. */
1684 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1686 /* Arrange to shift the fragment to where it belongs.
1687 extract_bit_field loads to the lsb of the reg. */
1688 if (
1689 #ifdef BLOCK_REG_PADDING
1690 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1691 == (BYTES_BIG_ENDIAN ? upward : downward)
1692 #else
1693 BYTES_BIG_ENDIAN
1694 #endif
1696 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1697 bytelen = ssize - bytepos;
1698 gcc_assert (bytelen > 0);
1701 /* If we won't be loading directly from memory, protect the real source
1702 from strange tricks we might play; but make sure that the source can
1703 be loaded directly into the destination. */
1704 src = orig_src;
1705 if (!MEM_P (orig_src)
1706 && (!CONSTANT_P (orig_src)
1707 || (GET_MODE (orig_src) != mode
1708 && GET_MODE (orig_src) != VOIDmode)))
1710 if (GET_MODE (orig_src) == VOIDmode)
1711 src = gen_reg_rtx (mode);
1712 else
1713 src = gen_reg_rtx (GET_MODE (orig_src));
1715 emit_move_insn (src, orig_src);
1718 /* Optimize the access just a bit. */
1719 if (MEM_P (src)
1720 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1721 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1722 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1723 && bytelen == GET_MODE_SIZE (mode))
1725 tmps[i] = gen_reg_rtx (mode);
1726 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1728 else if (COMPLEX_MODE_P (mode)
1729 && GET_MODE (src) == mode
1730 && bytelen == GET_MODE_SIZE (mode))
1731 /* Let emit_move_complex do the bulk of the work. */
1732 tmps[i] = src;
1733 else if (GET_CODE (src) == CONCAT)
1735 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1736 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1738 if ((bytepos == 0 && bytelen == slen0)
1739 || (bytepos != 0 && bytepos + bytelen <= slen))
1741 /* The following assumes that the concatenated objects all
1742 have the same size. In this case, a simple calculation
1743 can be used to determine the object and the bit field
1744 to be extracted. */
1745 tmps[i] = XEXP (src, bytepos / slen0);
1746 if (! CONSTANT_P (tmps[i])
1747 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1748 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1749 (bytepos % slen0) * BITS_PER_UNIT,
1750 1, NULL_RTX, mode, mode);
1752 else
1754 rtx mem;
1756 gcc_assert (!bytepos);
1757 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1758 emit_move_insn (mem, src);
1759 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1760 0, 1, NULL_RTX, mode, mode);
1763 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1764 SIMD register, which is currently broken. While we get GCC
1765 to emit proper RTL for these cases, let's dump to memory. */
1766 else if (VECTOR_MODE_P (GET_MODE (dst))
1767 && REG_P (src))
1769 int slen = GET_MODE_SIZE (GET_MODE (src));
1770 rtx mem;
1772 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1773 emit_move_insn (mem, src);
1774 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1776 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1777 && XVECLEN (dst, 0) > 1)
1778 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1779 else if (CONSTANT_P (src))
1781 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1783 if (len == ssize)
1784 tmps[i] = src;
1785 else
1787 rtx first, second;
1789 gcc_assert (2 * len == ssize);
1790 split_double (src, &first, &second);
1791 if (i)
1792 tmps[i] = second;
1793 else
1794 tmps[i] = first;
1797 else if (REG_P (src) && GET_MODE (src) == mode)
1798 tmps[i] = src;
1799 else
1800 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1801 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1802 mode, mode);
1804 if (shift)
1805 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1806 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1810 /* Emit code to move a block SRC of type TYPE to a block DST,
1811 where DST is non-consecutive registers represented by a PARALLEL.
1812 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1813 if not known. */
1815 void
1816 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1818 rtx *tmps;
1819 int i;
1821 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1822 emit_group_load_1 (tmps, dst, src, type, ssize);
1824 /* Copy the extracted pieces into the proper (probable) hard regs. */
1825 for (i = 0; i < XVECLEN (dst, 0); i++)
1827 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1828 if (d == NULL)
1829 continue;
1830 emit_move_insn (d, tmps[i]);
1834 /* Similar, but load SRC into new pseudos in a format that looks like
1835 PARALLEL. This can later be fed to emit_group_move to get things
1836 in the right place. */
1839 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1841 rtvec vec;
1842 int i;
1844 vec = rtvec_alloc (XVECLEN (parallel, 0));
1845 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1847 /* Convert the vector to look just like the original PARALLEL, except
1848 with the computed values. */
1849 for (i = 0; i < XVECLEN (parallel, 0); i++)
1851 rtx e = XVECEXP (parallel, 0, i);
1852 rtx d = XEXP (e, 0);
1854 if (d)
1856 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1857 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1859 RTVEC_ELT (vec, i) = e;
1862 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1865 /* Emit code to move a block SRC to block DST, where SRC and DST are
1866 non-consecutive groups of registers, each represented by a PARALLEL. */
1868 void
1869 emit_group_move (rtx dst, rtx src)
1871 int i;
1873 gcc_assert (GET_CODE (src) == PARALLEL
1874 && GET_CODE (dst) == PARALLEL
1875 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1877 /* Skip first entry if NULL. */
1878 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1879 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1880 XEXP (XVECEXP (src, 0, i), 0));
1883 /* Move a group of registers represented by a PARALLEL into pseudos. */
1886 emit_group_move_into_temps (rtx src)
1888 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1889 int i;
1891 for (i = 0; i < XVECLEN (src, 0); i++)
1893 rtx e = XVECEXP (src, 0, i);
1894 rtx d = XEXP (e, 0);
1896 if (d)
1897 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1898 RTVEC_ELT (vec, i) = e;
1901 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1904 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1905 where SRC is non-consecutive registers represented by a PARALLEL.
1906 SSIZE represents the total size of block ORIG_DST, or -1 if not
1907 known. */
1909 void
1910 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1912 rtx *tmps, dst;
1913 int start, finish, i;
1914 enum machine_mode m = GET_MODE (orig_dst);
1916 gcc_assert (GET_CODE (src) == PARALLEL);
1918 if (!SCALAR_INT_MODE_P (m)
1919 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1921 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1922 if (imode == BLKmode)
1923 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1924 else
1925 dst = gen_reg_rtx (imode);
1926 emit_group_store (dst, src, type, ssize);
1927 if (imode != BLKmode)
1928 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1929 emit_move_insn (orig_dst, dst);
1930 return;
1933 /* Check for a NULL entry, used to indicate that the parameter goes
1934 both on the stack and in registers. */
1935 if (XEXP (XVECEXP (src, 0, 0), 0))
1936 start = 0;
1937 else
1938 start = 1;
1939 finish = XVECLEN (src, 0);
1941 tmps = alloca (sizeof (rtx) * finish);
1943 /* Copy the (probable) hard regs into pseudos. */
1944 for (i = start; i < finish; i++)
1946 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1947 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1949 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1950 emit_move_insn (tmps[i], reg);
1952 else
1953 tmps[i] = reg;
1956 /* If we won't be storing directly into memory, protect the real destination
1957 from strange tricks we might play. */
1958 dst = orig_dst;
1959 if (GET_CODE (dst) == PARALLEL)
1961 rtx temp;
1963 /* We can get a PARALLEL dst if there is a conditional expression in
1964 a return statement. In that case, the dst and src are the same,
1965 so no action is necessary. */
1966 if (rtx_equal_p (dst, src))
1967 return;
1969 /* It is unclear if we can ever reach here, but we may as well handle
1970 it. Allocate a temporary, and split this into a store/load to/from
1971 the temporary. */
1973 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1974 emit_group_store (temp, src, type, ssize);
1975 emit_group_load (dst, temp, type, ssize);
1976 return;
1978 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1980 enum machine_mode outer = GET_MODE (dst);
1981 enum machine_mode inner;
1982 HOST_WIDE_INT bytepos;
1983 bool done = false;
1984 rtx temp;
1986 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1987 dst = gen_reg_rtx (outer);
1989 /* Make life a bit easier for combine. */
1990 /* If the first element of the vector is the low part
1991 of the destination mode, use a paradoxical subreg to
1992 initialize the destination. */
1993 if (start < finish)
1995 inner = GET_MODE (tmps[start]);
1996 bytepos = subreg_lowpart_offset (inner, outer);
1997 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1999 temp = simplify_gen_subreg (outer, tmps[start],
2000 inner, 0);
2001 if (temp)
2003 emit_move_insn (dst, temp);
2004 done = true;
2005 start++;
2010 /* If the first element wasn't the low part, try the last. */
2011 if (!done
2012 && start < finish - 1)
2014 inner = GET_MODE (tmps[finish - 1]);
2015 bytepos = subreg_lowpart_offset (inner, outer);
2016 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2018 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2019 inner, 0);
2020 if (temp)
2022 emit_move_insn (dst, temp);
2023 done = true;
2024 finish--;
2029 /* Otherwise, simply initialize the result to zero. */
2030 if (!done)
2031 emit_move_insn (dst, CONST0_RTX (outer));
2034 /* Process the pieces. */
2035 for (i = start; i < finish; i++)
2037 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2038 enum machine_mode mode = GET_MODE (tmps[i]);
2039 unsigned int bytelen = GET_MODE_SIZE (mode);
2040 rtx dest = dst;
2042 /* Handle trailing fragments that run over the size of the struct. */
2043 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2045 /* store_bit_field always takes its value from the lsb.
2046 Move the fragment to the lsb if it's not already there. */
2047 if (
2048 #ifdef BLOCK_REG_PADDING
2049 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2050 == (BYTES_BIG_ENDIAN ? upward : downward)
2051 #else
2052 BYTES_BIG_ENDIAN
2053 #endif
2056 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2057 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2058 build_int_cst (NULL_TREE, shift),
2059 tmps[i], 0);
2061 bytelen = ssize - bytepos;
2064 if (GET_CODE (dst) == CONCAT)
2066 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2067 dest = XEXP (dst, 0);
2068 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2070 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2071 dest = XEXP (dst, 1);
2073 else
2075 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2076 dest = assign_stack_temp (GET_MODE (dest),
2077 GET_MODE_SIZE (GET_MODE (dest)), 0);
2078 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2079 tmps[i]);
2080 dst = dest;
2081 break;
2085 /* Optimize the access just a bit. */
2086 if (MEM_P (dest)
2087 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2088 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2089 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2090 && bytelen == GET_MODE_SIZE (mode))
2091 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2092 else
2093 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2094 mode, tmps[i]);
2097 /* Copy from the pseudo into the (probable) hard reg. */
2098 if (orig_dst != dst)
2099 emit_move_insn (orig_dst, dst);
2102 /* Generate code to copy a BLKmode object of TYPE out of a
2103 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2104 is null, a stack temporary is created. TGTBLK is returned.
2106 The purpose of this routine is to handle functions that return
2107 BLKmode structures in registers. Some machines (the PA for example)
2108 want to return all small structures in registers regardless of the
2109 structure's alignment. */
2112 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2114 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2115 rtx src = NULL, dst = NULL;
2116 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2117 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2119 if (tgtblk == 0)
2121 tgtblk = assign_temp (build_qualified_type (type,
2122 (TYPE_QUALS (type)
2123 | TYPE_QUAL_CONST)),
2124 0, 1, 1);
2125 preserve_temp_slots (tgtblk);
2128 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2129 into a new pseudo which is a full word. */
2131 if (GET_MODE (srcreg) != BLKmode
2132 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2133 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2135 /* If the structure doesn't take up a whole number of words, see whether
2136 SRCREG is padded on the left or on the right. If it's on the left,
2137 set PADDING_CORRECTION to the number of bits to skip.
2139 In most ABIs, the structure will be returned at the least end of
2140 the register, which translates to right padding on little-endian
2141 targets and left padding on big-endian targets. The opposite
2142 holds if the structure is returned at the most significant
2143 end of the register. */
2144 if (bytes % UNITS_PER_WORD != 0
2145 && (targetm.calls.return_in_msb (type)
2146 ? !BYTES_BIG_ENDIAN
2147 : BYTES_BIG_ENDIAN))
2148 padding_correction
2149 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2151 /* Copy the structure BITSIZE bites at a time.
2153 We could probably emit more efficient code for machines which do not use
2154 strict alignment, but it doesn't seem worth the effort at the current
2155 time. */
2156 for (bitpos = 0, xbitpos = padding_correction;
2157 bitpos < bytes * BITS_PER_UNIT;
2158 bitpos += bitsize, xbitpos += bitsize)
2160 /* We need a new source operand each time xbitpos is on a
2161 word boundary and when xbitpos == padding_correction
2162 (the first time through). */
2163 if (xbitpos % BITS_PER_WORD == 0
2164 || xbitpos == padding_correction)
2165 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2166 GET_MODE (srcreg));
2168 /* We need a new destination operand each time bitpos is on
2169 a word boundary. */
2170 if (bitpos % BITS_PER_WORD == 0)
2171 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2173 /* Use xbitpos for the source extraction (right justified) and
2174 xbitpos for the destination store (left justified). */
2175 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2176 extract_bit_field (src, bitsize,
2177 xbitpos % BITS_PER_WORD, 1,
2178 NULL_RTX, word_mode, word_mode));
2181 return tgtblk;
2184 /* Add a USE expression for REG to the (possibly empty) list pointed
2185 to by CALL_FUSAGE. REG must denote a hard register. */
2187 void
2188 use_reg (rtx *call_fusage, rtx reg)
2190 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2192 *call_fusage
2193 = gen_rtx_EXPR_LIST (VOIDmode,
2194 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2197 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2198 starting at REGNO. All of these registers must be hard registers. */
2200 void
2201 use_regs (rtx *call_fusage, int regno, int nregs)
2203 int i;
2205 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2207 for (i = 0; i < nregs; i++)
2208 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2211 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2212 PARALLEL REGS. This is for calls that pass values in multiple
2213 non-contiguous locations. The Irix 6 ABI has examples of this. */
2215 void
2216 use_group_regs (rtx *call_fusage, rtx regs)
2218 int i;
2220 for (i = 0; i < XVECLEN (regs, 0); i++)
2222 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2224 /* A NULL entry means the parameter goes both on the stack and in
2225 registers. This can also be a MEM for targets that pass values
2226 partially on the stack and partially in registers. */
2227 if (reg != 0 && REG_P (reg))
2228 use_reg (call_fusage, reg);
2233 /* Determine whether the LEN bytes generated by CONSTFUN can be
2234 stored to memory using several move instructions. CONSTFUNDATA is
2235 a pointer which will be passed as argument in every CONSTFUN call.
2236 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2237 a memset operation and false if it's a copy of a constant string.
2238 Return nonzero if a call to store_by_pieces should succeed. */
2241 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2242 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2243 void *constfundata, unsigned int align, bool memsetp)
2245 unsigned HOST_WIDE_INT l;
2246 unsigned int max_size;
2247 HOST_WIDE_INT offset = 0;
2248 enum machine_mode mode, tmode;
2249 enum insn_code icode;
2250 int reverse;
2251 rtx cst;
2253 if (len == 0)
2254 return 1;
2256 if (! (memsetp
2257 ? SET_BY_PIECES_P (len, align)
2258 : STORE_BY_PIECES_P (len, align)))
2259 return 0;
2261 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2262 if (align >= GET_MODE_ALIGNMENT (tmode))
2263 align = GET_MODE_ALIGNMENT (tmode);
2264 else
2266 enum machine_mode xmode;
2268 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2269 tmode != VOIDmode;
2270 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2271 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2272 || SLOW_UNALIGNED_ACCESS (tmode, align))
2273 break;
2275 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2278 /* We would first store what we can in the largest integer mode, then go to
2279 successively smaller modes. */
2281 for (reverse = 0;
2282 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2283 reverse++)
2285 l = len;
2286 mode = VOIDmode;
2287 max_size = STORE_MAX_PIECES + 1;
2288 while (max_size > 1)
2290 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2291 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2292 if (GET_MODE_SIZE (tmode) < max_size)
2293 mode = tmode;
2295 if (mode == VOIDmode)
2296 break;
2298 icode = optab_handler (mov_optab, mode)->insn_code;
2299 if (icode != CODE_FOR_nothing
2300 && align >= GET_MODE_ALIGNMENT (mode))
2302 unsigned int size = GET_MODE_SIZE (mode);
2304 while (l >= size)
2306 if (reverse)
2307 offset -= size;
2309 cst = (*constfun) (constfundata, offset, mode);
2310 if (!LEGITIMATE_CONSTANT_P (cst))
2311 return 0;
2313 if (!reverse)
2314 offset += size;
2316 l -= size;
2320 max_size = GET_MODE_SIZE (mode);
2323 /* The code above should have handled everything. */
2324 gcc_assert (!l);
2327 return 1;
2330 /* Generate several move instructions to store LEN bytes generated by
2331 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2332 pointer which will be passed as argument in every CONSTFUN call.
2333 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2334 a memset operation and false if it's a copy of a constant string.
2335 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2336 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2337 stpcpy. */
2340 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2341 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2342 void *constfundata, unsigned int align, bool memsetp, int endp)
2344 struct store_by_pieces data;
2346 if (len == 0)
2348 gcc_assert (endp != 2);
2349 return to;
2352 gcc_assert (memsetp
2353 ? SET_BY_PIECES_P (len, align)
2354 : STORE_BY_PIECES_P (len, align));
2355 data.constfun = constfun;
2356 data.constfundata = constfundata;
2357 data.len = len;
2358 data.to = to;
2359 store_by_pieces_1 (&data, align);
2360 if (endp)
2362 rtx to1;
2364 gcc_assert (!data.reverse);
2365 if (data.autinc_to)
2367 if (endp == 2)
2369 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2370 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2371 else
2372 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2373 -1));
2375 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2376 data.offset);
2378 else
2380 if (endp == 2)
2381 --data.offset;
2382 to1 = adjust_address (data.to, QImode, data.offset);
2384 return to1;
2386 else
2387 return data.to;
2390 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2391 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2393 static void
2394 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2396 struct store_by_pieces data;
2398 if (len == 0)
2399 return;
2401 data.constfun = clear_by_pieces_1;
2402 data.constfundata = NULL;
2403 data.len = len;
2404 data.to = to;
2405 store_by_pieces_1 (&data, align);
2408 /* Callback routine for clear_by_pieces.
2409 Return const0_rtx unconditionally. */
2411 static rtx
2412 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2413 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2414 enum machine_mode mode ATTRIBUTE_UNUSED)
2416 return const0_rtx;
2419 /* Subroutine of clear_by_pieces and store_by_pieces.
2420 Generate several move instructions to store LEN bytes of block TO. (A MEM
2421 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2423 static void
2424 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2425 unsigned int align ATTRIBUTE_UNUSED)
2427 rtx to_addr = XEXP (data->to, 0);
2428 unsigned int max_size = STORE_MAX_PIECES + 1;
2429 enum machine_mode mode = VOIDmode, tmode;
2430 enum insn_code icode;
2432 data->offset = 0;
2433 data->to_addr = to_addr;
2434 data->autinc_to
2435 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2436 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2438 data->explicit_inc_to = 0;
2439 data->reverse
2440 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2441 if (data->reverse)
2442 data->offset = data->len;
2444 /* If storing requires more than two move insns,
2445 copy addresses to registers (to make displacements shorter)
2446 and use post-increment if available. */
2447 if (!data->autinc_to
2448 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2450 /* Determine the main mode we'll be using. */
2451 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2452 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2453 if (GET_MODE_SIZE (tmode) < max_size)
2454 mode = tmode;
2456 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2458 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2459 data->autinc_to = 1;
2460 data->explicit_inc_to = -1;
2463 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2464 && ! data->autinc_to)
2466 data->to_addr = copy_addr_to_reg (to_addr);
2467 data->autinc_to = 1;
2468 data->explicit_inc_to = 1;
2471 if ( !data->autinc_to && CONSTANT_P (to_addr))
2472 data->to_addr = copy_addr_to_reg (to_addr);
2475 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2476 if (align >= GET_MODE_ALIGNMENT (tmode))
2477 align = GET_MODE_ALIGNMENT (tmode);
2478 else
2480 enum machine_mode xmode;
2482 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2483 tmode != VOIDmode;
2484 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2485 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2486 || SLOW_UNALIGNED_ACCESS (tmode, align))
2487 break;
2489 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2492 /* First store what we can in the largest integer mode, then go to
2493 successively smaller modes. */
2495 while (max_size > 1)
2497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2499 if (GET_MODE_SIZE (tmode) < max_size)
2500 mode = tmode;
2502 if (mode == VOIDmode)
2503 break;
2505 icode = optab_handler (mov_optab, mode)->insn_code;
2506 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2507 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2509 max_size = GET_MODE_SIZE (mode);
2512 /* The code above should have handled everything. */
2513 gcc_assert (!data->len);
2516 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2517 with move instructions for mode MODE. GENFUN is the gen_... function
2518 to make a move insn for that mode. DATA has all the other info. */
2520 static void
2521 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2522 struct store_by_pieces *data)
2524 unsigned int size = GET_MODE_SIZE (mode);
2525 rtx to1, cst;
2527 while (data->len >= size)
2529 if (data->reverse)
2530 data->offset -= size;
2532 if (data->autinc_to)
2533 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2534 data->offset);
2535 else
2536 to1 = adjust_address (data->to, mode, data->offset);
2538 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2539 emit_insn (gen_add2_insn (data->to_addr,
2540 GEN_INT (-(HOST_WIDE_INT) size)));
2542 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2543 emit_insn ((*genfun) (to1, cst));
2545 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2546 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2548 if (! data->reverse)
2549 data->offset += size;
2551 data->len -= size;
2555 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2556 its length in bytes. */
2559 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2560 unsigned int expected_align, HOST_WIDE_INT expected_size)
2562 enum machine_mode mode = GET_MODE (object);
2563 unsigned int align;
2565 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2567 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2568 just move a zero. Otherwise, do this a piece at a time. */
2569 if (mode != BLKmode
2570 && GET_CODE (size) == CONST_INT
2571 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2573 rtx zero = CONST0_RTX (mode);
2574 if (zero != NULL)
2576 emit_move_insn (object, zero);
2577 return NULL;
2580 if (COMPLEX_MODE_P (mode))
2582 zero = CONST0_RTX (GET_MODE_INNER (mode));
2583 if (zero != NULL)
2585 write_complex_part (object, zero, 0);
2586 write_complex_part (object, zero, 1);
2587 return NULL;
2592 if (size == const0_rtx)
2593 return NULL;
2595 align = MEM_ALIGN (object);
2597 if (GET_CODE (size) == CONST_INT
2598 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2599 clear_by_pieces (object, INTVAL (size), align);
2600 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2601 expected_align, expected_size))
2603 else
2604 return set_storage_via_libcall (object, size, const0_rtx,
2605 method == BLOCK_OP_TAILCALL);
2607 return NULL;
2611 clear_storage (rtx object, rtx size, enum block_op_methods method)
2613 return clear_storage_hints (object, size, method, 0, -1);
2617 /* A subroutine of clear_storage. Expand a call to memset.
2618 Return the return value of memset, 0 otherwise. */
2621 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2623 tree call_expr, fn, object_tree, size_tree, val_tree;
2624 enum machine_mode size_mode;
2625 rtx retval;
2627 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2628 place those into new pseudos into a VAR_DECL and use them later. */
2630 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2632 size_mode = TYPE_MODE (sizetype);
2633 size = convert_to_mode (size_mode, size, 1);
2634 size = copy_to_mode_reg (size_mode, size);
2636 /* It is incorrect to use the libcall calling conventions to call
2637 memset in this context. This could be a user call to memset and
2638 the user may wish to examine the return value from memset. For
2639 targets where libcalls and normal calls have different conventions
2640 for returning pointers, we could end up generating incorrect code. */
2642 object_tree = make_tree (ptr_type_node, object);
2643 if (GET_CODE (val) != CONST_INT)
2644 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2645 size_tree = make_tree (sizetype, size);
2646 val_tree = make_tree (integer_type_node, val);
2648 fn = clear_storage_libcall_fn (true);
2649 call_expr = build_call_expr (fn, 3,
2650 object_tree, integer_zero_node, size_tree);
2651 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2653 retval = expand_normal (call_expr);
2655 return retval;
2658 /* A subroutine of set_storage_via_libcall. Create the tree node
2659 for the function we use for block clears. The first time FOR_CALL
2660 is true, we call assemble_external. */
2662 static GTY(()) tree block_clear_fn;
2664 void
2665 init_block_clear_fn (const char *asmspec)
2667 if (!block_clear_fn)
2669 tree fn, args;
2671 fn = get_identifier ("memset");
2672 args = build_function_type_list (ptr_type_node, ptr_type_node,
2673 integer_type_node, sizetype,
2674 NULL_TREE);
2676 fn = build_decl (FUNCTION_DECL, fn, args);
2677 DECL_EXTERNAL (fn) = 1;
2678 TREE_PUBLIC (fn) = 1;
2679 DECL_ARTIFICIAL (fn) = 1;
2680 TREE_NOTHROW (fn) = 1;
2681 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2682 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2684 block_clear_fn = fn;
2687 if (asmspec)
2688 set_user_assembler_name (block_clear_fn, asmspec);
2691 static tree
2692 clear_storage_libcall_fn (int for_call)
2694 static bool emitted_extern;
2696 if (!block_clear_fn)
2697 init_block_clear_fn (NULL);
2699 if (for_call && !emitted_extern)
2701 emitted_extern = true;
2702 make_decl_rtl (block_clear_fn);
2703 assemble_external (block_clear_fn);
2706 return block_clear_fn;
2709 /* Expand a setmem pattern; return true if successful. */
2711 bool
2712 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2713 unsigned int expected_align, HOST_WIDE_INT expected_size)
2715 /* Try the most limited insn first, because there's no point
2716 including more than one in the machine description unless
2717 the more limited one has some advantage. */
2719 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2720 enum machine_mode mode;
2722 if (expected_align < align)
2723 expected_align = align;
2725 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2726 mode = GET_MODE_WIDER_MODE (mode))
2728 enum insn_code code = setmem_optab[(int) mode];
2729 insn_operand_predicate_fn pred;
2731 if (code != CODE_FOR_nothing
2732 /* We don't need MODE to be narrower than
2733 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2734 the mode mask, as it is returned by the macro, it will
2735 definitely be less than the actual mode mask. */
2736 && ((GET_CODE (size) == CONST_INT
2737 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2738 <= (GET_MODE_MASK (mode) >> 1)))
2739 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2740 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2741 || (*pred) (object, BLKmode))
2742 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2743 || (*pred) (opalign, VOIDmode)))
2745 rtx opsize, opchar;
2746 enum machine_mode char_mode;
2747 rtx last = get_last_insn ();
2748 rtx pat;
2750 opsize = convert_to_mode (mode, size, 1);
2751 pred = insn_data[(int) code].operand[1].predicate;
2752 if (pred != 0 && ! (*pred) (opsize, mode))
2753 opsize = copy_to_mode_reg (mode, opsize);
2755 opchar = val;
2756 char_mode = insn_data[(int) code].operand[2].mode;
2757 if (char_mode != VOIDmode)
2759 opchar = convert_to_mode (char_mode, opchar, 1);
2760 pred = insn_data[(int) code].operand[2].predicate;
2761 if (pred != 0 && ! (*pred) (opchar, char_mode))
2762 opchar = copy_to_mode_reg (char_mode, opchar);
2765 if (insn_data[(int) code].n_operands == 4)
2766 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2767 else
2768 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2769 GEN_INT (expected_align),
2770 GEN_INT (expected_size));
2771 if (pat)
2773 emit_insn (pat);
2774 return true;
2776 else
2777 delete_insns_since (last);
2781 return false;
2785 /* Write to one of the components of the complex value CPLX. Write VAL to
2786 the real part if IMAG_P is false, and the imaginary part if its true. */
2788 static void
2789 write_complex_part (rtx cplx, rtx val, bool imag_p)
2791 enum machine_mode cmode;
2792 enum machine_mode imode;
2793 unsigned ibitsize;
2795 if (GET_CODE (cplx) == CONCAT)
2797 emit_move_insn (XEXP (cplx, imag_p), val);
2798 return;
2801 cmode = GET_MODE (cplx);
2802 imode = GET_MODE_INNER (cmode);
2803 ibitsize = GET_MODE_BITSIZE (imode);
2805 /* For MEMs simplify_gen_subreg may generate an invalid new address
2806 because, e.g., the original address is considered mode-dependent
2807 by the target, which restricts simplify_subreg from invoking
2808 adjust_address_nv. Instead of preparing fallback support for an
2809 invalid address, we call adjust_address_nv directly. */
2810 if (MEM_P (cplx))
2812 emit_move_insn (adjust_address_nv (cplx, imode,
2813 imag_p ? GET_MODE_SIZE (imode) : 0),
2814 val);
2815 return;
2818 /* If the sub-object is at least word sized, then we know that subregging
2819 will work. This special case is important, since store_bit_field
2820 wants to operate on integer modes, and there's rarely an OImode to
2821 correspond to TCmode. */
2822 if (ibitsize >= BITS_PER_WORD
2823 /* For hard regs we have exact predicates. Assume we can split
2824 the original object if it spans an even number of hard regs.
2825 This special case is important for SCmode on 64-bit platforms
2826 where the natural size of floating-point regs is 32-bit. */
2827 || (REG_P (cplx)
2828 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2829 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2831 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2832 imag_p ? GET_MODE_SIZE (imode) : 0);
2833 if (part)
2835 emit_move_insn (part, val);
2836 return;
2838 else
2839 /* simplify_gen_subreg may fail for sub-word MEMs. */
2840 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2843 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2846 /* Extract one of the components of the complex value CPLX. Extract the
2847 real part if IMAG_P is false, and the imaginary part if it's true. */
2849 static rtx
2850 read_complex_part (rtx cplx, bool imag_p)
2852 enum machine_mode cmode, imode;
2853 unsigned ibitsize;
2855 if (GET_CODE (cplx) == CONCAT)
2856 return XEXP (cplx, imag_p);
2858 cmode = GET_MODE (cplx);
2859 imode = GET_MODE_INNER (cmode);
2860 ibitsize = GET_MODE_BITSIZE (imode);
2862 /* Special case reads from complex constants that got spilled to memory. */
2863 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2865 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2866 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2868 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2869 if (CONSTANT_CLASS_P (part))
2870 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2874 /* For MEMs simplify_gen_subreg may generate an invalid new address
2875 because, e.g., the original address is considered mode-dependent
2876 by the target, which restricts simplify_subreg from invoking
2877 adjust_address_nv. Instead of preparing fallback support for an
2878 invalid address, we call adjust_address_nv directly. */
2879 if (MEM_P (cplx))
2880 return adjust_address_nv (cplx, imode,
2881 imag_p ? GET_MODE_SIZE (imode) : 0);
2883 /* If the sub-object is at least word sized, then we know that subregging
2884 will work. This special case is important, since extract_bit_field
2885 wants to operate on integer modes, and there's rarely an OImode to
2886 correspond to TCmode. */
2887 if (ibitsize >= BITS_PER_WORD
2888 /* For hard regs we have exact predicates. Assume we can split
2889 the original object if it spans an even number of hard regs.
2890 This special case is important for SCmode on 64-bit platforms
2891 where the natural size of floating-point regs is 32-bit. */
2892 || (REG_P (cplx)
2893 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2894 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2896 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2897 imag_p ? GET_MODE_SIZE (imode) : 0);
2898 if (ret)
2899 return ret;
2900 else
2901 /* simplify_gen_subreg may fail for sub-word MEMs. */
2902 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2905 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2906 true, NULL_RTX, imode, imode);
2909 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2910 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2911 represented in NEW_MODE. If FORCE is true, this will never happen, as
2912 we'll force-create a SUBREG if needed. */
2914 static rtx
2915 emit_move_change_mode (enum machine_mode new_mode,
2916 enum machine_mode old_mode, rtx x, bool force)
2918 rtx ret;
2920 if (push_operand (x, GET_MODE (x)))
2922 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2923 MEM_COPY_ATTRIBUTES (ret, x);
2925 else if (MEM_P (x))
2927 /* We don't have to worry about changing the address since the
2928 size in bytes is supposed to be the same. */
2929 if (reload_in_progress)
2931 /* Copy the MEM to change the mode and move any
2932 substitutions from the old MEM to the new one. */
2933 ret = adjust_address_nv (x, new_mode, 0);
2934 copy_replacements (x, ret);
2936 else
2937 ret = adjust_address (x, new_mode, 0);
2939 else
2941 /* Note that we do want simplify_subreg's behavior of validating
2942 that the new mode is ok for a hard register. If we were to use
2943 simplify_gen_subreg, we would create the subreg, but would
2944 probably run into the target not being able to implement it. */
2945 /* Except, of course, when FORCE is true, when this is exactly what
2946 we want. Which is needed for CCmodes on some targets. */
2947 if (force)
2948 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2949 else
2950 ret = simplify_subreg (new_mode, x, old_mode, 0);
2953 return ret;
2956 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2957 an integer mode of the same size as MODE. Returns the instruction
2958 emitted, or NULL if such a move could not be generated. */
2960 static rtx
2961 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2963 enum machine_mode imode;
2964 enum insn_code code;
2966 /* There must exist a mode of the exact size we require. */
2967 imode = int_mode_for_mode (mode);
2968 if (imode == BLKmode)
2969 return NULL_RTX;
2971 /* The target must support moves in this mode. */
2972 code = optab_handler (mov_optab, imode)->insn_code;
2973 if (code == CODE_FOR_nothing)
2974 return NULL_RTX;
2976 x = emit_move_change_mode (imode, mode, x, force);
2977 if (x == NULL_RTX)
2978 return NULL_RTX;
2979 y = emit_move_change_mode (imode, mode, y, force);
2980 if (y == NULL_RTX)
2981 return NULL_RTX;
2982 return emit_insn (GEN_FCN (code) (x, y));
2985 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2986 Return an equivalent MEM that does not use an auto-increment. */
2988 static rtx
2989 emit_move_resolve_push (enum machine_mode mode, rtx x)
2991 enum rtx_code code = GET_CODE (XEXP (x, 0));
2992 HOST_WIDE_INT adjust;
2993 rtx temp;
2995 adjust = GET_MODE_SIZE (mode);
2996 #ifdef PUSH_ROUNDING
2997 adjust = PUSH_ROUNDING (adjust);
2998 #endif
2999 if (code == PRE_DEC || code == POST_DEC)
3000 adjust = -adjust;
3001 else if (code == PRE_MODIFY || code == POST_MODIFY)
3003 rtx expr = XEXP (XEXP (x, 0), 1);
3004 HOST_WIDE_INT val;
3006 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3007 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
3008 val = INTVAL (XEXP (expr, 1));
3009 if (GET_CODE (expr) == MINUS)
3010 val = -val;
3011 gcc_assert (adjust == val || adjust == -val);
3012 adjust = val;
3015 /* Do not use anti_adjust_stack, since we don't want to update
3016 stack_pointer_delta. */
3017 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3018 GEN_INT (adjust), stack_pointer_rtx,
3019 0, OPTAB_LIB_WIDEN);
3020 if (temp != stack_pointer_rtx)
3021 emit_move_insn (stack_pointer_rtx, temp);
3023 switch (code)
3025 case PRE_INC:
3026 case PRE_DEC:
3027 case PRE_MODIFY:
3028 temp = stack_pointer_rtx;
3029 break;
3030 case POST_INC:
3031 case POST_DEC:
3032 case POST_MODIFY:
3033 temp = plus_constant (stack_pointer_rtx, -adjust);
3034 break;
3035 default:
3036 gcc_unreachable ();
3039 return replace_equiv_address (x, temp);
3042 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3043 X is known to satisfy push_operand, and MODE is known to be complex.
3044 Returns the last instruction emitted. */
3047 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3049 enum machine_mode submode = GET_MODE_INNER (mode);
3050 bool imag_first;
3052 #ifdef PUSH_ROUNDING
3053 unsigned int submodesize = GET_MODE_SIZE (submode);
3055 /* In case we output to the stack, but the size is smaller than the
3056 machine can push exactly, we need to use move instructions. */
3057 if (PUSH_ROUNDING (submodesize) != submodesize)
3059 x = emit_move_resolve_push (mode, x);
3060 return emit_move_insn (x, y);
3062 #endif
3064 /* Note that the real part always precedes the imag part in memory
3065 regardless of machine's endianness. */
3066 switch (GET_CODE (XEXP (x, 0)))
3068 case PRE_DEC:
3069 case POST_DEC:
3070 imag_first = true;
3071 break;
3072 case PRE_INC:
3073 case POST_INC:
3074 imag_first = false;
3075 break;
3076 default:
3077 gcc_unreachable ();
3080 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3081 read_complex_part (y, imag_first));
3082 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3083 read_complex_part (y, !imag_first));
3086 /* A subroutine of emit_move_complex. Perform the move from Y to X
3087 via two moves of the parts. Returns the last instruction emitted. */
3090 emit_move_complex_parts (rtx x, rtx y)
3092 /* Show the output dies here. This is necessary for SUBREGs
3093 of pseudos since we cannot track their lifetimes correctly;
3094 hard regs shouldn't appear here except as return values. */
3095 if (!reload_completed && !reload_in_progress
3096 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3097 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3099 write_complex_part (x, read_complex_part (y, false), false);
3100 write_complex_part (x, read_complex_part (y, true), true);
3102 return get_last_insn ();
3105 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3106 MODE is known to be complex. Returns the last instruction emitted. */
3108 static rtx
3109 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3111 bool try_int;
3113 /* Need to take special care for pushes, to maintain proper ordering
3114 of the data, and possibly extra padding. */
3115 if (push_operand (x, mode))
3116 return emit_move_complex_push (mode, x, y);
3118 /* See if we can coerce the target into moving both values at once. */
3120 /* Move floating point as parts. */
3121 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3122 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3123 try_int = false;
3124 /* Not possible if the values are inherently not adjacent. */
3125 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3126 try_int = false;
3127 /* Is possible if both are registers (or subregs of registers). */
3128 else if (register_operand (x, mode) && register_operand (y, mode))
3129 try_int = true;
3130 /* If one of the operands is a memory, and alignment constraints
3131 are friendly enough, we may be able to do combined memory operations.
3132 We do not attempt this if Y is a constant because that combination is
3133 usually better with the by-parts thing below. */
3134 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3135 && (!STRICT_ALIGNMENT
3136 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3137 try_int = true;
3138 else
3139 try_int = false;
3141 if (try_int)
3143 rtx ret;
3145 /* For memory to memory moves, optimal behavior can be had with the
3146 existing block move logic. */
3147 if (MEM_P (x) && MEM_P (y))
3149 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3150 BLOCK_OP_NO_LIBCALL);
3151 return get_last_insn ();
3154 ret = emit_move_via_integer (mode, x, y, true);
3155 if (ret)
3156 return ret;
3159 return emit_move_complex_parts (x, y);
3162 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3163 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3165 static rtx
3166 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3168 rtx ret;
3170 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3171 if (mode != CCmode)
3173 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3174 if (code != CODE_FOR_nothing)
3176 x = emit_move_change_mode (CCmode, mode, x, true);
3177 y = emit_move_change_mode (CCmode, mode, y, true);
3178 return emit_insn (GEN_FCN (code) (x, y));
3182 /* Otherwise, find the MODE_INT mode of the same width. */
3183 ret = emit_move_via_integer (mode, x, y, false);
3184 gcc_assert (ret != NULL);
3185 return ret;
3188 /* Return true if word I of OP lies entirely in the
3189 undefined bits of a paradoxical subreg. */
3191 static bool
3192 undefined_operand_subword_p (const_rtx op, int i)
3194 enum machine_mode innermode, innermostmode;
3195 int offset;
3196 if (GET_CODE (op) != SUBREG)
3197 return false;
3198 innermode = GET_MODE (op);
3199 innermostmode = GET_MODE (SUBREG_REG (op));
3200 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3201 /* The SUBREG_BYTE represents offset, as if the value were stored in
3202 memory, except for a paradoxical subreg where we define
3203 SUBREG_BYTE to be 0; undo this exception as in
3204 simplify_subreg. */
3205 if (SUBREG_BYTE (op) == 0
3206 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3208 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3209 if (WORDS_BIG_ENDIAN)
3210 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3211 if (BYTES_BIG_ENDIAN)
3212 offset += difference % UNITS_PER_WORD;
3214 if (offset >= GET_MODE_SIZE (innermostmode)
3215 || offset <= -GET_MODE_SIZE (word_mode))
3216 return true;
3217 return false;
3220 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3221 MODE is any multi-word or full-word mode that lacks a move_insn
3222 pattern. Note that you will get better code if you define such
3223 patterns, even if they must turn into multiple assembler instructions. */
3225 static rtx
3226 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3228 rtx last_insn = 0;
3229 rtx seq, inner;
3230 bool need_clobber;
3231 int i;
3233 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3235 /* If X is a push on the stack, do the push now and replace
3236 X with a reference to the stack pointer. */
3237 if (push_operand (x, mode))
3238 x = emit_move_resolve_push (mode, x);
3240 /* If we are in reload, see if either operand is a MEM whose address
3241 is scheduled for replacement. */
3242 if (reload_in_progress && MEM_P (x)
3243 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3244 x = replace_equiv_address_nv (x, inner);
3245 if (reload_in_progress && MEM_P (y)
3246 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3247 y = replace_equiv_address_nv (y, inner);
3249 start_sequence ();
3251 need_clobber = false;
3252 for (i = 0;
3253 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3254 i++)
3256 rtx xpart = operand_subword (x, i, 1, mode);
3257 rtx ypart;
3259 /* Do not generate code for a move if it would come entirely
3260 from the undefined bits of a paradoxical subreg. */
3261 if (undefined_operand_subword_p (y, i))
3262 continue;
3264 ypart = operand_subword (y, i, 1, mode);
3266 /* If we can't get a part of Y, put Y into memory if it is a
3267 constant. Otherwise, force it into a register. Then we must
3268 be able to get a part of Y. */
3269 if (ypart == 0 && CONSTANT_P (y))
3271 y = use_anchored_address (force_const_mem (mode, y));
3272 ypart = operand_subword (y, i, 1, mode);
3274 else if (ypart == 0)
3275 ypart = operand_subword_force (y, i, mode);
3277 gcc_assert (xpart && ypart);
3279 need_clobber |= (GET_CODE (xpart) == SUBREG);
3281 last_insn = emit_move_insn (xpart, ypart);
3284 seq = get_insns ();
3285 end_sequence ();
3287 /* Show the output dies here. This is necessary for SUBREGs
3288 of pseudos since we cannot track their lifetimes correctly;
3289 hard regs shouldn't appear here except as return values.
3290 We never want to emit such a clobber after reload. */
3291 if (x != y
3292 && ! (reload_in_progress || reload_completed)
3293 && need_clobber != 0)
3294 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3296 emit_insn (seq);
3298 return last_insn;
3301 /* Low level part of emit_move_insn.
3302 Called just like emit_move_insn, but assumes X and Y
3303 are basically valid. */
3306 emit_move_insn_1 (rtx x, rtx y)
3308 enum machine_mode mode = GET_MODE (x);
3309 enum insn_code code;
3311 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3313 code = optab_handler (mov_optab, mode)->insn_code;
3314 if (code != CODE_FOR_nothing)
3315 return emit_insn (GEN_FCN (code) (x, y));
3317 /* Expand complex moves by moving real part and imag part. */
3318 if (COMPLEX_MODE_P (mode))
3319 return emit_move_complex (mode, x, y);
3321 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3322 || ALL_FIXED_POINT_MODE_P (mode))
3324 rtx result = emit_move_via_integer (mode, x, y, true);
3326 /* If we can't find an integer mode, use multi words. */
3327 if (result)
3328 return result;
3329 else
3330 return emit_move_multi_word (mode, x, y);
3333 if (GET_MODE_CLASS (mode) == MODE_CC)
3334 return emit_move_ccmode (mode, x, y);
3336 /* Try using a move pattern for the corresponding integer mode. This is
3337 only safe when simplify_subreg can convert MODE constants into integer
3338 constants. At present, it can only do this reliably if the value
3339 fits within a HOST_WIDE_INT. */
3340 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3342 rtx ret = emit_move_via_integer (mode, x, y, false);
3343 if (ret)
3344 return ret;
3347 return emit_move_multi_word (mode, x, y);
3350 /* Generate code to copy Y into X.
3351 Both Y and X must have the same mode, except that
3352 Y can be a constant with VOIDmode.
3353 This mode cannot be BLKmode; use emit_block_move for that.
3355 Return the last instruction emitted. */
3358 emit_move_insn (rtx x, rtx y)
3360 enum machine_mode mode = GET_MODE (x);
3361 rtx y_cst = NULL_RTX;
3362 rtx last_insn, set;
3364 gcc_assert (mode != BLKmode
3365 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3367 if (CONSTANT_P (y))
3369 if (optimize
3370 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3371 && (last_insn = compress_float_constant (x, y)))
3372 return last_insn;
3374 y_cst = y;
3376 if (!LEGITIMATE_CONSTANT_P (y))
3378 y = force_const_mem (mode, y);
3380 /* If the target's cannot_force_const_mem prevented the spill,
3381 assume that the target's move expanders will also take care
3382 of the non-legitimate constant. */
3383 if (!y)
3384 y = y_cst;
3385 else
3386 y = use_anchored_address (y);
3390 /* If X or Y are memory references, verify that their addresses are valid
3391 for the machine. */
3392 if (MEM_P (x)
3393 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3394 && ! push_operand (x, GET_MODE (x)))
3395 || (flag_force_addr
3396 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3397 x = validize_mem (x);
3399 if (MEM_P (y)
3400 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3401 || (flag_force_addr
3402 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3403 y = validize_mem (y);
3405 gcc_assert (mode != BLKmode);
3407 last_insn = emit_move_insn_1 (x, y);
3409 if (y_cst && REG_P (x)
3410 && (set = single_set (last_insn)) != NULL_RTX
3411 && SET_DEST (set) == x
3412 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3413 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3415 return last_insn;
3418 /* If Y is representable exactly in a narrower mode, and the target can
3419 perform the extension directly from constant or memory, then emit the
3420 move as an extension. */
3422 static rtx
3423 compress_float_constant (rtx x, rtx y)
3425 enum machine_mode dstmode = GET_MODE (x);
3426 enum machine_mode orig_srcmode = GET_MODE (y);
3427 enum machine_mode srcmode;
3428 REAL_VALUE_TYPE r;
3429 int oldcost, newcost;
3431 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3433 if (LEGITIMATE_CONSTANT_P (y))
3434 oldcost = rtx_cost (y, SET);
3435 else
3436 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3438 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3439 srcmode != orig_srcmode;
3440 srcmode = GET_MODE_WIDER_MODE (srcmode))
3442 enum insn_code ic;
3443 rtx trunc_y, last_insn;
3445 /* Skip if the target can't extend this way. */
3446 ic = can_extend_p (dstmode, srcmode, 0);
3447 if (ic == CODE_FOR_nothing)
3448 continue;
3450 /* Skip if the narrowed value isn't exact. */
3451 if (! exact_real_truncate (srcmode, &r))
3452 continue;
3454 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3456 if (LEGITIMATE_CONSTANT_P (trunc_y))
3458 /* Skip if the target needs extra instructions to perform
3459 the extension. */
3460 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3461 continue;
3462 /* This is valid, but may not be cheaper than the original. */
3463 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3464 if (oldcost < newcost)
3465 continue;
3467 else if (float_extend_from_mem[dstmode][srcmode])
3469 trunc_y = force_const_mem (srcmode, trunc_y);
3470 /* This is valid, but may not be cheaper than the original. */
3471 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3472 if (oldcost < newcost)
3473 continue;
3474 trunc_y = validize_mem (trunc_y);
3476 else
3477 continue;
3479 /* For CSE's benefit, force the compressed constant pool entry
3480 into a new pseudo. This constant may be used in different modes,
3481 and if not, combine will put things back together for us. */
3482 trunc_y = force_reg (srcmode, trunc_y);
3483 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3484 last_insn = get_last_insn ();
3486 if (REG_P (x))
3487 set_unique_reg_note (last_insn, REG_EQUAL, y);
3489 return last_insn;
3492 return NULL_RTX;
3495 /* Pushing data onto the stack. */
3497 /* Push a block of length SIZE (perhaps variable)
3498 and return an rtx to address the beginning of the block.
3499 The value may be virtual_outgoing_args_rtx.
3501 EXTRA is the number of bytes of padding to push in addition to SIZE.
3502 BELOW nonzero means this padding comes at low addresses;
3503 otherwise, the padding comes at high addresses. */
3506 push_block (rtx size, int extra, int below)
3508 rtx temp;
3510 size = convert_modes (Pmode, ptr_mode, size, 1);
3511 if (CONSTANT_P (size))
3512 anti_adjust_stack (plus_constant (size, extra));
3513 else if (REG_P (size) && extra == 0)
3514 anti_adjust_stack (size);
3515 else
3517 temp = copy_to_mode_reg (Pmode, size);
3518 if (extra != 0)
3519 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3520 temp, 0, OPTAB_LIB_WIDEN);
3521 anti_adjust_stack (temp);
3524 #ifndef STACK_GROWS_DOWNWARD
3525 if (0)
3526 #else
3527 if (1)
3528 #endif
3530 temp = virtual_outgoing_args_rtx;
3531 if (extra != 0 && below)
3532 temp = plus_constant (temp, extra);
3534 else
3536 if (GET_CODE (size) == CONST_INT)
3537 temp = plus_constant (virtual_outgoing_args_rtx,
3538 -INTVAL (size) - (below ? 0 : extra));
3539 else if (extra != 0 && !below)
3540 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3541 negate_rtx (Pmode, plus_constant (size, extra)));
3542 else
3543 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3544 negate_rtx (Pmode, size));
3547 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3550 #ifdef PUSH_ROUNDING
3552 /* Emit single push insn. */
3554 static void
3555 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3557 rtx dest_addr;
3558 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3559 rtx dest;
3560 enum insn_code icode;
3561 insn_operand_predicate_fn pred;
3563 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3564 /* If there is push pattern, use it. Otherwise try old way of throwing
3565 MEM representing push operation to move expander. */
3566 icode = optab_handler (push_optab, mode)->insn_code;
3567 if (icode != CODE_FOR_nothing)
3569 if (((pred = insn_data[(int) icode].operand[0].predicate)
3570 && !((*pred) (x, mode))))
3571 x = force_reg (mode, x);
3572 emit_insn (GEN_FCN (icode) (x));
3573 return;
3575 if (GET_MODE_SIZE (mode) == rounded_size)
3576 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3577 /* If we are to pad downward, adjust the stack pointer first and
3578 then store X into the stack location using an offset. This is
3579 because emit_move_insn does not know how to pad; it does not have
3580 access to type. */
3581 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3583 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3584 HOST_WIDE_INT offset;
3586 emit_move_insn (stack_pointer_rtx,
3587 expand_binop (Pmode,
3588 #ifdef STACK_GROWS_DOWNWARD
3589 sub_optab,
3590 #else
3591 add_optab,
3592 #endif
3593 stack_pointer_rtx,
3594 GEN_INT (rounded_size),
3595 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3597 offset = (HOST_WIDE_INT) padding_size;
3598 #ifdef STACK_GROWS_DOWNWARD
3599 if (STACK_PUSH_CODE == POST_DEC)
3600 /* We have already decremented the stack pointer, so get the
3601 previous value. */
3602 offset += (HOST_WIDE_INT) rounded_size;
3603 #else
3604 if (STACK_PUSH_CODE == POST_INC)
3605 /* We have already incremented the stack pointer, so get the
3606 previous value. */
3607 offset -= (HOST_WIDE_INT) rounded_size;
3608 #endif
3609 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3611 else
3613 #ifdef STACK_GROWS_DOWNWARD
3614 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3615 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3616 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3617 #else
3618 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3619 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3620 GEN_INT (rounded_size));
3621 #endif
3622 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3625 dest = gen_rtx_MEM (mode, dest_addr);
3627 if (type != 0)
3629 set_mem_attributes (dest, type, 1);
3631 if (flag_optimize_sibling_calls)
3632 /* Function incoming arguments may overlap with sibling call
3633 outgoing arguments and we cannot allow reordering of reads
3634 from function arguments with stores to outgoing arguments
3635 of sibling calls. */
3636 set_mem_alias_set (dest, 0);
3638 emit_move_insn (dest, x);
3640 #endif
3642 /* Generate code to push X onto the stack, assuming it has mode MODE and
3643 type TYPE.
3644 MODE is redundant except when X is a CONST_INT (since they don't
3645 carry mode info).
3646 SIZE is an rtx for the size of data to be copied (in bytes),
3647 needed only if X is BLKmode.
3649 ALIGN (in bits) is maximum alignment we can assume.
3651 If PARTIAL and REG are both nonzero, then copy that many of the first
3652 bytes of X into registers starting with REG, and push the rest of X.
3653 The amount of space pushed is decreased by PARTIAL bytes.
3654 REG must be a hard register in this case.
3655 If REG is zero but PARTIAL is not, take any all others actions for an
3656 argument partially in registers, but do not actually load any
3657 registers.
3659 EXTRA is the amount in bytes of extra space to leave next to this arg.
3660 This is ignored if an argument block has already been allocated.
3662 On a machine that lacks real push insns, ARGS_ADDR is the address of
3663 the bottom of the argument block for this call. We use indexing off there
3664 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3665 argument block has not been preallocated.
3667 ARGS_SO_FAR is the size of args previously pushed for this call.
3669 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3670 for arguments passed in registers. If nonzero, it will be the number
3671 of bytes required. */
3673 void
3674 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3675 unsigned int align, int partial, rtx reg, int extra,
3676 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3677 rtx alignment_pad)
3679 rtx xinner;
3680 enum direction stack_direction
3681 #ifdef STACK_GROWS_DOWNWARD
3682 = downward;
3683 #else
3684 = upward;
3685 #endif
3687 /* Decide where to pad the argument: `downward' for below,
3688 `upward' for above, or `none' for don't pad it.
3689 Default is below for small data on big-endian machines; else above. */
3690 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3692 /* Invert direction if stack is post-decrement.
3693 FIXME: why? */
3694 if (STACK_PUSH_CODE == POST_DEC)
3695 if (where_pad != none)
3696 where_pad = (where_pad == downward ? upward : downward);
3698 xinner = x;
3700 if (mode == BLKmode
3701 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3703 /* Copy a block into the stack, entirely or partially. */
3705 rtx temp;
3706 int used;
3707 int offset;
3708 int skip;
3710 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3711 used = partial - offset;
3713 if (mode != BLKmode)
3715 /* A value is to be stored in an insufficiently aligned
3716 stack slot; copy via a suitably aligned slot if
3717 necessary. */
3718 size = GEN_INT (GET_MODE_SIZE (mode));
3719 if (!MEM_P (xinner))
3721 temp = assign_temp (type, 0, 1, 1);
3722 emit_move_insn (temp, xinner);
3723 xinner = temp;
3727 gcc_assert (size);
3729 /* USED is now the # of bytes we need not copy to the stack
3730 because registers will take care of them. */
3732 if (partial != 0)
3733 xinner = adjust_address (xinner, BLKmode, used);
3735 /* If the partial register-part of the arg counts in its stack size,
3736 skip the part of stack space corresponding to the registers.
3737 Otherwise, start copying to the beginning of the stack space,
3738 by setting SKIP to 0. */
3739 skip = (reg_parm_stack_space == 0) ? 0 : used;
3741 #ifdef PUSH_ROUNDING
3742 /* Do it with several push insns if that doesn't take lots of insns
3743 and if there is no difficulty with push insns that skip bytes
3744 on the stack for alignment purposes. */
3745 if (args_addr == 0
3746 && PUSH_ARGS
3747 && GET_CODE (size) == CONST_INT
3748 && skip == 0
3749 && MEM_ALIGN (xinner) >= align
3750 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3751 /* Here we avoid the case of a structure whose weak alignment
3752 forces many pushes of a small amount of data,
3753 and such small pushes do rounding that causes trouble. */
3754 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3755 || align >= BIGGEST_ALIGNMENT
3756 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3757 == (align / BITS_PER_UNIT)))
3758 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3760 /* Push padding now if padding above and stack grows down,
3761 or if padding below and stack grows up.
3762 But if space already allocated, this has already been done. */
3763 if (extra && args_addr == 0
3764 && where_pad != none && where_pad != stack_direction)
3765 anti_adjust_stack (GEN_INT (extra));
3767 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3769 else
3770 #endif /* PUSH_ROUNDING */
3772 rtx target;
3774 /* Otherwise make space on the stack and copy the data
3775 to the address of that space. */
3777 /* Deduct words put into registers from the size we must copy. */
3778 if (partial != 0)
3780 if (GET_CODE (size) == CONST_INT)
3781 size = GEN_INT (INTVAL (size) - used);
3782 else
3783 size = expand_binop (GET_MODE (size), sub_optab, size,
3784 GEN_INT (used), NULL_RTX, 0,
3785 OPTAB_LIB_WIDEN);
3788 /* Get the address of the stack space.
3789 In this case, we do not deal with EXTRA separately.
3790 A single stack adjust will do. */
3791 if (! args_addr)
3793 temp = push_block (size, extra, where_pad == downward);
3794 extra = 0;
3796 else if (GET_CODE (args_so_far) == CONST_INT)
3797 temp = memory_address (BLKmode,
3798 plus_constant (args_addr,
3799 skip + INTVAL (args_so_far)));
3800 else
3801 temp = memory_address (BLKmode,
3802 plus_constant (gen_rtx_PLUS (Pmode,
3803 args_addr,
3804 args_so_far),
3805 skip));
3807 if (!ACCUMULATE_OUTGOING_ARGS)
3809 /* If the source is referenced relative to the stack pointer,
3810 copy it to another register to stabilize it. We do not need
3811 to do this if we know that we won't be changing sp. */
3813 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3814 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3815 temp = copy_to_reg (temp);
3818 target = gen_rtx_MEM (BLKmode, temp);
3820 /* We do *not* set_mem_attributes here, because incoming arguments
3821 may overlap with sibling call outgoing arguments and we cannot
3822 allow reordering of reads from function arguments with stores
3823 to outgoing arguments of sibling calls. We do, however, want
3824 to record the alignment of the stack slot. */
3825 /* ALIGN may well be better aligned than TYPE, e.g. due to
3826 PARM_BOUNDARY. Assume the caller isn't lying. */
3827 set_mem_align (target, align);
3829 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3832 else if (partial > 0)
3834 /* Scalar partly in registers. */
3836 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3837 int i;
3838 int not_stack;
3839 /* # bytes of start of argument
3840 that we must make space for but need not store. */
3841 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3842 int args_offset = INTVAL (args_so_far);
3843 int skip;
3845 /* Push padding now if padding above and stack grows down,
3846 or if padding below and stack grows up.
3847 But if space already allocated, this has already been done. */
3848 if (extra && args_addr == 0
3849 && where_pad != none && where_pad != stack_direction)
3850 anti_adjust_stack (GEN_INT (extra));
3852 /* If we make space by pushing it, we might as well push
3853 the real data. Otherwise, we can leave OFFSET nonzero
3854 and leave the space uninitialized. */
3855 if (args_addr == 0)
3856 offset = 0;
3858 /* Now NOT_STACK gets the number of words that we don't need to
3859 allocate on the stack. Convert OFFSET to words too. */
3860 not_stack = (partial - offset) / UNITS_PER_WORD;
3861 offset /= UNITS_PER_WORD;
3863 /* If the partial register-part of the arg counts in its stack size,
3864 skip the part of stack space corresponding to the registers.
3865 Otherwise, start copying to the beginning of the stack space,
3866 by setting SKIP to 0. */
3867 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3869 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3870 x = validize_mem (force_const_mem (mode, x));
3872 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3873 SUBREGs of such registers are not allowed. */
3874 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3875 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3876 x = copy_to_reg (x);
3878 /* Loop over all the words allocated on the stack for this arg. */
3879 /* We can do it by words, because any scalar bigger than a word
3880 has a size a multiple of a word. */
3881 #ifndef PUSH_ARGS_REVERSED
3882 for (i = not_stack; i < size; i++)
3883 #else
3884 for (i = size - 1; i >= not_stack; i--)
3885 #endif
3886 if (i >= not_stack + offset)
3887 emit_push_insn (operand_subword_force (x, i, mode),
3888 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3889 0, args_addr,
3890 GEN_INT (args_offset + ((i - not_stack + skip)
3891 * UNITS_PER_WORD)),
3892 reg_parm_stack_space, alignment_pad);
3894 else
3896 rtx addr;
3897 rtx dest;
3899 /* Push padding now if padding above and stack grows down,
3900 or if padding below and stack grows up.
3901 But if space already allocated, this has already been done. */
3902 if (extra && args_addr == 0
3903 && where_pad != none && where_pad != stack_direction)
3904 anti_adjust_stack (GEN_INT (extra));
3906 #ifdef PUSH_ROUNDING
3907 if (args_addr == 0 && PUSH_ARGS)
3908 emit_single_push_insn (mode, x, type);
3909 else
3910 #endif
3912 if (GET_CODE (args_so_far) == CONST_INT)
3913 addr
3914 = memory_address (mode,
3915 plus_constant (args_addr,
3916 INTVAL (args_so_far)));
3917 else
3918 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3919 args_so_far));
3920 dest = gen_rtx_MEM (mode, addr);
3922 /* We do *not* set_mem_attributes here, because incoming arguments
3923 may overlap with sibling call outgoing arguments and we cannot
3924 allow reordering of reads from function arguments with stores
3925 to outgoing arguments of sibling calls. We do, however, want
3926 to record the alignment of the stack slot. */
3927 /* ALIGN may well be better aligned than TYPE, e.g. due to
3928 PARM_BOUNDARY. Assume the caller isn't lying. */
3929 set_mem_align (dest, align);
3931 emit_move_insn (dest, x);
3935 /* If part should go in registers, copy that part
3936 into the appropriate registers. Do this now, at the end,
3937 since mem-to-mem copies above may do function calls. */
3938 if (partial > 0 && reg != 0)
3940 /* Handle calls that pass values in multiple non-contiguous locations.
3941 The Irix 6 ABI has examples of this. */
3942 if (GET_CODE (reg) == PARALLEL)
3943 emit_group_load (reg, x, type, -1);
3944 else
3946 gcc_assert (partial % UNITS_PER_WORD == 0);
3947 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3951 if (extra && args_addr == 0 && where_pad == stack_direction)
3952 anti_adjust_stack (GEN_INT (extra));
3954 if (alignment_pad && args_addr == 0)
3955 anti_adjust_stack (alignment_pad);
3958 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3959 operations. */
3961 static rtx
3962 get_subtarget (rtx x)
3964 return (optimize
3965 || x == 0
3966 /* Only registers can be subtargets. */
3967 || !REG_P (x)
3968 /* Don't use hard regs to avoid extending their life. */
3969 || REGNO (x) < FIRST_PSEUDO_REGISTER
3970 ? 0 : x);
3973 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3974 FIELD is a bitfield. Returns true if the optimization was successful,
3975 and there's nothing else to do. */
3977 static bool
3978 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3979 unsigned HOST_WIDE_INT bitpos,
3980 enum machine_mode mode1, rtx str_rtx,
3981 tree to, tree src)
3983 enum machine_mode str_mode = GET_MODE (str_rtx);
3984 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3985 tree op0, op1;
3986 rtx value, result;
3987 optab binop;
3989 if (mode1 != VOIDmode
3990 || bitsize >= BITS_PER_WORD
3991 || str_bitsize > BITS_PER_WORD
3992 || TREE_SIDE_EFFECTS (to)
3993 || TREE_THIS_VOLATILE (to))
3994 return false;
3996 STRIP_NOPS (src);
3997 if (!BINARY_CLASS_P (src)
3998 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3999 return false;
4001 op0 = TREE_OPERAND (src, 0);
4002 op1 = TREE_OPERAND (src, 1);
4003 STRIP_NOPS (op0);
4005 if (!operand_equal_p (to, op0, 0))
4006 return false;
4008 if (MEM_P (str_rtx))
4010 unsigned HOST_WIDE_INT offset1;
4012 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4013 str_mode = word_mode;
4014 str_mode = get_best_mode (bitsize, bitpos,
4015 MEM_ALIGN (str_rtx), str_mode, 0);
4016 if (str_mode == VOIDmode)
4017 return false;
4018 str_bitsize = GET_MODE_BITSIZE (str_mode);
4020 offset1 = bitpos;
4021 bitpos %= str_bitsize;
4022 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4023 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4025 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4026 return false;
4028 /* If the bit field covers the whole REG/MEM, store_field
4029 will likely generate better code. */
4030 if (bitsize >= str_bitsize)
4031 return false;
4033 /* We can't handle fields split across multiple entities. */
4034 if (bitpos + bitsize > str_bitsize)
4035 return false;
4037 if (BYTES_BIG_ENDIAN)
4038 bitpos = str_bitsize - bitpos - bitsize;
4040 switch (TREE_CODE (src))
4042 case PLUS_EXPR:
4043 case MINUS_EXPR:
4044 /* For now, just optimize the case of the topmost bitfield
4045 where we don't need to do any masking and also
4046 1 bit bitfields where xor can be used.
4047 We might win by one instruction for the other bitfields
4048 too if insv/extv instructions aren't used, so that
4049 can be added later. */
4050 if (bitpos + bitsize != str_bitsize
4051 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4052 break;
4054 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4055 value = convert_modes (str_mode,
4056 TYPE_MODE (TREE_TYPE (op1)), value,
4057 TYPE_UNSIGNED (TREE_TYPE (op1)));
4059 /* We may be accessing data outside the field, which means
4060 we can alias adjacent data. */
4061 if (MEM_P (str_rtx))
4063 str_rtx = shallow_copy_rtx (str_rtx);
4064 set_mem_alias_set (str_rtx, 0);
4065 set_mem_expr (str_rtx, 0);
4068 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4069 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4071 value = expand_and (str_mode, value, const1_rtx, NULL);
4072 binop = xor_optab;
4074 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4075 build_int_cst (NULL_TREE, bitpos),
4076 NULL_RTX, 1);
4077 result = expand_binop (str_mode, binop, str_rtx,
4078 value, str_rtx, 1, OPTAB_WIDEN);
4079 if (result != str_rtx)
4080 emit_move_insn (str_rtx, result);
4081 return true;
4083 case BIT_IOR_EXPR:
4084 case BIT_XOR_EXPR:
4085 if (TREE_CODE (op1) != INTEGER_CST)
4086 break;
4087 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4088 value = convert_modes (GET_MODE (str_rtx),
4089 TYPE_MODE (TREE_TYPE (op1)), value,
4090 TYPE_UNSIGNED (TREE_TYPE (op1)));
4092 /* We may be accessing data outside the field, which means
4093 we can alias adjacent data. */
4094 if (MEM_P (str_rtx))
4096 str_rtx = shallow_copy_rtx (str_rtx);
4097 set_mem_alias_set (str_rtx, 0);
4098 set_mem_expr (str_rtx, 0);
4101 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4102 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4104 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4105 - 1);
4106 value = expand_and (GET_MODE (str_rtx), value, mask,
4107 NULL_RTX);
4109 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4110 build_int_cst (NULL_TREE, bitpos),
4111 NULL_RTX, 1);
4112 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4113 value, str_rtx, 1, OPTAB_WIDEN);
4114 if (result != str_rtx)
4115 emit_move_insn (str_rtx, result);
4116 return true;
4118 default:
4119 break;
4122 return false;
4126 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4127 is true, try generating a nontemporal store. */
4129 void
4130 expand_assignment (tree to, tree from, bool nontemporal)
4132 rtx to_rtx = 0;
4133 rtx result;
4135 /* Don't crash if the lhs of the assignment was erroneous. */
4136 if (TREE_CODE (to) == ERROR_MARK)
4138 result = expand_normal (from);
4139 return;
4142 /* Optimize away no-op moves without side-effects. */
4143 if (operand_equal_p (to, from, 0))
4144 return;
4146 /* Assignment of a structure component needs special treatment
4147 if the structure component's rtx is not simply a MEM.
4148 Assignment of an array element at a constant index, and assignment of
4149 an array element in an unaligned packed structure field, has the same
4150 problem. */
4151 if (handled_component_p (to)
4152 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4154 enum machine_mode mode1;
4155 HOST_WIDE_INT bitsize, bitpos;
4156 tree offset;
4157 int unsignedp;
4158 int volatilep = 0;
4159 tree tem;
4161 push_temp_slots ();
4162 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4163 &unsignedp, &volatilep, true);
4165 /* If we are going to use store_bit_field and extract_bit_field,
4166 make sure to_rtx will be safe for multiple use. */
4168 to_rtx = expand_normal (tem);
4170 if (offset != 0)
4172 rtx offset_rtx;
4174 if (!MEM_P (to_rtx))
4176 /* We can get constant negative offsets into arrays with broken
4177 user code. Translate this to a trap instead of ICEing. */
4178 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4179 expand_builtin_trap ();
4180 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4183 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4184 #ifdef POINTERS_EXTEND_UNSIGNED
4185 if (GET_MODE (offset_rtx) != Pmode)
4186 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4187 #else
4188 if (GET_MODE (offset_rtx) != ptr_mode)
4189 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4190 #endif
4192 /* A constant address in TO_RTX can have VOIDmode, we must not try
4193 to call force_reg for that case. Avoid that case. */
4194 if (MEM_P (to_rtx)
4195 && GET_MODE (to_rtx) == BLKmode
4196 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4197 && bitsize > 0
4198 && (bitpos % bitsize) == 0
4199 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4200 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4202 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4203 bitpos = 0;
4206 to_rtx = offset_address (to_rtx, offset_rtx,
4207 highest_pow2_factor_for_target (to,
4208 offset));
4211 /* Handle expand_expr of a complex value returning a CONCAT. */
4212 if (GET_CODE (to_rtx) == CONCAT)
4214 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4216 gcc_assert (bitpos == 0);
4217 result = store_expr (from, to_rtx, false, nontemporal);
4219 else
4221 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4222 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4223 nontemporal);
4226 else
4228 if (MEM_P (to_rtx))
4230 /* If the field is at offset zero, we could have been given the
4231 DECL_RTX of the parent struct. Don't munge it. */
4232 to_rtx = shallow_copy_rtx (to_rtx);
4234 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4236 /* Deal with volatile and readonly fields. The former is only
4237 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4238 if (volatilep)
4239 MEM_VOLATILE_P (to_rtx) = 1;
4240 if (component_uses_parent_alias_set (to))
4241 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4244 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4245 to_rtx, to, from))
4246 result = NULL;
4247 else
4248 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4249 TREE_TYPE (tem), get_alias_set (to),
4250 nontemporal);
4253 if (result)
4254 preserve_temp_slots (result);
4255 free_temp_slots ();
4256 pop_temp_slots ();
4257 return;
4260 /* If the rhs is a function call and its value is not an aggregate,
4261 call the function before we start to compute the lhs.
4262 This is needed for correct code for cases such as
4263 val = setjmp (buf) on machines where reference to val
4264 requires loading up part of an address in a separate insn.
4266 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4267 since it might be a promoted variable where the zero- or sign- extension
4268 needs to be done. Handling this in the normal way is safe because no
4269 computation is done before the call. */
4270 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4271 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4272 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4273 && REG_P (DECL_RTL (to))))
4275 rtx value;
4277 push_temp_slots ();
4278 value = expand_normal (from);
4279 if (to_rtx == 0)
4280 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4282 /* Handle calls that return values in multiple non-contiguous locations.
4283 The Irix 6 ABI has examples of this. */
4284 if (GET_CODE (to_rtx) == PARALLEL)
4285 emit_group_load (to_rtx, value, TREE_TYPE (from),
4286 int_size_in_bytes (TREE_TYPE (from)));
4287 else if (GET_MODE (to_rtx) == BLKmode)
4288 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4289 else
4291 if (POINTER_TYPE_P (TREE_TYPE (to)))
4292 value = convert_memory_address (GET_MODE (to_rtx), value);
4293 emit_move_insn (to_rtx, value);
4295 preserve_temp_slots (to_rtx);
4296 free_temp_slots ();
4297 pop_temp_slots ();
4298 return;
4301 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4302 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4304 if (to_rtx == 0)
4305 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4307 /* Don't move directly into a return register. */
4308 if (TREE_CODE (to) == RESULT_DECL
4309 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4311 rtx temp;
4313 push_temp_slots ();
4314 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4316 if (GET_CODE (to_rtx) == PARALLEL)
4317 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4318 int_size_in_bytes (TREE_TYPE (from)));
4319 else
4320 emit_move_insn (to_rtx, temp);
4322 preserve_temp_slots (to_rtx);
4323 free_temp_slots ();
4324 pop_temp_slots ();
4325 return;
4328 /* In case we are returning the contents of an object which overlaps
4329 the place the value is being stored, use a safe function when copying
4330 a value through a pointer into a structure value return block. */
4331 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4332 && current_function_returns_struct
4333 && !current_function_returns_pcc_struct)
4335 rtx from_rtx, size;
4337 push_temp_slots ();
4338 size = expr_size (from);
4339 from_rtx = expand_normal (from);
4341 emit_library_call (memmove_libfunc, LCT_NORMAL,
4342 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4343 XEXP (from_rtx, 0), Pmode,
4344 convert_to_mode (TYPE_MODE (sizetype),
4345 size, TYPE_UNSIGNED (sizetype)),
4346 TYPE_MODE (sizetype));
4348 preserve_temp_slots (to_rtx);
4349 free_temp_slots ();
4350 pop_temp_slots ();
4351 return;
4354 /* Compute FROM and store the value in the rtx we got. */
4356 push_temp_slots ();
4357 result = store_expr (from, to_rtx, 0, nontemporal);
4358 preserve_temp_slots (result);
4359 free_temp_slots ();
4360 pop_temp_slots ();
4361 return;
4364 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4365 succeeded, false otherwise. */
4367 static bool
4368 emit_storent_insn (rtx to, rtx from)
4370 enum machine_mode mode = GET_MODE (to), imode;
4371 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4372 rtx pattern;
4374 if (code == CODE_FOR_nothing)
4375 return false;
4377 imode = insn_data[code].operand[0].mode;
4378 if (!insn_data[code].operand[0].predicate (to, imode))
4379 return false;
4381 imode = insn_data[code].operand[1].mode;
4382 if (!insn_data[code].operand[1].predicate (from, imode))
4384 from = copy_to_mode_reg (imode, from);
4385 if (!insn_data[code].operand[1].predicate (from, imode))
4386 return false;
4389 pattern = GEN_FCN (code) (to, from);
4390 if (pattern == NULL_RTX)
4391 return false;
4393 emit_insn (pattern);
4394 return true;
4397 /* Generate code for computing expression EXP,
4398 and storing the value into TARGET.
4400 If the mode is BLKmode then we may return TARGET itself.
4401 It turns out that in BLKmode it doesn't cause a problem.
4402 because C has no operators that could combine two different
4403 assignments into the same BLKmode object with different values
4404 with no sequence point. Will other languages need this to
4405 be more thorough?
4407 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4408 stack, and block moves may need to be treated specially.
4410 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4413 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4415 rtx temp;
4416 rtx alt_rtl = NULL_RTX;
4417 int dont_return_target = 0;
4419 if (VOID_TYPE_P (TREE_TYPE (exp)))
4421 /* C++ can generate ?: expressions with a throw expression in one
4422 branch and an rvalue in the other. Here, we resolve attempts to
4423 store the throw expression's nonexistent result. */
4424 gcc_assert (!call_param_p);
4425 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4426 return NULL_RTX;
4428 if (TREE_CODE (exp) == COMPOUND_EXPR)
4430 /* Perform first part of compound expression, then assign from second
4431 part. */
4432 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4433 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4434 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4435 nontemporal);
4437 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4439 /* For conditional expression, get safe form of the target. Then
4440 test the condition, doing the appropriate assignment on either
4441 side. This avoids the creation of unnecessary temporaries.
4442 For non-BLKmode, it is more efficient not to do this. */
4444 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4446 do_pending_stack_adjust ();
4447 NO_DEFER_POP;
4448 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4449 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4450 nontemporal);
4451 emit_jump_insn (gen_jump (lab2));
4452 emit_barrier ();
4453 emit_label (lab1);
4454 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4455 nontemporal);
4456 emit_label (lab2);
4457 OK_DEFER_POP;
4459 return NULL_RTX;
4461 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4462 /* If this is a scalar in a register that is stored in a wider mode
4463 than the declared mode, compute the result into its declared mode
4464 and then convert to the wider mode. Our value is the computed
4465 expression. */
4467 rtx inner_target = 0;
4469 /* We can do the conversion inside EXP, which will often result
4470 in some optimizations. Do the conversion in two steps: first
4471 change the signedness, if needed, then the extend. But don't
4472 do this if the type of EXP is a subtype of something else
4473 since then the conversion might involve more than just
4474 converting modes. */
4475 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4476 && TREE_TYPE (TREE_TYPE (exp)) == 0
4477 && (!lang_hooks.reduce_bit_field_operations
4478 || (GET_MODE_PRECISION (GET_MODE (target))
4479 == TYPE_PRECISION (TREE_TYPE (exp)))))
4481 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4482 != SUBREG_PROMOTED_UNSIGNED_P (target))
4484 /* Some types, e.g. Fortran's logical*4, won't have a signed
4485 version, so use the mode instead. */
4486 tree ntype
4487 = (signed_or_unsigned_type_for
4488 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4489 if (ntype == NULL)
4490 ntype = lang_hooks.types.type_for_mode
4491 (TYPE_MODE (TREE_TYPE (exp)),
4492 SUBREG_PROMOTED_UNSIGNED_P (target));
4494 exp = fold_convert (ntype, exp);
4497 exp = fold_convert (lang_hooks.types.type_for_mode
4498 (GET_MODE (SUBREG_REG (target)),
4499 SUBREG_PROMOTED_UNSIGNED_P (target)),
4500 exp);
4502 inner_target = SUBREG_REG (target);
4505 temp = expand_expr (exp, inner_target, VOIDmode,
4506 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4508 /* If TEMP is a VOIDmode constant, use convert_modes to make
4509 sure that we properly convert it. */
4510 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4512 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4513 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4514 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4515 GET_MODE (target), temp,
4516 SUBREG_PROMOTED_UNSIGNED_P (target));
4519 convert_move (SUBREG_REG (target), temp,
4520 SUBREG_PROMOTED_UNSIGNED_P (target));
4522 return NULL_RTX;
4524 else if (TREE_CODE (exp) == STRING_CST
4525 && !nontemporal && !call_param_p
4526 && TREE_STRING_LENGTH (exp) > 0
4527 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4529 /* Optimize initialization of an array with a STRING_CST. */
4530 HOST_WIDE_INT exp_len, str_copy_len;
4531 rtx dest_mem;
4533 exp_len = int_expr_size (exp);
4534 if (exp_len <= 0)
4535 goto normal_expr;
4537 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4538 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4539 goto normal_expr;
4541 str_copy_len = TREE_STRING_LENGTH (exp);
4542 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4544 str_copy_len += STORE_MAX_PIECES - 1;
4545 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4547 str_copy_len = MIN (str_copy_len, exp_len);
4548 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4549 (void *) TREE_STRING_POINTER (exp),
4550 MEM_ALIGN (target), false))
4551 goto normal_expr;
4553 dest_mem = target;
4555 dest_mem = store_by_pieces (dest_mem,
4556 str_copy_len, builtin_strncpy_read_str,
4557 (void *) TREE_STRING_POINTER (exp),
4558 MEM_ALIGN (target), false,
4559 exp_len > str_copy_len ? 1 : 0);
4560 if (exp_len > str_copy_len)
4561 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4562 GEN_INT (exp_len - str_copy_len),
4563 BLOCK_OP_NORMAL);
4564 return NULL_RTX;
4566 else
4568 rtx tmp_target;
4570 normal_expr:
4571 /* If we want to use a nontemporal store, force the value to
4572 register first. */
4573 tmp_target = nontemporal ? NULL_RTX : target;
4574 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4575 (call_param_p
4576 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4577 &alt_rtl);
4578 /* Return TARGET if it's a specified hardware register.
4579 If TARGET is a volatile mem ref, either return TARGET
4580 or return a reg copied *from* TARGET; ANSI requires this.
4582 Otherwise, if TEMP is not TARGET, return TEMP
4583 if it is constant (for efficiency),
4584 or if we really want the correct value. */
4585 if (!(target && REG_P (target)
4586 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4587 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4588 && ! rtx_equal_p (temp, target)
4589 && CONSTANT_P (temp))
4590 dont_return_target = 1;
4593 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4594 the same as that of TARGET, adjust the constant. This is needed, for
4595 example, in case it is a CONST_DOUBLE and we want only a word-sized
4596 value. */
4597 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4598 && TREE_CODE (exp) != ERROR_MARK
4599 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4600 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4601 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4603 /* If value was not generated in the target, store it there.
4604 Convert the value to TARGET's type first if necessary and emit the
4605 pending incrementations that have been queued when expanding EXP.
4606 Note that we cannot emit the whole queue blindly because this will
4607 effectively disable the POST_INC optimization later.
4609 If TEMP and TARGET compare equal according to rtx_equal_p, but
4610 one or both of them are volatile memory refs, we have to distinguish
4611 two cases:
4612 - expand_expr has used TARGET. In this case, we must not generate
4613 another copy. This can be detected by TARGET being equal according
4614 to == .
4615 - expand_expr has not used TARGET - that means that the source just
4616 happens to have the same RTX form. Since temp will have been created
4617 by expand_expr, it will compare unequal according to == .
4618 We must generate a copy in this case, to reach the correct number
4619 of volatile memory references. */
4621 if ((! rtx_equal_p (temp, target)
4622 || (temp != target && (side_effects_p (temp)
4623 || side_effects_p (target))))
4624 && TREE_CODE (exp) != ERROR_MARK
4625 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4626 but TARGET is not valid memory reference, TEMP will differ
4627 from TARGET although it is really the same location. */
4628 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4629 /* If there's nothing to copy, don't bother. Don't call
4630 expr_size unless necessary, because some front-ends (C++)
4631 expr_size-hook must not be given objects that are not
4632 supposed to be bit-copied or bit-initialized. */
4633 && expr_size (exp) != const0_rtx)
4635 if (GET_MODE (temp) != GET_MODE (target)
4636 && GET_MODE (temp) != VOIDmode)
4638 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4639 if (dont_return_target)
4641 /* In this case, we will return TEMP,
4642 so make sure it has the proper mode.
4643 But don't forget to store the value into TARGET. */
4644 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4645 emit_move_insn (target, temp);
4647 else if (GET_MODE (target) == BLKmode)
4648 emit_block_move (target, temp, expr_size (exp),
4649 (call_param_p
4650 ? BLOCK_OP_CALL_PARM
4651 : BLOCK_OP_NORMAL));
4652 else
4653 convert_move (target, temp, unsignedp);
4656 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4658 /* Handle copying a string constant into an array. The string
4659 constant may be shorter than the array. So copy just the string's
4660 actual length, and clear the rest. First get the size of the data
4661 type of the string, which is actually the size of the target. */
4662 rtx size = expr_size (exp);
4664 if (GET_CODE (size) == CONST_INT
4665 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4666 emit_block_move (target, temp, size,
4667 (call_param_p
4668 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4669 else
4671 /* Compute the size of the data to copy from the string. */
4672 tree copy_size
4673 = size_binop (MIN_EXPR,
4674 make_tree (sizetype, size),
4675 size_int (TREE_STRING_LENGTH (exp)));
4676 rtx copy_size_rtx
4677 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4678 (call_param_p
4679 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4680 rtx label = 0;
4682 /* Copy that much. */
4683 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4684 TYPE_UNSIGNED (sizetype));
4685 emit_block_move (target, temp, copy_size_rtx,
4686 (call_param_p
4687 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4689 /* Figure out how much is left in TARGET that we have to clear.
4690 Do all calculations in ptr_mode. */
4691 if (GET_CODE (copy_size_rtx) == CONST_INT)
4693 size = plus_constant (size, -INTVAL (copy_size_rtx));
4694 target = adjust_address (target, BLKmode,
4695 INTVAL (copy_size_rtx));
4697 else
4699 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4700 copy_size_rtx, NULL_RTX, 0,
4701 OPTAB_LIB_WIDEN);
4703 #ifdef POINTERS_EXTEND_UNSIGNED
4704 if (GET_MODE (copy_size_rtx) != Pmode)
4705 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4706 TYPE_UNSIGNED (sizetype));
4707 #endif
4709 target = offset_address (target, copy_size_rtx,
4710 highest_pow2_factor (copy_size));
4711 label = gen_label_rtx ();
4712 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4713 GET_MODE (size), 0, label);
4716 if (size != const0_rtx)
4717 clear_storage (target, size, BLOCK_OP_NORMAL);
4719 if (label)
4720 emit_label (label);
4723 /* Handle calls that return values in multiple non-contiguous locations.
4724 The Irix 6 ABI has examples of this. */
4725 else if (GET_CODE (target) == PARALLEL)
4726 emit_group_load (target, temp, TREE_TYPE (exp),
4727 int_size_in_bytes (TREE_TYPE (exp)));
4728 else if (GET_MODE (temp) == BLKmode)
4729 emit_block_move (target, temp, expr_size (exp),
4730 (call_param_p
4731 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4732 else if (nontemporal
4733 && emit_storent_insn (target, temp))
4734 /* If we managed to emit a nontemporal store, there is nothing else to
4735 do. */
4737 else
4739 temp = force_operand (temp, target);
4740 if (temp != target)
4741 emit_move_insn (target, temp);
4745 return NULL_RTX;
4748 /* Helper for categorize_ctor_elements. Identical interface. */
4750 static bool
4751 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4752 HOST_WIDE_INT *p_elt_count,
4753 bool *p_must_clear)
4755 unsigned HOST_WIDE_INT idx;
4756 HOST_WIDE_INT nz_elts, elt_count;
4757 tree value, purpose;
4759 /* Whether CTOR is a valid constant initializer, in accordance with what
4760 initializer_constant_valid_p does. If inferred from the constructor
4761 elements, true until proven otherwise. */
4762 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4763 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4765 nz_elts = 0;
4766 elt_count = 0;
4768 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4770 HOST_WIDE_INT mult;
4772 mult = 1;
4773 if (TREE_CODE (purpose) == RANGE_EXPR)
4775 tree lo_index = TREE_OPERAND (purpose, 0);
4776 tree hi_index = TREE_OPERAND (purpose, 1);
4778 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4779 mult = (tree_low_cst (hi_index, 1)
4780 - tree_low_cst (lo_index, 1) + 1);
4783 switch (TREE_CODE (value))
4785 case CONSTRUCTOR:
4787 HOST_WIDE_INT nz = 0, ic = 0;
4789 bool const_elt_p
4790 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4792 nz_elts += mult * nz;
4793 elt_count += mult * ic;
4795 if (const_from_elts_p && const_p)
4796 const_p = const_elt_p;
4798 break;
4800 case INTEGER_CST:
4801 case REAL_CST:
4802 case FIXED_CST:
4803 if (!initializer_zerop (value))
4804 nz_elts += mult;
4805 elt_count += mult;
4806 break;
4808 case STRING_CST:
4809 nz_elts += mult * TREE_STRING_LENGTH (value);
4810 elt_count += mult * TREE_STRING_LENGTH (value);
4811 break;
4813 case COMPLEX_CST:
4814 if (!initializer_zerop (TREE_REALPART (value)))
4815 nz_elts += mult;
4816 if (!initializer_zerop (TREE_IMAGPART (value)))
4817 nz_elts += mult;
4818 elt_count += mult;
4819 break;
4821 case VECTOR_CST:
4823 tree v;
4824 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4826 if (!initializer_zerop (TREE_VALUE (v)))
4827 nz_elts += mult;
4828 elt_count += mult;
4831 break;
4833 default:
4834 nz_elts += mult;
4835 elt_count += mult;
4837 if (const_from_elts_p && const_p)
4838 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4839 != NULL_TREE;
4840 break;
4844 if (!*p_must_clear
4845 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4846 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4848 tree init_sub_type;
4849 bool clear_this = true;
4851 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4853 /* We don't expect more than one element of the union to be
4854 initialized. Not sure what we should do otherwise... */
4855 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4856 == 1);
4858 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4859 CONSTRUCTOR_ELTS (ctor),
4860 0)->value);
4862 /* ??? We could look at each element of the union, and find the
4863 largest element. Which would avoid comparing the size of the
4864 initialized element against any tail padding in the union.
4865 Doesn't seem worth the effort... */
4866 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4867 TYPE_SIZE (init_sub_type)) == 1)
4869 /* And now we have to find out if the element itself is fully
4870 constructed. E.g. for union { struct { int a, b; } s; } u
4871 = { .s = { .a = 1 } }. */
4872 if (elt_count == count_type_elements (init_sub_type, false))
4873 clear_this = false;
4877 *p_must_clear = clear_this;
4880 *p_nz_elts += nz_elts;
4881 *p_elt_count += elt_count;
4883 return const_p;
4886 /* Examine CTOR to discover:
4887 * how many scalar fields are set to nonzero values,
4888 and place it in *P_NZ_ELTS;
4889 * how many scalar fields in total are in CTOR,
4890 and place it in *P_ELT_COUNT.
4891 * if a type is a union, and the initializer from the constructor
4892 is not the largest element in the union, then set *p_must_clear.
4894 Return whether or not CTOR is a valid static constant initializer, the same
4895 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4897 bool
4898 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4899 HOST_WIDE_INT *p_elt_count,
4900 bool *p_must_clear)
4902 *p_nz_elts = 0;
4903 *p_elt_count = 0;
4904 *p_must_clear = false;
4906 return
4907 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4910 /* Count the number of scalars in TYPE. Return -1 on overflow or
4911 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4912 array member at the end of the structure. */
4914 HOST_WIDE_INT
4915 count_type_elements (const_tree type, bool allow_flexarr)
4917 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4918 switch (TREE_CODE (type))
4920 case ARRAY_TYPE:
4922 tree telts = array_type_nelts (type);
4923 if (telts && host_integerp (telts, 1))
4925 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4926 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4927 if (n == 0)
4928 return 0;
4929 else if (max / n > m)
4930 return n * m;
4932 return -1;
4935 case RECORD_TYPE:
4937 HOST_WIDE_INT n = 0, t;
4938 tree f;
4940 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4941 if (TREE_CODE (f) == FIELD_DECL)
4943 t = count_type_elements (TREE_TYPE (f), false);
4944 if (t < 0)
4946 /* Check for structures with flexible array member. */
4947 tree tf = TREE_TYPE (f);
4948 if (allow_flexarr
4949 && TREE_CHAIN (f) == NULL
4950 && TREE_CODE (tf) == ARRAY_TYPE
4951 && TYPE_DOMAIN (tf)
4952 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4953 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4954 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4955 && int_size_in_bytes (type) >= 0)
4956 break;
4958 return -1;
4960 n += t;
4963 return n;
4966 case UNION_TYPE:
4967 case QUAL_UNION_TYPE:
4969 /* Ho hum. How in the world do we guess here? Clearly it isn't
4970 right to count the fields. Guess based on the number of words. */
4971 HOST_WIDE_INT n = int_size_in_bytes (type);
4972 if (n < 0)
4973 return -1;
4974 return n / UNITS_PER_WORD;
4977 case COMPLEX_TYPE:
4978 return 2;
4980 case VECTOR_TYPE:
4981 return TYPE_VECTOR_SUBPARTS (type);
4983 case INTEGER_TYPE:
4984 case REAL_TYPE:
4985 case FIXED_POINT_TYPE:
4986 case ENUMERAL_TYPE:
4987 case BOOLEAN_TYPE:
4988 case POINTER_TYPE:
4989 case OFFSET_TYPE:
4990 case REFERENCE_TYPE:
4991 return 1;
4993 case VOID_TYPE:
4994 case METHOD_TYPE:
4995 case FUNCTION_TYPE:
4996 case LANG_TYPE:
4997 default:
4998 gcc_unreachable ();
5002 /* Return 1 if EXP contains mostly (3/4) zeros. */
5004 static int
5005 mostly_zeros_p (const_tree exp)
5007 if (TREE_CODE (exp) == CONSTRUCTOR)
5010 HOST_WIDE_INT nz_elts, count, elts;
5011 bool must_clear;
5013 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5014 if (must_clear)
5015 return 1;
5017 elts = count_type_elements (TREE_TYPE (exp), false);
5019 return nz_elts < elts / 4;
5022 return initializer_zerop (exp);
5025 /* Return 1 if EXP contains all zeros. */
5027 static int
5028 all_zeros_p (const_tree exp)
5030 if (TREE_CODE (exp) == CONSTRUCTOR)
5033 HOST_WIDE_INT nz_elts, count;
5034 bool must_clear;
5036 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5037 return nz_elts == 0;
5040 return initializer_zerop (exp);
5043 /* Helper function for store_constructor.
5044 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5045 TYPE is the type of the CONSTRUCTOR, not the element type.
5046 CLEARED is as for store_constructor.
5047 ALIAS_SET is the alias set to use for any stores.
5049 This provides a recursive shortcut back to store_constructor when it isn't
5050 necessary to go through store_field. This is so that we can pass through
5051 the cleared field to let store_constructor know that we may not have to
5052 clear a substructure if the outer structure has already been cleared. */
5054 static void
5055 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5056 HOST_WIDE_INT bitpos, enum machine_mode mode,
5057 tree exp, tree type, int cleared,
5058 alias_set_type alias_set)
5060 if (TREE_CODE (exp) == CONSTRUCTOR
5061 /* We can only call store_constructor recursively if the size and
5062 bit position are on a byte boundary. */
5063 && bitpos % BITS_PER_UNIT == 0
5064 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5065 /* If we have a nonzero bitpos for a register target, then we just
5066 let store_field do the bitfield handling. This is unlikely to
5067 generate unnecessary clear instructions anyways. */
5068 && (bitpos == 0 || MEM_P (target)))
5070 if (MEM_P (target))
5071 target
5072 = adjust_address (target,
5073 GET_MODE (target) == BLKmode
5074 || 0 != (bitpos
5075 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5076 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5079 /* Update the alias set, if required. */
5080 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5081 && MEM_ALIAS_SET (target) != 0)
5083 target = copy_rtx (target);
5084 set_mem_alias_set (target, alias_set);
5087 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5089 else
5090 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5093 /* Store the value of constructor EXP into the rtx TARGET.
5094 TARGET is either a REG or a MEM; we know it cannot conflict, since
5095 safe_from_p has been called.
5096 CLEARED is true if TARGET is known to have been zero'd.
5097 SIZE is the number of bytes of TARGET we are allowed to modify: this
5098 may not be the same as the size of EXP if we are assigning to a field
5099 which has been packed to exclude padding bits. */
5101 static void
5102 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5104 tree type = TREE_TYPE (exp);
5105 #ifdef WORD_REGISTER_OPERATIONS
5106 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5107 #endif
5109 switch (TREE_CODE (type))
5111 case RECORD_TYPE:
5112 case UNION_TYPE:
5113 case QUAL_UNION_TYPE:
5115 unsigned HOST_WIDE_INT idx;
5116 tree field, value;
5118 /* If size is zero or the target is already cleared, do nothing. */
5119 if (size == 0 || cleared)
5120 cleared = 1;
5121 /* We either clear the aggregate or indicate the value is dead. */
5122 else if ((TREE_CODE (type) == UNION_TYPE
5123 || TREE_CODE (type) == QUAL_UNION_TYPE)
5124 && ! CONSTRUCTOR_ELTS (exp))
5125 /* If the constructor is empty, clear the union. */
5127 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5128 cleared = 1;
5131 /* If we are building a static constructor into a register,
5132 set the initial value as zero so we can fold the value into
5133 a constant. But if more than one register is involved,
5134 this probably loses. */
5135 else if (REG_P (target) && TREE_STATIC (exp)
5136 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5138 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5139 cleared = 1;
5142 /* If the constructor has fewer fields than the structure or
5143 if we are initializing the structure to mostly zeros, clear
5144 the whole structure first. Don't do this if TARGET is a
5145 register whose mode size isn't equal to SIZE since
5146 clear_storage can't handle this case. */
5147 else if (size > 0
5148 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5149 != fields_length (type))
5150 || mostly_zeros_p (exp))
5151 && (!REG_P (target)
5152 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5153 == size)))
5155 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5156 cleared = 1;
5159 if (REG_P (target) && !cleared)
5160 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5162 /* Store each element of the constructor into the
5163 corresponding field of TARGET. */
5164 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5166 enum machine_mode mode;
5167 HOST_WIDE_INT bitsize;
5168 HOST_WIDE_INT bitpos = 0;
5169 tree offset;
5170 rtx to_rtx = target;
5172 /* Just ignore missing fields. We cleared the whole
5173 structure, above, if any fields are missing. */
5174 if (field == 0)
5175 continue;
5177 if (cleared && initializer_zerop (value))
5178 continue;
5180 if (host_integerp (DECL_SIZE (field), 1))
5181 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5182 else
5183 bitsize = -1;
5185 mode = DECL_MODE (field);
5186 if (DECL_BIT_FIELD (field))
5187 mode = VOIDmode;
5189 offset = DECL_FIELD_OFFSET (field);
5190 if (host_integerp (offset, 0)
5191 && host_integerp (bit_position (field), 0))
5193 bitpos = int_bit_position (field);
5194 offset = 0;
5196 else
5197 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5199 if (offset)
5201 rtx offset_rtx;
5203 offset
5204 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5205 make_tree (TREE_TYPE (exp),
5206 target));
5208 offset_rtx = expand_normal (offset);
5209 gcc_assert (MEM_P (to_rtx));
5211 #ifdef POINTERS_EXTEND_UNSIGNED
5212 if (GET_MODE (offset_rtx) != Pmode)
5213 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5214 #else
5215 if (GET_MODE (offset_rtx) != ptr_mode)
5216 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5217 #endif
5219 to_rtx = offset_address (to_rtx, offset_rtx,
5220 highest_pow2_factor (offset));
5223 #ifdef WORD_REGISTER_OPERATIONS
5224 /* If this initializes a field that is smaller than a
5225 word, at the start of a word, try to widen it to a full
5226 word. This special case allows us to output C++ member
5227 function initializations in a form that the optimizers
5228 can understand. */
5229 if (REG_P (target)
5230 && bitsize < BITS_PER_WORD
5231 && bitpos % BITS_PER_WORD == 0
5232 && GET_MODE_CLASS (mode) == MODE_INT
5233 && TREE_CODE (value) == INTEGER_CST
5234 && exp_size >= 0
5235 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5237 tree type = TREE_TYPE (value);
5239 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5241 type = lang_hooks.types.type_for_size
5242 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5243 value = fold_convert (type, value);
5246 if (BYTES_BIG_ENDIAN)
5247 value
5248 = fold_build2 (LSHIFT_EXPR, type, value,
5249 build_int_cst (type,
5250 BITS_PER_WORD - bitsize));
5251 bitsize = BITS_PER_WORD;
5252 mode = word_mode;
5254 #endif
5256 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5257 && DECL_NONADDRESSABLE_P (field))
5259 to_rtx = copy_rtx (to_rtx);
5260 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5263 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5264 value, type, cleared,
5265 get_alias_set (TREE_TYPE (field)));
5267 break;
5269 case ARRAY_TYPE:
5271 tree value, index;
5272 unsigned HOST_WIDE_INT i;
5273 int need_to_clear;
5274 tree domain;
5275 tree elttype = TREE_TYPE (type);
5276 int const_bounds_p;
5277 HOST_WIDE_INT minelt = 0;
5278 HOST_WIDE_INT maxelt = 0;
5280 domain = TYPE_DOMAIN (type);
5281 const_bounds_p = (TYPE_MIN_VALUE (domain)
5282 && TYPE_MAX_VALUE (domain)
5283 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5284 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5286 /* If we have constant bounds for the range of the type, get them. */
5287 if (const_bounds_p)
5289 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5290 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5293 /* If the constructor has fewer elements than the array, clear
5294 the whole array first. Similarly if this is static
5295 constructor of a non-BLKmode object. */
5296 if (cleared)
5297 need_to_clear = 0;
5298 else if (REG_P (target) && TREE_STATIC (exp))
5299 need_to_clear = 1;
5300 else
5302 unsigned HOST_WIDE_INT idx;
5303 tree index, value;
5304 HOST_WIDE_INT count = 0, zero_count = 0;
5305 need_to_clear = ! const_bounds_p;
5307 /* This loop is a more accurate version of the loop in
5308 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5309 is also needed to check for missing elements. */
5310 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5312 HOST_WIDE_INT this_node_count;
5314 if (need_to_clear)
5315 break;
5317 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5319 tree lo_index = TREE_OPERAND (index, 0);
5320 tree hi_index = TREE_OPERAND (index, 1);
5322 if (! host_integerp (lo_index, 1)
5323 || ! host_integerp (hi_index, 1))
5325 need_to_clear = 1;
5326 break;
5329 this_node_count = (tree_low_cst (hi_index, 1)
5330 - tree_low_cst (lo_index, 1) + 1);
5332 else
5333 this_node_count = 1;
5335 count += this_node_count;
5336 if (mostly_zeros_p (value))
5337 zero_count += this_node_count;
5340 /* Clear the entire array first if there are any missing
5341 elements, or if the incidence of zero elements is >=
5342 75%. */
5343 if (! need_to_clear
5344 && (count < maxelt - minelt + 1
5345 || 4 * zero_count >= 3 * count))
5346 need_to_clear = 1;
5349 if (need_to_clear && size > 0)
5351 if (REG_P (target))
5352 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5353 else
5354 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5355 cleared = 1;
5358 if (!cleared && REG_P (target))
5359 /* Inform later passes that the old value is dead. */
5360 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5362 /* Store each element of the constructor into the
5363 corresponding element of TARGET, determined by counting the
5364 elements. */
5365 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5367 enum machine_mode mode;
5368 HOST_WIDE_INT bitsize;
5369 HOST_WIDE_INT bitpos;
5370 int unsignedp;
5371 rtx xtarget = target;
5373 if (cleared && initializer_zerop (value))
5374 continue;
5376 unsignedp = TYPE_UNSIGNED (elttype);
5377 mode = TYPE_MODE (elttype);
5378 if (mode == BLKmode)
5379 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5380 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5381 : -1);
5382 else
5383 bitsize = GET_MODE_BITSIZE (mode);
5385 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5387 tree lo_index = TREE_OPERAND (index, 0);
5388 tree hi_index = TREE_OPERAND (index, 1);
5389 rtx index_r, pos_rtx;
5390 HOST_WIDE_INT lo, hi, count;
5391 tree position;
5393 /* If the range is constant and "small", unroll the loop. */
5394 if (const_bounds_p
5395 && host_integerp (lo_index, 0)
5396 && host_integerp (hi_index, 0)
5397 && (lo = tree_low_cst (lo_index, 0),
5398 hi = tree_low_cst (hi_index, 0),
5399 count = hi - lo + 1,
5400 (!MEM_P (target)
5401 || count <= 2
5402 || (host_integerp (TYPE_SIZE (elttype), 1)
5403 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5404 <= 40 * 8)))))
5406 lo -= minelt; hi -= minelt;
5407 for (; lo <= hi; lo++)
5409 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5411 if (MEM_P (target)
5412 && !MEM_KEEP_ALIAS_SET_P (target)
5413 && TREE_CODE (type) == ARRAY_TYPE
5414 && TYPE_NONALIASED_COMPONENT (type))
5416 target = copy_rtx (target);
5417 MEM_KEEP_ALIAS_SET_P (target) = 1;
5420 store_constructor_field
5421 (target, bitsize, bitpos, mode, value, type, cleared,
5422 get_alias_set (elttype));
5425 else
5427 rtx loop_start = gen_label_rtx ();
5428 rtx loop_end = gen_label_rtx ();
5429 tree exit_cond;
5431 expand_normal (hi_index);
5432 unsignedp = TYPE_UNSIGNED (domain);
5434 index = build_decl (VAR_DECL, NULL_TREE, domain);
5436 index_r
5437 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5438 &unsignedp, 0));
5439 SET_DECL_RTL (index, index_r);
5440 store_expr (lo_index, index_r, 0, false);
5442 /* Build the head of the loop. */
5443 do_pending_stack_adjust ();
5444 emit_label (loop_start);
5446 /* Assign value to element index. */
5447 position =
5448 fold_convert (ssizetype,
5449 fold_build2 (MINUS_EXPR,
5450 TREE_TYPE (index),
5451 index,
5452 TYPE_MIN_VALUE (domain)));
5454 position =
5455 size_binop (MULT_EXPR, position,
5456 fold_convert (ssizetype,
5457 TYPE_SIZE_UNIT (elttype)));
5459 pos_rtx = expand_normal (position);
5460 xtarget = offset_address (target, pos_rtx,
5461 highest_pow2_factor (position));
5462 xtarget = adjust_address (xtarget, mode, 0);
5463 if (TREE_CODE (value) == CONSTRUCTOR)
5464 store_constructor (value, xtarget, cleared,
5465 bitsize / BITS_PER_UNIT);
5466 else
5467 store_expr (value, xtarget, 0, false);
5469 /* Generate a conditional jump to exit the loop. */
5470 exit_cond = build2 (LT_EXPR, integer_type_node,
5471 index, hi_index);
5472 jumpif (exit_cond, loop_end);
5474 /* Update the loop counter, and jump to the head of
5475 the loop. */
5476 expand_assignment (index,
5477 build2 (PLUS_EXPR, TREE_TYPE (index),
5478 index, integer_one_node),
5479 false);
5481 emit_jump (loop_start);
5483 /* Build the end of the loop. */
5484 emit_label (loop_end);
5487 else if ((index != 0 && ! host_integerp (index, 0))
5488 || ! host_integerp (TYPE_SIZE (elttype), 1))
5490 tree position;
5492 if (index == 0)
5493 index = ssize_int (1);
5495 if (minelt)
5496 index = fold_convert (ssizetype,
5497 fold_build2 (MINUS_EXPR,
5498 TREE_TYPE (index),
5499 index,
5500 TYPE_MIN_VALUE (domain)));
5502 position =
5503 size_binop (MULT_EXPR, index,
5504 fold_convert (ssizetype,
5505 TYPE_SIZE_UNIT (elttype)));
5506 xtarget = offset_address (target,
5507 expand_normal (position),
5508 highest_pow2_factor (position));
5509 xtarget = adjust_address (xtarget, mode, 0);
5510 store_expr (value, xtarget, 0, false);
5512 else
5514 if (index != 0)
5515 bitpos = ((tree_low_cst (index, 0) - minelt)
5516 * tree_low_cst (TYPE_SIZE (elttype), 1));
5517 else
5518 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5520 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5521 && TREE_CODE (type) == ARRAY_TYPE
5522 && TYPE_NONALIASED_COMPONENT (type))
5524 target = copy_rtx (target);
5525 MEM_KEEP_ALIAS_SET_P (target) = 1;
5527 store_constructor_field (target, bitsize, bitpos, mode, value,
5528 type, cleared, get_alias_set (elttype));
5531 break;
5534 case VECTOR_TYPE:
5536 unsigned HOST_WIDE_INT idx;
5537 constructor_elt *ce;
5538 int i;
5539 int need_to_clear;
5540 int icode = 0;
5541 tree elttype = TREE_TYPE (type);
5542 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5543 enum machine_mode eltmode = TYPE_MODE (elttype);
5544 HOST_WIDE_INT bitsize;
5545 HOST_WIDE_INT bitpos;
5546 rtvec vector = NULL;
5547 unsigned n_elts;
5549 gcc_assert (eltmode != BLKmode);
5551 n_elts = TYPE_VECTOR_SUBPARTS (type);
5552 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5554 enum machine_mode mode = GET_MODE (target);
5556 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5557 if (icode != CODE_FOR_nothing)
5559 unsigned int i;
5561 vector = rtvec_alloc (n_elts);
5562 for (i = 0; i < n_elts; i++)
5563 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5567 /* If the constructor has fewer elements than the vector,
5568 clear the whole array first. Similarly if this is static
5569 constructor of a non-BLKmode object. */
5570 if (cleared)
5571 need_to_clear = 0;
5572 else if (REG_P (target) && TREE_STATIC (exp))
5573 need_to_clear = 1;
5574 else
5576 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5577 tree value;
5579 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5581 int n_elts_here = tree_low_cst
5582 (int_const_binop (TRUNC_DIV_EXPR,
5583 TYPE_SIZE (TREE_TYPE (value)),
5584 TYPE_SIZE (elttype), 0), 1);
5586 count += n_elts_here;
5587 if (mostly_zeros_p (value))
5588 zero_count += n_elts_here;
5591 /* Clear the entire vector first if there are any missing elements,
5592 or if the incidence of zero elements is >= 75%. */
5593 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5596 if (need_to_clear && size > 0 && !vector)
5598 if (REG_P (target))
5599 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5600 else
5601 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5602 cleared = 1;
5605 /* Inform later passes that the old value is dead. */
5606 if (!cleared && !vector && REG_P (target))
5607 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5609 /* Store each element of the constructor into the corresponding
5610 element of TARGET, determined by counting the elements. */
5611 for (idx = 0, i = 0;
5612 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5613 idx++, i += bitsize / elt_size)
5615 HOST_WIDE_INT eltpos;
5616 tree value = ce->value;
5618 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5619 if (cleared && initializer_zerop (value))
5620 continue;
5622 if (ce->index)
5623 eltpos = tree_low_cst (ce->index, 1);
5624 else
5625 eltpos = i;
5627 if (vector)
5629 /* Vector CONSTRUCTORs should only be built from smaller
5630 vectors in the case of BLKmode vectors. */
5631 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5632 RTVEC_ELT (vector, eltpos)
5633 = expand_normal (value);
5635 else
5637 enum machine_mode value_mode =
5638 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5639 ? TYPE_MODE (TREE_TYPE (value))
5640 : eltmode;
5641 bitpos = eltpos * elt_size;
5642 store_constructor_field (target, bitsize, bitpos,
5643 value_mode, value, type,
5644 cleared, get_alias_set (elttype));
5648 if (vector)
5649 emit_insn (GEN_FCN (icode)
5650 (target,
5651 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5652 break;
5655 default:
5656 gcc_unreachable ();
5660 /* Store the value of EXP (an expression tree)
5661 into a subfield of TARGET which has mode MODE and occupies
5662 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5663 If MODE is VOIDmode, it means that we are storing into a bit-field.
5665 Always return const0_rtx unless we have something particular to
5666 return.
5668 TYPE is the type of the underlying object,
5670 ALIAS_SET is the alias set for the destination. This value will
5671 (in general) be different from that for TARGET, since TARGET is a
5672 reference to the containing structure.
5674 If NONTEMPORAL is true, try generating a nontemporal store. */
5676 static rtx
5677 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5678 enum machine_mode mode, tree exp, tree type,
5679 alias_set_type alias_set, bool nontemporal)
5681 HOST_WIDE_INT width_mask = 0;
5683 if (TREE_CODE (exp) == ERROR_MARK)
5684 return const0_rtx;
5686 /* If we have nothing to store, do nothing unless the expression has
5687 side-effects. */
5688 if (bitsize == 0)
5689 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5690 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5691 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5693 /* If we are storing into an unaligned field of an aligned union that is
5694 in a register, we may have the mode of TARGET being an integer mode but
5695 MODE == BLKmode. In that case, get an aligned object whose size and
5696 alignment are the same as TARGET and store TARGET into it (we can avoid
5697 the store if the field being stored is the entire width of TARGET). Then
5698 call ourselves recursively to store the field into a BLKmode version of
5699 that object. Finally, load from the object into TARGET. This is not
5700 very efficient in general, but should only be slightly more expensive
5701 than the otherwise-required unaligned accesses. Perhaps this can be
5702 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5703 twice, once with emit_move_insn and once via store_field. */
5705 if (mode == BLKmode
5706 && (REG_P (target) || GET_CODE (target) == SUBREG))
5708 rtx object = assign_temp (type, 0, 1, 1);
5709 rtx blk_object = adjust_address (object, BLKmode, 0);
5711 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5712 emit_move_insn (object, target);
5714 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5715 nontemporal);
5717 emit_move_insn (target, object);
5719 /* We want to return the BLKmode version of the data. */
5720 return blk_object;
5723 if (GET_CODE (target) == CONCAT)
5725 /* We're storing into a struct containing a single __complex. */
5727 gcc_assert (!bitpos);
5728 return store_expr (exp, target, 0, nontemporal);
5731 /* If the structure is in a register or if the component
5732 is a bit field, we cannot use addressing to access it.
5733 Use bit-field techniques or SUBREG to store in it. */
5735 if (mode == VOIDmode
5736 || (mode != BLKmode && ! direct_store[(int) mode]
5737 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5738 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5739 || REG_P (target)
5740 || GET_CODE (target) == SUBREG
5741 /* If the field isn't aligned enough to store as an ordinary memref,
5742 store it as a bit field. */
5743 || (mode != BLKmode
5744 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5745 || bitpos % GET_MODE_ALIGNMENT (mode))
5746 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5747 || (bitpos % BITS_PER_UNIT != 0)))
5748 /* If the RHS and field are a constant size and the size of the
5749 RHS isn't the same size as the bitfield, we must use bitfield
5750 operations. */
5751 || (bitsize >= 0
5752 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5753 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5755 rtx temp;
5757 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5758 implies a mask operation. If the precision is the same size as
5759 the field we're storing into, that mask is redundant. This is
5760 particularly common with bit field assignments generated by the
5761 C front end. */
5762 if (TREE_CODE (exp) == NOP_EXPR)
5764 tree type = TREE_TYPE (exp);
5765 if (INTEGRAL_TYPE_P (type)
5766 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5767 && bitsize == TYPE_PRECISION (type))
5769 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5770 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5771 exp = TREE_OPERAND (exp, 0);
5775 temp = expand_normal (exp);
5777 /* If BITSIZE is narrower than the size of the type of EXP
5778 we will be narrowing TEMP. Normally, what's wanted are the
5779 low-order bits. However, if EXP's type is a record and this is
5780 big-endian machine, we want the upper BITSIZE bits. */
5781 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5782 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5783 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5784 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5785 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5786 - bitsize),
5787 NULL_RTX, 1);
5789 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5790 MODE. */
5791 if (mode != VOIDmode && mode != BLKmode
5792 && mode != TYPE_MODE (TREE_TYPE (exp)))
5793 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5795 /* If the modes of TARGET and TEMP are both BLKmode, both
5796 must be in memory and BITPOS must be aligned on a byte
5797 boundary. If so, we simply do a block copy. */
5798 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5800 gcc_assert (MEM_P (target) && MEM_P (temp)
5801 && !(bitpos % BITS_PER_UNIT));
5803 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5804 emit_block_move (target, temp,
5805 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5806 / BITS_PER_UNIT),
5807 BLOCK_OP_NORMAL);
5809 return const0_rtx;
5812 /* Store the value in the bitfield. */
5813 store_bit_field (target, bitsize, bitpos, mode, temp);
5815 return const0_rtx;
5817 else
5819 /* Now build a reference to just the desired component. */
5820 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5822 if (to_rtx == target)
5823 to_rtx = copy_rtx (to_rtx);
5825 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5826 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5827 set_mem_alias_set (to_rtx, alias_set);
5829 return store_expr (exp, to_rtx, 0, nontemporal);
5833 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5834 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5835 codes and find the ultimate containing object, which we return.
5837 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5838 bit position, and *PUNSIGNEDP to the signedness of the field.
5839 If the position of the field is variable, we store a tree
5840 giving the variable offset (in units) in *POFFSET.
5841 This offset is in addition to the bit position.
5842 If the position is not variable, we store 0 in *POFFSET.
5844 If any of the extraction expressions is volatile,
5845 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5847 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5848 is a mode that can be used to access the field. In that case, *PBITSIZE
5849 is redundant.
5851 If the field describes a variable-sized object, *PMODE is set to
5852 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5853 this case, but the address of the object can be found.
5855 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5856 look through nodes that serve as markers of a greater alignment than
5857 the one that can be deduced from the expression. These nodes make it
5858 possible for front-ends to prevent temporaries from being created by
5859 the middle-end on alignment considerations. For that purpose, the
5860 normal operating mode at high-level is to always pass FALSE so that
5861 the ultimate containing object is really returned; moreover, the
5862 associated predicate handled_component_p will always return TRUE
5863 on these nodes, thus indicating that they are essentially handled
5864 by get_inner_reference. TRUE should only be passed when the caller
5865 is scanning the expression in order to build another representation
5866 and specifically knows how to handle these nodes; as such, this is
5867 the normal operating mode in the RTL expanders. */
5869 tree
5870 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5871 HOST_WIDE_INT *pbitpos, tree *poffset,
5872 enum machine_mode *pmode, int *punsignedp,
5873 int *pvolatilep, bool keep_aligning)
5875 tree size_tree = 0;
5876 enum machine_mode mode = VOIDmode;
5877 tree offset = size_zero_node;
5878 tree bit_offset = bitsize_zero_node;
5880 /* First get the mode, signedness, and size. We do this from just the
5881 outermost expression. */
5882 if (TREE_CODE (exp) == COMPONENT_REF)
5884 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5885 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5886 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5888 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5890 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5892 size_tree = TREE_OPERAND (exp, 1);
5893 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5895 /* For vector types, with the correct size of access, use the mode of
5896 inner type. */
5897 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5898 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5899 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5900 mode = TYPE_MODE (TREE_TYPE (exp));
5902 else
5904 mode = TYPE_MODE (TREE_TYPE (exp));
5905 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5907 if (mode == BLKmode)
5908 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5909 else
5910 *pbitsize = GET_MODE_BITSIZE (mode);
5913 if (size_tree != 0)
5915 if (! host_integerp (size_tree, 1))
5916 mode = BLKmode, *pbitsize = -1;
5917 else
5918 *pbitsize = tree_low_cst (size_tree, 1);
5921 *pmode = mode;
5923 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5924 and find the ultimate containing object. */
5925 while (1)
5927 switch (TREE_CODE (exp))
5929 case BIT_FIELD_REF:
5930 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5931 TREE_OPERAND (exp, 2));
5932 break;
5934 case COMPONENT_REF:
5936 tree field = TREE_OPERAND (exp, 1);
5937 tree this_offset = component_ref_field_offset (exp);
5939 /* If this field hasn't been filled in yet, don't go past it.
5940 This should only happen when folding expressions made during
5941 type construction. */
5942 if (this_offset == 0)
5943 break;
5945 offset = size_binop (PLUS_EXPR, offset, this_offset);
5946 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5947 DECL_FIELD_BIT_OFFSET (field));
5949 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5951 break;
5953 case ARRAY_REF:
5954 case ARRAY_RANGE_REF:
5956 tree index = TREE_OPERAND (exp, 1);
5957 tree low_bound = array_ref_low_bound (exp);
5958 tree unit_size = array_ref_element_size (exp);
5960 /* We assume all arrays have sizes that are a multiple of a byte.
5961 First subtract the lower bound, if any, in the type of the
5962 index, then convert to sizetype and multiply by the size of
5963 the array element. */
5964 if (! integer_zerop (low_bound))
5965 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5966 index, low_bound);
5968 offset = size_binop (PLUS_EXPR, offset,
5969 size_binop (MULT_EXPR,
5970 fold_convert (sizetype, index),
5971 unit_size));
5973 break;
5975 case REALPART_EXPR:
5976 break;
5978 case IMAGPART_EXPR:
5979 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5980 bitsize_int (*pbitsize));
5981 break;
5983 case VIEW_CONVERT_EXPR:
5984 if (keep_aligning && STRICT_ALIGNMENT
5985 && (TYPE_ALIGN (TREE_TYPE (exp))
5986 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5987 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5988 < BIGGEST_ALIGNMENT)
5989 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5990 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5991 goto done;
5992 break;
5994 default:
5995 goto done;
5998 /* If any reference in the chain is volatile, the effect is volatile. */
5999 if (TREE_THIS_VOLATILE (exp))
6000 *pvolatilep = 1;
6002 exp = TREE_OPERAND (exp, 0);
6004 done:
6006 /* If OFFSET is constant, see if we can return the whole thing as a
6007 constant bit position. Make sure to handle overflow during
6008 this conversion. */
6009 if (host_integerp (offset, 0))
6011 double_int tem = double_int_mul (tree_to_double_int (offset),
6012 uhwi_to_double_int (BITS_PER_UNIT));
6013 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6014 if (double_int_fits_in_shwi_p (tem))
6016 *pbitpos = double_int_to_shwi (tem);
6017 *poffset = NULL_TREE;
6018 return exp;
6022 /* Otherwise, split it up. */
6023 *pbitpos = tree_low_cst (bit_offset, 0);
6024 *poffset = offset;
6026 return exp;
6029 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
6030 look for whether EXP or any nested component-refs within EXP is marked
6031 as PACKED. */
6033 bool
6034 contains_packed_reference (const_tree exp)
6036 bool packed_p = false;
6038 while (1)
6040 switch (TREE_CODE (exp))
6042 case COMPONENT_REF:
6044 tree field = TREE_OPERAND (exp, 1);
6045 packed_p = DECL_PACKED (field)
6046 || TYPE_PACKED (TREE_TYPE (field))
6047 || TYPE_PACKED (TREE_TYPE (exp));
6048 if (packed_p)
6049 goto done;
6051 break;
6053 case BIT_FIELD_REF:
6054 case ARRAY_REF:
6055 case ARRAY_RANGE_REF:
6056 case REALPART_EXPR:
6057 case IMAGPART_EXPR:
6058 case VIEW_CONVERT_EXPR:
6059 break;
6061 default:
6062 goto done;
6064 exp = TREE_OPERAND (exp, 0);
6066 done:
6067 return packed_p;
6070 /* Return a tree of sizetype representing the size, in bytes, of the element
6071 of EXP, an ARRAY_REF. */
6073 tree
6074 array_ref_element_size (tree exp)
6076 tree aligned_size = TREE_OPERAND (exp, 3);
6077 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6079 /* If a size was specified in the ARRAY_REF, it's the size measured
6080 in alignment units of the element type. So multiply by that value. */
6081 if (aligned_size)
6083 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6084 sizetype from another type of the same width and signedness. */
6085 if (TREE_TYPE (aligned_size) != sizetype)
6086 aligned_size = fold_convert (sizetype, aligned_size);
6087 return size_binop (MULT_EXPR, aligned_size,
6088 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6091 /* Otherwise, take the size from that of the element type. Substitute
6092 any PLACEHOLDER_EXPR that we have. */
6093 else
6094 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6097 /* Return a tree representing the lower bound of the array mentioned in
6098 EXP, an ARRAY_REF. */
6100 tree
6101 array_ref_low_bound (tree exp)
6103 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6105 /* If a lower bound is specified in EXP, use it. */
6106 if (TREE_OPERAND (exp, 2))
6107 return TREE_OPERAND (exp, 2);
6109 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6110 substituting for a PLACEHOLDER_EXPR as needed. */
6111 if (domain_type && TYPE_MIN_VALUE (domain_type))
6112 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6114 /* Otherwise, return a zero of the appropriate type. */
6115 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6118 /* Return a tree representing the upper bound of the array mentioned in
6119 EXP, an ARRAY_REF. */
6121 tree
6122 array_ref_up_bound (tree exp)
6124 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6126 /* If there is a domain type and it has an upper bound, use it, substituting
6127 for a PLACEHOLDER_EXPR as needed. */
6128 if (domain_type && TYPE_MAX_VALUE (domain_type))
6129 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6131 /* Otherwise fail. */
6132 return NULL_TREE;
6135 /* Return a tree representing the offset, in bytes, of the field referenced
6136 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6138 tree
6139 component_ref_field_offset (tree exp)
6141 tree aligned_offset = TREE_OPERAND (exp, 2);
6142 tree field = TREE_OPERAND (exp, 1);
6144 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6145 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6146 value. */
6147 if (aligned_offset)
6149 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6150 sizetype from another type of the same width and signedness. */
6151 if (TREE_TYPE (aligned_offset) != sizetype)
6152 aligned_offset = fold_convert (sizetype, aligned_offset);
6153 return size_binop (MULT_EXPR, aligned_offset,
6154 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6157 /* Otherwise, take the offset from that of the field. Substitute
6158 any PLACEHOLDER_EXPR that we have. */
6159 else
6160 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6163 /* Return 1 if T is an expression that get_inner_reference handles. */
6166 handled_component_p (const_tree t)
6168 switch (TREE_CODE (t))
6170 case BIT_FIELD_REF:
6171 case COMPONENT_REF:
6172 case ARRAY_REF:
6173 case ARRAY_RANGE_REF:
6174 case VIEW_CONVERT_EXPR:
6175 case REALPART_EXPR:
6176 case IMAGPART_EXPR:
6177 return 1;
6179 default:
6180 return 0;
6184 /* Given an rtx VALUE that may contain additions and multiplications, return
6185 an equivalent value that just refers to a register, memory, or constant.
6186 This is done by generating instructions to perform the arithmetic and
6187 returning a pseudo-register containing the value.
6189 The returned value may be a REG, SUBREG, MEM or constant. */
6192 force_operand (rtx value, rtx target)
6194 rtx op1, op2;
6195 /* Use subtarget as the target for operand 0 of a binary operation. */
6196 rtx subtarget = get_subtarget (target);
6197 enum rtx_code code = GET_CODE (value);
6199 /* Check for subreg applied to an expression produced by loop optimizer. */
6200 if (code == SUBREG
6201 && !REG_P (SUBREG_REG (value))
6202 && !MEM_P (SUBREG_REG (value)))
6204 value
6205 = simplify_gen_subreg (GET_MODE (value),
6206 force_reg (GET_MODE (SUBREG_REG (value)),
6207 force_operand (SUBREG_REG (value),
6208 NULL_RTX)),
6209 GET_MODE (SUBREG_REG (value)),
6210 SUBREG_BYTE (value));
6211 code = GET_CODE (value);
6214 /* Check for a PIC address load. */
6215 if ((code == PLUS || code == MINUS)
6216 && XEXP (value, 0) == pic_offset_table_rtx
6217 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6218 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6219 || GET_CODE (XEXP (value, 1)) == CONST))
6221 if (!subtarget)
6222 subtarget = gen_reg_rtx (GET_MODE (value));
6223 emit_move_insn (subtarget, value);
6224 return subtarget;
6227 if (ARITHMETIC_P (value))
6229 op2 = XEXP (value, 1);
6230 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6231 subtarget = 0;
6232 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6234 code = PLUS;
6235 op2 = negate_rtx (GET_MODE (value), op2);
6238 /* Check for an addition with OP2 a constant integer and our first
6239 operand a PLUS of a virtual register and something else. In that
6240 case, we want to emit the sum of the virtual register and the
6241 constant first and then add the other value. This allows virtual
6242 register instantiation to simply modify the constant rather than
6243 creating another one around this addition. */
6244 if (code == PLUS && GET_CODE (op2) == CONST_INT
6245 && GET_CODE (XEXP (value, 0)) == PLUS
6246 && REG_P (XEXP (XEXP (value, 0), 0))
6247 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6248 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6250 rtx temp = expand_simple_binop (GET_MODE (value), code,
6251 XEXP (XEXP (value, 0), 0), op2,
6252 subtarget, 0, OPTAB_LIB_WIDEN);
6253 return expand_simple_binop (GET_MODE (value), code, temp,
6254 force_operand (XEXP (XEXP (value,
6255 0), 1), 0),
6256 target, 0, OPTAB_LIB_WIDEN);
6259 op1 = force_operand (XEXP (value, 0), subtarget);
6260 op2 = force_operand (op2, NULL_RTX);
6261 switch (code)
6263 case MULT:
6264 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6265 case DIV:
6266 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6267 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6268 target, 1, OPTAB_LIB_WIDEN);
6269 else
6270 return expand_divmod (0,
6271 FLOAT_MODE_P (GET_MODE (value))
6272 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6273 GET_MODE (value), op1, op2, target, 0);
6274 case MOD:
6275 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6276 target, 0);
6277 case UDIV:
6278 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6279 target, 1);
6280 case UMOD:
6281 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6282 target, 1);
6283 case ASHIFTRT:
6284 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6285 target, 0, OPTAB_LIB_WIDEN);
6286 default:
6287 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6288 target, 1, OPTAB_LIB_WIDEN);
6291 if (UNARY_P (value))
6293 if (!target)
6294 target = gen_reg_rtx (GET_MODE (value));
6295 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6296 switch (code)
6298 case ZERO_EXTEND:
6299 case SIGN_EXTEND:
6300 case TRUNCATE:
6301 case FLOAT_EXTEND:
6302 case FLOAT_TRUNCATE:
6303 convert_move (target, op1, code == ZERO_EXTEND);
6304 return target;
6306 case FIX:
6307 case UNSIGNED_FIX:
6308 expand_fix (target, op1, code == UNSIGNED_FIX);
6309 return target;
6311 case FLOAT:
6312 case UNSIGNED_FLOAT:
6313 expand_float (target, op1, code == UNSIGNED_FLOAT);
6314 return target;
6316 default:
6317 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6321 #ifdef INSN_SCHEDULING
6322 /* On machines that have insn scheduling, we want all memory reference to be
6323 explicit, so we need to deal with such paradoxical SUBREGs. */
6324 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6325 && (GET_MODE_SIZE (GET_MODE (value))
6326 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6327 value
6328 = simplify_gen_subreg (GET_MODE (value),
6329 force_reg (GET_MODE (SUBREG_REG (value)),
6330 force_operand (SUBREG_REG (value),
6331 NULL_RTX)),
6332 GET_MODE (SUBREG_REG (value)),
6333 SUBREG_BYTE (value));
6334 #endif
6336 return value;
6339 /* Subroutine of expand_expr: return nonzero iff there is no way that
6340 EXP can reference X, which is being modified. TOP_P is nonzero if this
6341 call is going to be used to determine whether we need a temporary
6342 for EXP, as opposed to a recursive call to this function.
6344 It is always safe for this routine to return zero since it merely
6345 searches for optimization opportunities. */
6348 safe_from_p (const_rtx x, tree exp, int top_p)
6350 rtx exp_rtl = 0;
6351 int i, nops;
6353 if (x == 0
6354 /* If EXP has varying size, we MUST use a target since we currently
6355 have no way of allocating temporaries of variable size
6356 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6357 So we assume here that something at a higher level has prevented a
6358 clash. This is somewhat bogus, but the best we can do. Only
6359 do this when X is BLKmode and when we are at the top level. */
6360 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6361 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6362 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6363 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6364 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6365 != INTEGER_CST)
6366 && GET_MODE (x) == BLKmode)
6367 /* If X is in the outgoing argument area, it is always safe. */
6368 || (MEM_P (x)
6369 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6370 || (GET_CODE (XEXP (x, 0)) == PLUS
6371 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6372 return 1;
6374 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6375 find the underlying pseudo. */
6376 if (GET_CODE (x) == SUBREG)
6378 x = SUBREG_REG (x);
6379 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6380 return 0;
6383 /* Now look at our tree code and possibly recurse. */
6384 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6386 case tcc_declaration:
6387 exp_rtl = DECL_RTL_IF_SET (exp);
6388 break;
6390 case tcc_constant:
6391 return 1;
6393 case tcc_exceptional:
6394 if (TREE_CODE (exp) == TREE_LIST)
6396 while (1)
6398 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6399 return 0;
6400 exp = TREE_CHAIN (exp);
6401 if (!exp)
6402 return 1;
6403 if (TREE_CODE (exp) != TREE_LIST)
6404 return safe_from_p (x, exp, 0);
6407 else if (TREE_CODE (exp) == CONSTRUCTOR)
6409 constructor_elt *ce;
6410 unsigned HOST_WIDE_INT idx;
6412 for (idx = 0;
6413 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6414 idx++)
6415 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6416 || !safe_from_p (x, ce->value, 0))
6417 return 0;
6418 return 1;
6420 else if (TREE_CODE (exp) == ERROR_MARK)
6421 return 1; /* An already-visited SAVE_EXPR? */
6422 else
6423 return 0;
6425 case tcc_statement:
6426 /* The only case we look at here is the DECL_INITIAL inside a
6427 DECL_EXPR. */
6428 return (TREE_CODE (exp) != DECL_EXPR
6429 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6430 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6431 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6433 case tcc_binary:
6434 case tcc_comparison:
6435 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6436 return 0;
6437 /* Fall through. */
6439 case tcc_unary:
6440 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6442 case tcc_expression:
6443 case tcc_reference:
6444 case tcc_vl_exp:
6445 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6446 the expression. If it is set, we conflict iff we are that rtx or
6447 both are in memory. Otherwise, we check all operands of the
6448 expression recursively. */
6450 switch (TREE_CODE (exp))
6452 case ADDR_EXPR:
6453 /* If the operand is static or we are static, we can't conflict.
6454 Likewise if we don't conflict with the operand at all. */
6455 if (staticp (TREE_OPERAND (exp, 0))
6456 || TREE_STATIC (exp)
6457 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6458 return 1;
6460 /* Otherwise, the only way this can conflict is if we are taking
6461 the address of a DECL a that address if part of X, which is
6462 very rare. */
6463 exp = TREE_OPERAND (exp, 0);
6464 if (DECL_P (exp))
6466 if (!DECL_RTL_SET_P (exp)
6467 || !MEM_P (DECL_RTL (exp)))
6468 return 0;
6469 else
6470 exp_rtl = XEXP (DECL_RTL (exp), 0);
6472 break;
6474 case MISALIGNED_INDIRECT_REF:
6475 case ALIGN_INDIRECT_REF:
6476 case INDIRECT_REF:
6477 if (MEM_P (x)
6478 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6479 get_alias_set (exp)))
6480 return 0;
6481 break;
6483 case CALL_EXPR:
6484 /* Assume that the call will clobber all hard registers and
6485 all of memory. */
6486 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6487 || MEM_P (x))
6488 return 0;
6489 break;
6491 case WITH_CLEANUP_EXPR:
6492 case CLEANUP_POINT_EXPR:
6493 /* Lowered by gimplify.c. */
6494 gcc_unreachable ();
6496 case SAVE_EXPR:
6497 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6499 default:
6500 break;
6503 /* If we have an rtx, we do not need to scan our operands. */
6504 if (exp_rtl)
6505 break;
6507 nops = TREE_OPERAND_LENGTH (exp);
6508 for (i = 0; i < nops; i++)
6509 if (TREE_OPERAND (exp, i) != 0
6510 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6511 return 0;
6513 break;
6515 case tcc_type:
6516 /* Should never get a type here. */
6517 gcc_unreachable ();
6519 case tcc_gimple_stmt:
6520 gcc_unreachable ();
6523 /* If we have an rtl, find any enclosed object. Then see if we conflict
6524 with it. */
6525 if (exp_rtl)
6527 if (GET_CODE (exp_rtl) == SUBREG)
6529 exp_rtl = SUBREG_REG (exp_rtl);
6530 if (REG_P (exp_rtl)
6531 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6532 return 0;
6535 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6536 are memory and they conflict. */
6537 return ! (rtx_equal_p (x, exp_rtl)
6538 || (MEM_P (x) && MEM_P (exp_rtl)
6539 && true_dependence (exp_rtl, VOIDmode, x,
6540 rtx_addr_varies_p)));
6543 /* If we reach here, it is safe. */
6544 return 1;
6548 /* Return the highest power of two that EXP is known to be a multiple of.
6549 This is used in updating alignment of MEMs in array references. */
6551 unsigned HOST_WIDE_INT
6552 highest_pow2_factor (const_tree exp)
6554 unsigned HOST_WIDE_INT c0, c1;
6556 switch (TREE_CODE (exp))
6558 case INTEGER_CST:
6559 /* We can find the lowest bit that's a one. If the low
6560 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6561 We need to handle this case since we can find it in a COND_EXPR,
6562 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6563 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6564 later ICE. */
6565 if (TREE_OVERFLOW (exp))
6566 return BIGGEST_ALIGNMENT;
6567 else
6569 /* Note: tree_low_cst is intentionally not used here,
6570 we don't care about the upper bits. */
6571 c0 = TREE_INT_CST_LOW (exp);
6572 c0 &= -c0;
6573 return c0 ? c0 : BIGGEST_ALIGNMENT;
6575 break;
6577 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6578 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6579 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6580 return MIN (c0, c1);
6582 case MULT_EXPR:
6583 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6584 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6585 return c0 * c1;
6587 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6588 case CEIL_DIV_EXPR:
6589 if (integer_pow2p (TREE_OPERAND (exp, 1))
6590 && host_integerp (TREE_OPERAND (exp, 1), 1))
6592 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6593 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6594 return MAX (1, c0 / c1);
6596 break;
6598 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6599 case SAVE_EXPR:
6600 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6602 case COMPOUND_EXPR:
6603 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6605 case COND_EXPR:
6606 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6607 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6608 return MIN (c0, c1);
6610 default:
6611 break;
6614 return 1;
6617 /* Similar, except that the alignment requirements of TARGET are
6618 taken into account. Assume it is at least as aligned as its
6619 type, unless it is a COMPONENT_REF in which case the layout of
6620 the structure gives the alignment. */
6622 static unsigned HOST_WIDE_INT
6623 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6625 unsigned HOST_WIDE_INT target_align, factor;
6627 factor = highest_pow2_factor (exp);
6628 if (TREE_CODE (target) == COMPONENT_REF)
6629 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6630 else
6631 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6632 return MAX (factor, target_align);
6635 /* Return &VAR expression for emulated thread local VAR. */
6637 static tree
6638 emutls_var_address (tree var)
6640 tree emuvar = emutls_decl (var);
6641 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6642 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6643 tree arglist = build_tree_list (NULL_TREE, arg);
6644 tree call = build_function_call_expr (fn, arglist);
6645 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6648 /* Expands variable VAR. */
6650 void
6651 expand_var (tree var)
6653 if (DECL_EXTERNAL (var))
6654 return;
6656 if (TREE_STATIC (var))
6657 /* If this is an inlined copy of a static local variable,
6658 look up the original decl. */
6659 var = DECL_ORIGIN (var);
6661 if (TREE_STATIC (var)
6662 ? !TREE_ASM_WRITTEN (var)
6663 : !DECL_RTL_SET_P (var))
6665 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6666 /* Should be ignored. */;
6667 else if (lang_hooks.expand_decl (var))
6668 /* OK. */;
6669 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6670 expand_decl (var);
6671 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6672 rest_of_decl_compilation (var, 0, 0);
6673 else
6674 /* No expansion needed. */
6675 gcc_assert (TREE_CODE (var) == TYPE_DECL
6676 || TREE_CODE (var) == CONST_DECL
6677 || TREE_CODE (var) == FUNCTION_DECL
6678 || TREE_CODE (var) == LABEL_DECL);
6682 /* Subroutine of expand_expr. Expand the two operands of a binary
6683 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6684 The value may be stored in TARGET if TARGET is nonzero. The
6685 MODIFIER argument is as documented by expand_expr. */
6687 static void
6688 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6689 enum expand_modifier modifier)
6691 if (! safe_from_p (target, exp1, 1))
6692 target = 0;
6693 if (operand_equal_p (exp0, exp1, 0))
6695 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6696 *op1 = copy_rtx (*op0);
6698 else
6700 /* If we need to preserve evaluation order, copy exp0 into its own
6701 temporary variable so that it can't be clobbered by exp1. */
6702 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6703 exp0 = save_expr (exp0);
6704 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6705 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6710 /* Return a MEM that contains constant EXP. DEFER is as for
6711 output_constant_def and MODIFIER is as for expand_expr. */
6713 static rtx
6714 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6716 rtx mem;
6718 mem = output_constant_def (exp, defer);
6719 if (modifier != EXPAND_INITIALIZER)
6720 mem = use_anchored_address (mem);
6721 return mem;
6724 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6725 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6727 static rtx
6728 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6729 enum expand_modifier modifier)
6731 rtx result, subtarget;
6732 tree inner, offset;
6733 HOST_WIDE_INT bitsize, bitpos;
6734 int volatilep, unsignedp;
6735 enum machine_mode mode1;
6737 /* If we are taking the address of a constant and are at the top level,
6738 we have to use output_constant_def since we can't call force_const_mem
6739 at top level. */
6740 /* ??? This should be considered a front-end bug. We should not be
6741 generating ADDR_EXPR of something that isn't an LVALUE. The only
6742 exception here is STRING_CST. */
6743 if (CONSTANT_CLASS_P (exp))
6744 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6746 /* Everything must be something allowed by is_gimple_addressable. */
6747 switch (TREE_CODE (exp))
6749 case INDIRECT_REF:
6750 /* This case will happen via recursion for &a->b. */
6751 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6753 case CONST_DECL:
6754 /* Recurse and make the output_constant_def clause above handle this. */
6755 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6756 tmode, modifier);
6758 case REALPART_EXPR:
6759 /* The real part of the complex number is always first, therefore
6760 the address is the same as the address of the parent object. */
6761 offset = 0;
6762 bitpos = 0;
6763 inner = TREE_OPERAND (exp, 0);
6764 break;
6766 case IMAGPART_EXPR:
6767 /* The imaginary part of the complex number is always second.
6768 The expression is therefore always offset by the size of the
6769 scalar type. */
6770 offset = 0;
6771 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6772 inner = TREE_OPERAND (exp, 0);
6773 break;
6775 case VAR_DECL:
6776 /* TLS emulation hook - replace __thread VAR's &VAR with
6777 __emutls_get_address (&_emutls.VAR). */
6778 if (! targetm.have_tls
6779 && TREE_CODE (exp) == VAR_DECL
6780 && DECL_THREAD_LOCAL_P (exp))
6782 exp = emutls_var_address (exp);
6783 return expand_expr (exp, target, tmode, modifier);
6785 /* Fall through. */
6787 default:
6788 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6789 expand_expr, as that can have various side effects; LABEL_DECLs for
6790 example, may not have their DECL_RTL set yet. Expand the rtl of
6791 CONSTRUCTORs too, which should yield a memory reference for the
6792 constructor's contents. Assume language specific tree nodes can
6793 be expanded in some interesting way. */
6794 if (DECL_P (exp)
6795 || TREE_CODE (exp) == CONSTRUCTOR
6796 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6798 result = expand_expr (exp, target, tmode,
6799 modifier == EXPAND_INITIALIZER
6800 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6802 /* If the DECL isn't in memory, then the DECL wasn't properly
6803 marked TREE_ADDRESSABLE, which will be either a front-end
6804 or a tree optimizer bug. */
6805 gcc_assert (MEM_P (result));
6806 result = XEXP (result, 0);
6808 /* ??? Is this needed anymore? */
6809 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6811 assemble_external (exp);
6812 TREE_USED (exp) = 1;
6815 if (modifier != EXPAND_INITIALIZER
6816 && modifier != EXPAND_CONST_ADDRESS)
6817 result = force_operand (result, target);
6818 return result;
6821 /* Pass FALSE as the last argument to get_inner_reference although
6822 we are expanding to RTL. The rationale is that we know how to
6823 handle "aligning nodes" here: we can just bypass them because
6824 they won't change the final object whose address will be returned
6825 (they actually exist only for that purpose). */
6826 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6827 &mode1, &unsignedp, &volatilep, false);
6828 break;
6831 /* We must have made progress. */
6832 gcc_assert (inner != exp);
6834 subtarget = offset || bitpos ? NULL_RTX : target;
6835 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6837 if (offset)
6839 rtx tmp;
6841 if (modifier != EXPAND_NORMAL)
6842 result = force_operand (result, NULL);
6843 tmp = expand_expr (offset, NULL_RTX, tmode,
6844 modifier == EXPAND_INITIALIZER
6845 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6847 result = convert_memory_address (tmode, result);
6848 tmp = convert_memory_address (tmode, tmp);
6850 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6851 result = gen_rtx_PLUS (tmode, result, tmp);
6852 else
6854 subtarget = bitpos ? NULL_RTX : target;
6855 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6856 1, OPTAB_LIB_WIDEN);
6860 if (bitpos)
6862 /* Someone beforehand should have rejected taking the address
6863 of such an object. */
6864 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6866 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6867 if (modifier < EXPAND_SUM)
6868 result = force_operand (result, target);
6871 return result;
6874 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6875 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6877 static rtx
6878 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6879 enum expand_modifier modifier)
6881 enum machine_mode rmode;
6882 rtx result;
6884 /* Target mode of VOIDmode says "whatever's natural". */
6885 if (tmode == VOIDmode)
6886 tmode = TYPE_MODE (TREE_TYPE (exp));
6888 /* We can get called with some Weird Things if the user does silliness
6889 like "(short) &a". In that case, convert_memory_address won't do
6890 the right thing, so ignore the given target mode. */
6891 if (tmode != Pmode && tmode != ptr_mode)
6892 tmode = Pmode;
6894 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6895 tmode, modifier);
6897 /* Despite expand_expr claims concerning ignoring TMODE when not
6898 strictly convenient, stuff breaks if we don't honor it. Note
6899 that combined with the above, we only do this for pointer modes. */
6900 rmode = GET_MODE (result);
6901 if (rmode == VOIDmode)
6902 rmode = tmode;
6903 if (rmode != tmode)
6904 result = convert_memory_address (tmode, result);
6906 return result;
6909 /* Generate code for computing CONSTRUCTOR EXP.
6910 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6911 is TRUE, instead of creating a temporary variable in memory
6912 NULL is returned and the caller needs to handle it differently. */
6914 static rtx
6915 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6916 bool avoid_temp_mem)
6918 tree type = TREE_TYPE (exp);
6919 enum machine_mode mode = TYPE_MODE (type);
6921 /* Try to avoid creating a temporary at all. This is possible
6922 if all of the initializer is zero.
6923 FIXME: try to handle all [0..255] initializers we can handle
6924 with memset. */
6925 if (TREE_STATIC (exp)
6926 && !TREE_ADDRESSABLE (exp)
6927 && target != 0 && mode == BLKmode
6928 && all_zeros_p (exp))
6930 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6931 return target;
6934 /* All elts simple constants => refer to a constant in memory. But
6935 if this is a non-BLKmode mode, let it store a field at a time
6936 since that should make a CONST_INT or CONST_DOUBLE when we
6937 fold. Likewise, if we have a target we can use, it is best to
6938 store directly into the target unless the type is large enough
6939 that memcpy will be used. If we are making an initializer and
6940 all operands are constant, put it in memory as well.
6942 FIXME: Avoid trying to fill vector constructors piece-meal.
6943 Output them with output_constant_def below unless we're sure
6944 they're zeros. This should go away when vector initializers
6945 are treated like VECTOR_CST instead of arrays. */
6946 if ((TREE_STATIC (exp)
6947 && ((mode == BLKmode
6948 && ! (target != 0 && safe_from_p (target, exp, 1)))
6949 || TREE_ADDRESSABLE (exp)
6950 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6951 && (! MOVE_BY_PIECES_P
6952 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6953 TYPE_ALIGN (type)))
6954 && ! mostly_zeros_p (exp))))
6955 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
6956 && TREE_CONSTANT (exp)))
6958 rtx constructor;
6960 if (avoid_temp_mem)
6961 return NULL_RTX;
6963 constructor = expand_expr_constant (exp, 1, modifier);
6965 if (modifier != EXPAND_CONST_ADDRESS
6966 && modifier != EXPAND_INITIALIZER
6967 && modifier != EXPAND_SUM)
6968 constructor = validize_mem (constructor);
6970 return constructor;
6973 /* Handle calls that pass values in multiple non-contiguous
6974 locations. The Irix 6 ABI has examples of this. */
6975 if (target == 0 || ! safe_from_p (target, exp, 1)
6976 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
6978 if (avoid_temp_mem)
6979 return NULL_RTX;
6981 target
6982 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
6983 | (TREE_READONLY (exp)
6984 * TYPE_QUAL_CONST))),
6985 0, TREE_ADDRESSABLE (exp), 1);
6988 store_constructor (exp, target, 0, int_expr_size (exp));
6989 return target;
6993 /* expand_expr: generate code for computing expression EXP.
6994 An rtx for the computed value is returned. The value is never null.
6995 In the case of a void EXP, const0_rtx is returned.
6997 The value may be stored in TARGET if TARGET is nonzero.
6998 TARGET is just a suggestion; callers must assume that
6999 the rtx returned may not be the same as TARGET.
7001 If TARGET is CONST0_RTX, it means that the value will be ignored.
7003 If TMODE is not VOIDmode, it suggests generating the
7004 result in mode TMODE. But this is done only when convenient.
7005 Otherwise, TMODE is ignored and the value generated in its natural mode.
7006 TMODE is just a suggestion; callers must assume that
7007 the rtx returned may not have mode TMODE.
7009 Note that TARGET may have neither TMODE nor MODE. In that case, it
7010 probably will not be used.
7012 If MODIFIER is EXPAND_SUM then when EXP is an addition
7013 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7014 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7015 products as above, or REG or MEM, or constant.
7016 Ordinarily in such cases we would output mul or add instructions
7017 and then return a pseudo reg containing the sum.
7019 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7020 it also marks a label as absolutely required (it can't be dead).
7021 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7022 This is used for outputting expressions used in initializers.
7024 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7025 with a constant address even if that address is not normally legitimate.
7026 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7028 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7029 a call parameter. Such targets require special care as we haven't yet
7030 marked TARGET so that it's safe from being trashed by libcalls. We
7031 don't want to use TARGET for anything but the final result;
7032 Intermediate values must go elsewhere. Additionally, calls to
7033 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7035 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7036 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7037 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7038 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7039 recursively. */
7041 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
7042 enum expand_modifier, rtx *);
7045 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7046 enum expand_modifier modifier, rtx *alt_rtl)
7048 int rn = -1;
7049 rtx ret, last = NULL;
7051 /* Handle ERROR_MARK before anybody tries to access its type. */
7052 if (TREE_CODE (exp) == ERROR_MARK
7053 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7055 ret = CONST0_RTX (tmode);
7056 return ret ? ret : const0_rtx;
7059 if (flag_non_call_exceptions)
7061 rn = lookup_stmt_eh_region (exp);
7062 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7063 if (rn >= 0)
7064 last = get_last_insn ();
7067 /* If this is an expression of some kind and it has an associated line
7068 number, then emit the line number before expanding the expression.
7070 We need to save and restore the file and line information so that
7071 errors discovered during expansion are emitted with the right
7072 information. It would be better of the diagnostic routines
7073 used the file/line information embedded in the tree nodes rather
7074 than globals. */
7075 if (cfun && EXPR_HAS_LOCATION (exp))
7077 location_t saved_location = input_location;
7078 input_location = EXPR_LOCATION (exp);
7079 set_curr_insn_source_location (input_location);
7081 /* Record where the insns produced belong. */
7082 set_curr_insn_block (TREE_BLOCK (exp));
7084 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7086 input_location = saved_location;
7088 else
7090 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7093 /* If using non-call exceptions, mark all insns that may trap.
7094 expand_call() will mark CALL_INSNs before we get to this code,
7095 but it doesn't handle libcalls, and these may trap. */
7096 if (rn >= 0)
7098 rtx insn;
7099 for (insn = next_real_insn (last); insn;
7100 insn = next_real_insn (insn))
7102 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7103 /* If we want exceptions for non-call insns, any
7104 may_trap_p instruction may throw. */
7105 && GET_CODE (PATTERN (insn)) != CLOBBER
7106 && GET_CODE (PATTERN (insn)) != USE
7107 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
7109 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
7110 REG_NOTES (insn));
7115 return ret;
7118 static rtx
7119 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
7120 enum expand_modifier modifier, rtx *alt_rtl)
7122 rtx op0, op1, op2, temp, decl_rtl;
7123 tree type;
7124 int unsignedp;
7125 enum machine_mode mode;
7126 enum tree_code code = TREE_CODE (exp);
7127 optab this_optab;
7128 rtx subtarget, original_target;
7129 int ignore;
7130 tree context, subexp0, subexp1;
7131 bool reduce_bit_field = false;
7132 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
7133 ? reduce_to_bit_field_precision ((expr), \
7134 target, \
7135 type) \
7136 : (expr))
7138 if (GIMPLE_STMT_P (exp))
7140 type = void_type_node;
7141 mode = VOIDmode;
7142 unsignedp = 0;
7144 else
7146 type = TREE_TYPE (exp);
7147 mode = TYPE_MODE (type);
7148 unsignedp = TYPE_UNSIGNED (type);
7150 if (lang_hooks.reduce_bit_field_operations
7151 && TREE_CODE (type) == INTEGER_TYPE
7152 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
7154 /* An operation in what may be a bit-field type needs the
7155 result to be reduced to the precision of the bit-field type,
7156 which is narrower than that of the type's mode. */
7157 reduce_bit_field = true;
7158 if (modifier == EXPAND_STACK_PARM)
7159 target = 0;
7162 /* Use subtarget as the target for operand 0 of a binary operation. */
7163 subtarget = get_subtarget (target);
7164 original_target = target;
7165 ignore = (target == const0_rtx
7166 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
7167 || code == CONVERT_EXPR || code == COND_EXPR
7168 || code == VIEW_CONVERT_EXPR)
7169 && TREE_CODE (type) == VOID_TYPE));
7171 /* If we are going to ignore this result, we need only do something
7172 if there is a side-effect somewhere in the expression. If there
7173 is, short-circuit the most common cases here. Note that we must
7174 not call expand_expr with anything but const0_rtx in case this
7175 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7177 if (ignore)
7179 if (! TREE_SIDE_EFFECTS (exp))
7180 return const0_rtx;
7182 /* Ensure we reference a volatile object even if value is ignored, but
7183 don't do this if all we are doing is taking its address. */
7184 if (TREE_THIS_VOLATILE (exp)
7185 && TREE_CODE (exp) != FUNCTION_DECL
7186 && mode != VOIDmode && mode != BLKmode
7187 && modifier != EXPAND_CONST_ADDRESS)
7189 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7190 if (MEM_P (temp))
7191 temp = copy_to_reg (temp);
7192 return const0_rtx;
7195 if (TREE_CODE_CLASS (code) == tcc_unary
7196 || code == COMPONENT_REF || code == INDIRECT_REF)
7197 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7198 modifier);
7200 else if (TREE_CODE_CLASS (code) == tcc_binary
7201 || TREE_CODE_CLASS (code) == tcc_comparison
7202 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7204 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7205 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7206 return const0_rtx;
7208 else if (code == BIT_FIELD_REF)
7210 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7211 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7212 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7213 return const0_rtx;
7216 target = 0;
7220 switch (code)
7222 case LABEL_DECL:
7224 tree function = decl_function_context (exp);
7226 temp = label_rtx (exp);
7227 temp = gen_rtx_LABEL_REF (Pmode, temp);
7229 if (function != current_function_decl
7230 && function != 0)
7231 LABEL_REF_NONLOCAL_P (temp) = 1;
7233 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7234 return temp;
7237 case SSA_NAME:
7238 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7239 NULL);
7241 case PARM_DECL:
7242 case VAR_DECL:
7243 /* If a static var's type was incomplete when the decl was written,
7244 but the type is complete now, lay out the decl now. */
7245 if (DECL_SIZE (exp) == 0
7246 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7247 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7248 layout_decl (exp, 0);
7250 /* TLS emulation hook - replace __thread vars with
7251 *__emutls_get_address (&_emutls.var). */
7252 if (! targetm.have_tls
7253 && TREE_CODE (exp) == VAR_DECL
7254 && DECL_THREAD_LOCAL_P (exp))
7256 exp = build_fold_indirect_ref (emutls_var_address (exp));
7257 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7260 /* ... fall through ... */
7262 case FUNCTION_DECL:
7263 case RESULT_DECL:
7264 decl_rtl = DECL_RTL (exp);
7265 gcc_assert (decl_rtl);
7266 decl_rtl = copy_rtx (decl_rtl);
7268 /* Ensure variable marked as used even if it doesn't go through
7269 a parser. If it hasn't be used yet, write out an external
7270 definition. */
7271 if (! TREE_USED (exp))
7273 assemble_external (exp);
7274 TREE_USED (exp) = 1;
7277 /* Show we haven't gotten RTL for this yet. */
7278 temp = 0;
7280 /* Variables inherited from containing functions should have
7281 been lowered by this point. */
7282 context = decl_function_context (exp);
7283 gcc_assert (!context
7284 || context == current_function_decl
7285 || TREE_STATIC (exp)
7286 /* ??? C++ creates functions that are not TREE_STATIC. */
7287 || TREE_CODE (exp) == FUNCTION_DECL);
7289 /* This is the case of an array whose size is to be determined
7290 from its initializer, while the initializer is still being parsed.
7291 See expand_decl. */
7293 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7294 temp = validize_mem (decl_rtl);
7296 /* If DECL_RTL is memory, we are in the normal case and either
7297 the address is not valid or it is not a register and -fforce-addr
7298 is specified, get the address into a register. */
7300 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7302 if (alt_rtl)
7303 *alt_rtl = decl_rtl;
7304 decl_rtl = use_anchored_address (decl_rtl);
7305 if (modifier != EXPAND_CONST_ADDRESS
7306 && modifier != EXPAND_SUM
7307 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
7308 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
7309 temp = replace_equiv_address (decl_rtl,
7310 copy_rtx (XEXP (decl_rtl, 0)));
7313 /* If we got something, return it. But first, set the alignment
7314 if the address is a register. */
7315 if (temp != 0)
7317 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7318 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7320 return temp;
7323 /* If the mode of DECL_RTL does not match that of the decl, it
7324 must be a promoted value. We return a SUBREG of the wanted mode,
7325 but mark it so that we know that it was already extended. */
7327 if (REG_P (decl_rtl)
7328 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7330 enum machine_mode pmode;
7332 /* Get the signedness used for this variable. Ensure we get the
7333 same mode we got when the variable was declared. */
7334 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7335 (TREE_CODE (exp) == RESULT_DECL
7336 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7337 gcc_assert (GET_MODE (decl_rtl) == pmode);
7339 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7340 SUBREG_PROMOTED_VAR_P (temp) = 1;
7341 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7342 return temp;
7345 return decl_rtl;
7347 case INTEGER_CST:
7348 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7349 TREE_INT_CST_HIGH (exp), mode);
7351 return temp;
7353 case VECTOR_CST:
7355 tree tmp = NULL_TREE;
7356 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7357 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7358 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
7359 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
7360 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
7361 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
7362 return const_vector_from_tree (exp);
7363 if (GET_MODE_CLASS (mode) == MODE_INT)
7365 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7366 if (type_for_mode)
7367 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7369 if (!tmp)
7370 tmp = build_constructor_from_list (type,
7371 TREE_VECTOR_CST_ELTS (exp));
7372 return expand_expr (tmp, ignore ? const0_rtx : target,
7373 tmode, modifier);
7376 case CONST_DECL:
7377 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7379 case REAL_CST:
7380 /* If optimized, generate immediate CONST_DOUBLE
7381 which will be turned into memory by reload if necessary.
7383 We used to force a register so that loop.c could see it. But
7384 this does not allow gen_* patterns to perform optimizations with
7385 the constants. It also produces two insns in cases like "x = 1.0;".
7386 On most machines, floating-point constants are not permitted in
7387 many insns, so we'd end up copying it to a register in any case.
7389 Now, we do the copying in expand_binop, if appropriate. */
7390 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7391 TYPE_MODE (TREE_TYPE (exp)));
7393 case FIXED_CST:
7394 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
7395 TYPE_MODE (TREE_TYPE (exp)));
7397 case COMPLEX_CST:
7398 /* Handle evaluating a complex constant in a CONCAT target. */
7399 if (original_target && GET_CODE (original_target) == CONCAT)
7401 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7402 rtx rtarg, itarg;
7404 rtarg = XEXP (original_target, 0);
7405 itarg = XEXP (original_target, 1);
7407 /* Move the real and imaginary parts separately. */
7408 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7409 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7411 if (op0 != rtarg)
7412 emit_move_insn (rtarg, op0);
7413 if (op1 != itarg)
7414 emit_move_insn (itarg, op1);
7416 return original_target;
7419 /* ... fall through ... */
7421 case STRING_CST:
7422 temp = expand_expr_constant (exp, 1, modifier);
7424 /* temp contains a constant address.
7425 On RISC machines where a constant address isn't valid,
7426 make some insns to get that address into a register. */
7427 if (modifier != EXPAND_CONST_ADDRESS
7428 && modifier != EXPAND_INITIALIZER
7429 && modifier != EXPAND_SUM
7430 && (! memory_address_p (mode, XEXP (temp, 0))
7431 || flag_force_addr))
7432 return replace_equiv_address (temp,
7433 copy_rtx (XEXP (temp, 0)));
7434 return temp;
7436 case SAVE_EXPR:
7438 tree val = TREE_OPERAND (exp, 0);
7439 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7441 if (!SAVE_EXPR_RESOLVED_P (exp))
7443 /* We can indeed still hit this case, typically via builtin
7444 expanders calling save_expr immediately before expanding
7445 something. Assume this means that we only have to deal
7446 with non-BLKmode values. */
7447 gcc_assert (GET_MODE (ret) != BLKmode);
7449 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7450 DECL_ARTIFICIAL (val) = 1;
7451 DECL_IGNORED_P (val) = 1;
7452 TREE_OPERAND (exp, 0) = val;
7453 SAVE_EXPR_RESOLVED_P (exp) = 1;
7455 if (!CONSTANT_P (ret))
7456 ret = copy_to_reg (ret);
7457 SET_DECL_RTL (val, ret);
7460 return ret;
7463 case GOTO_EXPR:
7464 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7465 expand_goto (TREE_OPERAND (exp, 0));
7466 else
7467 expand_computed_goto (TREE_OPERAND (exp, 0));
7468 return const0_rtx;
7470 case CONSTRUCTOR:
7471 /* If we don't need the result, just ensure we evaluate any
7472 subexpressions. */
7473 if (ignore)
7475 unsigned HOST_WIDE_INT idx;
7476 tree value;
7478 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7479 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7481 return const0_rtx;
7484 return expand_constructor (exp, target, modifier, false);
7486 case MISALIGNED_INDIRECT_REF:
7487 case ALIGN_INDIRECT_REF:
7488 case INDIRECT_REF:
7490 tree exp1 = TREE_OPERAND (exp, 0);
7492 if (modifier != EXPAND_WRITE)
7494 tree t;
7496 t = fold_read_from_constant_string (exp);
7497 if (t)
7498 return expand_expr (t, target, tmode, modifier);
7501 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7502 op0 = memory_address (mode, op0);
7504 if (code == ALIGN_INDIRECT_REF)
7506 int align = TYPE_ALIGN_UNIT (type);
7507 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7508 op0 = memory_address (mode, op0);
7511 temp = gen_rtx_MEM (mode, op0);
7513 set_mem_attributes (temp, exp, 0);
7515 /* Resolve the misalignment now, so that we don't have to remember
7516 to resolve it later. Of course, this only works for reads. */
7517 /* ??? When we get around to supporting writes, we'll have to handle
7518 this in store_expr directly. The vectorizer isn't generating
7519 those yet, however. */
7520 if (code == MISALIGNED_INDIRECT_REF)
7522 int icode;
7523 rtx reg, insn;
7525 gcc_assert (modifier == EXPAND_NORMAL
7526 || modifier == EXPAND_STACK_PARM);
7528 /* The vectorizer should have already checked the mode. */
7529 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7530 gcc_assert (icode != CODE_FOR_nothing);
7532 /* We've already validated the memory, and we're creating a
7533 new pseudo destination. The predicates really can't fail. */
7534 reg = gen_reg_rtx (mode);
7536 /* Nor can the insn generator. */
7537 insn = GEN_FCN (icode) (reg, temp);
7538 emit_insn (insn);
7540 return reg;
7543 return temp;
7546 case TARGET_MEM_REF:
7548 struct mem_address addr;
7550 get_address_description (exp, &addr);
7551 op0 = addr_for_mem_ref (&addr, true);
7552 op0 = memory_address (mode, op0);
7553 temp = gen_rtx_MEM (mode, op0);
7554 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7556 return temp;
7558 case ARRAY_REF:
7561 tree array = TREE_OPERAND (exp, 0);
7562 tree index = TREE_OPERAND (exp, 1);
7564 /* Fold an expression like: "foo"[2].
7565 This is not done in fold so it won't happen inside &.
7566 Don't fold if this is for wide characters since it's too
7567 difficult to do correctly and this is a very rare case. */
7569 if (modifier != EXPAND_CONST_ADDRESS
7570 && modifier != EXPAND_INITIALIZER
7571 && modifier != EXPAND_MEMORY)
7573 tree t = fold_read_from_constant_string (exp);
7575 if (t)
7576 return expand_expr (t, target, tmode, modifier);
7579 /* If this is a constant index into a constant array,
7580 just get the value from the array. Handle both the cases when
7581 we have an explicit constructor and when our operand is a variable
7582 that was declared const. */
7584 if (modifier != EXPAND_CONST_ADDRESS
7585 && modifier != EXPAND_INITIALIZER
7586 && modifier != EXPAND_MEMORY
7587 && TREE_CODE (array) == CONSTRUCTOR
7588 && ! TREE_SIDE_EFFECTS (array)
7589 && TREE_CODE (index) == INTEGER_CST)
7591 unsigned HOST_WIDE_INT ix;
7592 tree field, value;
7594 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7595 field, value)
7596 if (tree_int_cst_equal (field, index))
7598 if (!TREE_SIDE_EFFECTS (value))
7599 return expand_expr (fold (value), target, tmode, modifier);
7600 break;
7604 else if (optimize >= 1
7605 && modifier != EXPAND_CONST_ADDRESS
7606 && modifier != EXPAND_INITIALIZER
7607 && modifier != EXPAND_MEMORY
7608 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7609 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7610 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7611 && targetm.binds_local_p (array))
7613 if (TREE_CODE (index) == INTEGER_CST)
7615 tree init = DECL_INITIAL (array);
7617 if (TREE_CODE (init) == CONSTRUCTOR)
7619 unsigned HOST_WIDE_INT ix;
7620 tree field, value;
7622 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7623 field, value)
7624 if (tree_int_cst_equal (field, index))
7626 if (TREE_SIDE_EFFECTS (value))
7627 break;
7629 if (TREE_CODE (value) == CONSTRUCTOR)
7631 /* If VALUE is a CONSTRUCTOR, this
7632 optimization is only useful if
7633 this doesn't store the CONSTRUCTOR
7634 into memory. If it does, it is more
7635 efficient to just load the data from
7636 the array directly. */
7637 rtx ret = expand_constructor (value, target,
7638 modifier, true);
7639 if (ret == NULL_RTX)
7640 break;
7643 return expand_expr (fold (value), target, tmode,
7644 modifier);
7647 else if(TREE_CODE (init) == STRING_CST)
7649 tree index1 = index;
7650 tree low_bound = array_ref_low_bound (exp);
7651 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7653 /* Optimize the special-case of a zero lower bound.
7655 We convert the low_bound to sizetype to avoid some problems
7656 with constant folding. (E.g. suppose the lower bound is 1,
7657 and its mode is QI. Without the conversion,l (ARRAY
7658 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7659 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7661 if (! integer_zerop (low_bound))
7662 index1 = size_diffop (index1, fold_convert (sizetype,
7663 low_bound));
7665 if (0 > compare_tree_int (index1,
7666 TREE_STRING_LENGTH (init)))
7668 tree type = TREE_TYPE (TREE_TYPE (init));
7669 enum machine_mode mode = TYPE_MODE (type);
7671 if (GET_MODE_CLASS (mode) == MODE_INT
7672 && GET_MODE_SIZE (mode) == 1)
7673 return gen_int_mode (TREE_STRING_POINTER (init)
7674 [TREE_INT_CST_LOW (index1)],
7675 mode);
7681 goto normal_inner_ref;
7683 case COMPONENT_REF:
7684 /* If the operand is a CONSTRUCTOR, we can just extract the
7685 appropriate field if it is present. */
7686 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7688 unsigned HOST_WIDE_INT idx;
7689 tree field, value;
7691 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7692 idx, field, value)
7693 if (field == TREE_OPERAND (exp, 1)
7694 /* We can normally use the value of the field in the
7695 CONSTRUCTOR. However, if this is a bitfield in
7696 an integral mode that we can fit in a HOST_WIDE_INT,
7697 we must mask only the number of bits in the bitfield,
7698 since this is done implicitly by the constructor. If
7699 the bitfield does not meet either of those conditions,
7700 we can't do this optimization. */
7701 && (! DECL_BIT_FIELD (field)
7702 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7703 && (GET_MODE_BITSIZE (DECL_MODE (field))
7704 <= HOST_BITS_PER_WIDE_INT))))
7706 if (DECL_BIT_FIELD (field)
7707 && modifier == EXPAND_STACK_PARM)
7708 target = 0;
7709 op0 = expand_expr (value, target, tmode, modifier);
7710 if (DECL_BIT_FIELD (field))
7712 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7713 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7715 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7717 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7718 op0 = expand_and (imode, op0, op1, target);
7720 else
7722 tree count
7723 = build_int_cst (NULL_TREE,
7724 GET_MODE_BITSIZE (imode) - bitsize);
7726 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7727 target, 0);
7728 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7729 target, 0);
7733 return op0;
7736 goto normal_inner_ref;
7738 case BIT_FIELD_REF:
7739 case ARRAY_RANGE_REF:
7740 normal_inner_ref:
7742 enum machine_mode mode1;
7743 HOST_WIDE_INT bitsize, bitpos;
7744 tree offset;
7745 int volatilep = 0;
7746 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7747 &mode1, &unsignedp, &volatilep, true);
7748 rtx orig_op0;
7750 /* If we got back the original object, something is wrong. Perhaps
7751 we are evaluating an expression too early. In any event, don't
7752 infinitely recurse. */
7753 gcc_assert (tem != exp);
7755 /* If TEM's type is a union of variable size, pass TARGET to the inner
7756 computation, since it will need a temporary and TARGET is known
7757 to have to do. This occurs in unchecked conversion in Ada. */
7759 orig_op0 = op0
7760 = expand_expr (tem,
7761 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7762 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7763 != INTEGER_CST)
7764 && modifier != EXPAND_STACK_PARM
7765 ? target : NULL_RTX),
7766 VOIDmode,
7767 (modifier == EXPAND_INITIALIZER
7768 || modifier == EXPAND_CONST_ADDRESS
7769 || modifier == EXPAND_STACK_PARM)
7770 ? modifier : EXPAND_NORMAL);
7772 /* If this is a constant, put it into a register if it is a legitimate
7773 constant, OFFSET is 0, and we won't try to extract outside the
7774 register (in case we were passed a partially uninitialized object
7775 or a view_conversion to a larger size). Force the constant to
7776 memory otherwise. */
7777 if (CONSTANT_P (op0))
7779 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7780 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7781 && offset == 0
7782 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7783 op0 = force_reg (mode, op0);
7784 else
7785 op0 = validize_mem (force_const_mem (mode, op0));
7788 /* Otherwise, if this object not in memory and we either have an
7789 offset, a BLKmode result, or a reference outside the object, put it
7790 there. Such cases can occur in Ada if we have unchecked conversion
7791 of an expression from a scalar type to an array or record type or
7792 for an ARRAY_RANGE_REF whose type is BLKmode. */
7793 else if (!MEM_P (op0)
7794 && (offset != 0
7795 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7796 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7798 tree nt = build_qualified_type (TREE_TYPE (tem),
7799 (TYPE_QUALS (TREE_TYPE (tem))
7800 | TYPE_QUAL_CONST));
7801 rtx memloc = assign_temp (nt, 1, 1, 1);
7803 emit_move_insn (memloc, op0);
7804 op0 = memloc;
7807 if (offset != 0)
7809 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7810 EXPAND_SUM);
7812 gcc_assert (MEM_P (op0));
7814 #ifdef POINTERS_EXTEND_UNSIGNED
7815 if (GET_MODE (offset_rtx) != Pmode)
7816 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7817 #else
7818 if (GET_MODE (offset_rtx) != ptr_mode)
7819 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7820 #endif
7822 if (GET_MODE (op0) == BLKmode
7823 /* A constant address in OP0 can have VOIDmode, we must
7824 not try to call force_reg in that case. */
7825 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7826 && bitsize != 0
7827 && (bitpos % bitsize) == 0
7828 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7829 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7831 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7832 bitpos = 0;
7835 op0 = offset_address (op0, offset_rtx,
7836 highest_pow2_factor (offset));
7839 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7840 record its alignment as BIGGEST_ALIGNMENT. */
7841 if (MEM_P (op0) && bitpos == 0 && offset != 0
7842 && is_aligning_offset (offset, tem))
7843 set_mem_align (op0, BIGGEST_ALIGNMENT);
7845 /* Don't forget about volatility even if this is a bitfield. */
7846 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7848 if (op0 == orig_op0)
7849 op0 = copy_rtx (op0);
7851 MEM_VOLATILE_P (op0) = 1;
7854 /* The following code doesn't handle CONCAT.
7855 Assume only bitpos == 0 can be used for CONCAT, due to
7856 one element arrays having the same mode as its element. */
7857 if (GET_CODE (op0) == CONCAT)
7859 gcc_assert (bitpos == 0
7860 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7861 return op0;
7864 /* In cases where an aligned union has an unaligned object
7865 as a field, we might be extracting a BLKmode value from
7866 an integer-mode (e.g., SImode) object. Handle this case
7867 by doing the extract into an object as wide as the field
7868 (which we know to be the width of a basic mode), then
7869 storing into memory, and changing the mode to BLKmode. */
7870 if (mode1 == VOIDmode
7871 || REG_P (op0) || GET_CODE (op0) == SUBREG
7872 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7873 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7874 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7875 && modifier != EXPAND_CONST_ADDRESS
7876 && modifier != EXPAND_INITIALIZER)
7877 /* If the field isn't aligned enough to fetch as a memref,
7878 fetch it as a bit field. */
7879 || (mode1 != BLKmode
7880 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7881 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7882 || (MEM_P (op0)
7883 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7884 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7885 && ((modifier == EXPAND_CONST_ADDRESS
7886 || modifier == EXPAND_INITIALIZER)
7887 ? STRICT_ALIGNMENT
7888 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7889 || (bitpos % BITS_PER_UNIT != 0)))
7890 /* If the type and the field are a constant size and the
7891 size of the type isn't the same size as the bitfield,
7892 we must use bitfield operations. */
7893 || (bitsize >= 0
7894 && TYPE_SIZE (TREE_TYPE (exp))
7895 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7896 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7897 bitsize)))
7899 enum machine_mode ext_mode = mode;
7901 if (ext_mode == BLKmode
7902 && ! (target != 0 && MEM_P (op0)
7903 && MEM_P (target)
7904 && bitpos % BITS_PER_UNIT == 0))
7905 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7907 if (ext_mode == BLKmode)
7909 if (target == 0)
7910 target = assign_temp (type, 0, 1, 1);
7912 if (bitsize == 0)
7913 return target;
7915 /* In this case, BITPOS must start at a byte boundary and
7916 TARGET, if specified, must be a MEM. */
7917 gcc_assert (MEM_P (op0)
7918 && (!target || MEM_P (target))
7919 && !(bitpos % BITS_PER_UNIT));
7921 emit_block_move (target,
7922 adjust_address (op0, VOIDmode,
7923 bitpos / BITS_PER_UNIT),
7924 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7925 / BITS_PER_UNIT),
7926 (modifier == EXPAND_STACK_PARM
7927 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7929 return target;
7932 op0 = validize_mem (op0);
7934 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7935 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7937 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7938 (modifier == EXPAND_STACK_PARM
7939 ? NULL_RTX : target),
7940 ext_mode, ext_mode);
7942 /* If the result is a record type and BITSIZE is narrower than
7943 the mode of OP0, an integral mode, and this is a big endian
7944 machine, we must put the field into the high-order bits. */
7945 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7946 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7947 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7948 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7949 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7950 - bitsize),
7951 op0, 1);
7953 /* If the result type is BLKmode, store the data into a temporary
7954 of the appropriate type, but with the mode corresponding to the
7955 mode for the data we have (op0's mode). It's tempting to make
7956 this a constant type, since we know it's only being stored once,
7957 but that can cause problems if we are taking the address of this
7958 COMPONENT_REF because the MEM of any reference via that address
7959 will have flags corresponding to the type, which will not
7960 necessarily be constant. */
7961 if (mode == BLKmode)
7963 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7964 rtx new;
7966 /* If the reference doesn't use the alias set of its type,
7967 we cannot create the temporary using that type. */
7968 if (component_uses_parent_alias_set (exp))
7970 new = assign_stack_local (ext_mode, size, 0);
7971 set_mem_alias_set (new, get_alias_set (exp));
7973 else
7974 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7976 emit_move_insn (new, op0);
7977 op0 = copy_rtx (new);
7978 PUT_MODE (op0, BLKmode);
7979 set_mem_attributes (op0, exp, 1);
7982 return op0;
7985 /* If the result is BLKmode, use that to access the object
7986 now as well. */
7987 if (mode == BLKmode)
7988 mode1 = BLKmode;
7990 /* Get a reference to just this component. */
7991 if (modifier == EXPAND_CONST_ADDRESS
7992 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7993 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7994 else
7995 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7997 if (op0 == orig_op0)
7998 op0 = copy_rtx (op0);
8000 set_mem_attributes (op0, exp, 0);
8001 if (REG_P (XEXP (op0, 0)))
8002 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
8004 MEM_VOLATILE_P (op0) |= volatilep;
8005 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
8006 || modifier == EXPAND_CONST_ADDRESS
8007 || modifier == EXPAND_INITIALIZER)
8008 return op0;
8009 else if (target == 0)
8010 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8012 convert_move (target, op0, unsignedp);
8013 return target;
8016 case OBJ_TYPE_REF:
8017 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
8019 case CALL_EXPR:
8020 /* All valid uses of __builtin_va_arg_pack () are removed during
8021 inlining. */
8022 if (CALL_EXPR_VA_ARG_PACK (exp))
8023 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8025 tree fndecl = get_callee_fndecl (exp), attr;
8027 if (fndecl
8028 && (attr = lookup_attribute ("error",
8029 DECL_ATTRIBUTES (fndecl))) != NULL)
8030 error ("%Kcall to %qs declared with attribute error: %s",
8031 exp, lang_hooks.decl_printable_name (fndecl, 1),
8032 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8033 if (fndecl
8034 && (attr = lookup_attribute ("warning",
8035 DECL_ATTRIBUTES (fndecl))) != NULL)
8036 warning (0, "%Kcall to %qs declared with attribute warning: %s",
8037 exp, lang_hooks.decl_printable_name (fndecl, 1),
8038 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8040 /* Check for a built-in function. */
8041 if (fndecl && DECL_BUILT_IN (fndecl))
8043 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND)
8044 return lang_hooks.expand_expr (exp, original_target,
8045 tmode, modifier, alt_rtl);
8046 else
8047 return expand_builtin (exp, target, subtarget, tmode, ignore);
8050 return expand_call (exp, target, ignore);
8052 case NON_LVALUE_EXPR:
8053 case NOP_EXPR:
8054 case CONVERT_EXPR:
8055 if (TREE_OPERAND (exp, 0) == error_mark_node)
8056 return const0_rtx;
8058 if (TREE_CODE (type) == UNION_TYPE)
8060 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
8062 /* If both input and output are BLKmode, this conversion isn't doing
8063 anything except possibly changing memory attribute. */
8064 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8066 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8067 modifier);
8069 result = copy_rtx (result);
8070 set_mem_attributes (result, exp, 0);
8071 return result;
8074 if (target == 0)
8076 if (TYPE_MODE (type) != BLKmode)
8077 target = gen_reg_rtx (TYPE_MODE (type));
8078 else
8079 target = assign_temp (type, 0, 1, 1);
8082 if (MEM_P (target))
8083 /* Store data into beginning of memory target. */
8084 store_expr (TREE_OPERAND (exp, 0),
8085 adjust_address (target, TYPE_MODE (valtype), 0),
8086 modifier == EXPAND_STACK_PARM,
8087 false);
8089 else
8091 gcc_assert (REG_P (target));
8093 /* Store this field into a union of the proper type. */
8094 store_field (target,
8095 MIN ((int_size_in_bytes (TREE_TYPE
8096 (TREE_OPERAND (exp, 0)))
8097 * BITS_PER_UNIT),
8098 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8099 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8100 type, 0, false);
8103 /* Return the entire union. */
8104 return target;
8107 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8109 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8110 modifier);
8112 /* If the signedness of the conversion differs and OP0 is
8113 a promoted SUBREG, clear that indication since we now
8114 have to do the proper extension. */
8115 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8116 && GET_CODE (op0) == SUBREG)
8117 SUBREG_PROMOTED_VAR_P (op0) = 0;
8119 return REDUCE_BIT_FIELD (op0);
8122 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8123 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8124 if (GET_MODE (op0) == mode)
8127 /* If OP0 is a constant, just convert it into the proper mode. */
8128 else if (CONSTANT_P (op0))
8130 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8131 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8133 if (modifier == EXPAND_INITIALIZER)
8134 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8135 subreg_lowpart_offset (mode,
8136 inner_mode));
8137 else
8138 op0= convert_modes (mode, inner_mode, op0,
8139 TYPE_UNSIGNED (inner_type));
8142 else if (modifier == EXPAND_INITIALIZER)
8143 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8145 else if (target == 0)
8146 op0 = convert_to_mode (mode, op0,
8147 TYPE_UNSIGNED (TREE_TYPE
8148 (TREE_OPERAND (exp, 0))));
8149 else
8151 convert_move (target, op0,
8152 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8153 op0 = target;
8156 return REDUCE_BIT_FIELD (op0);
8158 case VIEW_CONVERT_EXPR:
8159 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8161 /* If the input and output modes are both the same, we are done. */
8162 if (TYPE_MODE (type) == GET_MODE (op0))
8164 /* If neither mode is BLKmode, and both modes are the same size
8165 then we can use gen_lowpart. */
8166 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8167 && GET_MODE_SIZE (TYPE_MODE (type))
8168 == GET_MODE_SIZE (GET_MODE (op0)))
8170 if (GET_CODE (op0) == SUBREG)
8171 op0 = force_reg (GET_MODE (op0), op0);
8172 op0 = gen_lowpart (TYPE_MODE (type), op0);
8174 /* If both modes are integral, then we can convert from one to the
8175 other. */
8176 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
8177 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
8178 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
8179 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8180 /* As a last resort, spill op0 to memory, and reload it in a
8181 different mode. */
8182 else if (!MEM_P (op0))
8184 /* If the operand is not a MEM, force it into memory. Since we
8185 are going to be changing the mode of the MEM, don't call
8186 force_const_mem for constants because we don't allow pool
8187 constants to change mode. */
8188 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8190 gcc_assert (!TREE_ADDRESSABLE (exp));
8192 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8193 target
8194 = assign_stack_temp_for_type
8195 (TYPE_MODE (inner_type),
8196 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8198 emit_move_insn (target, op0);
8199 op0 = target;
8202 /* At this point, OP0 is in the correct mode. If the output type is such
8203 that the operand is known to be aligned, indicate that it is.
8204 Otherwise, we need only be concerned about alignment for non-BLKmode
8205 results. */
8206 if (MEM_P (op0))
8208 op0 = copy_rtx (op0);
8210 if (TYPE_ALIGN_OK (type))
8211 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8212 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8213 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8215 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8216 HOST_WIDE_INT temp_size
8217 = MAX (int_size_in_bytes (inner_type),
8218 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8219 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8220 temp_size, 0, type);
8221 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8223 gcc_assert (!TREE_ADDRESSABLE (exp));
8225 if (GET_MODE (op0) == BLKmode)
8226 emit_block_move (new_with_op0_mode, op0,
8227 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8228 (modifier == EXPAND_STACK_PARM
8229 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8230 else
8231 emit_move_insn (new_with_op0_mode, op0);
8233 op0 = new;
8236 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8239 return op0;
8241 case POINTER_PLUS_EXPR:
8242 /* Even though the sizetype mode and the pointer's mode can be different
8243 expand is able to handle this correctly and get the correct result out
8244 of the PLUS_EXPR code. */
8245 case PLUS_EXPR:
8247 /* Check if this is a case for multiplication and addition. */
8248 if ((TREE_CODE (type) == INTEGER_TYPE
8249 || TREE_CODE (type) == FIXED_POINT_TYPE)
8250 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8252 tree subsubexp0, subsubexp1;
8253 enum tree_code code0, code1, this_code;
8255 subexp0 = TREE_OPERAND (exp, 0);
8256 subsubexp0 = TREE_OPERAND (subexp0, 0);
8257 subsubexp1 = TREE_OPERAND (subexp0, 1);
8258 code0 = TREE_CODE (subsubexp0);
8259 code1 = TREE_CODE (subsubexp1);
8260 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8261 : FIXED_CONVERT_EXPR;
8262 if (code0 == this_code && code1 == this_code
8263 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8264 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8265 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8266 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8267 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8268 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8270 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8271 enum machine_mode innermode = TYPE_MODE (op0type);
8272 bool zextend_p = TYPE_UNSIGNED (op0type);
8273 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8274 if (sat_p == 0)
8275 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8276 else
8277 this_optab = zextend_p ? usmadd_widen_optab
8278 : ssmadd_widen_optab;
8279 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8280 && (optab_handler (this_optab, mode)->insn_code
8281 != CODE_FOR_nothing))
8283 expand_operands (TREE_OPERAND (subsubexp0, 0),
8284 TREE_OPERAND (subsubexp1, 0),
8285 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8286 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8287 VOIDmode, EXPAND_NORMAL);
8288 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8289 target, unsignedp);
8290 gcc_assert (temp);
8291 return REDUCE_BIT_FIELD (temp);
8296 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8297 something else, make sure we add the register to the constant and
8298 then to the other thing. This case can occur during strength
8299 reduction and doing it this way will produce better code if the
8300 frame pointer or argument pointer is eliminated.
8302 fold-const.c will ensure that the constant is always in the inner
8303 PLUS_EXPR, so the only case we need to do anything about is if
8304 sp, ap, or fp is our second argument, in which case we must swap
8305 the innermost first argument and our second argument. */
8307 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8308 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8309 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8310 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8311 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8312 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8314 tree t = TREE_OPERAND (exp, 1);
8316 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8317 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8320 /* If the result is to be ptr_mode and we are adding an integer to
8321 something, we might be forming a constant. So try to use
8322 plus_constant. If it produces a sum and we can't accept it,
8323 use force_operand. This allows P = &ARR[const] to generate
8324 efficient code on machines where a SYMBOL_REF is not a valid
8325 address.
8327 If this is an EXPAND_SUM call, always return the sum. */
8328 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8329 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8331 if (modifier == EXPAND_STACK_PARM)
8332 target = 0;
8333 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8334 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8335 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8337 rtx constant_part;
8339 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8340 EXPAND_SUM);
8341 /* Use immed_double_const to ensure that the constant is
8342 truncated according to the mode of OP1, then sign extended
8343 to a HOST_WIDE_INT. Using the constant directly can result
8344 in non-canonical RTL in a 64x32 cross compile. */
8345 constant_part
8346 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8347 (HOST_WIDE_INT) 0,
8348 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8349 op1 = plus_constant (op1, INTVAL (constant_part));
8350 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8351 op1 = force_operand (op1, target);
8352 return REDUCE_BIT_FIELD (op1);
8355 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8356 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8357 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8359 rtx constant_part;
8361 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8362 (modifier == EXPAND_INITIALIZER
8363 ? EXPAND_INITIALIZER : EXPAND_SUM));
8364 if (! CONSTANT_P (op0))
8366 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8367 VOIDmode, modifier);
8368 /* Return a PLUS if modifier says it's OK. */
8369 if (modifier == EXPAND_SUM
8370 || modifier == EXPAND_INITIALIZER)
8371 return simplify_gen_binary (PLUS, mode, op0, op1);
8372 goto binop2;
8374 /* Use immed_double_const to ensure that the constant is
8375 truncated according to the mode of OP1, then sign extended
8376 to a HOST_WIDE_INT. Using the constant directly can result
8377 in non-canonical RTL in a 64x32 cross compile. */
8378 constant_part
8379 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8380 (HOST_WIDE_INT) 0,
8381 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8382 op0 = plus_constant (op0, INTVAL (constant_part));
8383 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8384 op0 = force_operand (op0, target);
8385 return REDUCE_BIT_FIELD (op0);
8389 /* No sense saving up arithmetic to be done
8390 if it's all in the wrong mode to form part of an address.
8391 And force_operand won't know whether to sign-extend or
8392 zero-extend. */
8393 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8394 || mode != ptr_mode)
8396 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8397 subtarget, &op0, &op1, 0);
8398 if (op0 == const0_rtx)
8399 return op1;
8400 if (op1 == const0_rtx)
8401 return op0;
8402 goto binop2;
8405 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8406 subtarget, &op0, &op1, modifier);
8407 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8409 case MINUS_EXPR:
8410 /* Check if this is a case for multiplication and subtraction. */
8411 if ((TREE_CODE (type) == INTEGER_TYPE
8412 || TREE_CODE (type) == FIXED_POINT_TYPE)
8413 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8415 tree subsubexp0, subsubexp1;
8416 enum tree_code code0, code1, this_code;
8418 subexp1 = TREE_OPERAND (exp, 1);
8419 subsubexp0 = TREE_OPERAND (subexp1, 0);
8420 subsubexp1 = TREE_OPERAND (subexp1, 1);
8421 code0 = TREE_CODE (subsubexp0);
8422 code1 = TREE_CODE (subsubexp1);
8423 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8424 : FIXED_CONVERT_EXPR;
8425 if (code0 == this_code && code1 == this_code
8426 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8427 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8428 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8429 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8430 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8431 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8433 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8434 enum machine_mode innermode = TYPE_MODE (op0type);
8435 bool zextend_p = TYPE_UNSIGNED (op0type);
8436 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8437 if (sat_p == 0)
8438 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8439 else
8440 this_optab = zextend_p ? usmsub_widen_optab
8441 : ssmsub_widen_optab;
8442 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8443 && (optab_handler (this_optab, mode)->insn_code
8444 != CODE_FOR_nothing))
8446 expand_operands (TREE_OPERAND (subsubexp0, 0),
8447 TREE_OPERAND (subsubexp1, 0),
8448 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8449 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8450 VOIDmode, EXPAND_NORMAL);
8451 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8452 target, unsignedp);
8453 gcc_assert (temp);
8454 return REDUCE_BIT_FIELD (temp);
8459 /* For initializers, we are allowed to return a MINUS of two
8460 symbolic constants. Here we handle all cases when both operands
8461 are constant. */
8462 /* Handle difference of two symbolic constants,
8463 for the sake of an initializer. */
8464 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8465 && really_constant_p (TREE_OPERAND (exp, 0))
8466 && really_constant_p (TREE_OPERAND (exp, 1)))
8468 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8469 NULL_RTX, &op0, &op1, modifier);
8471 /* If the last operand is a CONST_INT, use plus_constant of
8472 the negated constant. Else make the MINUS. */
8473 if (GET_CODE (op1) == CONST_INT)
8474 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8475 else
8476 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8479 /* No sense saving up arithmetic to be done
8480 if it's all in the wrong mode to form part of an address.
8481 And force_operand won't know whether to sign-extend or
8482 zero-extend. */
8483 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8484 || mode != ptr_mode)
8485 goto binop;
8487 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8488 subtarget, &op0, &op1, modifier);
8490 /* Convert A - const to A + (-const). */
8491 if (GET_CODE (op1) == CONST_INT)
8493 op1 = negate_rtx (mode, op1);
8494 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8497 goto binop2;
8499 case MULT_EXPR:
8500 /* If this is a fixed-point operation, then we cannot use the code
8501 below because "expand_mult" doesn't support sat/no-sat fixed-point
8502 multiplications. */
8503 if (ALL_FIXED_POINT_MODE_P (mode))
8504 goto binop;
8506 /* If first operand is constant, swap them.
8507 Thus the following special case checks need only
8508 check the second operand. */
8509 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8511 tree t1 = TREE_OPERAND (exp, 0);
8512 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8513 TREE_OPERAND (exp, 1) = t1;
8516 /* Attempt to return something suitable for generating an
8517 indexed address, for machines that support that. */
8519 if (modifier == EXPAND_SUM && mode == ptr_mode
8520 && host_integerp (TREE_OPERAND (exp, 1), 0))
8522 tree exp1 = TREE_OPERAND (exp, 1);
8524 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8525 EXPAND_SUM);
8527 if (!REG_P (op0))
8528 op0 = force_operand (op0, NULL_RTX);
8529 if (!REG_P (op0))
8530 op0 = copy_to_mode_reg (mode, op0);
8532 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8533 gen_int_mode (tree_low_cst (exp1, 0),
8534 TYPE_MODE (TREE_TYPE (exp1)))));
8537 if (modifier == EXPAND_STACK_PARM)
8538 target = 0;
8540 /* Check for multiplying things that have been extended
8541 from a narrower type. If this machine supports multiplying
8542 in that narrower type with a result in the desired type,
8543 do it that way, and avoid the explicit type-conversion. */
8545 subexp0 = TREE_OPERAND (exp, 0);
8546 subexp1 = TREE_OPERAND (exp, 1);
8547 /* First, check if we have a multiplication of one signed and one
8548 unsigned operand. */
8549 if (TREE_CODE (subexp0) == NOP_EXPR
8550 && TREE_CODE (subexp1) == NOP_EXPR
8551 && TREE_CODE (type) == INTEGER_TYPE
8552 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8553 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8554 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8555 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8556 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8557 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8559 enum machine_mode innermode
8560 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8561 this_optab = usmul_widen_optab;
8562 if (mode == GET_MODE_WIDER_MODE (innermode))
8564 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8566 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8567 expand_operands (TREE_OPERAND (subexp0, 0),
8568 TREE_OPERAND (subexp1, 0),
8569 NULL_RTX, &op0, &op1, 0);
8570 else
8571 expand_operands (TREE_OPERAND (subexp0, 0),
8572 TREE_OPERAND (subexp1, 0),
8573 NULL_RTX, &op1, &op0, 0);
8575 goto binop3;
8579 /* Check for a multiplication with matching signedness. */
8580 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8581 && TREE_CODE (type) == INTEGER_TYPE
8582 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8583 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8584 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8585 && int_fits_type_p (TREE_OPERAND (exp, 1),
8586 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8587 /* Don't use a widening multiply if a shift will do. */
8588 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8589 > HOST_BITS_PER_WIDE_INT)
8590 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8592 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8593 && (TYPE_PRECISION (TREE_TYPE
8594 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8595 == TYPE_PRECISION (TREE_TYPE
8596 (TREE_OPERAND
8597 (TREE_OPERAND (exp, 0), 0))))
8598 /* If both operands are extended, they must either both
8599 be zero-extended or both be sign-extended. */
8600 && (TYPE_UNSIGNED (TREE_TYPE
8601 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8602 == TYPE_UNSIGNED (TREE_TYPE
8603 (TREE_OPERAND
8604 (TREE_OPERAND (exp, 0), 0)))))))
8606 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8607 enum machine_mode innermode = TYPE_MODE (op0type);
8608 bool zextend_p = TYPE_UNSIGNED (op0type);
8609 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8610 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8612 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8614 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8616 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8617 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8618 TREE_OPERAND (exp, 1),
8619 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8620 else
8621 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8622 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8623 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8624 goto binop3;
8626 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8627 && innermode == word_mode)
8629 rtx htem, hipart;
8630 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8631 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8632 op1 = convert_modes (innermode, mode,
8633 expand_normal (TREE_OPERAND (exp, 1)),
8634 unsignedp);
8635 else
8636 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8637 temp = expand_binop (mode, other_optab, op0, op1, target,
8638 unsignedp, OPTAB_LIB_WIDEN);
8639 hipart = gen_highpart (innermode, temp);
8640 htem = expand_mult_highpart_adjust (innermode, hipart,
8641 op0, op1, hipart,
8642 zextend_p);
8643 if (htem != hipart)
8644 emit_move_insn (hipart, htem);
8645 return REDUCE_BIT_FIELD (temp);
8649 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8650 subtarget, &op0, &op1, 0);
8651 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8653 case TRUNC_DIV_EXPR:
8654 case FLOOR_DIV_EXPR:
8655 case CEIL_DIV_EXPR:
8656 case ROUND_DIV_EXPR:
8657 case EXACT_DIV_EXPR:
8658 /* If this is a fixed-point operation, then we cannot use the code
8659 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8660 divisions. */
8661 if (ALL_FIXED_POINT_MODE_P (mode))
8662 goto binop;
8664 if (modifier == EXPAND_STACK_PARM)
8665 target = 0;
8666 /* Possible optimization: compute the dividend with EXPAND_SUM
8667 then if the divisor is constant can optimize the case
8668 where some terms of the dividend have coeffs divisible by it. */
8669 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8670 subtarget, &op0, &op1, 0);
8671 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8673 case RDIV_EXPR:
8674 goto binop;
8676 case TRUNC_MOD_EXPR:
8677 case FLOOR_MOD_EXPR:
8678 case CEIL_MOD_EXPR:
8679 case ROUND_MOD_EXPR:
8680 if (modifier == EXPAND_STACK_PARM)
8681 target = 0;
8682 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8683 subtarget, &op0, &op1, 0);
8684 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8686 case FIXED_CONVERT_EXPR:
8687 op0 = expand_normal (TREE_OPERAND (exp, 0));
8688 if (target == 0 || modifier == EXPAND_STACK_PARM)
8689 target = gen_reg_rtx (mode);
8691 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
8692 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
8693 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8694 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8695 else
8696 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8697 return target;
8699 case FIX_TRUNC_EXPR:
8700 op0 = expand_normal (TREE_OPERAND (exp, 0));
8701 if (target == 0 || modifier == EXPAND_STACK_PARM)
8702 target = gen_reg_rtx (mode);
8703 expand_fix (target, op0, unsignedp);
8704 return target;
8706 case FLOAT_EXPR:
8707 op0 = expand_normal (TREE_OPERAND (exp, 0));
8708 if (target == 0 || modifier == EXPAND_STACK_PARM)
8709 target = gen_reg_rtx (mode);
8710 /* expand_float can't figure out what to do if FROM has VOIDmode.
8711 So give it the correct mode. With -O, cse will optimize this. */
8712 if (GET_MODE (op0) == VOIDmode)
8713 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8714 op0);
8715 expand_float (target, op0,
8716 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8717 return target;
8719 case NEGATE_EXPR:
8720 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8721 VOIDmode, EXPAND_NORMAL);
8722 if (modifier == EXPAND_STACK_PARM)
8723 target = 0;
8724 temp = expand_unop (mode,
8725 optab_for_tree_code (NEGATE_EXPR, type),
8726 op0, target, 0);
8727 gcc_assert (temp);
8728 return REDUCE_BIT_FIELD (temp);
8730 case ABS_EXPR:
8731 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8732 VOIDmode, EXPAND_NORMAL);
8733 if (modifier == EXPAND_STACK_PARM)
8734 target = 0;
8736 /* ABS_EXPR is not valid for complex arguments. */
8737 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8738 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8740 /* Unsigned abs is simply the operand. Testing here means we don't
8741 risk generating incorrect code below. */
8742 if (TYPE_UNSIGNED (type))
8743 return op0;
8745 return expand_abs (mode, op0, target, unsignedp,
8746 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8748 case MAX_EXPR:
8749 case MIN_EXPR:
8750 target = original_target;
8751 if (target == 0
8752 || modifier == EXPAND_STACK_PARM
8753 || (MEM_P (target) && MEM_VOLATILE_P (target))
8754 || GET_MODE (target) != mode
8755 || (REG_P (target)
8756 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8757 target = gen_reg_rtx (mode);
8758 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8759 target, &op0, &op1, 0);
8761 /* First try to do it with a special MIN or MAX instruction.
8762 If that does not win, use a conditional jump to select the proper
8763 value. */
8764 this_optab = optab_for_tree_code (code, type);
8765 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8766 OPTAB_WIDEN);
8767 if (temp != 0)
8768 return temp;
8770 /* At this point, a MEM target is no longer useful; we will get better
8771 code without it. */
8773 if (! REG_P (target))
8774 target = gen_reg_rtx (mode);
8776 /* If op1 was placed in target, swap op0 and op1. */
8777 if (target != op0 && target == op1)
8779 temp = op0;
8780 op0 = op1;
8781 op1 = temp;
8784 /* We generate better code and avoid problems with op1 mentioning
8785 target by forcing op1 into a pseudo if it isn't a constant. */
8786 if (! CONSTANT_P (op1))
8787 op1 = force_reg (mode, op1);
8790 enum rtx_code comparison_code;
8791 rtx cmpop1 = op1;
8793 if (code == MAX_EXPR)
8794 comparison_code = unsignedp ? GEU : GE;
8795 else
8796 comparison_code = unsignedp ? LEU : LE;
8798 /* Canonicalize to comparisons against 0. */
8799 if (op1 == const1_rtx)
8801 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8802 or (a != 0 ? a : 1) for unsigned.
8803 For MIN we are safe converting (a <= 1 ? a : 1)
8804 into (a <= 0 ? a : 1) */
8805 cmpop1 = const0_rtx;
8806 if (code == MAX_EXPR)
8807 comparison_code = unsignedp ? NE : GT;
8809 if (op1 == constm1_rtx && !unsignedp)
8811 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8812 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8813 cmpop1 = const0_rtx;
8814 if (code == MIN_EXPR)
8815 comparison_code = LT;
8817 #ifdef HAVE_conditional_move
8818 /* Use a conditional move if possible. */
8819 if (can_conditionally_move_p (mode))
8821 rtx insn;
8823 /* ??? Same problem as in expmed.c: emit_conditional_move
8824 forces a stack adjustment via compare_from_rtx, and we
8825 lose the stack adjustment if the sequence we are about
8826 to create is discarded. */
8827 do_pending_stack_adjust ();
8829 start_sequence ();
8831 /* Try to emit the conditional move. */
8832 insn = emit_conditional_move (target, comparison_code,
8833 op0, cmpop1, mode,
8834 op0, op1, mode,
8835 unsignedp);
8837 /* If we could do the conditional move, emit the sequence,
8838 and return. */
8839 if (insn)
8841 rtx seq = get_insns ();
8842 end_sequence ();
8843 emit_insn (seq);
8844 return target;
8847 /* Otherwise discard the sequence and fall back to code with
8848 branches. */
8849 end_sequence ();
8851 #endif
8852 if (target != op0)
8853 emit_move_insn (target, op0);
8855 temp = gen_label_rtx ();
8856 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8857 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8859 emit_move_insn (target, op1);
8860 emit_label (temp);
8861 return target;
8863 case BIT_NOT_EXPR:
8864 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8865 VOIDmode, EXPAND_NORMAL);
8866 if (modifier == EXPAND_STACK_PARM)
8867 target = 0;
8868 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8869 gcc_assert (temp);
8870 return temp;
8872 /* ??? Can optimize bitwise operations with one arg constant.
8873 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8874 and (a bitwise1 b) bitwise2 b (etc)
8875 but that is probably not worth while. */
8877 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8878 boolean values when we want in all cases to compute both of them. In
8879 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8880 as actual zero-or-1 values and then bitwise anding. In cases where
8881 there cannot be any side effects, better code would be made by
8882 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8883 how to recognize those cases. */
8885 case TRUTH_AND_EXPR:
8886 code = BIT_AND_EXPR;
8887 case BIT_AND_EXPR:
8888 goto binop;
8890 case TRUTH_OR_EXPR:
8891 code = BIT_IOR_EXPR;
8892 case BIT_IOR_EXPR:
8893 goto binop;
8895 case TRUTH_XOR_EXPR:
8896 code = BIT_XOR_EXPR;
8897 case BIT_XOR_EXPR:
8898 goto binop;
8900 case LSHIFT_EXPR:
8901 case RSHIFT_EXPR:
8902 case LROTATE_EXPR:
8903 case RROTATE_EXPR:
8904 /* If this is a fixed-point operation, then we cannot use the code
8905 below because "expand_shift" doesn't support sat/no-sat fixed-point
8906 shifts. */
8907 if (ALL_FIXED_POINT_MODE_P (mode))
8908 goto binop;
8910 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8911 subtarget = 0;
8912 if (modifier == EXPAND_STACK_PARM)
8913 target = 0;
8914 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8915 VOIDmode, EXPAND_NORMAL);
8916 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8917 unsignedp);
8919 /* Could determine the answer when only additive constants differ. Also,
8920 the addition of one can be handled by changing the condition. */
8921 case LT_EXPR:
8922 case LE_EXPR:
8923 case GT_EXPR:
8924 case GE_EXPR:
8925 case EQ_EXPR:
8926 case NE_EXPR:
8927 case UNORDERED_EXPR:
8928 case ORDERED_EXPR:
8929 case UNLT_EXPR:
8930 case UNLE_EXPR:
8931 case UNGT_EXPR:
8932 case UNGE_EXPR:
8933 case UNEQ_EXPR:
8934 case LTGT_EXPR:
8935 temp = do_store_flag (exp,
8936 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8937 tmode != VOIDmode ? tmode : mode, 0);
8938 if (temp != 0)
8939 return temp;
8941 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8942 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8943 && original_target
8944 && REG_P (original_target)
8945 && (GET_MODE (original_target)
8946 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8948 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8949 VOIDmode, EXPAND_NORMAL);
8951 /* If temp is constant, we can just compute the result. */
8952 if (GET_CODE (temp) == CONST_INT)
8954 if (INTVAL (temp) != 0)
8955 emit_move_insn (target, const1_rtx);
8956 else
8957 emit_move_insn (target, const0_rtx);
8959 return target;
8962 if (temp != original_target)
8964 enum machine_mode mode1 = GET_MODE (temp);
8965 if (mode1 == VOIDmode)
8966 mode1 = tmode != VOIDmode ? tmode : mode;
8968 temp = copy_to_mode_reg (mode1, temp);
8971 op1 = gen_label_rtx ();
8972 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8973 GET_MODE (temp), unsignedp, op1);
8974 emit_move_insn (temp, const1_rtx);
8975 emit_label (op1);
8976 return temp;
8979 /* If no set-flag instruction, must generate a conditional store
8980 into a temporary variable. Drop through and handle this
8981 like && and ||. */
8983 if (! ignore
8984 && (target == 0
8985 || modifier == EXPAND_STACK_PARM
8986 || ! safe_from_p (target, exp, 1)
8987 /* Make sure we don't have a hard reg (such as function's return
8988 value) live across basic blocks, if not optimizing. */
8989 || (!optimize && REG_P (target)
8990 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8991 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8993 if (target)
8994 emit_move_insn (target, const0_rtx);
8996 op1 = gen_label_rtx ();
8997 jumpifnot (exp, op1);
8999 if (target)
9000 emit_move_insn (target, const1_rtx);
9002 emit_label (op1);
9003 return ignore ? const0_rtx : target;
9005 case TRUTH_NOT_EXPR:
9006 if (modifier == EXPAND_STACK_PARM)
9007 target = 0;
9008 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
9009 VOIDmode, EXPAND_NORMAL);
9010 /* The parser is careful to generate TRUTH_NOT_EXPR
9011 only with operands that are always zero or one. */
9012 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
9013 target, 1, OPTAB_LIB_WIDEN);
9014 gcc_assert (temp);
9015 return temp;
9017 case STATEMENT_LIST:
9019 tree_stmt_iterator iter;
9021 gcc_assert (ignore);
9023 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9024 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9026 return const0_rtx;
9028 case COND_EXPR:
9029 /* A COND_EXPR with its type being VOID_TYPE represents a
9030 conditional jump and is handled in
9031 expand_gimple_cond_expr. */
9032 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
9034 /* Note that COND_EXPRs whose type is a structure or union
9035 are required to be constructed to contain assignments of
9036 a temporary variable, so that we can evaluate them here
9037 for side effect only. If type is void, we must do likewise. */
9039 gcc_assert (!TREE_ADDRESSABLE (type)
9040 && !ignore
9041 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
9042 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
9044 /* If we are not to produce a result, we have no target. Otherwise,
9045 if a target was specified use it; it will not be used as an
9046 intermediate target unless it is safe. If no target, use a
9047 temporary. */
9049 if (modifier != EXPAND_STACK_PARM
9050 && original_target
9051 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
9052 && GET_MODE (original_target) == mode
9053 #ifdef HAVE_conditional_move
9054 && (! can_conditionally_move_p (mode)
9055 || REG_P (original_target))
9056 #endif
9057 && !MEM_P (original_target))
9058 temp = original_target;
9059 else
9060 temp = assign_temp (type, 0, 0, 1);
9062 do_pending_stack_adjust ();
9063 NO_DEFER_POP;
9064 op0 = gen_label_rtx ();
9065 op1 = gen_label_rtx ();
9066 jumpifnot (TREE_OPERAND (exp, 0), op0);
9067 store_expr (TREE_OPERAND (exp, 1), temp,
9068 modifier == EXPAND_STACK_PARM,
9069 false);
9071 emit_jump_insn (gen_jump (op1));
9072 emit_barrier ();
9073 emit_label (op0);
9074 store_expr (TREE_OPERAND (exp, 2), temp,
9075 modifier == EXPAND_STACK_PARM,
9076 false);
9078 emit_label (op1);
9079 OK_DEFER_POP;
9080 return temp;
9082 case VEC_COND_EXPR:
9083 target = expand_vec_cond_expr (exp, target);
9084 return target;
9086 case MODIFY_EXPR:
9088 tree lhs = TREE_OPERAND (exp, 0);
9089 tree rhs = TREE_OPERAND (exp, 1);
9090 gcc_assert (ignore);
9091 expand_assignment (lhs, rhs, false);
9092 return const0_rtx;
9095 case GIMPLE_MODIFY_STMT:
9097 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
9098 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
9100 gcc_assert (ignore);
9102 /* Check for |= or &= of a bitfield of size one into another bitfield
9103 of size 1. In this case, (unless we need the result of the
9104 assignment) we can do this more efficiently with a
9105 test followed by an assignment, if necessary.
9107 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9108 things change so we do, this code should be enhanced to
9109 support it. */
9110 if (TREE_CODE (lhs) == COMPONENT_REF
9111 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9112 || TREE_CODE (rhs) == BIT_AND_EXPR)
9113 && TREE_OPERAND (rhs, 0) == lhs
9114 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9115 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9116 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9118 rtx label = gen_label_rtx ();
9119 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9120 do_jump (TREE_OPERAND (rhs, 1),
9121 value ? label : 0,
9122 value ? 0 : label);
9123 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9124 MOVE_NONTEMPORAL (exp));
9125 do_pending_stack_adjust ();
9126 emit_label (label);
9127 return const0_rtx;
9130 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9131 return const0_rtx;
9134 case RETURN_EXPR:
9135 if (!TREE_OPERAND (exp, 0))
9136 expand_null_return ();
9137 else
9138 expand_return (TREE_OPERAND (exp, 0));
9139 return const0_rtx;
9141 case ADDR_EXPR:
9142 return expand_expr_addr_expr (exp, target, tmode, modifier);
9144 case COMPLEX_EXPR:
9145 /* Get the rtx code of the operands. */
9146 op0 = expand_normal (TREE_OPERAND (exp, 0));
9147 op1 = expand_normal (TREE_OPERAND (exp, 1));
9149 if (!target)
9150 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9152 /* Move the real (op0) and imaginary (op1) parts to their location. */
9153 write_complex_part (target, op0, false);
9154 write_complex_part (target, op1, true);
9156 return target;
9158 case REALPART_EXPR:
9159 op0 = expand_normal (TREE_OPERAND (exp, 0));
9160 return read_complex_part (op0, false);
9162 case IMAGPART_EXPR:
9163 op0 = expand_normal (TREE_OPERAND (exp, 0));
9164 return read_complex_part (op0, true);
9166 case RESX_EXPR:
9167 expand_resx_expr (exp);
9168 return const0_rtx;
9170 case TRY_CATCH_EXPR:
9171 case CATCH_EXPR:
9172 case EH_FILTER_EXPR:
9173 case TRY_FINALLY_EXPR:
9174 /* Lowered by tree-eh.c. */
9175 gcc_unreachable ();
9177 case WITH_CLEANUP_EXPR:
9178 case CLEANUP_POINT_EXPR:
9179 case TARGET_EXPR:
9180 case CASE_LABEL_EXPR:
9181 case VA_ARG_EXPR:
9182 case BIND_EXPR:
9183 case INIT_EXPR:
9184 case CONJ_EXPR:
9185 case COMPOUND_EXPR:
9186 case PREINCREMENT_EXPR:
9187 case PREDECREMENT_EXPR:
9188 case POSTINCREMENT_EXPR:
9189 case POSTDECREMENT_EXPR:
9190 case LOOP_EXPR:
9191 case EXIT_EXPR:
9192 case TRUTH_ANDIF_EXPR:
9193 case TRUTH_ORIF_EXPR:
9194 /* Lowered by gimplify.c. */
9195 gcc_unreachable ();
9197 case CHANGE_DYNAMIC_TYPE_EXPR:
9198 /* This is ignored at the RTL level. The tree level set
9199 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9200 overkill for the RTL layer but is all that we can
9201 represent. */
9202 return const0_rtx;
9204 case EXC_PTR_EXPR:
9205 return get_exception_pointer (cfun);
9207 case FILTER_EXPR:
9208 return get_exception_filter (cfun);
9210 case FDESC_EXPR:
9211 /* Function descriptors are not valid except for as
9212 initialization constants, and should not be expanded. */
9213 gcc_unreachable ();
9215 case SWITCH_EXPR:
9216 expand_case (exp);
9217 return const0_rtx;
9219 case LABEL_EXPR:
9220 expand_label (TREE_OPERAND (exp, 0));
9221 return const0_rtx;
9223 case ASM_EXPR:
9224 expand_asm_expr (exp);
9225 return const0_rtx;
9227 case WITH_SIZE_EXPR:
9228 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9229 have pulled out the size to use in whatever context it needed. */
9230 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9231 modifier, alt_rtl);
9233 case REALIGN_LOAD_EXPR:
9235 tree oprnd0 = TREE_OPERAND (exp, 0);
9236 tree oprnd1 = TREE_OPERAND (exp, 1);
9237 tree oprnd2 = TREE_OPERAND (exp, 2);
9238 rtx op2;
9240 this_optab = optab_for_tree_code (code, type);
9241 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9242 op2 = expand_normal (oprnd2);
9243 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9244 target, unsignedp);
9245 gcc_assert (temp);
9246 return temp;
9249 case DOT_PROD_EXPR:
9251 tree oprnd0 = TREE_OPERAND (exp, 0);
9252 tree oprnd1 = TREE_OPERAND (exp, 1);
9253 tree oprnd2 = TREE_OPERAND (exp, 2);
9254 rtx op2;
9256 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9257 op2 = expand_normal (oprnd2);
9258 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9259 target, unsignedp);
9260 return target;
9263 case WIDEN_SUM_EXPR:
9265 tree oprnd0 = TREE_OPERAND (exp, 0);
9266 tree oprnd1 = TREE_OPERAND (exp, 1);
9268 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9269 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9270 target, unsignedp);
9271 return target;
9274 case REDUC_MAX_EXPR:
9275 case REDUC_MIN_EXPR:
9276 case REDUC_PLUS_EXPR:
9278 op0 = expand_normal (TREE_OPERAND (exp, 0));
9279 this_optab = optab_for_tree_code (code, type);
9280 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9281 gcc_assert (temp);
9282 return temp;
9285 case VEC_EXTRACT_EVEN_EXPR:
9286 case VEC_EXTRACT_ODD_EXPR:
9288 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9289 NULL_RTX, &op0, &op1, 0);
9290 this_optab = optab_for_tree_code (code, type);
9291 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9292 OPTAB_WIDEN);
9293 gcc_assert (temp);
9294 return temp;
9297 case VEC_INTERLEAVE_HIGH_EXPR:
9298 case VEC_INTERLEAVE_LOW_EXPR:
9300 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9301 NULL_RTX, &op0, &op1, 0);
9302 this_optab = optab_for_tree_code (code, type);
9303 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9304 OPTAB_WIDEN);
9305 gcc_assert (temp);
9306 return temp;
9309 case VEC_LSHIFT_EXPR:
9310 case VEC_RSHIFT_EXPR:
9312 target = expand_vec_shift_expr (exp, target);
9313 return target;
9316 case VEC_UNPACK_HI_EXPR:
9317 case VEC_UNPACK_LO_EXPR:
9319 op0 = expand_normal (TREE_OPERAND (exp, 0));
9320 this_optab = optab_for_tree_code (code, type);
9321 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9322 target, unsignedp);
9323 gcc_assert (temp);
9324 return temp;
9327 case VEC_UNPACK_FLOAT_HI_EXPR:
9328 case VEC_UNPACK_FLOAT_LO_EXPR:
9330 op0 = expand_normal (TREE_OPERAND (exp, 0));
9331 /* The signedness is determined from input operand. */
9332 this_optab = optab_for_tree_code (code,
9333 TREE_TYPE (TREE_OPERAND (exp, 0)));
9334 temp = expand_widen_pattern_expr
9335 (exp, op0, NULL_RTX, NULL_RTX,
9336 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9338 gcc_assert (temp);
9339 return temp;
9342 case VEC_WIDEN_MULT_HI_EXPR:
9343 case VEC_WIDEN_MULT_LO_EXPR:
9345 tree oprnd0 = TREE_OPERAND (exp, 0);
9346 tree oprnd1 = TREE_OPERAND (exp, 1);
9348 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9349 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9350 target, unsignedp);
9351 gcc_assert (target);
9352 return target;
9355 case VEC_PACK_TRUNC_EXPR:
9356 case VEC_PACK_SAT_EXPR:
9357 case VEC_PACK_FIX_TRUNC_EXPR:
9359 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9360 goto binop;
9363 default:
9364 return lang_hooks.expand_expr (exp, original_target, tmode,
9365 modifier, alt_rtl);
9368 /* Here to do an ordinary binary operator. */
9369 binop:
9370 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9371 subtarget, &op0, &op1, 0);
9372 binop2:
9373 this_optab = optab_for_tree_code (code, type);
9374 binop3:
9375 if (modifier == EXPAND_STACK_PARM)
9376 target = 0;
9377 temp = expand_binop (mode, this_optab, op0, op1, target,
9378 unsignedp, OPTAB_LIB_WIDEN);
9379 gcc_assert (temp);
9380 return REDUCE_BIT_FIELD (temp);
9382 #undef REDUCE_BIT_FIELD
9384 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9385 signedness of TYPE), possibly returning the result in TARGET. */
9386 static rtx
9387 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9389 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9390 if (target && GET_MODE (target) != GET_MODE (exp))
9391 target = 0;
9392 /* For constant values, reduce using build_int_cst_type. */
9393 if (GET_CODE (exp) == CONST_INT)
9395 HOST_WIDE_INT value = INTVAL (exp);
9396 tree t = build_int_cst_type (type, value);
9397 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9399 else if (TYPE_UNSIGNED (type))
9401 rtx mask;
9402 if (prec < HOST_BITS_PER_WIDE_INT)
9403 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9404 GET_MODE (exp));
9405 else
9406 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9407 ((unsigned HOST_WIDE_INT) 1
9408 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9409 GET_MODE (exp));
9410 return expand_and (GET_MODE (exp), exp, mask, target);
9412 else
9414 tree count = build_int_cst (NULL_TREE,
9415 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9416 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9417 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9421 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9422 when applied to the address of EXP produces an address known to be
9423 aligned more than BIGGEST_ALIGNMENT. */
9425 static int
9426 is_aligning_offset (const_tree offset, const_tree exp)
9428 /* Strip off any conversions. */
9429 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9430 || TREE_CODE (offset) == NOP_EXPR
9431 || TREE_CODE (offset) == CONVERT_EXPR)
9432 offset = TREE_OPERAND (offset, 0);
9434 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9435 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9436 if (TREE_CODE (offset) != BIT_AND_EXPR
9437 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9438 || compare_tree_int (TREE_OPERAND (offset, 1),
9439 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9440 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9441 return 0;
9443 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9444 It must be NEGATE_EXPR. Then strip any more conversions. */
9445 offset = TREE_OPERAND (offset, 0);
9446 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9447 || TREE_CODE (offset) == NOP_EXPR
9448 || TREE_CODE (offset) == CONVERT_EXPR)
9449 offset = TREE_OPERAND (offset, 0);
9451 if (TREE_CODE (offset) != NEGATE_EXPR)
9452 return 0;
9454 offset = TREE_OPERAND (offset, 0);
9455 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9456 || TREE_CODE (offset) == NOP_EXPR
9457 || TREE_CODE (offset) == CONVERT_EXPR)
9458 offset = TREE_OPERAND (offset, 0);
9460 /* This must now be the address of EXP. */
9461 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9464 /* Return the tree node if an ARG corresponds to a string constant or zero
9465 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9466 in bytes within the string that ARG is accessing. The type of the
9467 offset will be `sizetype'. */
9469 tree
9470 string_constant (tree arg, tree *ptr_offset)
9472 tree array, offset, lower_bound;
9473 STRIP_NOPS (arg);
9475 if (TREE_CODE (arg) == ADDR_EXPR)
9477 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9479 *ptr_offset = size_zero_node;
9480 return TREE_OPERAND (arg, 0);
9482 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9484 array = TREE_OPERAND (arg, 0);
9485 offset = size_zero_node;
9487 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9489 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9490 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9491 if (TREE_CODE (array) != STRING_CST
9492 && TREE_CODE (array) != VAR_DECL)
9493 return 0;
9495 /* Check if the array has a nonzero lower bound. */
9496 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9497 if (!integer_zerop (lower_bound))
9499 /* If the offset and base aren't both constants, return 0. */
9500 if (TREE_CODE (lower_bound) != INTEGER_CST)
9501 return 0;
9502 if (TREE_CODE (offset) != INTEGER_CST)
9503 return 0;
9504 /* Adjust offset by the lower bound. */
9505 offset = size_diffop (fold_convert (sizetype, offset),
9506 fold_convert (sizetype, lower_bound));
9509 else
9510 return 0;
9512 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9514 tree arg0 = TREE_OPERAND (arg, 0);
9515 tree arg1 = TREE_OPERAND (arg, 1);
9517 STRIP_NOPS (arg0);
9518 STRIP_NOPS (arg1);
9520 if (TREE_CODE (arg0) == ADDR_EXPR
9521 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9522 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9524 array = TREE_OPERAND (arg0, 0);
9525 offset = arg1;
9527 else if (TREE_CODE (arg1) == ADDR_EXPR
9528 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9529 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9531 array = TREE_OPERAND (arg1, 0);
9532 offset = arg0;
9534 else
9535 return 0;
9537 else
9538 return 0;
9540 if (TREE_CODE (array) == STRING_CST)
9542 *ptr_offset = fold_convert (sizetype, offset);
9543 return array;
9545 else if (TREE_CODE (array) == VAR_DECL)
9547 int length;
9549 /* Variables initialized to string literals can be handled too. */
9550 if (DECL_INITIAL (array) == NULL_TREE
9551 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9552 return 0;
9554 /* If they are read-only, non-volatile and bind locally. */
9555 if (! TREE_READONLY (array)
9556 || TREE_SIDE_EFFECTS (array)
9557 || ! targetm.binds_local_p (array))
9558 return 0;
9560 /* Avoid const char foo[4] = "abcde"; */
9561 if (DECL_SIZE_UNIT (array) == NULL_TREE
9562 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9563 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9564 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9565 return 0;
9567 /* If variable is bigger than the string literal, OFFSET must be constant
9568 and inside of the bounds of the string literal. */
9569 offset = fold_convert (sizetype, offset);
9570 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9571 && (! host_integerp (offset, 1)
9572 || compare_tree_int (offset, length) >= 0))
9573 return 0;
9575 *ptr_offset = offset;
9576 return DECL_INITIAL (array);
9579 return 0;
9582 /* Generate code to calculate EXP using a store-flag instruction
9583 and return an rtx for the result. EXP is either a comparison
9584 or a TRUTH_NOT_EXPR whose operand is a comparison.
9586 If TARGET is nonzero, store the result there if convenient.
9588 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9589 cheap.
9591 Return zero if there is no suitable set-flag instruction
9592 available on this machine.
9594 Once expand_expr has been called on the arguments of the comparison,
9595 we are committed to doing the store flag, since it is not safe to
9596 re-evaluate the expression. We emit the store-flag insn by calling
9597 emit_store_flag, but only expand the arguments if we have a reason
9598 to believe that emit_store_flag will be successful. If we think that
9599 it will, but it isn't, we have to simulate the store-flag with a
9600 set/jump/set sequence. */
9602 static rtx
9603 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9605 enum rtx_code code;
9606 tree arg0, arg1, type;
9607 tree tem;
9608 enum machine_mode operand_mode;
9609 int invert = 0;
9610 int unsignedp;
9611 rtx op0, op1;
9612 enum insn_code icode;
9613 rtx subtarget = target;
9614 rtx result, label;
9616 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9617 result at the end. We can't simply invert the test since it would
9618 have already been inverted if it were valid. This case occurs for
9619 some floating-point comparisons. */
9621 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9622 invert = 1, exp = TREE_OPERAND (exp, 0);
9624 arg0 = TREE_OPERAND (exp, 0);
9625 arg1 = TREE_OPERAND (exp, 1);
9627 /* Don't crash if the comparison was erroneous. */
9628 if (arg0 == error_mark_node || arg1 == error_mark_node)
9629 return const0_rtx;
9631 type = TREE_TYPE (arg0);
9632 operand_mode = TYPE_MODE (type);
9633 unsignedp = TYPE_UNSIGNED (type);
9635 /* We won't bother with BLKmode store-flag operations because it would mean
9636 passing a lot of information to emit_store_flag. */
9637 if (operand_mode == BLKmode)
9638 return 0;
9640 /* We won't bother with store-flag operations involving function pointers
9641 when function pointers must be canonicalized before comparisons. */
9642 #ifdef HAVE_canonicalize_funcptr_for_compare
9643 if (HAVE_canonicalize_funcptr_for_compare
9644 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9645 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9646 == FUNCTION_TYPE))
9647 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9648 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9649 == FUNCTION_TYPE))))
9650 return 0;
9651 #endif
9653 STRIP_NOPS (arg0);
9654 STRIP_NOPS (arg1);
9656 /* Get the rtx comparison code to use. We know that EXP is a comparison
9657 operation of some type. Some comparisons against 1 and -1 can be
9658 converted to comparisons with zero. Do so here so that the tests
9659 below will be aware that we have a comparison with zero. These
9660 tests will not catch constants in the first operand, but constants
9661 are rarely passed as the first operand. */
9663 switch (TREE_CODE (exp))
9665 case EQ_EXPR:
9666 code = EQ;
9667 break;
9668 case NE_EXPR:
9669 code = NE;
9670 break;
9671 case LT_EXPR:
9672 if (integer_onep (arg1))
9673 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9674 else
9675 code = unsignedp ? LTU : LT;
9676 break;
9677 case LE_EXPR:
9678 if (! unsignedp && integer_all_onesp (arg1))
9679 arg1 = integer_zero_node, code = LT;
9680 else
9681 code = unsignedp ? LEU : LE;
9682 break;
9683 case GT_EXPR:
9684 if (! unsignedp && integer_all_onesp (arg1))
9685 arg1 = integer_zero_node, code = GE;
9686 else
9687 code = unsignedp ? GTU : GT;
9688 break;
9689 case GE_EXPR:
9690 if (integer_onep (arg1))
9691 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9692 else
9693 code = unsignedp ? GEU : GE;
9694 break;
9696 case UNORDERED_EXPR:
9697 code = UNORDERED;
9698 break;
9699 case ORDERED_EXPR:
9700 code = ORDERED;
9701 break;
9702 case UNLT_EXPR:
9703 code = UNLT;
9704 break;
9705 case UNLE_EXPR:
9706 code = UNLE;
9707 break;
9708 case UNGT_EXPR:
9709 code = UNGT;
9710 break;
9711 case UNGE_EXPR:
9712 code = UNGE;
9713 break;
9714 case UNEQ_EXPR:
9715 code = UNEQ;
9716 break;
9717 case LTGT_EXPR:
9718 code = LTGT;
9719 break;
9721 default:
9722 gcc_unreachable ();
9725 /* Put a constant second. */
9726 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9727 || TREE_CODE (arg0) == FIXED_CST)
9729 tem = arg0; arg0 = arg1; arg1 = tem;
9730 code = swap_condition (code);
9733 /* If this is an equality or inequality test of a single bit, we can
9734 do this by shifting the bit being tested to the low-order bit and
9735 masking the result with the constant 1. If the condition was EQ,
9736 we xor it with 1. This does not require an scc insn and is faster
9737 than an scc insn even if we have it.
9739 The code to make this transformation was moved into fold_single_bit_test,
9740 so we just call into the folder and expand its result. */
9742 if ((code == NE || code == EQ)
9743 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9744 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9746 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9747 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9748 arg0, arg1, type),
9749 target, VOIDmode, EXPAND_NORMAL);
9752 /* Now see if we are likely to be able to do this. Return if not. */
9753 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9754 return 0;
9756 icode = setcc_gen_code[(int) code];
9758 if (icode == CODE_FOR_nothing)
9760 enum machine_mode wmode;
9762 for (wmode = operand_mode;
9763 icode == CODE_FOR_nothing && wmode != VOIDmode;
9764 wmode = GET_MODE_WIDER_MODE (wmode))
9765 icode = optab_handler (cstore_optab, wmode)->insn_code;
9768 if (icode == CODE_FOR_nothing
9769 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9771 /* We can only do this if it is one of the special cases that
9772 can be handled without an scc insn. */
9773 if ((code == LT && integer_zerop (arg1))
9774 || (! only_cheap && code == GE && integer_zerop (arg1)))
9776 else if (! only_cheap && (code == NE || code == EQ)
9777 && TREE_CODE (type) != REAL_TYPE
9778 && ((optab_handler (abs_optab, operand_mode)->insn_code
9779 != CODE_FOR_nothing)
9780 || (optab_handler (ffs_optab, operand_mode)->insn_code
9781 != CODE_FOR_nothing)))
9783 else
9784 return 0;
9787 if (! get_subtarget (target)
9788 || GET_MODE (subtarget) != operand_mode)
9789 subtarget = 0;
9791 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9793 if (target == 0)
9794 target = gen_reg_rtx (mode);
9796 result = emit_store_flag (target, code, op0, op1,
9797 operand_mode, unsignedp, 1);
9799 if (result)
9801 if (invert)
9802 result = expand_binop (mode, xor_optab, result, const1_rtx,
9803 result, 0, OPTAB_LIB_WIDEN);
9804 return result;
9807 /* If this failed, we have to do this with set/compare/jump/set code. */
9808 if (!REG_P (target)
9809 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9810 target = gen_reg_rtx (GET_MODE (target));
9812 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9813 label = gen_label_rtx ();
9814 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9815 NULL_RTX, label);
9817 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9818 emit_label (label);
9820 return target;
9824 /* Stubs in case we haven't got a casesi insn. */
9825 #ifndef HAVE_casesi
9826 # define HAVE_casesi 0
9827 # define gen_casesi(a, b, c, d, e) (0)
9828 # define CODE_FOR_casesi CODE_FOR_nothing
9829 #endif
9831 /* If the machine does not have a case insn that compares the bounds,
9832 this means extra overhead for dispatch tables, which raises the
9833 threshold for using them. */
9834 #ifndef CASE_VALUES_THRESHOLD
9835 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9836 #endif /* CASE_VALUES_THRESHOLD */
9838 unsigned int
9839 case_values_threshold (void)
9841 return CASE_VALUES_THRESHOLD;
9844 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9845 0 otherwise (i.e. if there is no casesi instruction). */
9847 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9848 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9850 enum machine_mode index_mode = SImode;
9851 int index_bits = GET_MODE_BITSIZE (index_mode);
9852 rtx op1, op2, index;
9853 enum machine_mode op_mode;
9855 if (! HAVE_casesi)
9856 return 0;
9858 /* Convert the index to SImode. */
9859 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9861 enum machine_mode omode = TYPE_MODE (index_type);
9862 rtx rangertx = expand_normal (range);
9864 /* We must handle the endpoints in the original mode. */
9865 index_expr = build2 (MINUS_EXPR, index_type,
9866 index_expr, minval);
9867 minval = integer_zero_node;
9868 index = expand_normal (index_expr);
9869 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9870 omode, 1, default_label);
9871 /* Now we can safely truncate. */
9872 index = convert_to_mode (index_mode, index, 0);
9874 else
9876 if (TYPE_MODE (index_type) != index_mode)
9878 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9879 index_expr = fold_convert (index_type, index_expr);
9882 index = expand_normal (index_expr);
9885 do_pending_stack_adjust ();
9887 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9888 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9889 (index, op_mode))
9890 index = copy_to_mode_reg (op_mode, index);
9892 op1 = expand_normal (minval);
9894 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9895 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9896 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9897 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9898 (op1, op_mode))
9899 op1 = copy_to_mode_reg (op_mode, op1);
9901 op2 = expand_normal (range);
9903 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9904 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9905 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9906 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9907 (op2, op_mode))
9908 op2 = copy_to_mode_reg (op_mode, op2);
9910 emit_jump_insn (gen_casesi (index, op1, op2,
9911 table_label, default_label));
9912 return 1;
9915 /* Attempt to generate a tablejump instruction; same concept. */
9916 #ifndef HAVE_tablejump
9917 #define HAVE_tablejump 0
9918 #define gen_tablejump(x, y) (0)
9919 #endif
9921 /* Subroutine of the next function.
9923 INDEX is the value being switched on, with the lowest value
9924 in the table already subtracted.
9925 MODE is its expected mode (needed if INDEX is constant).
9926 RANGE is the length of the jump table.
9927 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9929 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9930 index value is out of range. */
9932 static void
9933 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9934 rtx default_label)
9936 rtx temp, vector;
9938 if (INTVAL (range) > cfun->max_jumptable_ents)
9939 cfun->max_jumptable_ents = INTVAL (range);
9941 /* Do an unsigned comparison (in the proper mode) between the index
9942 expression and the value which represents the length of the range.
9943 Since we just finished subtracting the lower bound of the range
9944 from the index expression, this comparison allows us to simultaneously
9945 check that the original index expression value is both greater than
9946 or equal to the minimum value of the range and less than or equal to
9947 the maximum value of the range. */
9949 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9950 default_label);
9952 /* If index is in range, it must fit in Pmode.
9953 Convert to Pmode so we can index with it. */
9954 if (mode != Pmode)
9955 index = convert_to_mode (Pmode, index, 1);
9957 /* Don't let a MEM slip through, because then INDEX that comes
9958 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9959 and break_out_memory_refs will go to work on it and mess it up. */
9960 #ifdef PIC_CASE_VECTOR_ADDRESS
9961 if (flag_pic && !REG_P (index))
9962 index = copy_to_mode_reg (Pmode, index);
9963 #endif
9965 /* If flag_force_addr were to affect this address
9966 it could interfere with the tricky assumptions made
9967 about addresses that contain label-refs,
9968 which may be valid only very near the tablejump itself. */
9969 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9970 GET_MODE_SIZE, because this indicates how large insns are. The other
9971 uses should all be Pmode, because they are addresses. This code
9972 could fail if addresses and insns are not the same size. */
9973 index = gen_rtx_PLUS (Pmode,
9974 gen_rtx_MULT (Pmode, index,
9975 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9976 gen_rtx_LABEL_REF (Pmode, table_label));
9977 #ifdef PIC_CASE_VECTOR_ADDRESS
9978 if (flag_pic)
9979 index = PIC_CASE_VECTOR_ADDRESS (index);
9980 else
9981 #endif
9982 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9983 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9984 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9985 convert_move (temp, vector, 0);
9987 emit_jump_insn (gen_tablejump (temp, table_label));
9989 /* If we are generating PIC code or if the table is PC-relative, the
9990 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9991 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9992 emit_barrier ();
9996 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9997 rtx table_label, rtx default_label)
9999 rtx index;
10001 if (! HAVE_tablejump)
10002 return 0;
10004 index_expr = fold_build2 (MINUS_EXPR, index_type,
10005 fold_convert (index_type, index_expr),
10006 fold_convert (index_type, minval));
10007 index = expand_normal (index_expr);
10008 do_pending_stack_adjust ();
10010 do_tablejump (index, TYPE_MODE (index_type),
10011 convert_modes (TYPE_MODE (index_type),
10012 TYPE_MODE (TREE_TYPE (range)),
10013 expand_normal (range),
10014 TYPE_UNSIGNED (TREE_TYPE (range))),
10015 table_label, default_label);
10016 return 1;
10019 /* Nonzero if the mode is a valid vector mode for this architecture.
10020 This returns nonzero even if there is no hardware support for the
10021 vector mode, but we can emulate with narrower modes. */
10024 vector_mode_valid_p (enum machine_mode mode)
10026 enum mode_class class = GET_MODE_CLASS (mode);
10027 enum machine_mode innermode;
10029 /* Doh! What's going on? */
10030 if (class != MODE_VECTOR_INT
10031 && class != MODE_VECTOR_FLOAT
10032 && class != MODE_VECTOR_FRACT
10033 && class != MODE_VECTOR_UFRACT
10034 && class != MODE_VECTOR_ACCUM
10035 && class != MODE_VECTOR_UACCUM)
10036 return 0;
10038 /* Hardware support. Woo hoo! */
10039 if (targetm.vector_mode_supported_p (mode))
10040 return 1;
10042 innermode = GET_MODE_INNER (mode);
10044 /* We should probably return 1 if requesting V4DI and we have no DI,
10045 but we have V2DI, but this is probably very unlikely. */
10047 /* If we have support for the inner mode, we can safely emulate it.
10048 We may not have V2DI, but me can emulate with a pair of DIs. */
10049 return targetm.scalar_mode_supported_p (innermode);
10052 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10053 static rtx
10054 const_vector_from_tree (tree exp)
10056 rtvec v;
10057 int units, i;
10058 tree link, elt;
10059 enum machine_mode inner, mode;
10061 mode = TYPE_MODE (TREE_TYPE (exp));
10063 if (initializer_zerop (exp))
10064 return CONST0_RTX (mode);
10066 units = GET_MODE_NUNITS (mode);
10067 inner = GET_MODE_INNER (mode);
10069 v = rtvec_alloc (units);
10071 link = TREE_VECTOR_CST_ELTS (exp);
10072 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10074 elt = TREE_VALUE (link);
10076 if (TREE_CODE (elt) == REAL_CST)
10077 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10078 inner);
10079 else if (TREE_CODE (elt) == FIXED_CST)
10080 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10081 inner);
10082 else
10083 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10084 TREE_INT_CST_HIGH (elt),
10085 inner);
10088 /* Initialize remaining elements to 0. */
10089 for (; i < units; ++i)
10090 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10092 return gen_rtx_CONST_VECTOR (mode, v);
10094 #include "gt-expr.h"