Update my e-mail address for new employer.
[official-gcc.git] / gcc / expr.c
blob8c4b03dfa0193f20afa274c6dcb4c8481fe30912
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
63 #ifdef PUSH_ROUNDING
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
71 #endif
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
110 struct store_by_pieces
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, int);
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
149 static int is_aligning_offset (tree, tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* Record for each mode whether we can float-extend from memory. */
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
178 #endif
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
186 #endif
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) MOVE_RATIO)
195 #endif
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
200 /* This array records the insn_code of insns to perform block sets. */
201 enum insn_code setmem_optab[NUM_MACHINE_MODES];
203 /* These arrays record the insn_code of three different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
209 /* Synchronization primitives. */
210 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
211 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
230 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
233 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
235 #ifndef SLOW_UNALIGNED_ACCESS
236 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
237 #endif
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
242 void
243 init_expr_once (void)
245 rtx insn, pat;
246 enum machine_mode mode;
247 int num_clobbers;
248 rtx mem, mem1;
249 rtx reg;
251 /* Try indexing by frame ptr and try by stack ptr.
252 It is known that on the Convex the stack ptr isn't a valid index.
253 With luck, one or the other is valid on any machine. */
254 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
255 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
257 /* A scratch register we can modify in-place below to avoid
258 useless RTL allocations. */
259 reg = gen_rtx_REG (VOIDmode, -1);
261 insn = rtx_alloc (INSN);
262 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
263 PATTERN (insn) = pat;
265 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
266 mode = (enum machine_mode) ((int) mode + 1))
268 int regno;
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
273 PUT_MODE (reg, mode);
275 /* See if there is some register that can be used in this mode and
276 directly loaded or stored from memory. */
278 if (mode != VOIDmode && mode != BLKmode)
279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
280 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
281 regno++)
283 if (! HARD_REGNO_MODE_OK (regno, mode))
284 continue;
286 REGNO (reg) = regno;
288 SET_SRC (pat) = mem;
289 SET_DEST (pat) = reg;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_load[(int) mode] = 1;
293 SET_SRC (pat) = mem1;
294 SET_DEST (pat) = reg;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_load[(int) mode] = 1;
298 SET_SRC (pat) = reg;
299 SET_DEST (pat) = mem;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_store[(int) mode] = 1;
303 SET_SRC (pat) = reg;
304 SET_DEST (pat) = mem1;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_store[(int) mode] = 1;
310 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
313 mode = GET_MODE_WIDER_MODE (mode))
315 enum machine_mode srcmode;
316 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
317 srcmode = GET_MODE_WIDER_MODE (srcmode))
319 enum insn_code ic;
321 ic = can_extend_p (mode, srcmode, 0);
322 if (ic == CODE_FOR_nothing)
323 continue;
325 PUT_MODE (mem, srcmode);
327 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
328 float_extend_from_mem[mode][srcmode] = true;
333 /* This is run at the start of compiling a function. */
335 void
336 init_expr (void)
338 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341 /* Copy data from FROM to TO, where the machine modes are not the same.
342 Both modes may be integer, or both may be floating.
343 UNSIGNEDP should be nonzero if FROM is an unsigned type.
344 This causes zero-extension instead of sign-extension. */
346 void
347 convert_move (rtx to, rtx from, int unsignedp)
349 enum machine_mode to_mode = GET_MODE (to);
350 enum machine_mode from_mode = GET_MODE (from);
351 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
352 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
353 enum insn_code code;
354 rtx libcall;
356 /* rtx code for making an equivalent value. */
357 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
358 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361 gcc_assert (to_real == from_real);
363 /* If the source and destination are already the same, then there's
364 nothing to do. */
365 if (to == from)
366 return;
368 /* If FROM is a SUBREG that indicates that we have already done at least
369 the required extension, strip it. We don't handle such SUBREGs as
370 TO here. */
372 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
373 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
374 >= GET_MODE_SIZE (to_mode))
375 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
376 from = gen_lowpart (to_mode, from), from_mode = to_mode;
378 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
380 if (to_mode == from_mode
381 || (from_mode == VOIDmode && CONSTANT_P (from)))
383 emit_move_insn (to, from);
384 return;
387 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
389 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
391 if (VECTOR_MODE_P (to_mode))
392 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
393 else
394 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
396 emit_move_insn (to, from);
397 return;
400 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
402 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
403 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
404 return;
407 if (to_real)
409 rtx value, insns;
410 convert_optab tab;
412 gcc_assert ((GET_MODE_PRECISION (from_mode)
413 != GET_MODE_PRECISION (to_mode))
414 || (DECIMAL_FLOAT_MODE_P (from_mode)
415 != DECIMAL_FLOAT_MODE_P (to_mode)));
417 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
418 /* Conversion between decimal float and binary float, same size. */
419 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
420 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
421 tab = sext_optab;
422 else
423 tab = trunc_optab;
425 /* Try converting directly if the insn is supported. */
427 code = tab->handlers[to_mode][from_mode].insn_code;
428 if (code != CODE_FOR_nothing)
430 emit_unop_insn (code, to, from,
431 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
432 return;
435 /* Otherwise use a libcall. */
436 libcall = tab->handlers[to_mode][from_mode].libfunc;
438 /* Is this conversion implemented yet? */
439 gcc_assert (libcall);
441 start_sequence ();
442 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
443 1, from, from_mode);
444 insns = get_insns ();
445 end_sequence ();
446 emit_libcall_block (insns, to, value,
447 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
448 from)
449 : gen_rtx_FLOAT_EXTEND (to_mode, from));
450 return;
453 /* Handle pointer conversion. */ /* SPEE 900220. */
454 /* Targets are expected to provide conversion insns between PxImode and
455 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
456 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
458 enum machine_mode full_mode
459 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
461 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
462 != CODE_FOR_nothing);
464 if (full_mode != from_mode)
465 from = convert_to_mode (full_mode, from, unsignedp);
466 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
467 to, from, UNKNOWN);
468 return;
470 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
472 rtx new_from;
473 enum machine_mode full_mode
474 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
476 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
477 != CODE_FOR_nothing);
479 if (to_mode == full_mode)
481 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
482 to, from, UNKNOWN);
483 return;
486 new_from = gen_reg_rtx (full_mode);
487 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
488 new_from, from, UNKNOWN);
490 /* else proceed to integer conversions below. */
491 from_mode = full_mode;
492 from = new_from;
495 /* Now both modes are integers. */
497 /* Handle expanding beyond a word. */
498 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
499 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
501 rtx insns;
502 rtx lowpart;
503 rtx fill_value;
504 rtx lowfrom;
505 int i;
506 enum machine_mode lowpart_mode;
507 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
509 /* Try converting directly if the insn is supported. */
510 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
511 != CODE_FOR_nothing)
513 /* If FROM is a SUBREG, put it into a register. Do this
514 so that we always generate the same set of insns for
515 better cse'ing; if an intermediate assignment occurred,
516 we won't be doing the operation directly on the SUBREG. */
517 if (optimize > 0 && GET_CODE (from) == SUBREG)
518 from = force_reg (from_mode, from);
519 emit_unop_insn (code, to, from, equiv_code);
520 return;
522 /* Next, try converting via full word. */
523 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
524 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
525 != CODE_FOR_nothing))
527 if (REG_P (to))
529 if (reg_overlap_mentioned_p (to, from))
530 from = force_reg (from_mode, from);
531 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
533 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
534 emit_unop_insn (code, to,
535 gen_lowpart (word_mode, to), equiv_code);
536 return;
539 /* No special multiword conversion insn; do it by hand. */
540 start_sequence ();
542 /* Since we will turn this into a no conflict block, we must ensure
543 that the source does not overlap the target. */
545 if (reg_overlap_mentioned_p (to, from))
546 from = force_reg (from_mode, from);
548 /* Get a copy of FROM widened to a word, if necessary. */
549 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
550 lowpart_mode = word_mode;
551 else
552 lowpart_mode = from_mode;
554 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
556 lowpart = gen_lowpart (lowpart_mode, to);
557 emit_move_insn (lowpart, lowfrom);
559 /* Compute the value to put in each remaining word. */
560 if (unsignedp)
561 fill_value = const0_rtx;
562 else
564 #ifdef HAVE_slt
565 if (HAVE_slt
566 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
567 && STORE_FLAG_VALUE == -1)
569 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
570 lowpart_mode, 0);
571 fill_value = gen_reg_rtx (word_mode);
572 emit_insn (gen_slt (fill_value));
574 else
575 #endif
577 fill_value
578 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
579 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
580 NULL_RTX, 0);
581 fill_value = convert_to_mode (word_mode, fill_value, 1);
585 /* Fill the remaining words. */
586 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
588 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
589 rtx subword = operand_subword (to, index, 1, to_mode);
591 gcc_assert (subword);
593 if (fill_value != subword)
594 emit_move_insn (subword, fill_value);
597 insns = get_insns ();
598 end_sequence ();
600 emit_no_conflict_block (insns, to, from, NULL_RTX,
601 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
602 return;
605 /* Truncating multi-word to a word or less. */
606 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
607 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
609 if (!((MEM_P (from)
610 && ! MEM_VOLATILE_P (from)
611 && direct_load[(int) to_mode]
612 && ! mode_dependent_address_p (XEXP (from, 0)))
613 || REG_P (from)
614 || GET_CODE (from) == SUBREG))
615 from = force_reg (from_mode, from);
616 convert_move (to, gen_lowpart (word_mode, from), 0);
617 return;
620 /* Now follow all the conversions between integers
621 no more than a word long. */
623 /* For truncation, usually we can just refer to FROM in a narrower mode. */
624 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
625 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
626 GET_MODE_BITSIZE (from_mode)))
628 if (!((MEM_P (from)
629 && ! MEM_VOLATILE_P (from)
630 && direct_load[(int) to_mode]
631 && ! mode_dependent_address_p (XEXP (from, 0)))
632 || REG_P (from)
633 || GET_CODE (from) == SUBREG))
634 from = force_reg (from_mode, from);
635 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
636 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
637 from = copy_to_reg (from);
638 emit_move_insn (to, gen_lowpart (to_mode, from));
639 return;
642 /* Handle extension. */
643 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
645 /* Convert directly if that works. */
646 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
647 != CODE_FOR_nothing)
649 emit_unop_insn (code, to, from, equiv_code);
650 return;
652 else
654 enum machine_mode intermediate;
655 rtx tmp;
656 tree shift_amount;
658 /* Search for a mode to convert via. */
659 for (intermediate = from_mode; intermediate != VOIDmode;
660 intermediate = GET_MODE_WIDER_MODE (intermediate))
661 if (((can_extend_p (to_mode, intermediate, unsignedp)
662 != CODE_FOR_nothing)
663 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
664 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
665 GET_MODE_BITSIZE (intermediate))))
666 && (can_extend_p (intermediate, from_mode, unsignedp)
667 != CODE_FOR_nothing))
669 convert_move (to, convert_to_mode (intermediate, from,
670 unsignedp), unsignedp);
671 return;
674 /* No suitable intermediate mode.
675 Generate what we need with shifts. */
676 shift_amount = build_int_cst (NULL_TREE,
677 GET_MODE_BITSIZE (to_mode)
678 - GET_MODE_BITSIZE (from_mode));
679 from = gen_lowpart (to_mode, force_reg (from_mode, from));
680 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
681 to, unsignedp);
682 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
683 to, unsignedp);
684 if (tmp != to)
685 emit_move_insn (to, tmp);
686 return;
690 /* Support special truncate insns for certain modes. */
691 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
693 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
694 to, from, UNKNOWN);
695 return;
698 /* Handle truncation of volatile memrefs, and so on;
699 the things that couldn't be truncated directly,
700 and for which there was no special instruction.
702 ??? Code above formerly short-circuited this, for most integer
703 mode pairs, with a force_reg in from_mode followed by a recursive
704 call to this routine. Appears always to have been wrong. */
705 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
707 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
708 emit_move_insn (to, temp);
709 return;
712 /* Mode combination is not recognized. */
713 gcc_unreachable ();
716 /* Return an rtx for a value that would result
717 from converting X to mode MODE.
718 Both X and MODE may be floating, or both integer.
719 UNSIGNEDP is nonzero if X is an unsigned value.
720 This can be done by referring to a part of X in place
721 or by copying to a new temporary with conversion. */
724 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
726 return convert_modes (mode, VOIDmode, x, unsignedp);
729 /* Return an rtx for a value that would result
730 from converting X from mode OLDMODE to mode MODE.
731 Both modes may be floating, or both integer.
732 UNSIGNEDP is nonzero if X is an unsigned value.
734 This can be done by referring to a part of X in place
735 or by copying to a new temporary with conversion.
737 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
740 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
742 rtx temp;
744 /* If FROM is a SUBREG that indicates that we have already done at least
745 the required extension, strip it. */
747 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
748 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
749 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
750 x = gen_lowpart (mode, x);
752 if (GET_MODE (x) != VOIDmode)
753 oldmode = GET_MODE (x);
755 if (mode == oldmode)
756 return x;
758 /* There is one case that we must handle specially: If we are converting
759 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
760 we are to interpret the constant as unsigned, gen_lowpart will do
761 the wrong if the constant appears negative. What we want to do is
762 make the high-order word of the constant zero, not all ones. */
764 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
765 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
766 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
768 HOST_WIDE_INT val = INTVAL (x);
770 if (oldmode != VOIDmode
771 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
773 int width = GET_MODE_BITSIZE (oldmode);
775 /* We need to zero extend VAL. */
776 val &= ((HOST_WIDE_INT) 1 << width) - 1;
779 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
782 /* We can do this with a gen_lowpart if both desired and current modes
783 are integer, and this is either a constant integer, a register, or a
784 non-volatile MEM. Except for the constant case where MODE is no
785 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
787 if ((GET_CODE (x) == CONST_INT
788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
789 || (GET_MODE_CLASS (mode) == MODE_INT
790 && GET_MODE_CLASS (oldmode) == MODE_INT
791 && (GET_CODE (x) == CONST_DOUBLE
792 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
793 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
794 && direct_load[(int) mode])
795 || (REG_P (x)
796 && (! HARD_REGISTER_P (x)
797 || HARD_REGNO_MODE_OK (REGNO (x), mode))
798 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
799 GET_MODE_BITSIZE (GET_MODE (x)))))))))
801 /* ?? If we don't know OLDMODE, we have to assume here that
802 X does not need sign- or zero-extension. This may not be
803 the case, but it's the best we can do. */
804 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
805 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
807 HOST_WIDE_INT val = INTVAL (x);
808 int width = GET_MODE_BITSIZE (oldmode);
810 /* We must sign or zero-extend in this case. Start by
811 zero-extending, then sign extend if we need to. */
812 val &= ((HOST_WIDE_INT) 1 << width) - 1;
813 if (! unsignedp
814 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
815 val |= (HOST_WIDE_INT) (-1) << width;
817 return gen_int_mode (val, mode);
820 return gen_lowpart (mode, x);
823 /* Converting from integer constant into mode is always equivalent to an
824 subreg operation. */
825 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
827 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
828 return simplify_gen_subreg (mode, x, oldmode, 0);
831 temp = gen_reg_rtx (mode);
832 convert_move (temp, x, unsignedp);
833 return temp;
836 /* STORE_MAX_PIECES is the number of bytes at a time that we can
837 store efficiently. Due to internal GCC limitations, this is
838 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
839 for an immediate constant. */
841 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
843 /* Determine whether the LEN bytes can be moved by using several move
844 instructions. Return nonzero if a call to move_by_pieces should
845 succeed. */
848 can_move_by_pieces (unsigned HOST_WIDE_INT len,
849 unsigned int align ATTRIBUTE_UNUSED)
851 return MOVE_BY_PIECES_P (len, align);
854 /* Generate several move instructions to copy LEN bytes from block FROM to
855 block TO. (These are MEM rtx's with BLKmode).
857 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
858 used to push FROM to the stack.
860 ALIGN is maximum stack alignment we can assume.
862 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
863 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
864 stpcpy. */
867 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
868 unsigned int align, int endp)
870 struct move_by_pieces data;
871 rtx to_addr, from_addr = XEXP (from, 0);
872 unsigned int max_size = MOVE_MAX_PIECES + 1;
873 enum machine_mode mode = VOIDmode, tmode;
874 enum insn_code icode;
876 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
878 data.offset = 0;
879 data.from_addr = from_addr;
880 if (to)
882 to_addr = XEXP (to, 0);
883 data.to = to;
884 data.autinc_to
885 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
886 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
887 data.reverse
888 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
890 else
892 to_addr = NULL_RTX;
893 data.to = NULL_RTX;
894 data.autinc_to = 1;
895 #ifdef STACK_GROWS_DOWNWARD
896 data.reverse = 1;
897 #else
898 data.reverse = 0;
899 #endif
901 data.to_addr = to_addr;
902 data.from = from;
903 data.autinc_from
904 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
905 || GET_CODE (from_addr) == POST_INC
906 || GET_CODE (from_addr) == POST_DEC);
908 data.explicit_inc_from = 0;
909 data.explicit_inc_to = 0;
910 if (data.reverse) data.offset = len;
911 data.len = len;
913 /* If copying requires more than two move insns,
914 copy addresses to registers (to make displacements shorter)
915 and use post-increment if available. */
916 if (!(data.autinc_from && data.autinc_to)
917 && move_by_pieces_ninsns (len, align, max_size) > 2)
919 /* Find the mode of the largest move... */
920 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
921 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
922 if (GET_MODE_SIZE (tmode) < max_size)
923 mode = tmode;
925 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
927 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
928 data.autinc_from = 1;
929 data.explicit_inc_from = -1;
931 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
933 data.from_addr = copy_addr_to_reg (from_addr);
934 data.autinc_from = 1;
935 data.explicit_inc_from = 1;
937 if (!data.autinc_from && CONSTANT_P (from_addr))
938 data.from_addr = copy_addr_to_reg (from_addr);
939 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
941 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
942 data.autinc_to = 1;
943 data.explicit_inc_to = -1;
945 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
947 data.to_addr = copy_addr_to_reg (to_addr);
948 data.autinc_to = 1;
949 data.explicit_inc_to = 1;
951 if (!data.autinc_to && CONSTANT_P (to_addr))
952 data.to_addr = copy_addr_to_reg (to_addr);
955 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
956 if (align >= GET_MODE_ALIGNMENT (tmode))
957 align = GET_MODE_ALIGNMENT (tmode);
958 else
960 enum machine_mode xmode;
962 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
963 tmode != VOIDmode;
964 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
965 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
966 || SLOW_UNALIGNED_ACCESS (tmode, align))
967 break;
969 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
972 /* First move what we can in the largest integer mode, then go to
973 successively smaller modes. */
975 while (max_size > 1)
977 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
978 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
979 if (GET_MODE_SIZE (tmode) < max_size)
980 mode = tmode;
982 if (mode == VOIDmode)
983 break;
985 icode = mov_optab->handlers[(int) mode].insn_code;
986 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
987 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
989 max_size = GET_MODE_SIZE (mode);
992 /* The code above should have handled everything. */
993 gcc_assert (!data.len);
995 if (endp)
997 rtx to1;
999 gcc_assert (!data.reverse);
1000 if (data.autinc_to)
1002 if (endp == 2)
1004 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1005 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1006 else
1007 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1008 -1));
1010 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1011 data.offset);
1013 else
1015 if (endp == 2)
1016 --data.offset;
1017 to1 = adjust_address (data.to, QImode, data.offset);
1019 return to1;
1021 else
1022 return data.to;
1025 /* Return number of insns required to move L bytes by pieces.
1026 ALIGN (in bits) is maximum alignment we can assume. */
1028 static unsigned HOST_WIDE_INT
1029 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1030 unsigned int max_size)
1032 unsigned HOST_WIDE_INT n_insns = 0;
1033 enum machine_mode tmode;
1035 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1036 if (align >= GET_MODE_ALIGNMENT (tmode))
1037 align = GET_MODE_ALIGNMENT (tmode);
1038 else
1040 enum machine_mode tmode, xmode;
1042 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1043 tmode != VOIDmode;
1044 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1045 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1046 || SLOW_UNALIGNED_ACCESS (tmode, align))
1047 break;
1049 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1052 while (max_size > 1)
1054 enum machine_mode mode = VOIDmode;
1055 enum insn_code icode;
1057 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1058 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1059 if (GET_MODE_SIZE (tmode) < max_size)
1060 mode = tmode;
1062 if (mode == VOIDmode)
1063 break;
1065 icode = mov_optab->handlers[(int) mode].insn_code;
1066 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1067 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1069 max_size = GET_MODE_SIZE (mode);
1072 gcc_assert (!l);
1073 return n_insns;
1076 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1077 with move instructions for mode MODE. GENFUN is the gen_... function
1078 to make a move insn for that mode. DATA has all the other info. */
1080 static void
1081 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1082 struct move_by_pieces *data)
1084 unsigned int size = GET_MODE_SIZE (mode);
1085 rtx to1 = NULL_RTX, from1;
1087 while (data->len >= size)
1089 if (data->reverse)
1090 data->offset -= size;
1092 if (data->to)
1094 if (data->autinc_to)
1095 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1096 data->offset);
1097 else
1098 to1 = adjust_address (data->to, mode, data->offset);
1101 if (data->autinc_from)
1102 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1103 data->offset);
1104 else
1105 from1 = adjust_address (data->from, mode, data->offset);
1107 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1108 emit_insn (gen_add2_insn (data->to_addr,
1109 GEN_INT (-(HOST_WIDE_INT)size)));
1110 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1111 emit_insn (gen_add2_insn (data->from_addr,
1112 GEN_INT (-(HOST_WIDE_INT)size)));
1114 if (data->to)
1115 emit_insn ((*genfun) (to1, from1));
1116 else
1118 #ifdef PUSH_ROUNDING
1119 emit_single_push_insn (mode, from1, NULL);
1120 #else
1121 gcc_unreachable ();
1122 #endif
1125 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1126 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1128 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1130 if (! data->reverse)
1131 data->offset += size;
1133 data->len -= size;
1137 /* Emit code to move a block Y to a block X. This may be done with
1138 string-move instructions, with multiple scalar move instructions,
1139 or with a library call.
1141 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1142 SIZE is an rtx that says how long they are.
1143 ALIGN is the maximum alignment we can assume they have.
1144 METHOD describes what kind of copy this is, and what mechanisms may be used.
1146 Return the address of the new block, if memcpy is called and returns it,
1147 0 otherwise. */
1150 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1151 unsigned int expected_align, HOST_WIDE_INT expected_size)
1153 bool may_use_call;
1154 rtx retval = 0;
1155 unsigned int align;
1157 switch (method)
1159 case BLOCK_OP_NORMAL:
1160 case BLOCK_OP_TAILCALL:
1161 may_use_call = true;
1162 break;
1164 case BLOCK_OP_CALL_PARM:
1165 may_use_call = block_move_libcall_safe_for_call_parm ();
1167 /* Make inhibit_defer_pop nonzero around the library call
1168 to force it to pop the arguments right away. */
1169 NO_DEFER_POP;
1170 break;
1172 case BLOCK_OP_NO_LIBCALL:
1173 may_use_call = false;
1174 break;
1176 default:
1177 gcc_unreachable ();
1180 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1182 gcc_assert (MEM_P (x));
1183 gcc_assert (MEM_P (y));
1184 gcc_assert (size);
1186 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1187 block copy is more efficient for other large modes, e.g. DCmode. */
1188 x = adjust_address (x, BLKmode, 0);
1189 y = adjust_address (y, BLKmode, 0);
1191 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1192 can be incorrect is coming from __builtin_memcpy. */
1193 if (GET_CODE (size) == CONST_INT)
1195 if (INTVAL (size) == 0)
1196 return 0;
1198 x = shallow_copy_rtx (x);
1199 y = shallow_copy_rtx (y);
1200 set_mem_size (x, size);
1201 set_mem_size (y, size);
1204 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1205 move_by_pieces (x, y, INTVAL (size), align, 0);
1206 else if (emit_block_move_via_movmem (x, y, size, align,
1207 expected_align, expected_size))
1209 else if (may_use_call)
1210 retval = emit_block_move_via_libcall (x, y, size,
1211 method == BLOCK_OP_TAILCALL);
1212 else
1213 emit_block_move_via_loop (x, y, size, align);
1215 if (method == BLOCK_OP_CALL_PARM)
1216 OK_DEFER_POP;
1218 return retval;
1222 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1224 return emit_block_move_hints (x, y, size, method, 0, -1);
1227 /* A subroutine of emit_block_move. Returns true if calling the
1228 block move libcall will not clobber any parameters which may have
1229 already been placed on the stack. */
1231 static bool
1232 block_move_libcall_safe_for_call_parm (void)
1234 /* If arguments are pushed on the stack, then they're safe. */
1235 if (PUSH_ARGS)
1236 return true;
1238 /* If registers go on the stack anyway, any argument is sure to clobber
1239 an outgoing argument. */
1240 #if defined (REG_PARM_STACK_SPACE)
1241 if (OUTGOING_REG_PARM_STACK_SPACE)
1243 tree fn;
1244 fn = emit_block_move_libcall_fn (false);
1245 if (REG_PARM_STACK_SPACE (fn) != 0)
1246 return false;
1248 #endif
1250 /* If any argument goes in memory, then it might clobber an outgoing
1251 argument. */
1253 CUMULATIVE_ARGS args_so_far;
1254 tree fn, arg;
1256 fn = emit_block_move_libcall_fn (false);
1257 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1259 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1260 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1262 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1263 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1264 if (!tmp || !REG_P (tmp))
1265 return false;
1266 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1267 return false;
1268 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1271 return true;
1274 /* A subroutine of emit_block_move. Expand a movmem pattern;
1275 return true if successful. */
1277 static bool
1278 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1279 unsigned int expected_align, HOST_WIDE_INT expected_size)
1281 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1282 int save_volatile_ok = volatile_ok;
1283 enum machine_mode mode;
1285 if (expected_align < align)
1286 expected_align = align;
1288 /* Since this is a move insn, we don't care about volatility. */
1289 volatile_ok = 1;
1291 /* Try the most limited insn first, because there's no point
1292 including more than one in the machine description unless
1293 the more limited one has some advantage. */
1295 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1296 mode = GET_MODE_WIDER_MODE (mode))
1298 enum insn_code code = movmem_optab[(int) mode];
1299 insn_operand_predicate_fn pred;
1301 if (code != CODE_FOR_nothing
1302 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1303 here because if SIZE is less than the mode mask, as it is
1304 returned by the macro, it will definitely be less than the
1305 actual mode mask. */
1306 && ((GET_CODE (size) == CONST_INT
1307 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1308 <= (GET_MODE_MASK (mode) >> 1)))
1309 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1310 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1311 || (*pred) (x, BLKmode))
1312 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1313 || (*pred) (y, BLKmode))
1314 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1315 || (*pred) (opalign, VOIDmode)))
1317 rtx op2;
1318 rtx last = get_last_insn ();
1319 rtx pat;
1321 op2 = convert_to_mode (mode, size, 1);
1322 pred = insn_data[(int) code].operand[2].predicate;
1323 if (pred != 0 && ! (*pred) (op2, mode))
1324 op2 = copy_to_mode_reg (mode, op2);
1326 /* ??? When called via emit_block_move_for_call, it'd be
1327 nice if there were some way to inform the backend, so
1328 that it doesn't fail the expansion because it thinks
1329 emitting the libcall would be more efficient. */
1331 if (insn_data[(int) code].n_operands == 4)
1332 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1333 else
1334 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1335 GEN_INT (expected_align),
1336 GEN_INT (expected_size));
1337 if (pat)
1339 emit_insn (pat);
1340 volatile_ok = save_volatile_ok;
1341 return true;
1343 else
1344 delete_insns_since (last);
1348 volatile_ok = save_volatile_ok;
1349 return false;
1352 /* A subroutine of emit_block_move. Expand a call to memcpy.
1353 Return the return value from memcpy, 0 otherwise. */
1356 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1358 rtx dst_addr, src_addr;
1359 tree call_expr, fn, src_tree, dst_tree, size_tree;
1360 enum machine_mode size_mode;
1361 rtx retval;
1363 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1364 pseudos. We can then place those new pseudos into a VAR_DECL and
1365 use them later. */
1367 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1368 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1370 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1371 src_addr = convert_memory_address (ptr_mode, src_addr);
1373 dst_tree = make_tree (ptr_type_node, dst_addr);
1374 src_tree = make_tree (ptr_type_node, src_addr);
1376 size_mode = TYPE_MODE (sizetype);
1378 size = convert_to_mode (size_mode, size, 1);
1379 size = copy_to_mode_reg (size_mode, size);
1381 /* It is incorrect to use the libcall calling conventions to call
1382 memcpy in this context. This could be a user call to memcpy and
1383 the user may wish to examine the return value from memcpy. For
1384 targets where libcalls and normal calls have different conventions
1385 for returning pointers, we could end up generating incorrect code. */
1387 size_tree = make_tree (sizetype, size);
1389 fn = emit_block_move_libcall_fn (true);
1390 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1391 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1393 retval = expand_normal (call_expr);
1395 return retval;
1398 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1399 for the function we use for block copies. The first time FOR_CALL
1400 is true, we call assemble_external. */
1402 static GTY(()) tree block_move_fn;
1404 void
1405 init_block_move_fn (const char *asmspec)
1407 if (!block_move_fn)
1409 tree args, fn;
1411 fn = get_identifier ("memcpy");
1412 args = build_function_type_list (ptr_type_node, ptr_type_node,
1413 const_ptr_type_node, sizetype,
1414 NULL_TREE);
1416 fn = build_decl (FUNCTION_DECL, fn, args);
1417 DECL_EXTERNAL (fn) = 1;
1418 TREE_PUBLIC (fn) = 1;
1419 DECL_ARTIFICIAL (fn) = 1;
1420 TREE_NOTHROW (fn) = 1;
1421 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1422 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1424 block_move_fn = fn;
1427 if (asmspec)
1428 set_user_assembler_name (block_move_fn, asmspec);
1431 static tree
1432 emit_block_move_libcall_fn (int for_call)
1434 static bool emitted_extern;
1436 if (!block_move_fn)
1437 init_block_move_fn (NULL);
1439 if (for_call && !emitted_extern)
1441 emitted_extern = true;
1442 make_decl_rtl (block_move_fn);
1443 assemble_external (block_move_fn);
1446 return block_move_fn;
1449 /* A subroutine of emit_block_move. Copy the data via an explicit
1450 loop. This is used only when libcalls are forbidden. */
1451 /* ??? It'd be nice to copy in hunks larger than QImode. */
1453 static void
1454 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1455 unsigned int align ATTRIBUTE_UNUSED)
1457 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1458 enum machine_mode iter_mode;
1460 iter_mode = GET_MODE (size);
1461 if (iter_mode == VOIDmode)
1462 iter_mode = word_mode;
1464 top_label = gen_label_rtx ();
1465 cmp_label = gen_label_rtx ();
1466 iter = gen_reg_rtx (iter_mode);
1468 emit_move_insn (iter, const0_rtx);
1470 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1471 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1472 do_pending_stack_adjust ();
1474 emit_jump (cmp_label);
1475 emit_label (top_label);
1477 tmp = convert_modes (Pmode, iter_mode, iter, true);
1478 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1479 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1480 x = change_address (x, QImode, x_addr);
1481 y = change_address (y, QImode, y_addr);
1483 emit_move_insn (x, y);
1485 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1486 true, OPTAB_LIB_WIDEN);
1487 if (tmp != iter)
1488 emit_move_insn (iter, tmp);
1490 emit_label (cmp_label);
1492 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1493 true, top_label);
1496 /* Copy all or part of a value X into registers starting at REGNO.
1497 The number of registers to be filled is NREGS. */
1499 void
1500 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1502 int i;
1503 #ifdef HAVE_load_multiple
1504 rtx pat;
1505 rtx last;
1506 #endif
1508 if (nregs == 0)
1509 return;
1511 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1512 x = validize_mem (force_const_mem (mode, x));
1514 /* See if the machine can do this with a load multiple insn. */
1515 #ifdef HAVE_load_multiple
1516 if (HAVE_load_multiple)
1518 last = get_last_insn ();
1519 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1520 GEN_INT (nregs));
1521 if (pat)
1523 emit_insn (pat);
1524 return;
1526 else
1527 delete_insns_since (last);
1529 #endif
1531 for (i = 0; i < nregs; i++)
1532 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1533 operand_subword_force (x, i, mode));
1536 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1537 The number of registers to be filled is NREGS. */
1539 void
1540 move_block_from_reg (int regno, rtx x, int nregs)
1542 int i;
1544 if (nregs == 0)
1545 return;
1547 /* See if the machine can do this with a store multiple insn. */
1548 #ifdef HAVE_store_multiple
1549 if (HAVE_store_multiple)
1551 rtx last = get_last_insn ();
1552 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1553 GEN_INT (nregs));
1554 if (pat)
1556 emit_insn (pat);
1557 return;
1559 else
1560 delete_insns_since (last);
1562 #endif
1564 for (i = 0; i < nregs; i++)
1566 rtx tem = operand_subword (x, i, 1, BLKmode);
1568 gcc_assert (tem);
1570 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1574 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1575 ORIG, where ORIG is a non-consecutive group of registers represented by
1576 a PARALLEL. The clone is identical to the original except in that the
1577 original set of registers is replaced by a new set of pseudo registers.
1578 The new set has the same modes as the original set. */
1581 gen_group_rtx (rtx orig)
1583 int i, length;
1584 rtx *tmps;
1586 gcc_assert (GET_CODE (orig) == PARALLEL);
1588 length = XVECLEN (orig, 0);
1589 tmps = alloca (sizeof (rtx) * length);
1591 /* Skip a NULL entry in first slot. */
1592 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1594 if (i)
1595 tmps[0] = 0;
1597 for (; i < length; i++)
1599 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1600 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1602 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1605 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1608 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1609 except that values are placed in TMPS[i], and must later be moved
1610 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1612 static void
1613 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1615 rtx src;
1616 int start, i;
1617 enum machine_mode m = GET_MODE (orig_src);
1619 gcc_assert (GET_CODE (dst) == PARALLEL);
1621 if (m != VOIDmode
1622 && !SCALAR_INT_MODE_P (m)
1623 && !MEM_P (orig_src)
1624 && GET_CODE (orig_src) != CONCAT)
1626 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1627 if (imode == BLKmode)
1628 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1629 else
1630 src = gen_reg_rtx (imode);
1631 if (imode != BLKmode)
1632 src = gen_lowpart (GET_MODE (orig_src), src);
1633 emit_move_insn (src, orig_src);
1634 /* ...and back again. */
1635 if (imode != BLKmode)
1636 src = gen_lowpart (imode, src);
1637 emit_group_load_1 (tmps, dst, src, type, ssize);
1638 return;
1641 /* Check for a NULL entry, used to indicate that the parameter goes
1642 both on the stack and in registers. */
1643 if (XEXP (XVECEXP (dst, 0, 0), 0))
1644 start = 0;
1645 else
1646 start = 1;
1648 /* Process the pieces. */
1649 for (i = start; i < XVECLEN (dst, 0); i++)
1651 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1652 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1653 unsigned int bytelen = GET_MODE_SIZE (mode);
1654 int shift = 0;
1656 /* Handle trailing fragments that run over the size of the struct. */
1657 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1659 /* Arrange to shift the fragment to where it belongs.
1660 extract_bit_field loads to the lsb of the reg. */
1661 if (
1662 #ifdef BLOCK_REG_PADDING
1663 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1664 == (BYTES_BIG_ENDIAN ? upward : downward)
1665 #else
1666 BYTES_BIG_ENDIAN
1667 #endif
1669 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1670 bytelen = ssize - bytepos;
1671 gcc_assert (bytelen > 0);
1674 /* If we won't be loading directly from memory, protect the real source
1675 from strange tricks we might play; but make sure that the source can
1676 be loaded directly into the destination. */
1677 src = orig_src;
1678 if (!MEM_P (orig_src)
1679 && (!CONSTANT_P (orig_src)
1680 || (GET_MODE (orig_src) != mode
1681 && GET_MODE (orig_src) != VOIDmode)))
1683 if (GET_MODE (orig_src) == VOIDmode)
1684 src = gen_reg_rtx (mode);
1685 else
1686 src = gen_reg_rtx (GET_MODE (orig_src));
1688 emit_move_insn (src, orig_src);
1691 /* Optimize the access just a bit. */
1692 if (MEM_P (src)
1693 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1694 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1695 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1696 && bytelen == GET_MODE_SIZE (mode))
1698 tmps[i] = gen_reg_rtx (mode);
1699 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1701 else if (COMPLEX_MODE_P (mode)
1702 && GET_MODE (src) == mode
1703 && bytelen == GET_MODE_SIZE (mode))
1704 /* Let emit_move_complex do the bulk of the work. */
1705 tmps[i] = src;
1706 else if (GET_CODE (src) == CONCAT)
1708 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1709 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1711 if ((bytepos == 0 && bytelen == slen0)
1712 || (bytepos != 0 && bytepos + bytelen <= slen))
1714 /* The following assumes that the concatenated objects all
1715 have the same size. In this case, a simple calculation
1716 can be used to determine the object and the bit field
1717 to be extracted. */
1718 tmps[i] = XEXP (src, bytepos / slen0);
1719 if (! CONSTANT_P (tmps[i])
1720 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1721 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1722 (bytepos % slen0) * BITS_PER_UNIT,
1723 1, NULL_RTX, mode, mode);
1725 else
1727 rtx mem;
1729 gcc_assert (!bytepos);
1730 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1731 emit_move_insn (mem, src);
1732 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1733 0, 1, NULL_RTX, mode, mode);
1736 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1737 SIMD register, which is currently broken. While we get GCC
1738 to emit proper RTL for these cases, let's dump to memory. */
1739 else if (VECTOR_MODE_P (GET_MODE (dst))
1740 && REG_P (src))
1742 int slen = GET_MODE_SIZE (GET_MODE (src));
1743 rtx mem;
1745 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1746 emit_move_insn (mem, src);
1747 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1749 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1750 && XVECLEN (dst, 0) > 1)
1751 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1752 else if (CONSTANT_P (src)
1753 || (REG_P (src) && GET_MODE (src) == mode))
1754 tmps[i] = src;
1755 else
1756 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1757 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1758 mode, mode);
1760 if (shift)
1761 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1762 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1766 /* Emit code to move a block SRC of type TYPE to a block DST,
1767 where DST is non-consecutive registers represented by a PARALLEL.
1768 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1769 if not known. */
1771 void
1772 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1774 rtx *tmps;
1775 int i;
1777 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1778 emit_group_load_1 (tmps, dst, src, type, ssize);
1780 /* Copy the extracted pieces into the proper (probable) hard regs. */
1781 for (i = 0; i < XVECLEN (dst, 0); i++)
1783 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1784 if (d == NULL)
1785 continue;
1786 emit_move_insn (d, tmps[i]);
1790 /* Similar, but load SRC into new pseudos in a format that looks like
1791 PARALLEL. This can later be fed to emit_group_move to get things
1792 in the right place. */
1795 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1797 rtvec vec;
1798 int i;
1800 vec = rtvec_alloc (XVECLEN (parallel, 0));
1801 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1803 /* Convert the vector to look just like the original PARALLEL, except
1804 with the computed values. */
1805 for (i = 0; i < XVECLEN (parallel, 0); i++)
1807 rtx e = XVECEXP (parallel, 0, i);
1808 rtx d = XEXP (e, 0);
1810 if (d)
1812 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1813 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1815 RTVEC_ELT (vec, i) = e;
1818 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1821 /* Emit code to move a block SRC to block DST, where SRC and DST are
1822 non-consecutive groups of registers, each represented by a PARALLEL. */
1824 void
1825 emit_group_move (rtx dst, rtx src)
1827 int i;
1829 gcc_assert (GET_CODE (src) == PARALLEL
1830 && GET_CODE (dst) == PARALLEL
1831 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1833 /* Skip first entry if NULL. */
1834 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1835 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1836 XEXP (XVECEXP (src, 0, i), 0));
1839 /* Move a group of registers represented by a PARALLEL into pseudos. */
1842 emit_group_move_into_temps (rtx src)
1844 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1845 int i;
1847 for (i = 0; i < XVECLEN (src, 0); i++)
1849 rtx e = XVECEXP (src, 0, i);
1850 rtx d = XEXP (e, 0);
1852 if (d)
1853 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1854 RTVEC_ELT (vec, i) = e;
1857 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1860 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1861 where SRC is non-consecutive registers represented by a PARALLEL.
1862 SSIZE represents the total size of block ORIG_DST, or -1 if not
1863 known. */
1865 void
1866 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1868 rtx *tmps, dst;
1869 int start, finish, i;
1870 enum machine_mode m = GET_MODE (orig_dst);
1872 gcc_assert (GET_CODE (src) == PARALLEL);
1874 if (!SCALAR_INT_MODE_P (m)
1875 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1877 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1878 if (imode == BLKmode)
1879 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1880 else
1881 dst = gen_reg_rtx (imode);
1882 emit_group_store (dst, src, type, ssize);
1883 if (imode != BLKmode)
1884 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1885 emit_move_insn (orig_dst, dst);
1886 return;
1889 /* Check for a NULL entry, used to indicate that the parameter goes
1890 both on the stack and in registers. */
1891 if (XEXP (XVECEXP (src, 0, 0), 0))
1892 start = 0;
1893 else
1894 start = 1;
1895 finish = XVECLEN (src, 0);
1897 tmps = alloca (sizeof (rtx) * finish);
1899 /* Copy the (probable) hard regs into pseudos. */
1900 for (i = start; i < finish; i++)
1902 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1903 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1905 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1906 emit_move_insn (tmps[i], reg);
1908 else
1909 tmps[i] = reg;
1912 /* If we won't be storing directly into memory, protect the real destination
1913 from strange tricks we might play. */
1914 dst = orig_dst;
1915 if (GET_CODE (dst) == PARALLEL)
1917 rtx temp;
1919 /* We can get a PARALLEL dst if there is a conditional expression in
1920 a return statement. In that case, the dst and src are the same,
1921 so no action is necessary. */
1922 if (rtx_equal_p (dst, src))
1923 return;
1925 /* It is unclear if we can ever reach here, but we may as well handle
1926 it. Allocate a temporary, and split this into a store/load to/from
1927 the temporary. */
1929 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1930 emit_group_store (temp, src, type, ssize);
1931 emit_group_load (dst, temp, type, ssize);
1932 return;
1934 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1936 enum machine_mode outer = GET_MODE (dst);
1937 enum machine_mode inner;
1938 HOST_WIDE_INT bytepos;
1939 bool done = false;
1940 rtx temp;
1942 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1943 dst = gen_reg_rtx (outer);
1945 /* Make life a bit easier for combine. */
1946 /* If the first element of the vector is the low part
1947 of the destination mode, use a paradoxical subreg to
1948 initialize the destination. */
1949 if (start < finish)
1951 inner = GET_MODE (tmps[start]);
1952 bytepos = subreg_lowpart_offset (inner, outer);
1953 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1955 temp = simplify_gen_subreg (outer, tmps[start],
1956 inner, 0);
1957 if (temp)
1959 emit_move_insn (dst, temp);
1960 done = true;
1961 start++;
1966 /* If the first element wasn't the low part, try the last. */
1967 if (!done
1968 && start < finish - 1)
1970 inner = GET_MODE (tmps[finish - 1]);
1971 bytepos = subreg_lowpart_offset (inner, outer);
1972 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1974 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1975 inner, 0);
1976 if (temp)
1978 emit_move_insn (dst, temp);
1979 done = true;
1980 finish--;
1985 /* Otherwise, simply initialize the result to zero. */
1986 if (!done)
1987 emit_move_insn (dst, CONST0_RTX (outer));
1990 /* Process the pieces. */
1991 for (i = start; i < finish; i++)
1993 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1994 enum machine_mode mode = GET_MODE (tmps[i]);
1995 unsigned int bytelen = GET_MODE_SIZE (mode);
1996 rtx dest = dst;
1998 /* Handle trailing fragments that run over the size of the struct. */
1999 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2001 /* store_bit_field always takes its value from the lsb.
2002 Move the fragment to the lsb if it's not already there. */
2003 if (
2004 #ifdef BLOCK_REG_PADDING
2005 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2006 == (BYTES_BIG_ENDIAN ? upward : downward)
2007 #else
2008 BYTES_BIG_ENDIAN
2009 #endif
2012 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2013 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2014 build_int_cst (NULL_TREE, shift),
2015 tmps[i], 0);
2017 bytelen = ssize - bytepos;
2020 if (GET_CODE (dst) == CONCAT)
2022 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2023 dest = XEXP (dst, 0);
2024 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2026 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2027 dest = XEXP (dst, 1);
2029 else
2031 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2032 dest = assign_stack_temp (GET_MODE (dest),
2033 GET_MODE_SIZE (GET_MODE (dest)), 0);
2034 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2035 tmps[i]);
2036 dst = dest;
2037 break;
2041 /* Optimize the access just a bit. */
2042 if (MEM_P (dest)
2043 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2044 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2045 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2046 && bytelen == GET_MODE_SIZE (mode))
2047 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2048 else
2049 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2050 mode, tmps[i]);
2053 /* Copy from the pseudo into the (probable) hard reg. */
2054 if (orig_dst != dst)
2055 emit_move_insn (orig_dst, dst);
2058 /* Generate code to copy a BLKmode object of TYPE out of a
2059 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2060 is null, a stack temporary is created. TGTBLK is returned.
2062 The purpose of this routine is to handle functions that return
2063 BLKmode structures in registers. Some machines (the PA for example)
2064 want to return all small structures in registers regardless of the
2065 structure's alignment. */
2068 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2070 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2071 rtx src = NULL, dst = NULL;
2072 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2073 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2075 if (tgtblk == 0)
2077 tgtblk = assign_temp (build_qualified_type (type,
2078 (TYPE_QUALS (type)
2079 | TYPE_QUAL_CONST)),
2080 0, 1, 1);
2081 preserve_temp_slots (tgtblk);
2084 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2085 into a new pseudo which is a full word. */
2087 if (GET_MODE (srcreg) != BLKmode
2088 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2089 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2091 /* If the structure doesn't take up a whole number of words, see whether
2092 SRCREG is padded on the left or on the right. If it's on the left,
2093 set PADDING_CORRECTION to the number of bits to skip.
2095 In most ABIs, the structure will be returned at the least end of
2096 the register, which translates to right padding on little-endian
2097 targets and left padding on big-endian targets. The opposite
2098 holds if the structure is returned at the most significant
2099 end of the register. */
2100 if (bytes % UNITS_PER_WORD != 0
2101 && (targetm.calls.return_in_msb (type)
2102 ? !BYTES_BIG_ENDIAN
2103 : BYTES_BIG_ENDIAN))
2104 padding_correction
2105 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2107 /* Copy the structure BITSIZE bites at a time.
2109 We could probably emit more efficient code for machines which do not use
2110 strict alignment, but it doesn't seem worth the effort at the current
2111 time. */
2112 for (bitpos = 0, xbitpos = padding_correction;
2113 bitpos < bytes * BITS_PER_UNIT;
2114 bitpos += bitsize, xbitpos += bitsize)
2116 /* We need a new source operand each time xbitpos is on a
2117 word boundary and when xbitpos == padding_correction
2118 (the first time through). */
2119 if (xbitpos % BITS_PER_WORD == 0
2120 || xbitpos == padding_correction)
2121 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2122 GET_MODE (srcreg));
2124 /* We need a new destination operand each time bitpos is on
2125 a word boundary. */
2126 if (bitpos % BITS_PER_WORD == 0)
2127 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2129 /* Use xbitpos for the source extraction (right justified) and
2130 xbitpos for the destination store (left justified). */
2131 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2132 extract_bit_field (src, bitsize,
2133 xbitpos % BITS_PER_WORD, 1,
2134 NULL_RTX, word_mode, word_mode));
2137 return tgtblk;
2140 /* Add a USE expression for REG to the (possibly empty) list pointed
2141 to by CALL_FUSAGE. REG must denote a hard register. */
2143 void
2144 use_reg (rtx *call_fusage, rtx reg)
2146 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2148 *call_fusage
2149 = gen_rtx_EXPR_LIST (VOIDmode,
2150 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2153 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2154 starting at REGNO. All of these registers must be hard registers. */
2156 void
2157 use_regs (rtx *call_fusage, int regno, int nregs)
2159 int i;
2161 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2163 for (i = 0; i < nregs; i++)
2164 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2167 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2168 PARALLEL REGS. This is for calls that pass values in multiple
2169 non-contiguous locations. The Irix 6 ABI has examples of this. */
2171 void
2172 use_group_regs (rtx *call_fusage, rtx regs)
2174 int i;
2176 for (i = 0; i < XVECLEN (regs, 0); i++)
2178 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2180 /* A NULL entry means the parameter goes both on the stack and in
2181 registers. This can also be a MEM for targets that pass values
2182 partially on the stack and partially in registers. */
2183 if (reg != 0 && REG_P (reg))
2184 use_reg (call_fusage, reg);
2189 /* Determine whether the LEN bytes generated by CONSTFUN can be
2190 stored to memory using several move instructions. CONSTFUNDATA is
2191 a pointer which will be passed as argument in every CONSTFUN call.
2192 ALIGN is maximum alignment we can assume. Return nonzero if a
2193 call to store_by_pieces should succeed. */
2196 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2197 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2198 void *constfundata, unsigned int align)
2200 unsigned HOST_WIDE_INT l;
2201 unsigned int max_size;
2202 HOST_WIDE_INT offset = 0;
2203 enum machine_mode mode, tmode;
2204 enum insn_code icode;
2205 int reverse;
2206 rtx cst;
2208 if (len == 0)
2209 return 1;
2211 if (! STORE_BY_PIECES_P (len, align))
2212 return 0;
2214 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2215 if (align >= GET_MODE_ALIGNMENT (tmode))
2216 align = GET_MODE_ALIGNMENT (tmode);
2217 else
2219 enum machine_mode xmode;
2221 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2222 tmode != VOIDmode;
2223 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2224 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2225 || SLOW_UNALIGNED_ACCESS (tmode, align))
2226 break;
2228 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2231 /* We would first store what we can in the largest integer mode, then go to
2232 successively smaller modes. */
2234 for (reverse = 0;
2235 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2236 reverse++)
2238 l = len;
2239 mode = VOIDmode;
2240 max_size = STORE_MAX_PIECES + 1;
2241 while (max_size > 1)
2243 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2244 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2245 if (GET_MODE_SIZE (tmode) < max_size)
2246 mode = tmode;
2248 if (mode == VOIDmode)
2249 break;
2251 icode = mov_optab->handlers[(int) mode].insn_code;
2252 if (icode != CODE_FOR_nothing
2253 && align >= GET_MODE_ALIGNMENT (mode))
2255 unsigned int size = GET_MODE_SIZE (mode);
2257 while (l >= size)
2259 if (reverse)
2260 offset -= size;
2262 cst = (*constfun) (constfundata, offset, mode);
2263 if (!LEGITIMATE_CONSTANT_P (cst))
2264 return 0;
2266 if (!reverse)
2267 offset += size;
2269 l -= size;
2273 max_size = GET_MODE_SIZE (mode);
2276 /* The code above should have handled everything. */
2277 gcc_assert (!l);
2280 return 1;
2283 /* Generate several move instructions to store LEN bytes generated by
2284 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2285 pointer which will be passed as argument in every CONSTFUN call.
2286 ALIGN is maximum alignment we can assume.
2287 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2288 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2289 stpcpy. */
2292 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2293 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2294 void *constfundata, unsigned int align, int endp)
2296 struct store_by_pieces data;
2298 if (len == 0)
2300 gcc_assert (endp != 2);
2301 return to;
2304 gcc_assert (STORE_BY_PIECES_P (len, align));
2305 data.constfun = constfun;
2306 data.constfundata = constfundata;
2307 data.len = len;
2308 data.to = to;
2309 store_by_pieces_1 (&data, align);
2310 if (endp)
2312 rtx to1;
2314 gcc_assert (!data.reverse);
2315 if (data.autinc_to)
2317 if (endp == 2)
2319 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2320 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2321 else
2322 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2323 -1));
2325 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2326 data.offset);
2328 else
2330 if (endp == 2)
2331 --data.offset;
2332 to1 = adjust_address (data.to, QImode, data.offset);
2334 return to1;
2336 else
2337 return data.to;
2340 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2341 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2343 static void
2344 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2346 struct store_by_pieces data;
2348 if (len == 0)
2349 return;
2351 data.constfun = clear_by_pieces_1;
2352 data.constfundata = NULL;
2353 data.len = len;
2354 data.to = to;
2355 store_by_pieces_1 (&data, align);
2358 /* Callback routine for clear_by_pieces.
2359 Return const0_rtx unconditionally. */
2361 static rtx
2362 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2363 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2364 enum machine_mode mode ATTRIBUTE_UNUSED)
2366 return const0_rtx;
2369 /* Subroutine of clear_by_pieces and store_by_pieces.
2370 Generate several move instructions to store LEN bytes of block TO. (A MEM
2371 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2373 static void
2374 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2375 unsigned int align ATTRIBUTE_UNUSED)
2377 rtx to_addr = XEXP (data->to, 0);
2378 unsigned int max_size = STORE_MAX_PIECES + 1;
2379 enum machine_mode mode = VOIDmode, tmode;
2380 enum insn_code icode;
2382 data->offset = 0;
2383 data->to_addr = to_addr;
2384 data->autinc_to
2385 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2386 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2388 data->explicit_inc_to = 0;
2389 data->reverse
2390 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2391 if (data->reverse)
2392 data->offset = data->len;
2394 /* If storing requires more than two move insns,
2395 copy addresses to registers (to make displacements shorter)
2396 and use post-increment if available. */
2397 if (!data->autinc_to
2398 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2400 /* Determine the main mode we'll be using. */
2401 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2402 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2403 if (GET_MODE_SIZE (tmode) < max_size)
2404 mode = tmode;
2406 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2408 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2409 data->autinc_to = 1;
2410 data->explicit_inc_to = -1;
2413 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2414 && ! data->autinc_to)
2416 data->to_addr = copy_addr_to_reg (to_addr);
2417 data->autinc_to = 1;
2418 data->explicit_inc_to = 1;
2421 if ( !data->autinc_to && CONSTANT_P (to_addr))
2422 data->to_addr = copy_addr_to_reg (to_addr);
2425 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2426 if (align >= GET_MODE_ALIGNMENT (tmode))
2427 align = GET_MODE_ALIGNMENT (tmode);
2428 else
2430 enum machine_mode xmode;
2432 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2433 tmode != VOIDmode;
2434 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2435 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2436 || SLOW_UNALIGNED_ACCESS (tmode, align))
2437 break;
2439 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2442 /* First store what we can in the largest integer mode, then go to
2443 successively smaller modes. */
2445 while (max_size > 1)
2447 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2448 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2449 if (GET_MODE_SIZE (tmode) < max_size)
2450 mode = tmode;
2452 if (mode == VOIDmode)
2453 break;
2455 icode = mov_optab->handlers[(int) mode].insn_code;
2456 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2457 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2459 max_size = GET_MODE_SIZE (mode);
2462 /* The code above should have handled everything. */
2463 gcc_assert (!data->len);
2466 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2467 with move instructions for mode MODE. GENFUN is the gen_... function
2468 to make a move insn for that mode. DATA has all the other info. */
2470 static void
2471 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2472 struct store_by_pieces *data)
2474 unsigned int size = GET_MODE_SIZE (mode);
2475 rtx to1, cst;
2477 while (data->len >= size)
2479 if (data->reverse)
2480 data->offset -= size;
2482 if (data->autinc_to)
2483 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2484 data->offset);
2485 else
2486 to1 = adjust_address (data->to, mode, data->offset);
2488 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2489 emit_insn (gen_add2_insn (data->to_addr,
2490 GEN_INT (-(HOST_WIDE_INT) size)));
2492 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2493 emit_insn ((*genfun) (to1, cst));
2495 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2496 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2498 if (! data->reverse)
2499 data->offset += size;
2501 data->len -= size;
2505 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2506 its length in bytes. */
2509 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2510 unsigned int expected_align, HOST_WIDE_INT expected_size)
2512 enum machine_mode mode = GET_MODE (object);
2513 unsigned int align;
2515 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2517 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2518 just move a zero. Otherwise, do this a piece at a time. */
2519 if (mode != BLKmode
2520 && GET_CODE (size) == CONST_INT
2521 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2523 rtx zero = CONST0_RTX (mode);
2524 if (zero != NULL)
2526 emit_move_insn (object, zero);
2527 return NULL;
2530 if (COMPLEX_MODE_P (mode))
2532 zero = CONST0_RTX (GET_MODE_INNER (mode));
2533 if (zero != NULL)
2535 write_complex_part (object, zero, 0);
2536 write_complex_part (object, zero, 1);
2537 return NULL;
2542 if (size == const0_rtx)
2543 return NULL;
2545 align = MEM_ALIGN (object);
2547 if (GET_CODE (size) == CONST_INT
2548 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2549 clear_by_pieces (object, INTVAL (size), align);
2550 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2551 expected_align, expected_size))
2553 else
2554 return set_storage_via_libcall (object, size, const0_rtx,
2555 method == BLOCK_OP_TAILCALL);
2557 return NULL;
2561 clear_storage (rtx object, rtx size, enum block_op_methods method)
2563 return clear_storage_hints (object, size, method, 0, -1);
2567 /* A subroutine of clear_storage. Expand a call to memset.
2568 Return the return value of memset, 0 otherwise. */
2571 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2573 tree call_expr, fn, object_tree, size_tree, val_tree;
2574 enum machine_mode size_mode;
2575 rtx retval;
2577 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2578 place those into new pseudos into a VAR_DECL and use them later. */
2580 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2582 size_mode = TYPE_MODE (sizetype);
2583 size = convert_to_mode (size_mode, size, 1);
2584 size = copy_to_mode_reg (size_mode, size);
2586 /* It is incorrect to use the libcall calling conventions to call
2587 memset in this context. This could be a user call to memset and
2588 the user may wish to examine the return value from memset. For
2589 targets where libcalls and normal calls have different conventions
2590 for returning pointers, we could end up generating incorrect code. */
2592 object_tree = make_tree (ptr_type_node, object);
2593 if (GET_CODE (val) != CONST_INT)
2594 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2595 size_tree = make_tree (sizetype, size);
2596 val_tree = make_tree (integer_type_node, val);
2598 fn = clear_storage_libcall_fn (true);
2599 call_expr = build_call_expr (fn, 3,
2600 object_tree, integer_zero_node, size_tree);
2601 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2603 retval = expand_normal (call_expr);
2605 return retval;
2608 /* A subroutine of set_storage_via_libcall. Create the tree node
2609 for the function we use for block clears. The first time FOR_CALL
2610 is true, we call assemble_external. */
2612 static GTY(()) tree block_clear_fn;
2614 void
2615 init_block_clear_fn (const char *asmspec)
2617 if (!block_clear_fn)
2619 tree fn, args;
2621 fn = get_identifier ("memset");
2622 args = build_function_type_list (ptr_type_node, ptr_type_node,
2623 integer_type_node, sizetype,
2624 NULL_TREE);
2626 fn = build_decl (FUNCTION_DECL, fn, args);
2627 DECL_EXTERNAL (fn) = 1;
2628 TREE_PUBLIC (fn) = 1;
2629 DECL_ARTIFICIAL (fn) = 1;
2630 TREE_NOTHROW (fn) = 1;
2631 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2632 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2634 block_clear_fn = fn;
2637 if (asmspec)
2638 set_user_assembler_name (block_clear_fn, asmspec);
2641 static tree
2642 clear_storage_libcall_fn (int for_call)
2644 static bool emitted_extern;
2646 if (!block_clear_fn)
2647 init_block_clear_fn (NULL);
2649 if (for_call && !emitted_extern)
2651 emitted_extern = true;
2652 make_decl_rtl (block_clear_fn);
2653 assemble_external (block_clear_fn);
2656 return block_clear_fn;
2659 /* Expand a setmem pattern; return true if successful. */
2661 bool
2662 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2663 unsigned int expected_align, HOST_WIDE_INT expected_size)
2665 /* Try the most limited insn first, because there's no point
2666 including more than one in the machine description unless
2667 the more limited one has some advantage. */
2669 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2670 enum machine_mode mode;
2672 if (expected_align < align)
2673 expected_align = align;
2675 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2676 mode = GET_MODE_WIDER_MODE (mode))
2678 enum insn_code code = setmem_optab[(int) mode];
2679 insn_operand_predicate_fn pred;
2681 if (code != CODE_FOR_nothing
2682 /* We don't need MODE to be narrower than
2683 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2684 the mode mask, as it is returned by the macro, it will
2685 definitely be less than the actual mode mask. */
2686 && ((GET_CODE (size) == CONST_INT
2687 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2688 <= (GET_MODE_MASK (mode) >> 1)))
2689 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2690 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2691 || (*pred) (object, BLKmode))
2692 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2693 || (*pred) (opalign, VOIDmode)))
2695 rtx opsize, opchar;
2696 enum machine_mode char_mode;
2697 rtx last = get_last_insn ();
2698 rtx pat;
2700 opsize = convert_to_mode (mode, size, 1);
2701 pred = insn_data[(int) code].operand[1].predicate;
2702 if (pred != 0 && ! (*pred) (opsize, mode))
2703 opsize = copy_to_mode_reg (mode, opsize);
2705 opchar = val;
2706 char_mode = insn_data[(int) code].operand[2].mode;
2707 if (char_mode != VOIDmode)
2709 opchar = convert_to_mode (char_mode, opchar, 1);
2710 pred = insn_data[(int) code].operand[2].predicate;
2711 if (pred != 0 && ! (*pred) (opchar, char_mode))
2712 opchar = copy_to_mode_reg (char_mode, opchar);
2715 if (insn_data[(int) code].n_operands == 4)
2716 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2717 else
2718 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2719 GEN_INT (expected_align),
2720 GEN_INT (expected_size));
2721 if (pat)
2723 emit_insn (pat);
2724 return true;
2726 else
2727 delete_insns_since (last);
2731 return false;
2735 /* Write to one of the components of the complex value CPLX. Write VAL to
2736 the real part if IMAG_P is false, and the imaginary part if its true. */
2738 static void
2739 write_complex_part (rtx cplx, rtx val, bool imag_p)
2741 enum machine_mode cmode;
2742 enum machine_mode imode;
2743 unsigned ibitsize;
2745 if (GET_CODE (cplx) == CONCAT)
2747 emit_move_insn (XEXP (cplx, imag_p), val);
2748 return;
2751 cmode = GET_MODE (cplx);
2752 imode = GET_MODE_INNER (cmode);
2753 ibitsize = GET_MODE_BITSIZE (imode);
2755 /* For MEMs simplify_gen_subreg may generate an invalid new address
2756 because, e.g., the original address is considered mode-dependent
2757 by the target, which restricts simplify_subreg from invoking
2758 adjust_address_nv. Instead of preparing fallback support for an
2759 invalid address, we call adjust_address_nv directly. */
2760 if (MEM_P (cplx))
2762 emit_move_insn (adjust_address_nv (cplx, imode,
2763 imag_p ? GET_MODE_SIZE (imode) : 0),
2764 val);
2765 return;
2768 /* If the sub-object is at least word sized, then we know that subregging
2769 will work. This special case is important, since store_bit_field
2770 wants to operate on integer modes, and there's rarely an OImode to
2771 correspond to TCmode. */
2772 if (ibitsize >= BITS_PER_WORD
2773 /* For hard regs we have exact predicates. Assume we can split
2774 the original object if it spans an even number of hard regs.
2775 This special case is important for SCmode on 64-bit platforms
2776 where the natural size of floating-point regs is 32-bit. */
2777 || (REG_P (cplx)
2778 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2779 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2781 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2782 imag_p ? GET_MODE_SIZE (imode) : 0);
2783 if (part)
2785 emit_move_insn (part, val);
2786 return;
2788 else
2789 /* simplify_gen_subreg may fail for sub-word MEMs. */
2790 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2793 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2796 /* Extract one of the components of the complex value CPLX. Extract the
2797 real part if IMAG_P is false, and the imaginary part if it's true. */
2799 static rtx
2800 read_complex_part (rtx cplx, bool imag_p)
2802 enum machine_mode cmode, imode;
2803 unsigned ibitsize;
2805 if (GET_CODE (cplx) == CONCAT)
2806 return XEXP (cplx, imag_p);
2808 cmode = GET_MODE (cplx);
2809 imode = GET_MODE_INNER (cmode);
2810 ibitsize = GET_MODE_BITSIZE (imode);
2812 /* Special case reads from complex constants that got spilled to memory. */
2813 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2815 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2816 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2818 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2819 if (CONSTANT_CLASS_P (part))
2820 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2824 /* For MEMs simplify_gen_subreg may generate an invalid new address
2825 because, e.g., the original address is considered mode-dependent
2826 by the target, which restricts simplify_subreg from invoking
2827 adjust_address_nv. Instead of preparing fallback support for an
2828 invalid address, we call adjust_address_nv directly. */
2829 if (MEM_P (cplx))
2830 return adjust_address_nv (cplx, imode,
2831 imag_p ? GET_MODE_SIZE (imode) : 0);
2833 /* If the sub-object is at least word sized, then we know that subregging
2834 will work. This special case is important, since extract_bit_field
2835 wants to operate on integer modes, and there's rarely an OImode to
2836 correspond to TCmode. */
2837 if (ibitsize >= BITS_PER_WORD
2838 /* For hard regs we have exact predicates. Assume we can split
2839 the original object if it spans an even number of hard regs.
2840 This special case is important for SCmode on 64-bit platforms
2841 where the natural size of floating-point regs is 32-bit. */
2842 || (REG_P (cplx)
2843 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2844 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2846 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2847 imag_p ? GET_MODE_SIZE (imode) : 0);
2848 if (ret)
2849 return ret;
2850 else
2851 /* simplify_gen_subreg may fail for sub-word MEMs. */
2852 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2855 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2856 true, NULL_RTX, imode, imode);
2859 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2860 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2861 represented in NEW_MODE. If FORCE is true, this will never happen, as
2862 we'll force-create a SUBREG if needed. */
2864 static rtx
2865 emit_move_change_mode (enum machine_mode new_mode,
2866 enum machine_mode old_mode, rtx x, bool force)
2868 rtx ret;
2870 if (push_operand (x, GET_MODE (x)))
2872 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2873 MEM_COPY_ATTRIBUTES (ret, x);
2875 else if (MEM_P (x))
2877 /* We don't have to worry about changing the address since the
2878 size in bytes is supposed to be the same. */
2879 if (reload_in_progress)
2881 /* Copy the MEM to change the mode and move any
2882 substitutions from the old MEM to the new one. */
2883 ret = adjust_address_nv (x, new_mode, 0);
2884 copy_replacements (x, ret);
2886 else
2887 ret = adjust_address (x, new_mode, 0);
2889 else
2891 /* Note that we do want simplify_subreg's behavior of validating
2892 that the new mode is ok for a hard register. If we were to use
2893 simplify_gen_subreg, we would create the subreg, but would
2894 probably run into the target not being able to implement it. */
2895 /* Except, of course, when FORCE is true, when this is exactly what
2896 we want. Which is needed for CCmodes on some targets. */
2897 if (force)
2898 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2899 else
2900 ret = simplify_subreg (new_mode, x, old_mode, 0);
2903 return ret;
2906 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2907 an integer mode of the same size as MODE. Returns the instruction
2908 emitted, or NULL if such a move could not be generated. */
2910 static rtx
2911 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2913 enum machine_mode imode;
2914 enum insn_code code;
2916 /* There must exist a mode of the exact size we require. */
2917 imode = int_mode_for_mode (mode);
2918 if (imode == BLKmode)
2919 return NULL_RTX;
2921 /* The target must support moves in this mode. */
2922 code = mov_optab->handlers[imode].insn_code;
2923 if (code == CODE_FOR_nothing)
2924 return NULL_RTX;
2926 x = emit_move_change_mode (imode, mode, x, force);
2927 if (x == NULL_RTX)
2928 return NULL_RTX;
2929 y = emit_move_change_mode (imode, mode, y, force);
2930 if (y == NULL_RTX)
2931 return NULL_RTX;
2932 return emit_insn (GEN_FCN (code) (x, y));
2935 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2936 Return an equivalent MEM that does not use an auto-increment. */
2938 static rtx
2939 emit_move_resolve_push (enum machine_mode mode, rtx x)
2941 enum rtx_code code = GET_CODE (XEXP (x, 0));
2942 HOST_WIDE_INT adjust;
2943 rtx temp;
2945 adjust = GET_MODE_SIZE (mode);
2946 #ifdef PUSH_ROUNDING
2947 adjust = PUSH_ROUNDING (adjust);
2948 #endif
2949 if (code == PRE_DEC || code == POST_DEC)
2950 adjust = -adjust;
2951 else if (code == PRE_MODIFY || code == POST_MODIFY)
2953 rtx expr = XEXP (XEXP (x, 0), 1);
2954 HOST_WIDE_INT val;
2956 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2957 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2958 val = INTVAL (XEXP (expr, 1));
2959 if (GET_CODE (expr) == MINUS)
2960 val = -val;
2961 gcc_assert (adjust == val || adjust == -val);
2962 adjust = val;
2965 /* Do not use anti_adjust_stack, since we don't want to update
2966 stack_pointer_delta. */
2967 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2968 GEN_INT (adjust), stack_pointer_rtx,
2969 0, OPTAB_LIB_WIDEN);
2970 if (temp != stack_pointer_rtx)
2971 emit_move_insn (stack_pointer_rtx, temp);
2973 switch (code)
2975 case PRE_INC:
2976 case PRE_DEC:
2977 case PRE_MODIFY:
2978 temp = stack_pointer_rtx;
2979 break;
2980 case POST_INC:
2981 case POST_DEC:
2982 case POST_MODIFY:
2983 temp = plus_constant (stack_pointer_rtx, -adjust);
2984 break;
2985 default:
2986 gcc_unreachable ();
2989 return replace_equiv_address (x, temp);
2992 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2993 X is known to satisfy push_operand, and MODE is known to be complex.
2994 Returns the last instruction emitted. */
2997 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2999 enum machine_mode submode = GET_MODE_INNER (mode);
3000 bool imag_first;
3002 #ifdef PUSH_ROUNDING
3003 unsigned int submodesize = GET_MODE_SIZE (submode);
3005 /* In case we output to the stack, but the size is smaller than the
3006 machine can push exactly, we need to use move instructions. */
3007 if (PUSH_ROUNDING (submodesize) != submodesize)
3009 x = emit_move_resolve_push (mode, x);
3010 return emit_move_insn (x, y);
3012 #endif
3014 /* Note that the real part always precedes the imag part in memory
3015 regardless of machine's endianness. */
3016 switch (GET_CODE (XEXP (x, 0)))
3018 case PRE_DEC:
3019 case POST_DEC:
3020 imag_first = true;
3021 break;
3022 case PRE_INC:
3023 case POST_INC:
3024 imag_first = false;
3025 break;
3026 default:
3027 gcc_unreachable ();
3030 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3031 read_complex_part (y, imag_first));
3032 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3033 read_complex_part (y, !imag_first));
3036 /* A subroutine of emit_move_complex. Perform the move from Y to X
3037 via two moves of the parts. Returns the last instruction emitted. */
3040 emit_move_complex_parts (rtx x, rtx y)
3042 /* Show the output dies here. This is necessary for SUBREGs
3043 of pseudos since we cannot track their lifetimes correctly;
3044 hard regs shouldn't appear here except as return values. */
3045 if (!reload_completed && !reload_in_progress
3046 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3047 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3049 write_complex_part (x, read_complex_part (y, false), false);
3050 write_complex_part (x, read_complex_part (y, true), true);
3052 return get_last_insn ();
3055 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3056 MODE is known to be complex. Returns the last instruction emitted. */
3058 static rtx
3059 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3061 bool try_int;
3063 /* Need to take special care for pushes, to maintain proper ordering
3064 of the data, and possibly extra padding. */
3065 if (push_operand (x, mode))
3066 return emit_move_complex_push (mode, x, y);
3068 /* See if we can coerce the target into moving both values at once. */
3070 /* Move floating point as parts. */
3071 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3072 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3073 try_int = false;
3074 /* Not possible if the values are inherently not adjacent. */
3075 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3076 try_int = false;
3077 /* Is possible if both are registers (or subregs of registers). */
3078 else if (register_operand (x, mode) && register_operand (y, mode))
3079 try_int = true;
3080 /* If one of the operands is a memory, and alignment constraints
3081 are friendly enough, we may be able to do combined memory operations.
3082 We do not attempt this if Y is a constant because that combination is
3083 usually better with the by-parts thing below. */
3084 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3085 && (!STRICT_ALIGNMENT
3086 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3087 try_int = true;
3088 else
3089 try_int = false;
3091 if (try_int)
3093 rtx ret;
3095 /* For memory to memory moves, optimal behavior can be had with the
3096 existing block move logic. */
3097 if (MEM_P (x) && MEM_P (y))
3099 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3100 BLOCK_OP_NO_LIBCALL);
3101 return get_last_insn ();
3104 ret = emit_move_via_integer (mode, x, y, true);
3105 if (ret)
3106 return ret;
3109 return emit_move_complex_parts (x, y);
3112 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3113 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3115 static rtx
3116 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3118 rtx ret;
3120 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3121 if (mode != CCmode)
3123 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3124 if (code != CODE_FOR_nothing)
3126 x = emit_move_change_mode (CCmode, mode, x, true);
3127 y = emit_move_change_mode (CCmode, mode, y, true);
3128 return emit_insn (GEN_FCN (code) (x, y));
3132 /* Otherwise, find the MODE_INT mode of the same width. */
3133 ret = emit_move_via_integer (mode, x, y, false);
3134 gcc_assert (ret != NULL);
3135 return ret;
3138 /* Return true if word I of OP lies entirely in the
3139 undefined bits of a paradoxical subreg. */
3141 static bool
3142 undefined_operand_subword_p (rtx op, int i)
3144 enum machine_mode innermode, innermostmode;
3145 int offset;
3146 if (GET_CODE (op) != SUBREG)
3147 return false;
3148 innermode = GET_MODE (op);
3149 innermostmode = GET_MODE (SUBREG_REG (op));
3150 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3151 /* The SUBREG_BYTE represents offset, as if the value were stored in
3152 memory, except for a paradoxical subreg where we define
3153 SUBREG_BYTE to be 0; undo this exception as in
3154 simplify_subreg. */
3155 if (SUBREG_BYTE (op) == 0
3156 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3158 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3159 if (WORDS_BIG_ENDIAN)
3160 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3161 if (BYTES_BIG_ENDIAN)
3162 offset += difference % UNITS_PER_WORD;
3164 if (offset >= GET_MODE_SIZE (innermostmode)
3165 || offset <= -GET_MODE_SIZE (word_mode))
3166 return true;
3167 return false;
3170 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3171 MODE is any multi-word or full-word mode that lacks a move_insn
3172 pattern. Note that you will get better code if you define such
3173 patterns, even if they must turn into multiple assembler instructions. */
3175 static rtx
3176 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3178 rtx last_insn = 0;
3179 rtx seq, inner;
3180 bool need_clobber;
3181 int i;
3183 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3185 /* If X is a push on the stack, do the push now and replace
3186 X with a reference to the stack pointer. */
3187 if (push_operand (x, mode))
3188 x = emit_move_resolve_push (mode, x);
3190 /* If we are in reload, see if either operand is a MEM whose address
3191 is scheduled for replacement. */
3192 if (reload_in_progress && MEM_P (x)
3193 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3194 x = replace_equiv_address_nv (x, inner);
3195 if (reload_in_progress && MEM_P (y)
3196 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3197 y = replace_equiv_address_nv (y, inner);
3199 start_sequence ();
3201 need_clobber = false;
3202 for (i = 0;
3203 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3204 i++)
3206 rtx xpart = operand_subword (x, i, 1, mode);
3207 rtx ypart;
3209 /* Do not generate code for a move if it would come entirely
3210 from the undefined bits of a paradoxical subreg. */
3211 if (undefined_operand_subword_p (y, i))
3212 continue;
3214 ypart = operand_subword (y, i, 1, mode);
3216 /* If we can't get a part of Y, put Y into memory if it is a
3217 constant. Otherwise, force it into a register. Then we must
3218 be able to get a part of Y. */
3219 if (ypart == 0 && CONSTANT_P (y))
3221 y = use_anchored_address (force_const_mem (mode, y));
3222 ypart = operand_subword (y, i, 1, mode);
3224 else if (ypart == 0)
3225 ypart = operand_subword_force (y, i, mode);
3227 gcc_assert (xpart && ypart);
3229 need_clobber |= (GET_CODE (xpart) == SUBREG);
3231 last_insn = emit_move_insn (xpart, ypart);
3234 seq = get_insns ();
3235 end_sequence ();
3237 /* Show the output dies here. This is necessary for SUBREGs
3238 of pseudos since we cannot track their lifetimes correctly;
3239 hard regs shouldn't appear here except as return values.
3240 We never want to emit such a clobber after reload. */
3241 if (x != y
3242 && ! (reload_in_progress || reload_completed)
3243 && need_clobber != 0)
3244 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3246 emit_insn (seq);
3248 return last_insn;
3251 /* Low level part of emit_move_insn.
3252 Called just like emit_move_insn, but assumes X and Y
3253 are basically valid. */
3256 emit_move_insn_1 (rtx x, rtx y)
3258 enum machine_mode mode = GET_MODE (x);
3259 enum insn_code code;
3261 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3263 code = mov_optab->handlers[mode].insn_code;
3264 if (code != CODE_FOR_nothing)
3265 return emit_insn (GEN_FCN (code) (x, y));
3267 /* Expand complex moves by moving real part and imag part. */
3268 if (COMPLEX_MODE_P (mode))
3269 return emit_move_complex (mode, x, y);
3271 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3273 rtx result = emit_move_via_integer (mode, x, y, true);
3275 /* If we can't find an integer mode, use multi words. */
3276 if (result)
3277 return result;
3278 else
3279 return emit_move_multi_word (mode, x, y);
3282 if (GET_MODE_CLASS (mode) == MODE_CC)
3283 return emit_move_ccmode (mode, x, y);
3285 /* Try using a move pattern for the corresponding integer mode. This is
3286 only safe when simplify_subreg can convert MODE constants into integer
3287 constants. At present, it can only do this reliably if the value
3288 fits within a HOST_WIDE_INT. */
3289 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3291 rtx ret = emit_move_via_integer (mode, x, y, false);
3292 if (ret)
3293 return ret;
3296 return emit_move_multi_word (mode, x, y);
3299 /* Generate code to copy Y into X.
3300 Both Y and X must have the same mode, except that
3301 Y can be a constant with VOIDmode.
3302 This mode cannot be BLKmode; use emit_block_move for that.
3304 Return the last instruction emitted. */
3307 emit_move_insn (rtx x, rtx y)
3309 enum machine_mode mode = GET_MODE (x);
3310 rtx y_cst = NULL_RTX;
3311 rtx last_insn, set;
3313 gcc_assert (mode != BLKmode
3314 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3316 if (CONSTANT_P (y))
3318 if (optimize
3319 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3320 && (last_insn = compress_float_constant (x, y)))
3321 return last_insn;
3323 y_cst = y;
3325 if (!LEGITIMATE_CONSTANT_P (y))
3327 y = force_const_mem (mode, y);
3329 /* If the target's cannot_force_const_mem prevented the spill,
3330 assume that the target's move expanders will also take care
3331 of the non-legitimate constant. */
3332 if (!y)
3333 y = y_cst;
3334 else
3335 y = use_anchored_address (y);
3339 /* If X or Y are memory references, verify that their addresses are valid
3340 for the machine. */
3341 if (MEM_P (x)
3342 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3343 && ! push_operand (x, GET_MODE (x)))
3344 || (flag_force_addr
3345 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3346 x = validize_mem (x);
3348 if (MEM_P (y)
3349 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3350 || (flag_force_addr
3351 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3352 y = validize_mem (y);
3354 gcc_assert (mode != BLKmode);
3356 last_insn = emit_move_insn_1 (x, y);
3358 if (y_cst && REG_P (x)
3359 && (set = single_set (last_insn)) != NULL_RTX
3360 && SET_DEST (set) == x
3361 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3362 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3364 return last_insn;
3367 /* If Y is representable exactly in a narrower mode, and the target can
3368 perform the extension directly from constant or memory, then emit the
3369 move as an extension. */
3371 static rtx
3372 compress_float_constant (rtx x, rtx y)
3374 enum machine_mode dstmode = GET_MODE (x);
3375 enum machine_mode orig_srcmode = GET_MODE (y);
3376 enum machine_mode srcmode;
3377 REAL_VALUE_TYPE r;
3378 int oldcost, newcost;
3380 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3382 if (LEGITIMATE_CONSTANT_P (y))
3383 oldcost = rtx_cost (y, SET);
3384 else
3385 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3387 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3388 srcmode != orig_srcmode;
3389 srcmode = GET_MODE_WIDER_MODE (srcmode))
3391 enum insn_code ic;
3392 rtx trunc_y, last_insn;
3394 /* Skip if the target can't extend this way. */
3395 ic = can_extend_p (dstmode, srcmode, 0);
3396 if (ic == CODE_FOR_nothing)
3397 continue;
3399 /* Skip if the narrowed value isn't exact. */
3400 if (! exact_real_truncate (srcmode, &r))
3401 continue;
3403 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3405 if (LEGITIMATE_CONSTANT_P (trunc_y))
3407 /* Skip if the target needs extra instructions to perform
3408 the extension. */
3409 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3410 continue;
3411 /* This is valid, but may not be cheaper than the original. */
3412 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3413 if (oldcost < newcost)
3414 continue;
3416 else if (float_extend_from_mem[dstmode][srcmode])
3418 trunc_y = force_const_mem (srcmode, trunc_y);
3419 /* This is valid, but may not be cheaper than the original. */
3420 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3421 if (oldcost < newcost)
3422 continue;
3423 trunc_y = validize_mem (trunc_y);
3425 else
3426 continue;
3428 /* For CSE's benefit, force the compressed constant pool entry
3429 into a new pseudo. This constant may be used in different modes,
3430 and if not, combine will put things back together for us. */
3431 trunc_y = force_reg (srcmode, trunc_y);
3432 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3433 last_insn = get_last_insn ();
3435 if (REG_P (x))
3436 set_unique_reg_note (last_insn, REG_EQUAL, y);
3438 return last_insn;
3441 return NULL_RTX;
3444 /* Pushing data onto the stack. */
3446 /* Push a block of length SIZE (perhaps variable)
3447 and return an rtx to address the beginning of the block.
3448 The value may be virtual_outgoing_args_rtx.
3450 EXTRA is the number of bytes of padding to push in addition to SIZE.
3451 BELOW nonzero means this padding comes at low addresses;
3452 otherwise, the padding comes at high addresses. */
3455 push_block (rtx size, int extra, int below)
3457 rtx temp;
3459 size = convert_modes (Pmode, ptr_mode, size, 1);
3460 if (CONSTANT_P (size))
3461 anti_adjust_stack (plus_constant (size, extra));
3462 else if (REG_P (size) && extra == 0)
3463 anti_adjust_stack (size);
3464 else
3466 temp = copy_to_mode_reg (Pmode, size);
3467 if (extra != 0)
3468 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3469 temp, 0, OPTAB_LIB_WIDEN);
3470 anti_adjust_stack (temp);
3473 #ifndef STACK_GROWS_DOWNWARD
3474 if (0)
3475 #else
3476 if (1)
3477 #endif
3479 temp = virtual_outgoing_args_rtx;
3480 if (extra != 0 && below)
3481 temp = plus_constant (temp, extra);
3483 else
3485 if (GET_CODE (size) == CONST_INT)
3486 temp = plus_constant (virtual_outgoing_args_rtx,
3487 -INTVAL (size) - (below ? 0 : extra));
3488 else if (extra != 0 && !below)
3489 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3490 negate_rtx (Pmode, plus_constant (size, extra)));
3491 else
3492 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3493 negate_rtx (Pmode, size));
3496 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3499 #ifdef PUSH_ROUNDING
3501 /* Emit single push insn. */
3503 static void
3504 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3506 rtx dest_addr;
3507 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3508 rtx dest;
3509 enum insn_code icode;
3510 insn_operand_predicate_fn pred;
3512 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3513 /* If there is push pattern, use it. Otherwise try old way of throwing
3514 MEM representing push operation to move expander. */
3515 icode = push_optab->handlers[(int) mode].insn_code;
3516 if (icode != CODE_FOR_nothing)
3518 if (((pred = insn_data[(int) icode].operand[0].predicate)
3519 && !((*pred) (x, mode))))
3520 x = force_reg (mode, x);
3521 emit_insn (GEN_FCN (icode) (x));
3522 return;
3524 if (GET_MODE_SIZE (mode) == rounded_size)
3525 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3526 /* If we are to pad downward, adjust the stack pointer first and
3527 then store X into the stack location using an offset. This is
3528 because emit_move_insn does not know how to pad; it does not have
3529 access to type. */
3530 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3532 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3533 HOST_WIDE_INT offset;
3535 emit_move_insn (stack_pointer_rtx,
3536 expand_binop (Pmode,
3537 #ifdef STACK_GROWS_DOWNWARD
3538 sub_optab,
3539 #else
3540 add_optab,
3541 #endif
3542 stack_pointer_rtx,
3543 GEN_INT (rounded_size),
3544 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3546 offset = (HOST_WIDE_INT) padding_size;
3547 #ifdef STACK_GROWS_DOWNWARD
3548 if (STACK_PUSH_CODE == POST_DEC)
3549 /* We have already decremented the stack pointer, so get the
3550 previous value. */
3551 offset += (HOST_WIDE_INT) rounded_size;
3552 #else
3553 if (STACK_PUSH_CODE == POST_INC)
3554 /* We have already incremented the stack pointer, so get the
3555 previous value. */
3556 offset -= (HOST_WIDE_INT) rounded_size;
3557 #endif
3558 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3560 else
3562 #ifdef STACK_GROWS_DOWNWARD
3563 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3564 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3565 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3566 #else
3567 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3568 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3569 GEN_INT (rounded_size));
3570 #endif
3571 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3574 dest = gen_rtx_MEM (mode, dest_addr);
3576 if (type != 0)
3578 set_mem_attributes (dest, type, 1);
3580 if (flag_optimize_sibling_calls)
3581 /* Function incoming arguments may overlap with sibling call
3582 outgoing arguments and we cannot allow reordering of reads
3583 from function arguments with stores to outgoing arguments
3584 of sibling calls. */
3585 set_mem_alias_set (dest, 0);
3587 emit_move_insn (dest, x);
3589 #endif
3591 /* Generate code to push X onto the stack, assuming it has mode MODE and
3592 type TYPE.
3593 MODE is redundant except when X is a CONST_INT (since they don't
3594 carry mode info).
3595 SIZE is an rtx for the size of data to be copied (in bytes),
3596 needed only if X is BLKmode.
3598 ALIGN (in bits) is maximum alignment we can assume.
3600 If PARTIAL and REG are both nonzero, then copy that many of the first
3601 bytes of X into registers starting with REG, and push the rest of X.
3602 The amount of space pushed is decreased by PARTIAL bytes.
3603 REG must be a hard register in this case.
3604 If REG is zero but PARTIAL is not, take any all others actions for an
3605 argument partially in registers, but do not actually load any
3606 registers.
3608 EXTRA is the amount in bytes of extra space to leave next to this arg.
3609 This is ignored if an argument block has already been allocated.
3611 On a machine that lacks real push insns, ARGS_ADDR is the address of
3612 the bottom of the argument block for this call. We use indexing off there
3613 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3614 argument block has not been preallocated.
3616 ARGS_SO_FAR is the size of args previously pushed for this call.
3618 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3619 for arguments passed in registers. If nonzero, it will be the number
3620 of bytes required. */
3622 void
3623 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3624 unsigned int align, int partial, rtx reg, int extra,
3625 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3626 rtx alignment_pad)
3628 rtx xinner;
3629 enum direction stack_direction
3630 #ifdef STACK_GROWS_DOWNWARD
3631 = downward;
3632 #else
3633 = upward;
3634 #endif
3636 /* Decide where to pad the argument: `downward' for below,
3637 `upward' for above, or `none' for don't pad it.
3638 Default is below for small data on big-endian machines; else above. */
3639 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3641 /* Invert direction if stack is post-decrement.
3642 FIXME: why? */
3643 if (STACK_PUSH_CODE == POST_DEC)
3644 if (where_pad != none)
3645 where_pad = (where_pad == downward ? upward : downward);
3647 xinner = x;
3649 if (mode == BLKmode
3650 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3652 /* Copy a block into the stack, entirely or partially. */
3654 rtx temp;
3655 int used;
3656 int offset;
3657 int skip;
3659 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3660 used = partial - offset;
3662 if (mode != BLKmode)
3664 /* A value is to be stored in an insufficiently aligned
3665 stack slot; copy via a suitably aligned slot if
3666 necessary. */
3667 size = GEN_INT (GET_MODE_SIZE (mode));
3668 if (!MEM_P (xinner))
3670 temp = assign_temp (type, 0, 1, 1);
3671 emit_move_insn (temp, xinner);
3672 xinner = temp;
3676 gcc_assert (size);
3678 /* USED is now the # of bytes we need not copy to the stack
3679 because registers will take care of them. */
3681 if (partial != 0)
3682 xinner = adjust_address (xinner, BLKmode, used);
3684 /* If the partial register-part of the arg counts in its stack size,
3685 skip the part of stack space corresponding to the registers.
3686 Otherwise, start copying to the beginning of the stack space,
3687 by setting SKIP to 0. */
3688 skip = (reg_parm_stack_space == 0) ? 0 : used;
3690 #ifdef PUSH_ROUNDING
3691 /* Do it with several push insns if that doesn't take lots of insns
3692 and if there is no difficulty with push insns that skip bytes
3693 on the stack for alignment purposes. */
3694 if (args_addr == 0
3695 && PUSH_ARGS
3696 && GET_CODE (size) == CONST_INT
3697 && skip == 0
3698 && MEM_ALIGN (xinner) >= align
3699 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3700 /* Here we avoid the case of a structure whose weak alignment
3701 forces many pushes of a small amount of data,
3702 and such small pushes do rounding that causes trouble. */
3703 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3704 || align >= BIGGEST_ALIGNMENT
3705 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3706 == (align / BITS_PER_UNIT)))
3707 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3709 /* Push padding now if padding above and stack grows down,
3710 or if padding below and stack grows up.
3711 But if space already allocated, this has already been done. */
3712 if (extra && args_addr == 0
3713 && where_pad != none && where_pad != stack_direction)
3714 anti_adjust_stack (GEN_INT (extra));
3716 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3718 else
3719 #endif /* PUSH_ROUNDING */
3721 rtx target;
3723 /* Otherwise make space on the stack and copy the data
3724 to the address of that space. */
3726 /* Deduct words put into registers from the size we must copy. */
3727 if (partial != 0)
3729 if (GET_CODE (size) == CONST_INT)
3730 size = GEN_INT (INTVAL (size) - used);
3731 else
3732 size = expand_binop (GET_MODE (size), sub_optab, size,
3733 GEN_INT (used), NULL_RTX, 0,
3734 OPTAB_LIB_WIDEN);
3737 /* Get the address of the stack space.
3738 In this case, we do not deal with EXTRA separately.
3739 A single stack adjust will do. */
3740 if (! args_addr)
3742 temp = push_block (size, extra, where_pad == downward);
3743 extra = 0;
3745 else if (GET_CODE (args_so_far) == CONST_INT)
3746 temp = memory_address (BLKmode,
3747 plus_constant (args_addr,
3748 skip + INTVAL (args_so_far)));
3749 else
3750 temp = memory_address (BLKmode,
3751 plus_constant (gen_rtx_PLUS (Pmode,
3752 args_addr,
3753 args_so_far),
3754 skip));
3756 if (!ACCUMULATE_OUTGOING_ARGS)
3758 /* If the source is referenced relative to the stack pointer,
3759 copy it to another register to stabilize it. We do not need
3760 to do this if we know that we won't be changing sp. */
3762 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3763 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3764 temp = copy_to_reg (temp);
3767 target = gen_rtx_MEM (BLKmode, temp);
3769 /* We do *not* set_mem_attributes here, because incoming arguments
3770 may overlap with sibling call outgoing arguments and we cannot
3771 allow reordering of reads from function arguments with stores
3772 to outgoing arguments of sibling calls. We do, however, want
3773 to record the alignment of the stack slot. */
3774 /* ALIGN may well be better aligned than TYPE, e.g. due to
3775 PARM_BOUNDARY. Assume the caller isn't lying. */
3776 set_mem_align (target, align);
3778 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3781 else if (partial > 0)
3783 /* Scalar partly in registers. */
3785 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3786 int i;
3787 int not_stack;
3788 /* # bytes of start of argument
3789 that we must make space for but need not store. */
3790 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3791 int args_offset = INTVAL (args_so_far);
3792 int skip;
3794 /* Push padding now if padding above and stack grows down,
3795 or if padding below and stack grows up.
3796 But if space already allocated, this has already been done. */
3797 if (extra && args_addr == 0
3798 && where_pad != none && where_pad != stack_direction)
3799 anti_adjust_stack (GEN_INT (extra));
3801 /* If we make space by pushing it, we might as well push
3802 the real data. Otherwise, we can leave OFFSET nonzero
3803 and leave the space uninitialized. */
3804 if (args_addr == 0)
3805 offset = 0;
3807 /* Now NOT_STACK gets the number of words that we don't need to
3808 allocate on the stack. Convert OFFSET to words too. */
3809 not_stack = (partial - offset) / UNITS_PER_WORD;
3810 offset /= UNITS_PER_WORD;
3812 /* If the partial register-part of the arg counts in its stack size,
3813 skip the part of stack space corresponding to the registers.
3814 Otherwise, start copying to the beginning of the stack space,
3815 by setting SKIP to 0. */
3816 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3818 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3819 x = validize_mem (force_const_mem (mode, x));
3821 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3822 SUBREGs of such registers are not allowed. */
3823 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3824 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3825 x = copy_to_reg (x);
3827 /* Loop over all the words allocated on the stack for this arg. */
3828 /* We can do it by words, because any scalar bigger than a word
3829 has a size a multiple of a word. */
3830 #ifndef PUSH_ARGS_REVERSED
3831 for (i = not_stack; i < size; i++)
3832 #else
3833 for (i = size - 1; i >= not_stack; i--)
3834 #endif
3835 if (i >= not_stack + offset)
3836 emit_push_insn (operand_subword_force (x, i, mode),
3837 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3838 0, args_addr,
3839 GEN_INT (args_offset + ((i - not_stack + skip)
3840 * UNITS_PER_WORD)),
3841 reg_parm_stack_space, alignment_pad);
3843 else
3845 rtx addr;
3846 rtx dest;
3848 /* Push padding now if padding above and stack grows down,
3849 or if padding below and stack grows up.
3850 But if space already allocated, this has already been done. */
3851 if (extra && args_addr == 0
3852 && where_pad != none && where_pad != stack_direction)
3853 anti_adjust_stack (GEN_INT (extra));
3855 #ifdef PUSH_ROUNDING
3856 if (args_addr == 0 && PUSH_ARGS)
3857 emit_single_push_insn (mode, x, type);
3858 else
3859 #endif
3861 if (GET_CODE (args_so_far) == CONST_INT)
3862 addr
3863 = memory_address (mode,
3864 plus_constant (args_addr,
3865 INTVAL (args_so_far)));
3866 else
3867 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3868 args_so_far));
3869 dest = gen_rtx_MEM (mode, addr);
3871 /* We do *not* set_mem_attributes here, because incoming arguments
3872 may overlap with sibling call outgoing arguments and we cannot
3873 allow reordering of reads from function arguments with stores
3874 to outgoing arguments of sibling calls. We do, however, want
3875 to record the alignment of the stack slot. */
3876 /* ALIGN may well be better aligned than TYPE, e.g. due to
3877 PARM_BOUNDARY. Assume the caller isn't lying. */
3878 set_mem_align (dest, align);
3880 emit_move_insn (dest, x);
3884 /* If part should go in registers, copy that part
3885 into the appropriate registers. Do this now, at the end,
3886 since mem-to-mem copies above may do function calls. */
3887 if (partial > 0 && reg != 0)
3889 /* Handle calls that pass values in multiple non-contiguous locations.
3890 The Irix 6 ABI has examples of this. */
3891 if (GET_CODE (reg) == PARALLEL)
3892 emit_group_load (reg, x, type, -1);
3893 else
3895 gcc_assert (partial % UNITS_PER_WORD == 0);
3896 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3900 if (extra && args_addr == 0 && where_pad == stack_direction)
3901 anti_adjust_stack (GEN_INT (extra));
3903 if (alignment_pad && args_addr == 0)
3904 anti_adjust_stack (alignment_pad);
3907 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3908 operations. */
3910 static rtx
3911 get_subtarget (rtx x)
3913 return (optimize
3914 || x == 0
3915 /* Only registers can be subtargets. */
3916 || !REG_P (x)
3917 /* Don't use hard regs to avoid extending their life. */
3918 || REGNO (x) < FIRST_PSEUDO_REGISTER
3919 ? 0 : x);
3922 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3923 FIELD is a bitfield. Returns true if the optimization was successful,
3924 and there's nothing else to do. */
3926 static bool
3927 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3928 unsigned HOST_WIDE_INT bitpos,
3929 enum machine_mode mode1, rtx str_rtx,
3930 tree to, tree src)
3932 enum machine_mode str_mode = GET_MODE (str_rtx);
3933 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3934 tree op0, op1;
3935 rtx value, result;
3936 optab binop;
3938 if (mode1 != VOIDmode
3939 || bitsize >= BITS_PER_WORD
3940 || str_bitsize > BITS_PER_WORD
3941 || TREE_SIDE_EFFECTS (to)
3942 || TREE_THIS_VOLATILE (to))
3943 return false;
3945 STRIP_NOPS (src);
3946 if (!BINARY_CLASS_P (src)
3947 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3948 return false;
3950 op0 = TREE_OPERAND (src, 0);
3951 op1 = TREE_OPERAND (src, 1);
3952 STRIP_NOPS (op0);
3954 if (!operand_equal_p (to, op0, 0))
3955 return false;
3957 if (MEM_P (str_rtx))
3959 unsigned HOST_WIDE_INT offset1;
3961 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3962 str_mode = word_mode;
3963 str_mode = get_best_mode (bitsize, bitpos,
3964 MEM_ALIGN (str_rtx), str_mode, 0);
3965 if (str_mode == VOIDmode)
3966 return false;
3967 str_bitsize = GET_MODE_BITSIZE (str_mode);
3969 offset1 = bitpos;
3970 bitpos %= str_bitsize;
3971 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3972 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3974 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3975 return false;
3977 /* If the bit field covers the whole REG/MEM, store_field
3978 will likely generate better code. */
3979 if (bitsize >= str_bitsize)
3980 return false;
3982 /* We can't handle fields split across multiple entities. */
3983 if (bitpos + bitsize > str_bitsize)
3984 return false;
3986 if (BYTES_BIG_ENDIAN)
3987 bitpos = str_bitsize - bitpos - bitsize;
3989 switch (TREE_CODE (src))
3991 case PLUS_EXPR:
3992 case MINUS_EXPR:
3993 /* For now, just optimize the case of the topmost bitfield
3994 where we don't need to do any masking and also
3995 1 bit bitfields where xor can be used.
3996 We might win by one instruction for the other bitfields
3997 too if insv/extv instructions aren't used, so that
3998 can be added later. */
3999 if (bitpos + bitsize != str_bitsize
4000 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4001 break;
4003 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4004 value = convert_modes (str_mode,
4005 TYPE_MODE (TREE_TYPE (op1)), value,
4006 TYPE_UNSIGNED (TREE_TYPE (op1)));
4008 /* We may be accessing data outside the field, which means
4009 we can alias adjacent data. */
4010 if (MEM_P (str_rtx))
4012 str_rtx = shallow_copy_rtx (str_rtx);
4013 set_mem_alias_set (str_rtx, 0);
4014 set_mem_expr (str_rtx, 0);
4017 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4018 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4020 value = expand_and (str_mode, value, const1_rtx, NULL);
4021 binop = xor_optab;
4023 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4024 build_int_cst (NULL_TREE, bitpos),
4025 NULL_RTX, 1);
4026 result = expand_binop (str_mode, binop, str_rtx,
4027 value, str_rtx, 1, OPTAB_WIDEN);
4028 if (result != str_rtx)
4029 emit_move_insn (str_rtx, result);
4030 return true;
4032 case BIT_IOR_EXPR:
4033 case BIT_XOR_EXPR:
4034 if (TREE_CODE (op1) != INTEGER_CST)
4035 break;
4036 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4037 value = convert_modes (GET_MODE (str_rtx),
4038 TYPE_MODE (TREE_TYPE (op1)), value,
4039 TYPE_UNSIGNED (TREE_TYPE (op1)));
4041 /* We may be accessing data outside the field, which means
4042 we can alias adjacent data. */
4043 if (MEM_P (str_rtx))
4045 str_rtx = shallow_copy_rtx (str_rtx);
4046 set_mem_alias_set (str_rtx, 0);
4047 set_mem_expr (str_rtx, 0);
4050 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4051 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4053 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4054 - 1);
4055 value = expand_and (GET_MODE (str_rtx), value, mask,
4056 NULL_RTX);
4058 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4059 build_int_cst (NULL_TREE, bitpos),
4060 NULL_RTX, 1);
4061 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4062 value, str_rtx, 1, OPTAB_WIDEN);
4063 if (result != str_rtx)
4064 emit_move_insn (str_rtx, result);
4065 return true;
4067 default:
4068 break;
4071 return false;
4075 /* Expand an assignment that stores the value of FROM into TO. */
4077 void
4078 expand_assignment (tree to, tree from)
4080 rtx to_rtx = 0;
4081 rtx result;
4083 /* Don't crash if the lhs of the assignment was erroneous. */
4084 if (TREE_CODE (to) == ERROR_MARK)
4086 result = expand_normal (from);
4087 return;
4090 /* Optimize away no-op moves without side-effects. */
4091 if (operand_equal_p (to, from, 0))
4092 return;
4094 /* Assignment of a structure component needs special treatment
4095 if the structure component's rtx is not simply a MEM.
4096 Assignment of an array element at a constant index, and assignment of
4097 an array element in an unaligned packed structure field, has the same
4098 problem. */
4099 if (handled_component_p (to)
4100 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4102 enum machine_mode mode1;
4103 HOST_WIDE_INT bitsize, bitpos;
4104 tree offset;
4105 int unsignedp;
4106 int volatilep = 0;
4107 tree tem;
4109 push_temp_slots ();
4110 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4111 &unsignedp, &volatilep, true);
4113 /* If we are going to use store_bit_field and extract_bit_field,
4114 make sure to_rtx will be safe for multiple use. */
4116 to_rtx = expand_normal (tem);
4118 if (offset != 0)
4120 rtx offset_rtx;
4122 if (!MEM_P (to_rtx))
4124 /* We can get constant negative offsets into arrays with broken
4125 user code. Translate this to a trap instead of ICEing. */
4126 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4127 expand_builtin_trap ();
4128 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4131 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4132 #ifdef POINTERS_EXTEND_UNSIGNED
4133 if (GET_MODE (offset_rtx) != Pmode)
4134 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4135 #else
4136 if (GET_MODE (offset_rtx) != ptr_mode)
4137 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4138 #endif
4140 /* A constant address in TO_RTX can have VOIDmode, we must not try
4141 to call force_reg for that case. Avoid that case. */
4142 if (MEM_P (to_rtx)
4143 && GET_MODE (to_rtx) == BLKmode
4144 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4145 && bitsize > 0
4146 && (bitpos % bitsize) == 0
4147 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4148 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4150 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4151 bitpos = 0;
4154 to_rtx = offset_address (to_rtx, offset_rtx,
4155 highest_pow2_factor_for_target (to,
4156 offset));
4159 /* Handle expand_expr of a complex value returning a CONCAT. */
4160 if (GET_CODE (to_rtx) == CONCAT)
4162 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4164 gcc_assert (bitpos == 0);
4165 result = store_expr (from, to_rtx, false);
4167 else
4169 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4170 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4173 else
4175 if (MEM_P (to_rtx))
4177 /* If the field is at offset zero, we could have been given the
4178 DECL_RTX of the parent struct. Don't munge it. */
4179 to_rtx = shallow_copy_rtx (to_rtx);
4181 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4183 /* Deal with volatile and readonly fields. The former is only
4184 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4185 if (volatilep)
4186 MEM_VOLATILE_P (to_rtx) = 1;
4187 if (component_uses_parent_alias_set (to))
4188 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4191 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4192 to_rtx, to, from))
4193 result = NULL;
4194 else
4195 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4196 TREE_TYPE (tem), get_alias_set (to));
4199 if (result)
4200 preserve_temp_slots (result);
4201 free_temp_slots ();
4202 pop_temp_slots ();
4203 return;
4206 /* If the rhs is a function call and its value is not an aggregate,
4207 call the function before we start to compute the lhs.
4208 This is needed for correct code for cases such as
4209 val = setjmp (buf) on machines where reference to val
4210 requires loading up part of an address in a separate insn.
4212 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4213 since it might be a promoted variable where the zero- or sign- extension
4214 needs to be done. Handling this in the normal way is safe because no
4215 computation is done before the call. */
4216 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4217 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4218 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4219 && REG_P (DECL_RTL (to))))
4221 rtx value;
4223 push_temp_slots ();
4224 value = expand_normal (from);
4225 if (to_rtx == 0)
4226 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4228 /* Handle calls that return values in multiple non-contiguous locations.
4229 The Irix 6 ABI has examples of this. */
4230 if (GET_CODE (to_rtx) == PARALLEL)
4231 emit_group_load (to_rtx, value, TREE_TYPE (from),
4232 int_size_in_bytes (TREE_TYPE (from)));
4233 else if (GET_MODE (to_rtx) == BLKmode)
4234 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4235 else
4237 if (POINTER_TYPE_P (TREE_TYPE (to)))
4238 value = convert_memory_address (GET_MODE (to_rtx), value);
4239 emit_move_insn (to_rtx, value);
4241 preserve_temp_slots (to_rtx);
4242 free_temp_slots ();
4243 pop_temp_slots ();
4244 return;
4247 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4248 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4250 if (to_rtx == 0)
4251 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4253 /* Don't move directly into a return register. */
4254 if (TREE_CODE (to) == RESULT_DECL
4255 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4257 rtx temp;
4259 push_temp_slots ();
4260 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4262 if (GET_CODE (to_rtx) == PARALLEL)
4263 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4264 int_size_in_bytes (TREE_TYPE (from)));
4265 else
4266 emit_move_insn (to_rtx, temp);
4268 preserve_temp_slots (to_rtx);
4269 free_temp_slots ();
4270 pop_temp_slots ();
4271 return;
4274 /* In case we are returning the contents of an object which overlaps
4275 the place the value is being stored, use a safe function when copying
4276 a value through a pointer into a structure value return block. */
4277 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4278 && current_function_returns_struct
4279 && !current_function_returns_pcc_struct)
4281 rtx from_rtx, size;
4283 push_temp_slots ();
4284 size = expr_size (from);
4285 from_rtx = expand_normal (from);
4287 emit_library_call (memmove_libfunc, LCT_NORMAL,
4288 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4289 XEXP (from_rtx, 0), Pmode,
4290 convert_to_mode (TYPE_MODE (sizetype),
4291 size, TYPE_UNSIGNED (sizetype)),
4292 TYPE_MODE (sizetype));
4294 preserve_temp_slots (to_rtx);
4295 free_temp_slots ();
4296 pop_temp_slots ();
4297 return;
4300 /* Compute FROM and store the value in the rtx we got. */
4302 push_temp_slots ();
4303 result = store_expr (from, to_rtx, 0);
4304 preserve_temp_slots (result);
4305 free_temp_slots ();
4306 pop_temp_slots ();
4307 return;
4310 /* Generate code for computing expression EXP,
4311 and storing the value into TARGET.
4313 If the mode is BLKmode then we may return TARGET itself.
4314 It turns out that in BLKmode it doesn't cause a problem.
4315 because C has no operators that could combine two different
4316 assignments into the same BLKmode object with different values
4317 with no sequence point. Will other languages need this to
4318 be more thorough?
4320 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4321 stack, and block moves may need to be treated specially. */
4324 store_expr (tree exp, rtx target, int call_param_p)
4326 rtx temp;
4327 rtx alt_rtl = NULL_RTX;
4328 int dont_return_target = 0;
4330 if (VOID_TYPE_P (TREE_TYPE (exp)))
4332 /* C++ can generate ?: expressions with a throw expression in one
4333 branch and an rvalue in the other. Here, we resolve attempts to
4334 store the throw expression's nonexistent result. */
4335 gcc_assert (!call_param_p);
4336 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4337 return NULL_RTX;
4339 if (TREE_CODE (exp) == COMPOUND_EXPR)
4341 /* Perform first part of compound expression, then assign from second
4342 part. */
4343 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4344 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4345 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4347 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4349 /* For conditional expression, get safe form of the target. Then
4350 test the condition, doing the appropriate assignment on either
4351 side. This avoids the creation of unnecessary temporaries.
4352 For non-BLKmode, it is more efficient not to do this. */
4354 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4356 do_pending_stack_adjust ();
4357 NO_DEFER_POP;
4358 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4359 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4360 emit_jump_insn (gen_jump (lab2));
4361 emit_barrier ();
4362 emit_label (lab1);
4363 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4364 emit_label (lab2);
4365 OK_DEFER_POP;
4367 return NULL_RTX;
4369 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4370 /* If this is a scalar in a register that is stored in a wider mode
4371 than the declared mode, compute the result into its declared mode
4372 and then convert to the wider mode. Our value is the computed
4373 expression. */
4375 rtx inner_target = 0;
4377 /* We can do the conversion inside EXP, which will often result
4378 in some optimizations. Do the conversion in two steps: first
4379 change the signedness, if needed, then the extend. But don't
4380 do this if the type of EXP is a subtype of something else
4381 since then the conversion might involve more than just
4382 converting modes. */
4383 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4384 && TREE_TYPE (TREE_TYPE (exp)) == 0
4385 && (!lang_hooks.reduce_bit_field_operations
4386 || (GET_MODE_PRECISION (GET_MODE (target))
4387 == TYPE_PRECISION (TREE_TYPE (exp)))))
4389 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4390 != SUBREG_PROMOTED_UNSIGNED_P (target))
4392 /* Some types, e.g. Fortran's logical*4, won't have a signed
4393 version, so use the mode instead. */
4394 tree ntype
4395 = (get_signed_or_unsigned_type
4396 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4397 if (ntype == NULL)
4398 ntype = lang_hooks.types.type_for_mode
4399 (TYPE_MODE (TREE_TYPE (exp)),
4400 SUBREG_PROMOTED_UNSIGNED_P (target));
4402 exp = fold_convert (ntype, exp);
4405 exp = fold_convert (lang_hooks.types.type_for_mode
4406 (GET_MODE (SUBREG_REG (target)),
4407 SUBREG_PROMOTED_UNSIGNED_P (target)),
4408 exp);
4410 inner_target = SUBREG_REG (target);
4413 temp = expand_expr (exp, inner_target, VOIDmode,
4414 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4416 /* If TEMP is a VOIDmode constant, use convert_modes to make
4417 sure that we properly convert it. */
4418 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4420 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4421 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4422 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4423 GET_MODE (target), temp,
4424 SUBREG_PROMOTED_UNSIGNED_P (target));
4427 convert_move (SUBREG_REG (target), temp,
4428 SUBREG_PROMOTED_UNSIGNED_P (target));
4430 return NULL_RTX;
4432 else
4434 temp = expand_expr_real (exp, target, GET_MODE (target),
4435 (call_param_p
4436 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4437 &alt_rtl);
4438 /* Return TARGET if it's a specified hardware register.
4439 If TARGET is a volatile mem ref, either return TARGET
4440 or return a reg copied *from* TARGET; ANSI requires this.
4442 Otherwise, if TEMP is not TARGET, return TEMP
4443 if it is constant (for efficiency),
4444 or if we really want the correct value. */
4445 if (!(target && REG_P (target)
4446 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4447 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4448 && ! rtx_equal_p (temp, target)
4449 && CONSTANT_P (temp))
4450 dont_return_target = 1;
4453 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4454 the same as that of TARGET, adjust the constant. This is needed, for
4455 example, in case it is a CONST_DOUBLE and we want only a word-sized
4456 value. */
4457 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4458 && TREE_CODE (exp) != ERROR_MARK
4459 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4460 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4461 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4463 /* If value was not generated in the target, store it there.
4464 Convert the value to TARGET's type first if necessary and emit the
4465 pending incrementations that have been queued when expanding EXP.
4466 Note that we cannot emit the whole queue blindly because this will
4467 effectively disable the POST_INC optimization later.
4469 If TEMP and TARGET compare equal according to rtx_equal_p, but
4470 one or both of them are volatile memory refs, we have to distinguish
4471 two cases:
4472 - expand_expr has used TARGET. In this case, we must not generate
4473 another copy. This can be detected by TARGET being equal according
4474 to == .
4475 - expand_expr has not used TARGET - that means that the source just
4476 happens to have the same RTX form. Since temp will have been created
4477 by expand_expr, it will compare unequal according to == .
4478 We must generate a copy in this case, to reach the correct number
4479 of volatile memory references. */
4481 if ((! rtx_equal_p (temp, target)
4482 || (temp != target && (side_effects_p (temp)
4483 || side_effects_p (target))))
4484 && TREE_CODE (exp) != ERROR_MARK
4485 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4486 but TARGET is not valid memory reference, TEMP will differ
4487 from TARGET although it is really the same location. */
4488 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4489 /* If there's nothing to copy, don't bother. Don't call
4490 expr_size unless necessary, because some front-ends (C++)
4491 expr_size-hook must not be given objects that are not
4492 supposed to be bit-copied or bit-initialized. */
4493 && expr_size (exp) != const0_rtx)
4495 if (GET_MODE (temp) != GET_MODE (target)
4496 && GET_MODE (temp) != VOIDmode)
4498 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4499 if (dont_return_target)
4501 /* In this case, we will return TEMP,
4502 so make sure it has the proper mode.
4503 But don't forget to store the value into TARGET. */
4504 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4505 emit_move_insn (target, temp);
4507 else
4508 convert_move (target, temp, unsignedp);
4511 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4513 /* Handle copying a string constant into an array. The string
4514 constant may be shorter than the array. So copy just the string's
4515 actual length, and clear the rest. First get the size of the data
4516 type of the string, which is actually the size of the target. */
4517 rtx size = expr_size (exp);
4519 if (GET_CODE (size) == CONST_INT
4520 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4521 emit_block_move (target, temp, size,
4522 (call_param_p
4523 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4524 else
4526 /* Compute the size of the data to copy from the string. */
4527 tree copy_size
4528 = size_binop (MIN_EXPR,
4529 make_tree (sizetype, size),
4530 size_int (TREE_STRING_LENGTH (exp)));
4531 rtx copy_size_rtx
4532 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4533 (call_param_p
4534 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4535 rtx label = 0;
4537 /* Copy that much. */
4538 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4539 TYPE_UNSIGNED (sizetype));
4540 emit_block_move (target, temp, copy_size_rtx,
4541 (call_param_p
4542 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4544 /* Figure out how much is left in TARGET that we have to clear.
4545 Do all calculations in ptr_mode. */
4546 if (GET_CODE (copy_size_rtx) == CONST_INT)
4548 size = plus_constant (size, -INTVAL (copy_size_rtx));
4549 target = adjust_address (target, BLKmode,
4550 INTVAL (copy_size_rtx));
4552 else
4554 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4555 copy_size_rtx, NULL_RTX, 0,
4556 OPTAB_LIB_WIDEN);
4558 #ifdef POINTERS_EXTEND_UNSIGNED
4559 if (GET_MODE (copy_size_rtx) != Pmode)
4560 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4561 TYPE_UNSIGNED (sizetype));
4562 #endif
4564 target = offset_address (target, copy_size_rtx,
4565 highest_pow2_factor (copy_size));
4566 label = gen_label_rtx ();
4567 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4568 GET_MODE (size), 0, label);
4571 if (size != const0_rtx)
4572 clear_storage (target, size, BLOCK_OP_NORMAL);
4574 if (label)
4575 emit_label (label);
4578 /* Handle calls that return values in multiple non-contiguous locations.
4579 The Irix 6 ABI has examples of this. */
4580 else if (GET_CODE (target) == PARALLEL)
4581 emit_group_load (target, temp, TREE_TYPE (exp),
4582 int_size_in_bytes (TREE_TYPE (exp)));
4583 else if (GET_MODE (temp) == BLKmode)
4584 emit_block_move (target, temp, expr_size (exp),
4585 (call_param_p
4586 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4587 else
4589 temp = force_operand (temp, target);
4590 if (temp != target)
4591 emit_move_insn (target, temp);
4595 return NULL_RTX;
4598 /* Helper for categorize_ctor_elements. Identical interface. */
4600 static bool
4601 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4602 HOST_WIDE_INT *p_elt_count,
4603 bool *p_must_clear)
4605 unsigned HOST_WIDE_INT idx;
4606 HOST_WIDE_INT nz_elts, elt_count;
4607 tree value, purpose;
4609 /* Whether CTOR is a valid constant initializer, in accordance with what
4610 initializer_constant_valid_p does. If inferred from the constructor
4611 elements, true until proven otherwise. */
4612 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4613 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4615 nz_elts = 0;
4616 elt_count = 0;
4618 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4620 HOST_WIDE_INT mult;
4622 mult = 1;
4623 if (TREE_CODE (purpose) == RANGE_EXPR)
4625 tree lo_index = TREE_OPERAND (purpose, 0);
4626 tree hi_index = TREE_OPERAND (purpose, 1);
4628 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4629 mult = (tree_low_cst (hi_index, 1)
4630 - tree_low_cst (lo_index, 1) + 1);
4633 switch (TREE_CODE (value))
4635 case CONSTRUCTOR:
4637 HOST_WIDE_INT nz = 0, ic = 0;
4639 bool const_elt_p
4640 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4642 nz_elts += mult * nz;
4643 elt_count += mult * ic;
4645 if (const_from_elts_p && const_p)
4646 const_p = const_elt_p;
4648 break;
4650 case INTEGER_CST:
4651 case REAL_CST:
4652 if (!initializer_zerop (value))
4653 nz_elts += mult;
4654 elt_count += mult;
4655 break;
4657 case STRING_CST:
4658 nz_elts += mult * TREE_STRING_LENGTH (value);
4659 elt_count += mult * TREE_STRING_LENGTH (value);
4660 break;
4662 case COMPLEX_CST:
4663 if (!initializer_zerop (TREE_REALPART (value)))
4664 nz_elts += mult;
4665 if (!initializer_zerop (TREE_IMAGPART (value)))
4666 nz_elts += mult;
4667 elt_count += mult;
4668 break;
4670 case VECTOR_CST:
4672 tree v;
4673 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4675 if (!initializer_zerop (TREE_VALUE (v)))
4676 nz_elts += mult;
4677 elt_count += mult;
4680 break;
4682 default:
4683 nz_elts += mult;
4684 elt_count += mult;
4686 if (const_from_elts_p && const_p)
4687 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4688 != NULL_TREE;
4689 break;
4693 if (!*p_must_clear
4694 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4695 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4697 tree init_sub_type;
4698 bool clear_this = true;
4700 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4702 /* We don't expect more than one element of the union to be
4703 initialized. Not sure what we should do otherwise... */
4704 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4705 == 1);
4707 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4708 CONSTRUCTOR_ELTS (ctor),
4709 0)->value);
4711 /* ??? We could look at each element of the union, and find the
4712 largest element. Which would avoid comparing the size of the
4713 initialized element against any tail padding in the union.
4714 Doesn't seem worth the effort... */
4715 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4716 TYPE_SIZE (init_sub_type)) == 1)
4718 /* And now we have to find out if the element itself is fully
4719 constructed. E.g. for union { struct { int a, b; } s; } u
4720 = { .s = { .a = 1 } }. */
4721 if (elt_count == count_type_elements (init_sub_type, false))
4722 clear_this = false;
4726 *p_must_clear = clear_this;
4729 *p_nz_elts += nz_elts;
4730 *p_elt_count += elt_count;
4732 return const_p;
4735 /* Examine CTOR to discover:
4736 * how many scalar fields are set to nonzero values,
4737 and place it in *P_NZ_ELTS;
4738 * how many scalar fields in total are in CTOR,
4739 and place it in *P_ELT_COUNT.
4740 * if a type is a union, and the initializer from the constructor
4741 is not the largest element in the union, then set *p_must_clear.
4743 Return whether or not CTOR is a valid static constant initializer, the same
4744 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4746 bool
4747 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4748 HOST_WIDE_INT *p_elt_count,
4749 bool *p_must_clear)
4751 *p_nz_elts = 0;
4752 *p_elt_count = 0;
4753 *p_must_clear = false;
4755 return
4756 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4759 /* Count the number of scalars in TYPE. Return -1 on overflow or
4760 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4761 array member at the end of the structure. */
4763 HOST_WIDE_INT
4764 count_type_elements (tree type, bool allow_flexarr)
4766 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4767 switch (TREE_CODE (type))
4769 case ARRAY_TYPE:
4771 tree telts = array_type_nelts (type);
4772 if (telts && host_integerp (telts, 1))
4774 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4775 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4776 if (n == 0)
4777 return 0;
4778 else if (max / n > m)
4779 return n * m;
4781 return -1;
4784 case RECORD_TYPE:
4786 HOST_WIDE_INT n = 0, t;
4787 tree f;
4789 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4790 if (TREE_CODE (f) == FIELD_DECL)
4792 t = count_type_elements (TREE_TYPE (f), false);
4793 if (t < 0)
4795 /* Check for structures with flexible array member. */
4796 tree tf = TREE_TYPE (f);
4797 if (allow_flexarr
4798 && TREE_CHAIN (f) == NULL
4799 && TREE_CODE (tf) == ARRAY_TYPE
4800 && TYPE_DOMAIN (tf)
4801 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4802 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4803 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4804 && int_size_in_bytes (type) >= 0)
4805 break;
4807 return -1;
4809 n += t;
4812 return n;
4815 case UNION_TYPE:
4816 case QUAL_UNION_TYPE:
4818 /* Ho hum. How in the world do we guess here? Clearly it isn't
4819 right to count the fields. Guess based on the number of words. */
4820 HOST_WIDE_INT n = int_size_in_bytes (type);
4821 if (n < 0)
4822 return -1;
4823 return n / UNITS_PER_WORD;
4826 case COMPLEX_TYPE:
4827 return 2;
4829 case VECTOR_TYPE:
4830 return TYPE_VECTOR_SUBPARTS (type);
4832 case INTEGER_TYPE:
4833 case REAL_TYPE:
4834 case ENUMERAL_TYPE:
4835 case BOOLEAN_TYPE:
4836 case POINTER_TYPE:
4837 case OFFSET_TYPE:
4838 case REFERENCE_TYPE:
4839 return 1;
4841 case VOID_TYPE:
4842 case METHOD_TYPE:
4843 case FUNCTION_TYPE:
4844 case LANG_TYPE:
4845 default:
4846 gcc_unreachable ();
4850 /* Return 1 if EXP contains mostly (3/4) zeros. */
4852 static int
4853 mostly_zeros_p (tree exp)
4855 if (TREE_CODE (exp) == CONSTRUCTOR)
4858 HOST_WIDE_INT nz_elts, count, elts;
4859 bool must_clear;
4861 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4862 if (must_clear)
4863 return 1;
4865 elts = count_type_elements (TREE_TYPE (exp), false);
4867 return nz_elts < elts / 4;
4870 return initializer_zerop (exp);
4873 /* Return 1 if EXP contains all zeros. */
4875 static int
4876 all_zeros_p (tree exp)
4878 if (TREE_CODE (exp) == CONSTRUCTOR)
4881 HOST_WIDE_INT nz_elts, count;
4882 bool must_clear;
4884 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4885 return nz_elts == 0;
4888 return initializer_zerop (exp);
4891 /* Helper function for store_constructor.
4892 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4893 TYPE is the type of the CONSTRUCTOR, not the element type.
4894 CLEARED is as for store_constructor.
4895 ALIAS_SET is the alias set to use for any stores.
4897 This provides a recursive shortcut back to store_constructor when it isn't
4898 necessary to go through store_field. This is so that we can pass through
4899 the cleared field to let store_constructor know that we may not have to
4900 clear a substructure if the outer structure has already been cleared. */
4902 static void
4903 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4904 HOST_WIDE_INT bitpos, enum machine_mode mode,
4905 tree exp, tree type, int cleared, int alias_set)
4907 if (TREE_CODE (exp) == CONSTRUCTOR
4908 /* We can only call store_constructor recursively if the size and
4909 bit position are on a byte boundary. */
4910 && bitpos % BITS_PER_UNIT == 0
4911 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4912 /* If we have a nonzero bitpos for a register target, then we just
4913 let store_field do the bitfield handling. This is unlikely to
4914 generate unnecessary clear instructions anyways. */
4915 && (bitpos == 0 || MEM_P (target)))
4917 if (MEM_P (target))
4918 target
4919 = adjust_address (target,
4920 GET_MODE (target) == BLKmode
4921 || 0 != (bitpos
4922 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4923 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4926 /* Update the alias set, if required. */
4927 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4928 && MEM_ALIAS_SET (target) != 0)
4930 target = copy_rtx (target);
4931 set_mem_alias_set (target, alias_set);
4934 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4936 else
4937 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4940 /* Store the value of constructor EXP into the rtx TARGET.
4941 TARGET is either a REG or a MEM; we know it cannot conflict, since
4942 safe_from_p has been called.
4943 CLEARED is true if TARGET is known to have been zero'd.
4944 SIZE is the number of bytes of TARGET we are allowed to modify: this
4945 may not be the same as the size of EXP if we are assigning to a field
4946 which has been packed to exclude padding bits. */
4948 static void
4949 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4951 tree type = TREE_TYPE (exp);
4952 #ifdef WORD_REGISTER_OPERATIONS
4953 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4954 #endif
4956 switch (TREE_CODE (type))
4958 case RECORD_TYPE:
4959 case UNION_TYPE:
4960 case QUAL_UNION_TYPE:
4962 unsigned HOST_WIDE_INT idx;
4963 tree field, value;
4965 /* If size is zero or the target is already cleared, do nothing. */
4966 if (size == 0 || cleared)
4967 cleared = 1;
4968 /* We either clear the aggregate or indicate the value is dead. */
4969 else if ((TREE_CODE (type) == UNION_TYPE
4970 || TREE_CODE (type) == QUAL_UNION_TYPE)
4971 && ! CONSTRUCTOR_ELTS (exp))
4972 /* If the constructor is empty, clear the union. */
4974 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4975 cleared = 1;
4978 /* If we are building a static constructor into a register,
4979 set the initial value as zero so we can fold the value into
4980 a constant. But if more than one register is involved,
4981 this probably loses. */
4982 else if (REG_P (target) && TREE_STATIC (exp)
4983 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4985 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4986 cleared = 1;
4989 /* If the constructor has fewer fields than the structure or
4990 if we are initializing the structure to mostly zeros, clear
4991 the whole structure first. Don't do this if TARGET is a
4992 register whose mode size isn't equal to SIZE since
4993 clear_storage can't handle this case. */
4994 else if (size > 0
4995 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4996 != fields_length (type))
4997 || mostly_zeros_p (exp))
4998 && (!REG_P (target)
4999 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5000 == size)))
5002 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5003 cleared = 1;
5006 if (! cleared)
5007 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5009 /* Store each element of the constructor into the
5010 corresponding field of TARGET. */
5011 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5013 enum machine_mode mode;
5014 HOST_WIDE_INT bitsize;
5015 HOST_WIDE_INT bitpos = 0;
5016 tree offset;
5017 rtx to_rtx = target;
5019 /* Just ignore missing fields. We cleared the whole
5020 structure, above, if any fields are missing. */
5021 if (field == 0)
5022 continue;
5024 if (cleared && initializer_zerop (value))
5025 continue;
5027 if (host_integerp (DECL_SIZE (field), 1))
5028 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5029 else
5030 bitsize = -1;
5032 mode = DECL_MODE (field);
5033 if (DECL_BIT_FIELD (field))
5034 mode = VOIDmode;
5036 offset = DECL_FIELD_OFFSET (field);
5037 if (host_integerp (offset, 0)
5038 && host_integerp (bit_position (field), 0))
5040 bitpos = int_bit_position (field);
5041 offset = 0;
5043 else
5044 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5046 if (offset)
5048 rtx offset_rtx;
5050 offset
5051 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5052 make_tree (TREE_TYPE (exp),
5053 target));
5055 offset_rtx = expand_normal (offset);
5056 gcc_assert (MEM_P (to_rtx));
5058 #ifdef POINTERS_EXTEND_UNSIGNED
5059 if (GET_MODE (offset_rtx) != Pmode)
5060 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5061 #else
5062 if (GET_MODE (offset_rtx) != ptr_mode)
5063 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5064 #endif
5066 to_rtx = offset_address (to_rtx, offset_rtx,
5067 highest_pow2_factor (offset));
5070 #ifdef WORD_REGISTER_OPERATIONS
5071 /* If this initializes a field that is smaller than a
5072 word, at the start of a word, try to widen it to a full
5073 word. This special case allows us to output C++ member
5074 function initializations in a form that the optimizers
5075 can understand. */
5076 if (REG_P (target)
5077 && bitsize < BITS_PER_WORD
5078 && bitpos % BITS_PER_WORD == 0
5079 && GET_MODE_CLASS (mode) == MODE_INT
5080 && TREE_CODE (value) == INTEGER_CST
5081 && exp_size >= 0
5082 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5084 tree type = TREE_TYPE (value);
5086 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5088 type = lang_hooks.types.type_for_size
5089 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5090 value = fold_convert (type, value);
5093 if (BYTES_BIG_ENDIAN)
5094 value
5095 = fold_build2 (LSHIFT_EXPR, type, value,
5096 build_int_cst (type,
5097 BITS_PER_WORD - bitsize));
5098 bitsize = BITS_PER_WORD;
5099 mode = word_mode;
5101 #endif
5103 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5104 && DECL_NONADDRESSABLE_P (field))
5106 to_rtx = copy_rtx (to_rtx);
5107 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5110 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5111 value, type, cleared,
5112 get_alias_set (TREE_TYPE (field)));
5114 break;
5116 case ARRAY_TYPE:
5118 tree value, index;
5119 unsigned HOST_WIDE_INT i;
5120 int need_to_clear;
5121 tree domain;
5122 tree elttype = TREE_TYPE (type);
5123 int const_bounds_p;
5124 HOST_WIDE_INT minelt = 0;
5125 HOST_WIDE_INT maxelt = 0;
5127 domain = TYPE_DOMAIN (type);
5128 const_bounds_p = (TYPE_MIN_VALUE (domain)
5129 && TYPE_MAX_VALUE (domain)
5130 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5131 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5133 /* If we have constant bounds for the range of the type, get them. */
5134 if (const_bounds_p)
5136 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5137 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5140 /* If the constructor has fewer elements than the array, clear
5141 the whole array first. Similarly if this is static
5142 constructor of a non-BLKmode object. */
5143 if (cleared)
5144 need_to_clear = 0;
5145 else if (REG_P (target) && TREE_STATIC (exp))
5146 need_to_clear = 1;
5147 else
5149 unsigned HOST_WIDE_INT idx;
5150 tree index, value;
5151 HOST_WIDE_INT count = 0, zero_count = 0;
5152 need_to_clear = ! const_bounds_p;
5154 /* This loop is a more accurate version of the loop in
5155 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5156 is also needed to check for missing elements. */
5157 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5159 HOST_WIDE_INT this_node_count;
5161 if (need_to_clear)
5162 break;
5164 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5166 tree lo_index = TREE_OPERAND (index, 0);
5167 tree hi_index = TREE_OPERAND (index, 1);
5169 if (! host_integerp (lo_index, 1)
5170 || ! host_integerp (hi_index, 1))
5172 need_to_clear = 1;
5173 break;
5176 this_node_count = (tree_low_cst (hi_index, 1)
5177 - tree_low_cst (lo_index, 1) + 1);
5179 else
5180 this_node_count = 1;
5182 count += this_node_count;
5183 if (mostly_zeros_p (value))
5184 zero_count += this_node_count;
5187 /* Clear the entire array first if there are any missing
5188 elements, or if the incidence of zero elements is >=
5189 75%. */
5190 if (! need_to_clear
5191 && (count < maxelt - minelt + 1
5192 || 4 * zero_count >= 3 * count))
5193 need_to_clear = 1;
5196 if (need_to_clear && size > 0)
5198 if (REG_P (target))
5199 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5200 else
5201 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5202 cleared = 1;
5205 if (!cleared && REG_P (target))
5206 /* Inform later passes that the old value is dead. */
5207 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5209 /* Store each element of the constructor into the
5210 corresponding element of TARGET, determined by counting the
5211 elements. */
5212 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5214 enum machine_mode mode;
5215 HOST_WIDE_INT bitsize;
5216 HOST_WIDE_INT bitpos;
5217 int unsignedp;
5218 rtx xtarget = target;
5220 if (cleared && initializer_zerop (value))
5221 continue;
5223 unsignedp = TYPE_UNSIGNED (elttype);
5224 mode = TYPE_MODE (elttype);
5225 if (mode == BLKmode)
5226 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5227 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5228 : -1);
5229 else
5230 bitsize = GET_MODE_BITSIZE (mode);
5232 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5234 tree lo_index = TREE_OPERAND (index, 0);
5235 tree hi_index = TREE_OPERAND (index, 1);
5236 rtx index_r, pos_rtx;
5237 HOST_WIDE_INT lo, hi, count;
5238 tree position;
5240 /* If the range is constant and "small", unroll the loop. */
5241 if (const_bounds_p
5242 && host_integerp (lo_index, 0)
5243 && host_integerp (hi_index, 0)
5244 && (lo = tree_low_cst (lo_index, 0),
5245 hi = tree_low_cst (hi_index, 0),
5246 count = hi - lo + 1,
5247 (!MEM_P (target)
5248 || count <= 2
5249 || (host_integerp (TYPE_SIZE (elttype), 1)
5250 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5251 <= 40 * 8)))))
5253 lo -= minelt; hi -= minelt;
5254 for (; lo <= hi; lo++)
5256 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5258 if (MEM_P (target)
5259 && !MEM_KEEP_ALIAS_SET_P (target)
5260 && TREE_CODE (type) == ARRAY_TYPE
5261 && TYPE_NONALIASED_COMPONENT (type))
5263 target = copy_rtx (target);
5264 MEM_KEEP_ALIAS_SET_P (target) = 1;
5267 store_constructor_field
5268 (target, bitsize, bitpos, mode, value, type, cleared,
5269 get_alias_set (elttype));
5272 else
5274 rtx loop_start = gen_label_rtx ();
5275 rtx loop_end = gen_label_rtx ();
5276 tree exit_cond;
5278 expand_normal (hi_index);
5279 unsignedp = TYPE_UNSIGNED (domain);
5281 index = build_decl (VAR_DECL, NULL_TREE, domain);
5283 index_r
5284 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5285 &unsignedp, 0));
5286 SET_DECL_RTL (index, index_r);
5287 store_expr (lo_index, index_r, 0);
5289 /* Build the head of the loop. */
5290 do_pending_stack_adjust ();
5291 emit_label (loop_start);
5293 /* Assign value to element index. */
5294 position =
5295 fold_convert (ssizetype,
5296 fold_build2 (MINUS_EXPR,
5297 TREE_TYPE (index),
5298 index,
5299 TYPE_MIN_VALUE (domain)));
5301 position =
5302 size_binop (MULT_EXPR, position,
5303 fold_convert (ssizetype,
5304 TYPE_SIZE_UNIT (elttype)));
5306 pos_rtx = expand_normal (position);
5307 xtarget = offset_address (target, pos_rtx,
5308 highest_pow2_factor (position));
5309 xtarget = adjust_address (xtarget, mode, 0);
5310 if (TREE_CODE (value) == CONSTRUCTOR)
5311 store_constructor (value, xtarget, cleared,
5312 bitsize / BITS_PER_UNIT);
5313 else
5314 store_expr (value, xtarget, 0);
5316 /* Generate a conditional jump to exit the loop. */
5317 exit_cond = build2 (LT_EXPR, integer_type_node,
5318 index, hi_index);
5319 jumpif (exit_cond, loop_end);
5321 /* Update the loop counter, and jump to the head of
5322 the loop. */
5323 expand_assignment (index,
5324 build2 (PLUS_EXPR, TREE_TYPE (index),
5325 index, integer_one_node));
5327 emit_jump (loop_start);
5329 /* Build the end of the loop. */
5330 emit_label (loop_end);
5333 else if ((index != 0 && ! host_integerp (index, 0))
5334 || ! host_integerp (TYPE_SIZE (elttype), 1))
5336 tree position;
5338 if (index == 0)
5339 index = ssize_int (1);
5341 if (minelt)
5342 index = fold_convert (ssizetype,
5343 fold_build2 (MINUS_EXPR,
5344 TREE_TYPE (index),
5345 index,
5346 TYPE_MIN_VALUE (domain)));
5348 position =
5349 size_binop (MULT_EXPR, index,
5350 fold_convert (ssizetype,
5351 TYPE_SIZE_UNIT (elttype)));
5352 xtarget = offset_address (target,
5353 expand_normal (position),
5354 highest_pow2_factor (position));
5355 xtarget = adjust_address (xtarget, mode, 0);
5356 store_expr (value, xtarget, 0);
5358 else
5360 if (index != 0)
5361 bitpos = ((tree_low_cst (index, 0) - minelt)
5362 * tree_low_cst (TYPE_SIZE (elttype), 1));
5363 else
5364 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5366 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5367 && TREE_CODE (type) == ARRAY_TYPE
5368 && TYPE_NONALIASED_COMPONENT (type))
5370 target = copy_rtx (target);
5371 MEM_KEEP_ALIAS_SET_P (target) = 1;
5373 store_constructor_field (target, bitsize, bitpos, mode, value,
5374 type, cleared, get_alias_set (elttype));
5377 break;
5380 case VECTOR_TYPE:
5382 unsigned HOST_WIDE_INT idx;
5383 constructor_elt *ce;
5384 int i;
5385 int need_to_clear;
5386 int icode = 0;
5387 tree elttype = TREE_TYPE (type);
5388 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5389 enum machine_mode eltmode = TYPE_MODE (elttype);
5390 HOST_WIDE_INT bitsize;
5391 HOST_WIDE_INT bitpos;
5392 rtvec vector = NULL;
5393 unsigned n_elts;
5395 gcc_assert (eltmode != BLKmode);
5397 n_elts = TYPE_VECTOR_SUBPARTS (type);
5398 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5400 enum machine_mode mode = GET_MODE (target);
5402 icode = (int) vec_init_optab->handlers[mode].insn_code;
5403 if (icode != CODE_FOR_nothing)
5405 unsigned int i;
5407 vector = rtvec_alloc (n_elts);
5408 for (i = 0; i < n_elts; i++)
5409 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5413 /* If the constructor has fewer elements than the vector,
5414 clear the whole array first. Similarly if this is static
5415 constructor of a non-BLKmode object. */
5416 if (cleared)
5417 need_to_clear = 0;
5418 else if (REG_P (target) && TREE_STATIC (exp))
5419 need_to_clear = 1;
5420 else
5422 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5423 tree value;
5425 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5427 int n_elts_here = tree_low_cst
5428 (int_const_binop (TRUNC_DIV_EXPR,
5429 TYPE_SIZE (TREE_TYPE (value)),
5430 TYPE_SIZE (elttype), 0), 1);
5432 count += n_elts_here;
5433 if (mostly_zeros_p (value))
5434 zero_count += n_elts_here;
5437 /* Clear the entire vector first if there are any missing elements,
5438 or if the incidence of zero elements is >= 75%. */
5439 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5442 if (need_to_clear && size > 0 && !vector)
5444 if (REG_P (target))
5445 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5446 else
5447 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5448 cleared = 1;
5451 /* Inform later passes that the old value is dead. */
5452 if (!cleared && !vector && REG_P (target))
5453 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5455 /* Store each element of the constructor into the corresponding
5456 element of TARGET, determined by counting the elements. */
5457 for (idx = 0, i = 0;
5458 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5459 idx++, i += bitsize / elt_size)
5461 HOST_WIDE_INT eltpos;
5462 tree value = ce->value;
5464 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5465 if (cleared && initializer_zerop (value))
5466 continue;
5468 if (ce->index)
5469 eltpos = tree_low_cst (ce->index, 1);
5470 else
5471 eltpos = i;
5473 if (vector)
5475 /* Vector CONSTRUCTORs should only be built from smaller
5476 vectors in the case of BLKmode vectors. */
5477 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5478 RTVEC_ELT (vector, eltpos)
5479 = expand_normal (value);
5481 else
5483 enum machine_mode value_mode =
5484 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5485 ? TYPE_MODE (TREE_TYPE (value))
5486 : eltmode;
5487 bitpos = eltpos * elt_size;
5488 store_constructor_field (target, bitsize, bitpos,
5489 value_mode, value, type,
5490 cleared, get_alias_set (elttype));
5494 if (vector)
5495 emit_insn (GEN_FCN (icode)
5496 (target,
5497 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5498 break;
5501 default:
5502 gcc_unreachable ();
5506 /* Store the value of EXP (an expression tree)
5507 into a subfield of TARGET which has mode MODE and occupies
5508 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5509 If MODE is VOIDmode, it means that we are storing into a bit-field.
5511 Always return const0_rtx unless we have something particular to
5512 return.
5514 TYPE is the type of the underlying object,
5516 ALIAS_SET is the alias set for the destination. This value will
5517 (in general) be different from that for TARGET, since TARGET is a
5518 reference to the containing structure. */
5520 static rtx
5521 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5522 enum machine_mode mode, tree exp, tree type, int alias_set)
5524 HOST_WIDE_INT width_mask = 0;
5526 if (TREE_CODE (exp) == ERROR_MARK)
5527 return const0_rtx;
5529 /* If we have nothing to store, do nothing unless the expression has
5530 side-effects. */
5531 if (bitsize == 0)
5532 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5533 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5534 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5536 /* If we are storing into an unaligned field of an aligned union that is
5537 in a register, we may have the mode of TARGET being an integer mode but
5538 MODE == BLKmode. In that case, get an aligned object whose size and
5539 alignment are the same as TARGET and store TARGET into it (we can avoid
5540 the store if the field being stored is the entire width of TARGET). Then
5541 call ourselves recursively to store the field into a BLKmode version of
5542 that object. Finally, load from the object into TARGET. This is not
5543 very efficient in general, but should only be slightly more expensive
5544 than the otherwise-required unaligned accesses. Perhaps this can be
5545 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5546 twice, once with emit_move_insn and once via store_field. */
5548 if (mode == BLKmode
5549 && (REG_P (target) || GET_CODE (target) == SUBREG))
5551 rtx object = assign_temp (type, 0, 1, 1);
5552 rtx blk_object = adjust_address (object, BLKmode, 0);
5554 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5555 emit_move_insn (object, target);
5557 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5559 emit_move_insn (target, object);
5561 /* We want to return the BLKmode version of the data. */
5562 return blk_object;
5565 if (GET_CODE (target) == CONCAT)
5567 /* We're storing into a struct containing a single __complex. */
5569 gcc_assert (!bitpos);
5570 return store_expr (exp, target, 0);
5573 /* If the structure is in a register or if the component
5574 is a bit field, we cannot use addressing to access it.
5575 Use bit-field techniques or SUBREG to store in it. */
5577 if (mode == VOIDmode
5578 || (mode != BLKmode && ! direct_store[(int) mode]
5579 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5580 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5581 || REG_P (target)
5582 || GET_CODE (target) == SUBREG
5583 /* If the field isn't aligned enough to store as an ordinary memref,
5584 store it as a bit field. */
5585 || (mode != BLKmode
5586 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5587 || bitpos % GET_MODE_ALIGNMENT (mode))
5588 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5589 || (bitpos % BITS_PER_UNIT != 0)))
5590 /* If the RHS and field are a constant size and the size of the
5591 RHS isn't the same size as the bitfield, we must use bitfield
5592 operations. */
5593 || (bitsize >= 0
5594 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5595 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5597 rtx temp;
5599 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5600 implies a mask operation. If the precision is the same size as
5601 the field we're storing into, that mask is redundant. This is
5602 particularly common with bit field assignments generated by the
5603 C front end. */
5604 if (TREE_CODE (exp) == NOP_EXPR)
5606 tree type = TREE_TYPE (exp);
5607 if (INTEGRAL_TYPE_P (type)
5608 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5609 && bitsize == TYPE_PRECISION (type))
5611 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5612 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5613 exp = TREE_OPERAND (exp, 0);
5617 temp = expand_normal (exp);
5619 /* If BITSIZE is narrower than the size of the type of EXP
5620 we will be narrowing TEMP. Normally, what's wanted are the
5621 low-order bits. However, if EXP's type is a record and this is
5622 big-endian machine, we want the upper BITSIZE bits. */
5623 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5624 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5625 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5626 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5627 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5628 - bitsize),
5629 NULL_RTX, 1);
5631 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5632 MODE. */
5633 if (mode != VOIDmode && mode != BLKmode
5634 && mode != TYPE_MODE (TREE_TYPE (exp)))
5635 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5637 /* If the modes of TARGET and TEMP are both BLKmode, both
5638 must be in memory and BITPOS must be aligned on a byte
5639 boundary. If so, we simply do a block copy. */
5640 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5642 gcc_assert (MEM_P (target) && MEM_P (temp)
5643 && !(bitpos % BITS_PER_UNIT));
5645 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5646 emit_block_move (target, temp,
5647 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5648 / BITS_PER_UNIT),
5649 BLOCK_OP_NORMAL);
5651 return const0_rtx;
5654 /* Store the value in the bitfield. */
5655 store_bit_field (target, bitsize, bitpos, mode, temp);
5657 return const0_rtx;
5659 else
5661 /* Now build a reference to just the desired component. */
5662 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5664 if (to_rtx == target)
5665 to_rtx = copy_rtx (to_rtx);
5667 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5668 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5669 set_mem_alias_set (to_rtx, alias_set);
5671 return store_expr (exp, to_rtx, 0);
5675 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5676 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5677 codes and find the ultimate containing object, which we return.
5679 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5680 bit position, and *PUNSIGNEDP to the signedness of the field.
5681 If the position of the field is variable, we store a tree
5682 giving the variable offset (in units) in *POFFSET.
5683 This offset is in addition to the bit position.
5684 If the position is not variable, we store 0 in *POFFSET.
5686 If any of the extraction expressions is volatile,
5687 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5689 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5690 is a mode that can be used to access the field. In that case, *PBITSIZE
5691 is redundant.
5693 If the field describes a variable-sized object, *PMODE is set to
5694 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5695 this case, but the address of the object can be found.
5697 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5698 look through nodes that serve as markers of a greater alignment than
5699 the one that can be deduced from the expression. These nodes make it
5700 possible for front-ends to prevent temporaries from being created by
5701 the middle-end on alignment considerations. For that purpose, the
5702 normal operating mode at high-level is to always pass FALSE so that
5703 the ultimate containing object is really returned; moreover, the
5704 associated predicate handled_component_p will always return TRUE
5705 on these nodes, thus indicating that they are essentially handled
5706 by get_inner_reference. TRUE should only be passed when the caller
5707 is scanning the expression in order to build another representation
5708 and specifically knows how to handle these nodes; as such, this is
5709 the normal operating mode in the RTL expanders. */
5711 tree
5712 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5713 HOST_WIDE_INT *pbitpos, tree *poffset,
5714 enum machine_mode *pmode, int *punsignedp,
5715 int *pvolatilep, bool keep_aligning)
5717 tree size_tree = 0;
5718 enum machine_mode mode = VOIDmode;
5719 tree offset = size_zero_node;
5720 tree bit_offset = bitsize_zero_node;
5721 tree tem;
5723 /* First get the mode, signedness, and size. We do this from just the
5724 outermost expression. */
5725 if (TREE_CODE (exp) == COMPONENT_REF)
5727 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5728 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5729 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5731 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5733 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5735 size_tree = TREE_OPERAND (exp, 1);
5736 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5738 /* For vector types, with the correct size of access, use the mode of
5739 inner type. */
5740 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5741 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5742 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5743 mode = TYPE_MODE (TREE_TYPE (exp));
5745 else
5747 mode = TYPE_MODE (TREE_TYPE (exp));
5748 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5750 if (mode == BLKmode)
5751 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5752 else
5753 *pbitsize = GET_MODE_BITSIZE (mode);
5756 if (size_tree != 0)
5758 if (! host_integerp (size_tree, 1))
5759 mode = BLKmode, *pbitsize = -1;
5760 else
5761 *pbitsize = tree_low_cst (size_tree, 1);
5764 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5765 and find the ultimate containing object. */
5766 while (1)
5768 switch (TREE_CODE (exp))
5770 case BIT_FIELD_REF:
5771 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5772 TREE_OPERAND (exp, 2));
5773 break;
5775 case COMPONENT_REF:
5777 tree field = TREE_OPERAND (exp, 1);
5778 tree this_offset = component_ref_field_offset (exp);
5780 /* If this field hasn't been filled in yet, don't go past it.
5781 This should only happen when folding expressions made during
5782 type construction. */
5783 if (this_offset == 0)
5784 break;
5786 offset = size_binop (PLUS_EXPR, offset, this_offset);
5787 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5788 DECL_FIELD_BIT_OFFSET (field));
5790 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5792 break;
5794 case ARRAY_REF:
5795 case ARRAY_RANGE_REF:
5797 tree index = TREE_OPERAND (exp, 1);
5798 tree low_bound = array_ref_low_bound (exp);
5799 tree unit_size = array_ref_element_size (exp);
5801 /* We assume all arrays have sizes that are a multiple of a byte.
5802 First subtract the lower bound, if any, in the type of the
5803 index, then convert to sizetype and multiply by the size of
5804 the array element. */
5805 if (! integer_zerop (low_bound))
5806 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5807 index, low_bound);
5809 offset = size_binop (PLUS_EXPR, offset,
5810 size_binop (MULT_EXPR,
5811 fold_convert (sizetype, index),
5812 unit_size));
5814 break;
5816 case REALPART_EXPR:
5817 break;
5819 case IMAGPART_EXPR:
5820 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5821 bitsize_int (*pbitsize));
5822 break;
5824 case VIEW_CONVERT_EXPR:
5825 if (keep_aligning && STRICT_ALIGNMENT
5826 && (TYPE_ALIGN (TREE_TYPE (exp))
5827 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5828 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5829 < BIGGEST_ALIGNMENT)
5830 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5831 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5832 goto done;
5833 break;
5835 default:
5836 goto done;
5839 /* If any reference in the chain is volatile, the effect is volatile. */
5840 if (TREE_THIS_VOLATILE (exp))
5841 *pvolatilep = 1;
5843 exp = TREE_OPERAND (exp, 0);
5845 done:
5847 /* If OFFSET is constant, see if we can return the whole thing as a
5848 constant bit position. Otherwise, split it up. */
5849 if (host_integerp (offset, 0)
5850 && 0 != (tem = size_binop (MULT_EXPR,
5851 fold_convert (bitsizetype, offset),
5852 bitsize_unit_node))
5853 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5854 && host_integerp (tem, 0))
5855 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5856 else
5857 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5859 *pmode = mode;
5860 return exp;
5863 /* Return a tree of sizetype representing the size, in bytes, of the element
5864 of EXP, an ARRAY_REF. */
5866 tree
5867 array_ref_element_size (tree exp)
5869 tree aligned_size = TREE_OPERAND (exp, 3);
5870 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5872 /* If a size was specified in the ARRAY_REF, it's the size measured
5873 in alignment units of the element type. So multiply by that value. */
5874 if (aligned_size)
5876 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5877 sizetype from another type of the same width and signedness. */
5878 if (TREE_TYPE (aligned_size) != sizetype)
5879 aligned_size = fold_convert (sizetype, aligned_size);
5880 return size_binop (MULT_EXPR, aligned_size,
5881 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5884 /* Otherwise, take the size from that of the element type. Substitute
5885 any PLACEHOLDER_EXPR that we have. */
5886 else
5887 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5890 /* Return a tree representing the lower bound of the array mentioned in
5891 EXP, an ARRAY_REF. */
5893 tree
5894 array_ref_low_bound (tree exp)
5896 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5898 /* If a lower bound is specified in EXP, use it. */
5899 if (TREE_OPERAND (exp, 2))
5900 return TREE_OPERAND (exp, 2);
5902 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5903 substituting for a PLACEHOLDER_EXPR as needed. */
5904 if (domain_type && TYPE_MIN_VALUE (domain_type))
5905 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5907 /* Otherwise, return a zero of the appropriate type. */
5908 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5911 /* Return a tree representing the upper bound of the array mentioned in
5912 EXP, an ARRAY_REF. */
5914 tree
5915 array_ref_up_bound (tree exp)
5917 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5919 /* If there is a domain type and it has an upper bound, use it, substituting
5920 for a PLACEHOLDER_EXPR as needed. */
5921 if (domain_type && TYPE_MAX_VALUE (domain_type))
5922 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5924 /* Otherwise fail. */
5925 return NULL_TREE;
5928 /* Return a tree representing the offset, in bytes, of the field referenced
5929 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5931 tree
5932 component_ref_field_offset (tree exp)
5934 tree aligned_offset = TREE_OPERAND (exp, 2);
5935 tree field = TREE_OPERAND (exp, 1);
5937 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5938 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5939 value. */
5940 if (aligned_offset)
5942 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5943 sizetype from another type of the same width and signedness. */
5944 if (TREE_TYPE (aligned_offset) != sizetype)
5945 aligned_offset = fold_convert (sizetype, aligned_offset);
5946 return size_binop (MULT_EXPR, aligned_offset,
5947 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5950 /* Otherwise, take the offset from that of the field. Substitute
5951 any PLACEHOLDER_EXPR that we have. */
5952 else
5953 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5956 /* Return 1 if T is an expression that get_inner_reference handles. */
5959 handled_component_p (tree t)
5961 switch (TREE_CODE (t))
5963 case BIT_FIELD_REF:
5964 case COMPONENT_REF:
5965 case ARRAY_REF:
5966 case ARRAY_RANGE_REF:
5967 case VIEW_CONVERT_EXPR:
5968 case REALPART_EXPR:
5969 case IMAGPART_EXPR:
5970 return 1;
5972 default:
5973 return 0;
5977 /* Given an rtx VALUE that may contain additions and multiplications, return
5978 an equivalent value that just refers to a register, memory, or constant.
5979 This is done by generating instructions to perform the arithmetic and
5980 returning a pseudo-register containing the value.
5982 The returned value may be a REG, SUBREG, MEM or constant. */
5985 force_operand (rtx value, rtx target)
5987 rtx op1, op2;
5988 /* Use subtarget as the target for operand 0 of a binary operation. */
5989 rtx subtarget = get_subtarget (target);
5990 enum rtx_code code = GET_CODE (value);
5992 /* Check for subreg applied to an expression produced by loop optimizer. */
5993 if (code == SUBREG
5994 && !REG_P (SUBREG_REG (value))
5995 && !MEM_P (SUBREG_REG (value)))
5997 value = simplify_gen_subreg (GET_MODE (value),
5998 force_reg (GET_MODE (SUBREG_REG (value)),
5999 force_operand (SUBREG_REG (value),
6000 NULL_RTX)),
6001 GET_MODE (SUBREG_REG (value)),
6002 SUBREG_BYTE (value));
6003 code = GET_CODE (value);
6006 /* Check for a PIC address load. */
6007 if ((code == PLUS || code == MINUS)
6008 && XEXP (value, 0) == pic_offset_table_rtx
6009 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6010 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6011 || GET_CODE (XEXP (value, 1)) == CONST))
6013 if (!subtarget)
6014 subtarget = gen_reg_rtx (GET_MODE (value));
6015 emit_move_insn (subtarget, value);
6016 return subtarget;
6019 if (ARITHMETIC_P (value))
6021 op2 = XEXP (value, 1);
6022 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6023 subtarget = 0;
6024 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6026 code = PLUS;
6027 op2 = negate_rtx (GET_MODE (value), op2);
6030 /* Check for an addition with OP2 a constant integer and our first
6031 operand a PLUS of a virtual register and something else. In that
6032 case, we want to emit the sum of the virtual register and the
6033 constant first and then add the other value. This allows virtual
6034 register instantiation to simply modify the constant rather than
6035 creating another one around this addition. */
6036 if (code == PLUS && GET_CODE (op2) == CONST_INT
6037 && GET_CODE (XEXP (value, 0)) == PLUS
6038 && REG_P (XEXP (XEXP (value, 0), 0))
6039 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6040 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6042 rtx temp = expand_simple_binop (GET_MODE (value), code,
6043 XEXP (XEXP (value, 0), 0), op2,
6044 subtarget, 0, OPTAB_LIB_WIDEN);
6045 return expand_simple_binop (GET_MODE (value), code, temp,
6046 force_operand (XEXP (XEXP (value,
6047 0), 1), 0),
6048 target, 0, OPTAB_LIB_WIDEN);
6051 op1 = force_operand (XEXP (value, 0), subtarget);
6052 op2 = force_operand (op2, NULL_RTX);
6053 switch (code)
6055 case MULT:
6056 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6057 case DIV:
6058 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6059 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6060 target, 1, OPTAB_LIB_WIDEN);
6061 else
6062 return expand_divmod (0,
6063 FLOAT_MODE_P (GET_MODE (value))
6064 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6065 GET_MODE (value), op1, op2, target, 0);
6066 break;
6067 case MOD:
6068 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6069 target, 0);
6070 break;
6071 case UDIV:
6072 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6073 target, 1);
6074 break;
6075 case UMOD:
6076 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6077 target, 1);
6078 break;
6079 case ASHIFTRT:
6080 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6081 target, 0, OPTAB_LIB_WIDEN);
6082 break;
6083 default:
6084 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6085 target, 1, OPTAB_LIB_WIDEN);
6088 if (UNARY_P (value))
6090 if (!target)
6091 target = gen_reg_rtx (GET_MODE (value));
6092 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6093 switch (code)
6095 case ZERO_EXTEND:
6096 case SIGN_EXTEND:
6097 case TRUNCATE:
6098 case FLOAT_EXTEND:
6099 case FLOAT_TRUNCATE:
6100 convert_move (target, op1, code == ZERO_EXTEND);
6101 return target;
6103 case FIX:
6104 case UNSIGNED_FIX:
6105 expand_fix (target, op1, code == UNSIGNED_FIX);
6106 return target;
6108 case FLOAT:
6109 case UNSIGNED_FLOAT:
6110 expand_float (target, op1, code == UNSIGNED_FLOAT);
6111 return target;
6113 default:
6114 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6118 #ifdef INSN_SCHEDULING
6119 /* On machines that have insn scheduling, we want all memory reference to be
6120 explicit, so we need to deal with such paradoxical SUBREGs. */
6121 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6122 && (GET_MODE_SIZE (GET_MODE (value))
6123 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6124 value
6125 = simplify_gen_subreg (GET_MODE (value),
6126 force_reg (GET_MODE (SUBREG_REG (value)),
6127 force_operand (SUBREG_REG (value),
6128 NULL_RTX)),
6129 GET_MODE (SUBREG_REG (value)),
6130 SUBREG_BYTE (value));
6131 #endif
6133 return value;
6136 /* Subroutine of expand_expr: return nonzero iff there is no way that
6137 EXP can reference X, which is being modified. TOP_P is nonzero if this
6138 call is going to be used to determine whether we need a temporary
6139 for EXP, as opposed to a recursive call to this function.
6141 It is always safe for this routine to return zero since it merely
6142 searches for optimization opportunities. */
6145 safe_from_p (rtx x, tree exp, int top_p)
6147 rtx exp_rtl = 0;
6148 int i, nops;
6150 if (x == 0
6151 /* If EXP has varying size, we MUST use a target since we currently
6152 have no way of allocating temporaries of variable size
6153 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6154 So we assume here that something at a higher level has prevented a
6155 clash. This is somewhat bogus, but the best we can do. Only
6156 do this when X is BLKmode and when we are at the top level. */
6157 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6158 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6159 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6160 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6161 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6162 != INTEGER_CST)
6163 && GET_MODE (x) == BLKmode)
6164 /* If X is in the outgoing argument area, it is always safe. */
6165 || (MEM_P (x)
6166 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6167 || (GET_CODE (XEXP (x, 0)) == PLUS
6168 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6169 return 1;
6171 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6172 find the underlying pseudo. */
6173 if (GET_CODE (x) == SUBREG)
6175 x = SUBREG_REG (x);
6176 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6177 return 0;
6180 /* Now look at our tree code and possibly recurse. */
6181 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6183 case tcc_declaration:
6184 exp_rtl = DECL_RTL_IF_SET (exp);
6185 break;
6187 case tcc_constant:
6188 return 1;
6190 case tcc_exceptional:
6191 if (TREE_CODE (exp) == TREE_LIST)
6193 while (1)
6195 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6196 return 0;
6197 exp = TREE_CHAIN (exp);
6198 if (!exp)
6199 return 1;
6200 if (TREE_CODE (exp) != TREE_LIST)
6201 return safe_from_p (x, exp, 0);
6204 else if (TREE_CODE (exp) == CONSTRUCTOR)
6206 constructor_elt *ce;
6207 unsigned HOST_WIDE_INT idx;
6209 for (idx = 0;
6210 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6211 idx++)
6212 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6213 || !safe_from_p (x, ce->value, 0))
6214 return 0;
6215 return 1;
6217 else if (TREE_CODE (exp) == ERROR_MARK)
6218 return 1; /* An already-visited SAVE_EXPR? */
6219 else
6220 return 0;
6222 case tcc_statement:
6223 /* The only case we look at here is the DECL_INITIAL inside a
6224 DECL_EXPR. */
6225 return (TREE_CODE (exp) != DECL_EXPR
6226 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6227 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6228 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6230 case tcc_binary:
6231 case tcc_comparison:
6232 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6233 return 0;
6234 /* Fall through. */
6236 case tcc_unary:
6237 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6239 case tcc_expression:
6240 case tcc_reference:
6241 case tcc_vl_exp:
6242 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6243 the expression. If it is set, we conflict iff we are that rtx or
6244 both are in memory. Otherwise, we check all operands of the
6245 expression recursively. */
6247 switch (TREE_CODE (exp))
6249 case ADDR_EXPR:
6250 /* If the operand is static or we are static, we can't conflict.
6251 Likewise if we don't conflict with the operand at all. */
6252 if (staticp (TREE_OPERAND (exp, 0))
6253 || TREE_STATIC (exp)
6254 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6255 return 1;
6257 /* Otherwise, the only way this can conflict is if we are taking
6258 the address of a DECL a that address if part of X, which is
6259 very rare. */
6260 exp = TREE_OPERAND (exp, 0);
6261 if (DECL_P (exp))
6263 if (!DECL_RTL_SET_P (exp)
6264 || !MEM_P (DECL_RTL (exp)))
6265 return 0;
6266 else
6267 exp_rtl = XEXP (DECL_RTL (exp), 0);
6269 break;
6271 case MISALIGNED_INDIRECT_REF:
6272 case ALIGN_INDIRECT_REF:
6273 case INDIRECT_REF:
6274 if (MEM_P (x)
6275 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6276 get_alias_set (exp)))
6277 return 0;
6278 break;
6280 case CALL_EXPR:
6281 /* Assume that the call will clobber all hard registers and
6282 all of memory. */
6283 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6284 || MEM_P (x))
6285 return 0;
6286 break;
6288 case WITH_CLEANUP_EXPR:
6289 case CLEANUP_POINT_EXPR:
6290 /* Lowered by gimplify.c. */
6291 gcc_unreachable ();
6293 case SAVE_EXPR:
6294 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6296 default:
6297 break;
6300 /* If we have an rtx, we do not need to scan our operands. */
6301 if (exp_rtl)
6302 break;
6304 nops = TREE_OPERAND_LENGTH (exp);
6305 for (i = 0; i < nops; i++)
6306 if (TREE_OPERAND (exp, i) != 0
6307 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6308 return 0;
6310 break;
6312 case tcc_type:
6313 /* Should never get a type here. */
6314 gcc_unreachable ();
6316 case tcc_gimple_stmt:
6317 gcc_unreachable ();
6320 /* If we have an rtl, find any enclosed object. Then see if we conflict
6321 with it. */
6322 if (exp_rtl)
6324 if (GET_CODE (exp_rtl) == SUBREG)
6326 exp_rtl = SUBREG_REG (exp_rtl);
6327 if (REG_P (exp_rtl)
6328 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6329 return 0;
6332 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6333 are memory and they conflict. */
6334 return ! (rtx_equal_p (x, exp_rtl)
6335 || (MEM_P (x) && MEM_P (exp_rtl)
6336 && true_dependence (exp_rtl, VOIDmode, x,
6337 rtx_addr_varies_p)));
6340 /* If we reach here, it is safe. */
6341 return 1;
6345 /* Return the highest power of two that EXP is known to be a multiple of.
6346 This is used in updating alignment of MEMs in array references. */
6348 unsigned HOST_WIDE_INT
6349 highest_pow2_factor (tree exp)
6351 unsigned HOST_WIDE_INT c0, c1;
6353 switch (TREE_CODE (exp))
6355 case INTEGER_CST:
6356 /* We can find the lowest bit that's a one. If the low
6357 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6358 We need to handle this case since we can find it in a COND_EXPR,
6359 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6360 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6361 later ICE. */
6362 if (TREE_OVERFLOW (exp))
6363 return BIGGEST_ALIGNMENT;
6364 else
6366 /* Note: tree_low_cst is intentionally not used here,
6367 we don't care about the upper bits. */
6368 c0 = TREE_INT_CST_LOW (exp);
6369 c0 &= -c0;
6370 return c0 ? c0 : BIGGEST_ALIGNMENT;
6372 break;
6374 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6375 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6376 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6377 return MIN (c0, c1);
6379 case MULT_EXPR:
6380 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6381 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6382 return c0 * c1;
6384 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6385 case CEIL_DIV_EXPR:
6386 if (integer_pow2p (TREE_OPERAND (exp, 1))
6387 && host_integerp (TREE_OPERAND (exp, 1), 1))
6389 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6390 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6391 return MAX (1, c0 / c1);
6393 break;
6395 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6396 case SAVE_EXPR:
6397 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6399 case COMPOUND_EXPR:
6400 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6402 case COND_EXPR:
6403 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6404 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6405 return MIN (c0, c1);
6407 default:
6408 break;
6411 return 1;
6414 /* Similar, except that the alignment requirements of TARGET are
6415 taken into account. Assume it is at least as aligned as its
6416 type, unless it is a COMPONENT_REF in which case the layout of
6417 the structure gives the alignment. */
6419 static unsigned HOST_WIDE_INT
6420 highest_pow2_factor_for_target (tree target, tree exp)
6422 unsigned HOST_WIDE_INT target_align, factor;
6424 factor = highest_pow2_factor (exp);
6425 if (TREE_CODE (target) == COMPONENT_REF)
6426 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6427 else
6428 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6429 return MAX (factor, target_align);
6432 /* Return &VAR expression for emulated thread local VAR. */
6434 static tree
6435 emutls_var_address (tree var)
6437 tree emuvar = emutls_decl (var);
6438 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6439 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6440 tree arglist = build_tree_list (NULL_TREE, arg);
6441 tree call = build_function_call_expr (fn, arglist);
6442 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6445 /* Expands variable VAR. */
6447 void
6448 expand_var (tree var)
6450 if (DECL_EXTERNAL (var))
6451 return;
6453 if (TREE_STATIC (var))
6454 /* If this is an inlined copy of a static local variable,
6455 look up the original decl. */
6456 var = DECL_ORIGIN (var);
6458 if (TREE_STATIC (var)
6459 ? !TREE_ASM_WRITTEN (var)
6460 : !DECL_RTL_SET_P (var))
6462 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6463 /* Should be ignored. */;
6464 else if (lang_hooks.expand_decl (var))
6465 /* OK. */;
6466 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6467 expand_decl (var);
6468 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6469 rest_of_decl_compilation (var, 0, 0);
6470 else
6471 /* No expansion needed. */
6472 gcc_assert (TREE_CODE (var) == TYPE_DECL
6473 || TREE_CODE (var) == CONST_DECL
6474 || TREE_CODE (var) == FUNCTION_DECL
6475 || TREE_CODE (var) == LABEL_DECL);
6479 /* Subroutine of expand_expr. Expand the two operands of a binary
6480 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6481 The value may be stored in TARGET if TARGET is nonzero. The
6482 MODIFIER argument is as documented by expand_expr. */
6484 static void
6485 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6486 enum expand_modifier modifier)
6488 if (! safe_from_p (target, exp1, 1))
6489 target = 0;
6490 if (operand_equal_p (exp0, exp1, 0))
6492 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6493 *op1 = copy_rtx (*op0);
6495 else
6497 /* If we need to preserve evaluation order, copy exp0 into its own
6498 temporary variable so that it can't be clobbered by exp1. */
6499 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6500 exp0 = save_expr (exp0);
6501 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6502 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6507 /* Return a MEM that contains constant EXP. DEFER is as for
6508 output_constant_def and MODIFIER is as for expand_expr. */
6510 static rtx
6511 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6513 rtx mem;
6515 mem = output_constant_def (exp, defer);
6516 if (modifier != EXPAND_INITIALIZER)
6517 mem = use_anchored_address (mem);
6518 return mem;
6521 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6522 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6524 static rtx
6525 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6526 enum expand_modifier modifier)
6528 rtx result, subtarget;
6529 tree inner, offset;
6530 HOST_WIDE_INT bitsize, bitpos;
6531 int volatilep, unsignedp;
6532 enum machine_mode mode1;
6534 /* If we are taking the address of a constant and are at the top level,
6535 we have to use output_constant_def since we can't call force_const_mem
6536 at top level. */
6537 /* ??? This should be considered a front-end bug. We should not be
6538 generating ADDR_EXPR of something that isn't an LVALUE. The only
6539 exception here is STRING_CST. */
6540 if (TREE_CODE (exp) == CONSTRUCTOR
6541 || CONSTANT_CLASS_P (exp))
6542 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6544 /* Everything must be something allowed by is_gimple_addressable. */
6545 switch (TREE_CODE (exp))
6547 case INDIRECT_REF:
6548 /* This case will happen via recursion for &a->b. */
6549 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6551 case CONST_DECL:
6552 /* Recurse and make the output_constant_def clause above handle this. */
6553 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6554 tmode, modifier);
6556 case REALPART_EXPR:
6557 /* The real part of the complex number is always first, therefore
6558 the address is the same as the address of the parent object. */
6559 offset = 0;
6560 bitpos = 0;
6561 inner = TREE_OPERAND (exp, 0);
6562 break;
6564 case IMAGPART_EXPR:
6565 /* The imaginary part of the complex number is always second.
6566 The expression is therefore always offset by the size of the
6567 scalar type. */
6568 offset = 0;
6569 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6570 inner = TREE_OPERAND (exp, 0);
6571 break;
6573 case VAR_DECL:
6574 /* TLS emulation hook - replace __thread VAR's &VAR with
6575 __emutls_get_address (&_emutls.VAR). */
6576 if (! targetm.have_tls
6577 && TREE_CODE (exp) == VAR_DECL
6578 && DECL_THREAD_LOCAL_P (exp))
6580 exp = emutls_var_address (exp);
6581 return expand_expr (exp, target, tmode, modifier);
6583 /* Fall through. */
6585 default:
6586 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6587 expand_expr, as that can have various side effects; LABEL_DECLs for
6588 example, may not have their DECL_RTL set yet. Assume language
6589 specific tree nodes can be expanded in some interesting way. */
6590 if (DECL_P (exp)
6591 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6593 result = expand_expr (exp, target, tmode,
6594 modifier == EXPAND_INITIALIZER
6595 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6597 /* If the DECL isn't in memory, then the DECL wasn't properly
6598 marked TREE_ADDRESSABLE, which will be either a front-end
6599 or a tree optimizer bug. */
6600 gcc_assert (MEM_P (result));
6601 result = XEXP (result, 0);
6603 /* ??? Is this needed anymore? */
6604 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6606 assemble_external (exp);
6607 TREE_USED (exp) = 1;
6610 if (modifier != EXPAND_INITIALIZER
6611 && modifier != EXPAND_CONST_ADDRESS)
6612 result = force_operand (result, target);
6613 return result;
6616 /* Pass FALSE as the last argument to get_inner_reference although
6617 we are expanding to RTL. The rationale is that we know how to
6618 handle "aligning nodes" here: we can just bypass them because
6619 they won't change the final object whose address will be returned
6620 (they actually exist only for that purpose). */
6621 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6622 &mode1, &unsignedp, &volatilep, false);
6623 break;
6626 /* We must have made progress. */
6627 gcc_assert (inner != exp);
6629 subtarget = offset || bitpos ? NULL_RTX : target;
6630 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6632 if (offset)
6634 rtx tmp;
6636 if (modifier != EXPAND_NORMAL)
6637 result = force_operand (result, NULL);
6638 tmp = expand_expr (offset, NULL_RTX, tmode, EXPAND_NORMAL);
6640 result = convert_memory_address (tmode, result);
6641 tmp = convert_memory_address (tmode, tmp);
6643 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6644 result = gen_rtx_PLUS (tmode, result, tmp);
6645 else
6647 subtarget = bitpos ? NULL_RTX : target;
6648 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6649 1, OPTAB_LIB_WIDEN);
6653 if (bitpos)
6655 /* Someone beforehand should have rejected taking the address
6656 of such an object. */
6657 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6659 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6660 if (modifier < EXPAND_SUM)
6661 result = force_operand (result, target);
6664 return result;
6667 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6668 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6670 static rtx
6671 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6672 enum expand_modifier modifier)
6674 enum machine_mode rmode;
6675 rtx result;
6677 /* Target mode of VOIDmode says "whatever's natural". */
6678 if (tmode == VOIDmode)
6679 tmode = TYPE_MODE (TREE_TYPE (exp));
6681 /* We can get called with some Weird Things if the user does silliness
6682 like "(short) &a". In that case, convert_memory_address won't do
6683 the right thing, so ignore the given target mode. */
6684 if (tmode != Pmode && tmode != ptr_mode)
6685 tmode = Pmode;
6687 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6688 tmode, modifier);
6690 /* Despite expand_expr claims concerning ignoring TMODE when not
6691 strictly convenient, stuff breaks if we don't honor it. Note
6692 that combined with the above, we only do this for pointer modes. */
6693 rmode = GET_MODE (result);
6694 if (rmode == VOIDmode)
6695 rmode = tmode;
6696 if (rmode != tmode)
6697 result = convert_memory_address (tmode, result);
6699 return result;
6703 /* expand_expr: generate code for computing expression EXP.
6704 An rtx for the computed value is returned. The value is never null.
6705 In the case of a void EXP, const0_rtx is returned.
6707 The value may be stored in TARGET if TARGET is nonzero.
6708 TARGET is just a suggestion; callers must assume that
6709 the rtx returned may not be the same as TARGET.
6711 If TARGET is CONST0_RTX, it means that the value will be ignored.
6713 If TMODE is not VOIDmode, it suggests generating the
6714 result in mode TMODE. But this is done only when convenient.
6715 Otherwise, TMODE is ignored and the value generated in its natural mode.
6716 TMODE is just a suggestion; callers must assume that
6717 the rtx returned may not have mode TMODE.
6719 Note that TARGET may have neither TMODE nor MODE. In that case, it
6720 probably will not be used.
6722 If MODIFIER is EXPAND_SUM then when EXP is an addition
6723 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6724 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6725 products as above, or REG or MEM, or constant.
6726 Ordinarily in such cases we would output mul or add instructions
6727 and then return a pseudo reg containing the sum.
6729 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6730 it also marks a label as absolutely required (it can't be dead).
6731 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6732 This is used for outputting expressions used in initializers.
6734 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6735 with a constant address even if that address is not normally legitimate.
6736 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6738 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6739 a call parameter. Such targets require special care as we haven't yet
6740 marked TARGET so that it's safe from being trashed by libcalls. We
6741 don't want to use TARGET for anything but the final result;
6742 Intermediate values must go elsewhere. Additionally, calls to
6743 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6745 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6746 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6747 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6748 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6749 recursively. */
6751 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6752 enum expand_modifier, rtx *);
6755 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6756 enum expand_modifier modifier, rtx *alt_rtl)
6758 int rn = -1;
6759 rtx ret, last = NULL;
6761 /* Handle ERROR_MARK before anybody tries to access its type. */
6762 if (TREE_CODE (exp) == ERROR_MARK
6763 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6765 ret = CONST0_RTX (tmode);
6766 return ret ? ret : const0_rtx;
6769 if (flag_non_call_exceptions)
6771 rn = lookup_stmt_eh_region (exp);
6772 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6773 if (rn >= 0)
6774 last = get_last_insn ();
6777 /* If this is an expression of some kind and it has an associated line
6778 number, then emit the line number before expanding the expression.
6780 We need to save and restore the file and line information so that
6781 errors discovered during expansion are emitted with the right
6782 information. It would be better of the diagnostic routines
6783 used the file/line information embedded in the tree nodes rather
6784 than globals. */
6785 if (cfun && EXPR_HAS_LOCATION (exp))
6787 location_t saved_location = input_location;
6788 input_location = EXPR_LOCATION (exp);
6789 set_curr_insn_source_location (input_location);
6791 /* Record where the insns produced belong. */
6792 set_curr_insn_block (TREE_BLOCK (exp));
6794 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6796 input_location = saved_location;
6798 else
6800 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6803 /* If using non-call exceptions, mark all insns that may trap.
6804 expand_call() will mark CALL_INSNs before we get to this code,
6805 but it doesn't handle libcalls, and these may trap. */
6806 if (rn >= 0)
6808 rtx insn;
6809 for (insn = next_real_insn (last); insn;
6810 insn = next_real_insn (insn))
6812 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6813 /* If we want exceptions for non-call insns, any
6814 may_trap_p instruction may throw. */
6815 && GET_CODE (PATTERN (insn)) != CLOBBER
6816 && GET_CODE (PATTERN (insn)) != USE
6817 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6819 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6820 REG_NOTES (insn));
6825 return ret;
6828 static rtx
6829 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6830 enum expand_modifier modifier, rtx *alt_rtl)
6832 rtx op0, op1, op2, temp, decl_rtl;
6833 tree type;
6834 int unsignedp;
6835 enum machine_mode mode;
6836 enum tree_code code = TREE_CODE (exp);
6837 optab this_optab;
6838 rtx subtarget, original_target;
6839 int ignore;
6840 tree context, subexp0, subexp1;
6841 bool reduce_bit_field = false;
6842 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6843 ? reduce_to_bit_field_precision ((expr), \
6844 target, \
6845 type) \
6846 : (expr))
6848 if (GIMPLE_STMT_P (exp))
6850 type = void_type_node;
6851 mode = VOIDmode;
6852 unsignedp = 0;
6854 else
6856 type = TREE_TYPE (exp);
6857 mode = TYPE_MODE (type);
6858 unsignedp = TYPE_UNSIGNED (type);
6860 if (lang_hooks.reduce_bit_field_operations
6861 && TREE_CODE (type) == INTEGER_TYPE
6862 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6864 /* An operation in what may be a bit-field type needs the
6865 result to be reduced to the precision of the bit-field type,
6866 which is narrower than that of the type's mode. */
6867 reduce_bit_field = true;
6868 if (modifier == EXPAND_STACK_PARM)
6869 target = 0;
6872 /* Use subtarget as the target for operand 0 of a binary operation. */
6873 subtarget = get_subtarget (target);
6874 original_target = target;
6875 ignore = (target == const0_rtx
6876 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6877 || code == CONVERT_EXPR || code == COND_EXPR
6878 || code == VIEW_CONVERT_EXPR)
6879 && TREE_CODE (type) == VOID_TYPE));
6881 /* If we are going to ignore this result, we need only do something
6882 if there is a side-effect somewhere in the expression. If there
6883 is, short-circuit the most common cases here. Note that we must
6884 not call expand_expr with anything but const0_rtx in case this
6885 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6887 if (ignore)
6889 if (! TREE_SIDE_EFFECTS (exp))
6890 return const0_rtx;
6892 /* Ensure we reference a volatile object even if value is ignored, but
6893 don't do this if all we are doing is taking its address. */
6894 if (TREE_THIS_VOLATILE (exp)
6895 && TREE_CODE (exp) != FUNCTION_DECL
6896 && mode != VOIDmode && mode != BLKmode
6897 && modifier != EXPAND_CONST_ADDRESS)
6899 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6900 if (MEM_P (temp))
6901 temp = copy_to_reg (temp);
6902 return const0_rtx;
6905 if (TREE_CODE_CLASS (code) == tcc_unary
6906 || code == COMPONENT_REF || code == INDIRECT_REF)
6907 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6908 modifier);
6910 else if (TREE_CODE_CLASS (code) == tcc_binary
6911 || TREE_CODE_CLASS (code) == tcc_comparison
6912 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6914 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6915 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6916 return const0_rtx;
6918 else if (code == BIT_FIELD_REF)
6920 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6921 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6922 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6923 return const0_rtx;
6926 target = 0;
6930 switch (code)
6932 case LABEL_DECL:
6934 tree function = decl_function_context (exp);
6936 temp = label_rtx (exp);
6937 temp = gen_rtx_LABEL_REF (Pmode, temp);
6939 if (function != current_function_decl
6940 && function != 0)
6941 LABEL_REF_NONLOCAL_P (temp) = 1;
6943 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6944 return temp;
6947 case SSA_NAME:
6948 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6949 NULL);
6951 case PARM_DECL:
6952 case VAR_DECL:
6953 /* If a static var's type was incomplete when the decl was written,
6954 but the type is complete now, lay out the decl now. */
6955 if (DECL_SIZE (exp) == 0
6956 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6957 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6958 layout_decl (exp, 0);
6960 /* TLS emulation hook - replace __thread vars with
6961 *__emutls_get_address (&_emutls.var). */
6962 if (! targetm.have_tls
6963 && TREE_CODE (exp) == VAR_DECL
6964 && DECL_THREAD_LOCAL_P (exp))
6966 exp = build_fold_indirect_ref (emutls_var_address (exp));
6967 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
6970 /* ... fall through ... */
6972 case FUNCTION_DECL:
6973 case RESULT_DECL:
6974 decl_rtl = DECL_RTL (exp);
6975 gcc_assert (decl_rtl);
6976 decl_rtl = copy_rtx (decl_rtl);
6978 /* Ensure variable marked as used even if it doesn't go through
6979 a parser. If it hasn't be used yet, write out an external
6980 definition. */
6981 if (! TREE_USED (exp))
6983 assemble_external (exp);
6984 TREE_USED (exp) = 1;
6987 /* Show we haven't gotten RTL for this yet. */
6988 temp = 0;
6990 /* Variables inherited from containing functions should have
6991 been lowered by this point. */
6992 context = decl_function_context (exp);
6993 gcc_assert (!context
6994 || context == current_function_decl
6995 || TREE_STATIC (exp)
6996 /* ??? C++ creates functions that are not TREE_STATIC. */
6997 || TREE_CODE (exp) == FUNCTION_DECL);
6999 /* This is the case of an array whose size is to be determined
7000 from its initializer, while the initializer is still being parsed.
7001 See expand_decl. */
7003 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7004 temp = validize_mem (decl_rtl);
7006 /* If DECL_RTL is memory, we are in the normal case and either
7007 the address is not valid or it is not a register and -fforce-addr
7008 is specified, get the address into a register. */
7010 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7012 if (alt_rtl)
7013 *alt_rtl = decl_rtl;
7014 decl_rtl = use_anchored_address (decl_rtl);
7015 if (modifier != EXPAND_CONST_ADDRESS
7016 && modifier != EXPAND_SUM
7017 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
7018 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
7019 temp = replace_equiv_address (decl_rtl,
7020 copy_rtx (XEXP (decl_rtl, 0)));
7023 /* If we got something, return it. But first, set the alignment
7024 if the address is a register. */
7025 if (temp != 0)
7027 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7028 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7030 return temp;
7033 /* If the mode of DECL_RTL does not match that of the decl, it
7034 must be a promoted value. We return a SUBREG of the wanted mode,
7035 but mark it so that we know that it was already extended. */
7037 if (REG_P (decl_rtl)
7038 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7040 enum machine_mode pmode;
7042 /* Get the signedness used for this variable. Ensure we get the
7043 same mode we got when the variable was declared. */
7044 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7045 (TREE_CODE (exp) == RESULT_DECL
7046 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7047 gcc_assert (GET_MODE (decl_rtl) == pmode);
7049 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7050 SUBREG_PROMOTED_VAR_P (temp) = 1;
7051 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7052 return temp;
7055 return decl_rtl;
7057 case INTEGER_CST:
7058 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7059 TREE_INT_CST_HIGH (exp), mode);
7061 /* ??? If overflow is set, fold will have done an incomplete job,
7062 which can result in (plus xx (const_int 0)), which can get
7063 simplified by validate_replace_rtx during virtual register
7064 instantiation, which can result in unrecognizable insns.
7065 Avoid this by forcing all overflows into registers. */
7066 if (TREE_OVERFLOW (exp)
7067 && modifier != EXPAND_INITIALIZER)
7068 temp = force_reg (mode, temp);
7070 return temp;
7072 case VECTOR_CST:
7074 tree tmp = NULL_TREE;
7075 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7076 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7077 return const_vector_from_tree (exp);
7078 if (GET_MODE_CLASS (mode) == MODE_INT)
7080 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7081 if (type_for_mode)
7082 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7084 if (!tmp)
7085 tmp = build_constructor_from_list (type,
7086 TREE_VECTOR_CST_ELTS (exp));
7087 return expand_expr (tmp, ignore ? const0_rtx : target,
7088 tmode, modifier);
7091 case CONST_DECL:
7092 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7094 case REAL_CST:
7095 /* If optimized, generate immediate CONST_DOUBLE
7096 which will be turned into memory by reload if necessary.
7098 We used to force a register so that loop.c could see it. But
7099 this does not allow gen_* patterns to perform optimizations with
7100 the constants. It also produces two insns in cases like "x = 1.0;".
7101 On most machines, floating-point constants are not permitted in
7102 many insns, so we'd end up copying it to a register in any case.
7104 Now, we do the copying in expand_binop, if appropriate. */
7105 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7106 TYPE_MODE (TREE_TYPE (exp)));
7108 case COMPLEX_CST:
7109 /* Handle evaluating a complex constant in a CONCAT target. */
7110 if (original_target && GET_CODE (original_target) == CONCAT)
7112 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7113 rtx rtarg, itarg;
7115 rtarg = XEXP (original_target, 0);
7116 itarg = XEXP (original_target, 1);
7118 /* Move the real and imaginary parts separately. */
7119 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7120 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7122 if (op0 != rtarg)
7123 emit_move_insn (rtarg, op0);
7124 if (op1 != itarg)
7125 emit_move_insn (itarg, op1);
7127 return original_target;
7130 /* ... fall through ... */
7132 case STRING_CST:
7133 temp = expand_expr_constant (exp, 1, modifier);
7135 /* temp contains a constant address.
7136 On RISC machines where a constant address isn't valid,
7137 make some insns to get that address into a register. */
7138 if (modifier != EXPAND_CONST_ADDRESS
7139 && modifier != EXPAND_INITIALIZER
7140 && modifier != EXPAND_SUM
7141 && (! memory_address_p (mode, XEXP (temp, 0))
7142 || flag_force_addr))
7143 return replace_equiv_address (temp,
7144 copy_rtx (XEXP (temp, 0)));
7145 return temp;
7147 case SAVE_EXPR:
7149 tree val = TREE_OPERAND (exp, 0);
7150 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7152 if (!SAVE_EXPR_RESOLVED_P (exp))
7154 /* We can indeed still hit this case, typically via builtin
7155 expanders calling save_expr immediately before expanding
7156 something. Assume this means that we only have to deal
7157 with non-BLKmode values. */
7158 gcc_assert (GET_MODE (ret) != BLKmode);
7160 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7161 DECL_ARTIFICIAL (val) = 1;
7162 DECL_IGNORED_P (val) = 1;
7163 TREE_OPERAND (exp, 0) = val;
7164 SAVE_EXPR_RESOLVED_P (exp) = 1;
7166 if (!CONSTANT_P (ret))
7167 ret = copy_to_reg (ret);
7168 SET_DECL_RTL (val, ret);
7171 return ret;
7174 case GOTO_EXPR:
7175 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7176 expand_goto (TREE_OPERAND (exp, 0));
7177 else
7178 expand_computed_goto (TREE_OPERAND (exp, 0));
7179 return const0_rtx;
7181 case CONSTRUCTOR:
7182 /* If we don't need the result, just ensure we evaluate any
7183 subexpressions. */
7184 if (ignore)
7186 unsigned HOST_WIDE_INT idx;
7187 tree value;
7189 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7190 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7192 return const0_rtx;
7195 /* Try to avoid creating a temporary at all. This is possible
7196 if all of the initializer is zero.
7197 FIXME: try to handle all [0..255] initializers we can handle
7198 with memset. */
7199 else if (TREE_STATIC (exp)
7200 && !TREE_ADDRESSABLE (exp)
7201 && target != 0 && mode == BLKmode
7202 && all_zeros_p (exp))
7204 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7205 return target;
7208 /* All elts simple constants => refer to a constant in memory. But
7209 if this is a non-BLKmode mode, let it store a field at a time
7210 since that should make a CONST_INT or CONST_DOUBLE when we
7211 fold. Likewise, if we have a target we can use, it is best to
7212 store directly into the target unless the type is large enough
7213 that memcpy will be used. If we are making an initializer and
7214 all operands are constant, put it in memory as well.
7216 FIXME: Avoid trying to fill vector constructors piece-meal.
7217 Output them with output_constant_def below unless we're sure
7218 they're zeros. This should go away when vector initializers
7219 are treated like VECTOR_CST instead of arrays.
7221 else if ((TREE_STATIC (exp)
7222 && ((mode == BLKmode
7223 && ! (target != 0 && safe_from_p (target, exp, 1)))
7224 || TREE_ADDRESSABLE (exp)
7225 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7226 && (! MOVE_BY_PIECES_P
7227 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7228 TYPE_ALIGN (type)))
7229 && ! mostly_zeros_p (exp))))
7230 || ((modifier == EXPAND_INITIALIZER
7231 || modifier == EXPAND_CONST_ADDRESS)
7232 && TREE_CONSTANT (exp)))
7234 rtx constructor = expand_expr_constant (exp, 1, modifier);
7236 if (modifier != EXPAND_CONST_ADDRESS
7237 && modifier != EXPAND_INITIALIZER
7238 && modifier != EXPAND_SUM)
7239 constructor = validize_mem (constructor);
7241 return constructor;
7243 else
7245 /* Handle calls that pass values in multiple non-contiguous
7246 locations. The Irix 6 ABI has examples of this. */
7247 if (target == 0 || ! safe_from_p (target, exp, 1)
7248 || GET_CODE (target) == PARALLEL
7249 || modifier == EXPAND_STACK_PARM)
7250 target
7251 = assign_temp (build_qualified_type (type,
7252 (TYPE_QUALS (type)
7253 | (TREE_READONLY (exp)
7254 * TYPE_QUAL_CONST))),
7255 0, TREE_ADDRESSABLE (exp), 1);
7257 store_constructor (exp, target, 0, int_expr_size (exp));
7258 return target;
7261 case MISALIGNED_INDIRECT_REF:
7262 case ALIGN_INDIRECT_REF:
7263 case INDIRECT_REF:
7265 tree exp1 = TREE_OPERAND (exp, 0);
7267 if (modifier != EXPAND_WRITE)
7269 tree t;
7271 t = fold_read_from_constant_string (exp);
7272 if (t)
7273 return expand_expr (t, target, tmode, modifier);
7276 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7277 op0 = memory_address (mode, op0);
7279 if (code == ALIGN_INDIRECT_REF)
7281 int align = TYPE_ALIGN_UNIT (type);
7282 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7283 op0 = memory_address (mode, op0);
7286 temp = gen_rtx_MEM (mode, op0);
7288 set_mem_attributes (temp, exp, 0);
7290 /* Resolve the misalignment now, so that we don't have to remember
7291 to resolve it later. Of course, this only works for reads. */
7292 /* ??? When we get around to supporting writes, we'll have to handle
7293 this in store_expr directly. The vectorizer isn't generating
7294 those yet, however. */
7295 if (code == MISALIGNED_INDIRECT_REF)
7297 int icode;
7298 rtx reg, insn;
7300 gcc_assert (modifier == EXPAND_NORMAL
7301 || modifier == EXPAND_STACK_PARM);
7303 /* The vectorizer should have already checked the mode. */
7304 icode = movmisalign_optab->handlers[mode].insn_code;
7305 gcc_assert (icode != CODE_FOR_nothing);
7307 /* We've already validated the memory, and we're creating a
7308 new pseudo destination. The predicates really can't fail. */
7309 reg = gen_reg_rtx (mode);
7311 /* Nor can the insn generator. */
7312 insn = GEN_FCN (icode) (reg, temp);
7313 emit_insn (insn);
7315 return reg;
7318 return temp;
7321 case TARGET_MEM_REF:
7323 struct mem_address addr;
7325 get_address_description (exp, &addr);
7326 op0 = addr_for_mem_ref (&addr, true);
7327 op0 = memory_address (mode, op0);
7328 temp = gen_rtx_MEM (mode, op0);
7329 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7331 return temp;
7333 case ARRAY_REF:
7336 tree array = TREE_OPERAND (exp, 0);
7337 tree index = TREE_OPERAND (exp, 1);
7339 /* Fold an expression like: "foo"[2].
7340 This is not done in fold so it won't happen inside &.
7341 Don't fold if this is for wide characters since it's too
7342 difficult to do correctly and this is a very rare case. */
7344 if (modifier != EXPAND_CONST_ADDRESS
7345 && modifier != EXPAND_INITIALIZER
7346 && modifier != EXPAND_MEMORY)
7348 tree t = fold_read_from_constant_string (exp);
7350 if (t)
7351 return expand_expr (t, target, tmode, modifier);
7354 /* If this is a constant index into a constant array,
7355 just get the value from the array. Handle both the cases when
7356 we have an explicit constructor and when our operand is a variable
7357 that was declared const. */
7359 if (modifier != EXPAND_CONST_ADDRESS
7360 && modifier != EXPAND_INITIALIZER
7361 && modifier != EXPAND_MEMORY
7362 && TREE_CODE (array) == CONSTRUCTOR
7363 && ! TREE_SIDE_EFFECTS (array)
7364 && TREE_CODE (index) == INTEGER_CST)
7366 unsigned HOST_WIDE_INT ix;
7367 tree field, value;
7369 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7370 field, value)
7371 if (tree_int_cst_equal (field, index))
7373 if (!TREE_SIDE_EFFECTS (value))
7374 return expand_expr (fold (value), target, tmode, modifier);
7375 break;
7379 else if (optimize >= 1
7380 && modifier != EXPAND_CONST_ADDRESS
7381 && modifier != EXPAND_INITIALIZER
7382 && modifier != EXPAND_MEMORY
7383 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7384 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7385 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7386 && targetm.binds_local_p (array))
7388 if (TREE_CODE (index) == INTEGER_CST)
7390 tree init = DECL_INITIAL (array);
7392 if (TREE_CODE (init) == CONSTRUCTOR)
7394 unsigned HOST_WIDE_INT ix;
7395 tree field, value;
7397 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7398 field, value)
7399 if (tree_int_cst_equal (field, index))
7401 if (!TREE_SIDE_EFFECTS (value))
7402 return expand_expr (fold (value), target, tmode,
7403 modifier);
7404 break;
7407 else if(TREE_CODE (init) == STRING_CST)
7409 tree index1 = index;
7410 tree low_bound = array_ref_low_bound (exp);
7411 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7413 /* Optimize the special-case of a zero lower bound.
7415 We convert the low_bound to sizetype to avoid some problems
7416 with constant folding. (E.g. suppose the lower bound is 1,
7417 and its mode is QI. Without the conversion,l (ARRAY
7418 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7419 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7421 if (! integer_zerop (low_bound))
7422 index1 = size_diffop (index1, fold_convert (sizetype,
7423 low_bound));
7425 if (0 > compare_tree_int (index1,
7426 TREE_STRING_LENGTH (init)))
7428 tree type = TREE_TYPE (TREE_TYPE (init));
7429 enum machine_mode mode = TYPE_MODE (type);
7431 if (GET_MODE_CLASS (mode) == MODE_INT
7432 && GET_MODE_SIZE (mode) == 1)
7433 return gen_int_mode (TREE_STRING_POINTER (init)
7434 [TREE_INT_CST_LOW (index1)],
7435 mode);
7441 goto normal_inner_ref;
7443 case COMPONENT_REF:
7444 /* If the operand is a CONSTRUCTOR, we can just extract the
7445 appropriate field if it is present. */
7446 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7448 unsigned HOST_WIDE_INT idx;
7449 tree field, value;
7451 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7452 idx, field, value)
7453 if (field == TREE_OPERAND (exp, 1)
7454 /* We can normally use the value of the field in the
7455 CONSTRUCTOR. However, if this is a bitfield in
7456 an integral mode that we can fit in a HOST_WIDE_INT,
7457 we must mask only the number of bits in the bitfield,
7458 since this is done implicitly by the constructor. If
7459 the bitfield does not meet either of those conditions,
7460 we can't do this optimization. */
7461 && (! DECL_BIT_FIELD (field)
7462 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7463 && (GET_MODE_BITSIZE (DECL_MODE (field))
7464 <= HOST_BITS_PER_WIDE_INT))))
7466 if (DECL_BIT_FIELD (field)
7467 && modifier == EXPAND_STACK_PARM)
7468 target = 0;
7469 op0 = expand_expr (value, target, tmode, modifier);
7470 if (DECL_BIT_FIELD (field))
7472 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7473 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7475 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7477 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7478 op0 = expand_and (imode, op0, op1, target);
7480 else
7482 tree count
7483 = build_int_cst (NULL_TREE,
7484 GET_MODE_BITSIZE (imode) - bitsize);
7486 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7487 target, 0);
7488 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7489 target, 0);
7493 return op0;
7496 goto normal_inner_ref;
7498 case BIT_FIELD_REF:
7499 case ARRAY_RANGE_REF:
7500 normal_inner_ref:
7502 enum machine_mode mode1;
7503 HOST_WIDE_INT bitsize, bitpos;
7504 tree offset;
7505 int volatilep = 0;
7506 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7507 &mode1, &unsignedp, &volatilep, true);
7508 rtx orig_op0;
7510 /* If we got back the original object, something is wrong. Perhaps
7511 we are evaluating an expression too early. In any event, don't
7512 infinitely recurse. */
7513 gcc_assert (tem != exp);
7515 /* If TEM's type is a union of variable size, pass TARGET to the inner
7516 computation, since it will need a temporary and TARGET is known
7517 to have to do. This occurs in unchecked conversion in Ada. */
7519 orig_op0 = op0
7520 = expand_expr (tem,
7521 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7522 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7523 != INTEGER_CST)
7524 && modifier != EXPAND_STACK_PARM
7525 ? target : NULL_RTX),
7526 VOIDmode,
7527 (modifier == EXPAND_INITIALIZER
7528 || modifier == EXPAND_CONST_ADDRESS
7529 || modifier == EXPAND_STACK_PARM)
7530 ? modifier : EXPAND_NORMAL);
7532 /* If this is a constant, put it into a register if it is a legitimate
7533 constant, OFFSET is 0, and we won't try to extract outside the
7534 register (in case we were passed a partially uninitialized object
7535 or a view_conversion to a larger size). Force the constant to
7536 memory otherwise. */
7537 if (CONSTANT_P (op0))
7539 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7540 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7541 && offset == 0
7542 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7543 op0 = force_reg (mode, op0);
7544 else
7545 op0 = validize_mem (force_const_mem (mode, op0));
7548 /* Otherwise, if this object not in memory and we either have an
7549 offset, a BLKmode result, or a reference outside the object, put it
7550 there. Such cases can occur in Ada if we have unchecked conversion
7551 of an expression from a scalar type to an array or record type or
7552 for an ARRAY_RANGE_REF whose type is BLKmode. */
7553 else if (!MEM_P (op0)
7554 && (offset != 0
7555 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7556 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7558 tree nt = build_qualified_type (TREE_TYPE (tem),
7559 (TYPE_QUALS (TREE_TYPE (tem))
7560 | TYPE_QUAL_CONST));
7561 rtx memloc = assign_temp (nt, 1, 1, 1);
7563 emit_move_insn (memloc, op0);
7564 op0 = memloc;
7567 if (offset != 0)
7569 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7570 EXPAND_SUM);
7572 gcc_assert (MEM_P (op0));
7574 #ifdef POINTERS_EXTEND_UNSIGNED
7575 if (GET_MODE (offset_rtx) != Pmode)
7576 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7577 #else
7578 if (GET_MODE (offset_rtx) != ptr_mode)
7579 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7580 #endif
7582 if (GET_MODE (op0) == BLKmode
7583 /* A constant address in OP0 can have VOIDmode, we must
7584 not try to call force_reg in that case. */
7585 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7586 && bitsize != 0
7587 && (bitpos % bitsize) == 0
7588 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7589 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7591 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7592 bitpos = 0;
7595 op0 = offset_address (op0, offset_rtx,
7596 highest_pow2_factor (offset));
7599 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7600 record its alignment as BIGGEST_ALIGNMENT. */
7601 if (MEM_P (op0) && bitpos == 0 && offset != 0
7602 && is_aligning_offset (offset, tem))
7603 set_mem_align (op0, BIGGEST_ALIGNMENT);
7605 /* Don't forget about volatility even if this is a bitfield. */
7606 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7608 if (op0 == orig_op0)
7609 op0 = copy_rtx (op0);
7611 MEM_VOLATILE_P (op0) = 1;
7614 /* The following code doesn't handle CONCAT.
7615 Assume only bitpos == 0 can be used for CONCAT, due to
7616 one element arrays having the same mode as its element. */
7617 if (GET_CODE (op0) == CONCAT)
7619 gcc_assert (bitpos == 0
7620 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7621 return op0;
7624 /* In cases where an aligned union has an unaligned object
7625 as a field, we might be extracting a BLKmode value from
7626 an integer-mode (e.g., SImode) object. Handle this case
7627 by doing the extract into an object as wide as the field
7628 (which we know to be the width of a basic mode), then
7629 storing into memory, and changing the mode to BLKmode. */
7630 if (mode1 == VOIDmode
7631 || REG_P (op0) || GET_CODE (op0) == SUBREG
7632 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7633 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7634 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7635 && modifier != EXPAND_CONST_ADDRESS
7636 && modifier != EXPAND_INITIALIZER)
7637 /* If the field isn't aligned enough to fetch as a memref,
7638 fetch it as a bit field. */
7639 || (mode1 != BLKmode
7640 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7641 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7642 || (MEM_P (op0)
7643 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7644 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7645 && ((modifier == EXPAND_CONST_ADDRESS
7646 || modifier == EXPAND_INITIALIZER)
7647 ? STRICT_ALIGNMENT
7648 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7649 || (bitpos % BITS_PER_UNIT != 0)))
7650 /* If the type and the field are a constant size and the
7651 size of the type isn't the same size as the bitfield,
7652 we must use bitfield operations. */
7653 || (bitsize >= 0
7654 && TYPE_SIZE (TREE_TYPE (exp))
7655 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7656 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7657 bitsize)))
7659 enum machine_mode ext_mode = mode;
7661 if (ext_mode == BLKmode
7662 && ! (target != 0 && MEM_P (op0)
7663 && MEM_P (target)
7664 && bitpos % BITS_PER_UNIT == 0))
7665 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7667 if (ext_mode == BLKmode)
7669 if (target == 0)
7670 target = assign_temp (type, 0, 1, 1);
7672 if (bitsize == 0)
7673 return target;
7675 /* In this case, BITPOS must start at a byte boundary and
7676 TARGET, if specified, must be a MEM. */
7677 gcc_assert (MEM_P (op0)
7678 && (!target || MEM_P (target))
7679 && !(bitpos % BITS_PER_UNIT));
7681 emit_block_move (target,
7682 adjust_address (op0, VOIDmode,
7683 bitpos / BITS_PER_UNIT),
7684 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7685 / BITS_PER_UNIT),
7686 (modifier == EXPAND_STACK_PARM
7687 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7689 return target;
7692 op0 = validize_mem (op0);
7694 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7695 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7697 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7698 (modifier == EXPAND_STACK_PARM
7699 ? NULL_RTX : target),
7700 ext_mode, ext_mode);
7702 /* If the result is a record type and BITSIZE is narrower than
7703 the mode of OP0, an integral mode, and this is a big endian
7704 machine, we must put the field into the high-order bits. */
7705 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7706 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7707 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7708 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7709 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7710 - bitsize),
7711 op0, 1);
7713 /* If the result type is BLKmode, store the data into a temporary
7714 of the appropriate type, but with the mode corresponding to the
7715 mode for the data we have (op0's mode). It's tempting to make
7716 this a constant type, since we know it's only being stored once,
7717 but that can cause problems if we are taking the address of this
7718 COMPONENT_REF because the MEM of any reference via that address
7719 will have flags corresponding to the type, which will not
7720 necessarily be constant. */
7721 if (mode == BLKmode)
7723 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7724 rtx new;
7726 /* If the reference doesn't use the alias set of its type,
7727 we cannot create the temporary using that type. */
7728 if (component_uses_parent_alias_set (exp))
7730 new = assign_stack_local (ext_mode, size, 0);
7731 set_mem_alias_set (new, get_alias_set (exp));
7733 else
7734 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7736 emit_move_insn (new, op0);
7737 op0 = copy_rtx (new);
7738 PUT_MODE (op0, BLKmode);
7739 set_mem_attributes (op0, exp, 1);
7742 return op0;
7745 /* If the result is BLKmode, use that to access the object
7746 now as well. */
7747 if (mode == BLKmode)
7748 mode1 = BLKmode;
7750 /* Get a reference to just this component. */
7751 if (modifier == EXPAND_CONST_ADDRESS
7752 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7753 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7754 else
7755 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7757 if (op0 == orig_op0)
7758 op0 = copy_rtx (op0);
7760 set_mem_attributes (op0, exp, 0);
7761 if (REG_P (XEXP (op0, 0)))
7762 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7764 MEM_VOLATILE_P (op0) |= volatilep;
7765 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7766 || modifier == EXPAND_CONST_ADDRESS
7767 || modifier == EXPAND_INITIALIZER)
7768 return op0;
7769 else if (target == 0)
7770 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7772 convert_move (target, op0, unsignedp);
7773 return target;
7776 case OBJ_TYPE_REF:
7777 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7779 case CALL_EXPR:
7780 /* Check for a built-in function. */
7781 if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
7782 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7783 == FUNCTION_DECL)
7784 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7786 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7787 == BUILT_IN_FRONTEND)
7788 return lang_hooks.expand_expr (exp, original_target,
7789 tmode, modifier,
7790 alt_rtl);
7791 else
7792 return expand_builtin (exp, target, subtarget, tmode, ignore);
7795 return expand_call (exp, target, ignore);
7797 case NON_LVALUE_EXPR:
7798 case NOP_EXPR:
7799 case CONVERT_EXPR:
7800 if (TREE_OPERAND (exp, 0) == error_mark_node)
7801 return const0_rtx;
7803 if (TREE_CODE (type) == UNION_TYPE)
7805 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7807 /* If both input and output are BLKmode, this conversion isn't doing
7808 anything except possibly changing memory attribute. */
7809 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7811 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7812 modifier);
7814 result = copy_rtx (result);
7815 set_mem_attributes (result, exp, 0);
7816 return result;
7819 if (target == 0)
7821 if (TYPE_MODE (type) != BLKmode)
7822 target = gen_reg_rtx (TYPE_MODE (type));
7823 else
7824 target = assign_temp (type, 0, 1, 1);
7827 if (MEM_P (target))
7828 /* Store data into beginning of memory target. */
7829 store_expr (TREE_OPERAND (exp, 0),
7830 adjust_address (target, TYPE_MODE (valtype), 0),
7831 modifier == EXPAND_STACK_PARM);
7833 else
7835 gcc_assert (REG_P (target));
7837 /* Store this field into a union of the proper type. */
7838 store_field (target,
7839 MIN ((int_size_in_bytes (TREE_TYPE
7840 (TREE_OPERAND (exp, 0)))
7841 * BITS_PER_UNIT),
7842 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7843 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7844 type, 0);
7847 /* Return the entire union. */
7848 return target;
7851 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7853 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7854 modifier);
7856 /* If the signedness of the conversion differs and OP0 is
7857 a promoted SUBREG, clear that indication since we now
7858 have to do the proper extension. */
7859 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7860 && GET_CODE (op0) == SUBREG)
7861 SUBREG_PROMOTED_VAR_P (op0) = 0;
7863 return REDUCE_BIT_FIELD (op0);
7866 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7867 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7868 if (GET_MODE (op0) == mode)
7871 /* If OP0 is a constant, just convert it into the proper mode. */
7872 else if (CONSTANT_P (op0))
7874 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7875 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7877 if (modifier == EXPAND_INITIALIZER)
7878 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7879 subreg_lowpart_offset (mode,
7880 inner_mode));
7881 else
7882 op0= convert_modes (mode, inner_mode, op0,
7883 TYPE_UNSIGNED (inner_type));
7886 else if (modifier == EXPAND_INITIALIZER)
7887 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7889 else if (target == 0)
7890 op0 = convert_to_mode (mode, op0,
7891 TYPE_UNSIGNED (TREE_TYPE
7892 (TREE_OPERAND (exp, 0))));
7893 else
7895 convert_move (target, op0,
7896 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7897 op0 = target;
7900 return REDUCE_BIT_FIELD (op0);
7902 case VIEW_CONVERT_EXPR:
7903 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7905 /* If the input and output modes are both the same, we are done. */
7906 if (TYPE_MODE (type) == GET_MODE (op0))
7908 /* If neither mode is BLKmode, and both modes are the same size
7909 then we can use gen_lowpart. */
7910 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7911 && GET_MODE_SIZE (TYPE_MODE (type))
7912 == GET_MODE_SIZE (GET_MODE (op0)))
7914 if (GET_CODE (op0) == SUBREG)
7915 op0 = force_reg (GET_MODE (op0), op0);
7916 op0 = gen_lowpart (TYPE_MODE (type), op0);
7918 /* If both modes are integral, then we can convert from one to the
7919 other. */
7920 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7921 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7922 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7923 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7924 /* As a last resort, spill op0 to memory, and reload it in a
7925 different mode. */
7926 else if (!MEM_P (op0))
7928 /* If the operand is not a MEM, force it into memory. Since we
7929 are going to be changing the mode of the MEM, don't call
7930 force_const_mem for constants because we don't allow pool
7931 constants to change mode. */
7932 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7934 gcc_assert (!TREE_ADDRESSABLE (exp));
7936 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7937 target
7938 = assign_stack_temp_for_type
7939 (TYPE_MODE (inner_type),
7940 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7942 emit_move_insn (target, op0);
7943 op0 = target;
7946 /* At this point, OP0 is in the correct mode. If the output type is such
7947 that the operand is known to be aligned, indicate that it is.
7948 Otherwise, we need only be concerned about alignment for non-BLKmode
7949 results. */
7950 if (MEM_P (op0))
7952 op0 = copy_rtx (op0);
7954 if (TYPE_ALIGN_OK (type))
7955 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7956 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7957 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7959 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7960 HOST_WIDE_INT temp_size
7961 = MAX (int_size_in_bytes (inner_type),
7962 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7963 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7964 temp_size, 0, type);
7965 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7967 gcc_assert (!TREE_ADDRESSABLE (exp));
7969 if (GET_MODE (op0) == BLKmode)
7970 emit_block_move (new_with_op0_mode, op0,
7971 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7972 (modifier == EXPAND_STACK_PARM
7973 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7974 else
7975 emit_move_insn (new_with_op0_mode, op0);
7977 op0 = new;
7980 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7983 return op0;
7985 case PLUS_EXPR:
7986 /* Check if this is a case for multiplication and addition. */
7987 if (TREE_CODE (type) == INTEGER_TYPE
7988 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
7990 tree subsubexp0, subsubexp1;
7991 enum tree_code code0, code1;
7993 subexp0 = TREE_OPERAND (exp, 0);
7994 subsubexp0 = TREE_OPERAND (subexp0, 0);
7995 subsubexp1 = TREE_OPERAND (subexp0, 1);
7996 code0 = TREE_CODE (subsubexp0);
7997 code1 = TREE_CODE (subsubexp1);
7998 if (code0 == NOP_EXPR && code1 == NOP_EXPR
7999 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8000 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8001 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8002 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8003 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8004 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8006 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8007 enum machine_mode innermode = TYPE_MODE (op0type);
8008 bool zextend_p = TYPE_UNSIGNED (op0type);
8009 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8010 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8011 && (this_optab->handlers[(int) mode].insn_code
8012 != CODE_FOR_nothing))
8014 expand_operands (TREE_OPERAND (subsubexp0, 0),
8015 TREE_OPERAND (subsubexp1, 0),
8016 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8017 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8018 VOIDmode, EXPAND_NORMAL);
8019 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8020 target, unsignedp);
8021 gcc_assert (temp);
8022 return REDUCE_BIT_FIELD (temp);
8027 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8028 something else, make sure we add the register to the constant and
8029 then to the other thing. This case can occur during strength
8030 reduction and doing it this way will produce better code if the
8031 frame pointer or argument pointer is eliminated.
8033 fold-const.c will ensure that the constant is always in the inner
8034 PLUS_EXPR, so the only case we need to do anything about is if
8035 sp, ap, or fp is our second argument, in which case we must swap
8036 the innermost first argument and our second argument. */
8038 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8039 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8040 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8041 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8042 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8043 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8045 tree t = TREE_OPERAND (exp, 1);
8047 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8048 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8051 /* If the result is to be ptr_mode and we are adding an integer to
8052 something, we might be forming a constant. So try to use
8053 plus_constant. If it produces a sum and we can't accept it,
8054 use force_operand. This allows P = &ARR[const] to generate
8055 efficient code on machines where a SYMBOL_REF is not a valid
8056 address.
8058 If this is an EXPAND_SUM call, always return the sum. */
8059 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8060 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8062 if (modifier == EXPAND_STACK_PARM)
8063 target = 0;
8064 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8065 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8066 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8068 rtx constant_part;
8070 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8071 EXPAND_SUM);
8072 /* Use immed_double_const to ensure that the constant is
8073 truncated according to the mode of OP1, then sign extended
8074 to a HOST_WIDE_INT. Using the constant directly can result
8075 in non-canonical RTL in a 64x32 cross compile. */
8076 constant_part
8077 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8078 (HOST_WIDE_INT) 0,
8079 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8080 op1 = plus_constant (op1, INTVAL (constant_part));
8081 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8082 op1 = force_operand (op1, target);
8083 return REDUCE_BIT_FIELD (op1);
8086 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8087 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8088 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8090 rtx constant_part;
8092 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8093 (modifier == EXPAND_INITIALIZER
8094 ? EXPAND_INITIALIZER : EXPAND_SUM));
8095 if (! CONSTANT_P (op0))
8097 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8098 VOIDmode, modifier);
8099 /* Return a PLUS if modifier says it's OK. */
8100 if (modifier == EXPAND_SUM
8101 || modifier == EXPAND_INITIALIZER)
8102 return simplify_gen_binary (PLUS, mode, op0, op1);
8103 goto binop2;
8105 /* Use immed_double_const to ensure that the constant is
8106 truncated according to the mode of OP1, then sign extended
8107 to a HOST_WIDE_INT. Using the constant directly can result
8108 in non-canonical RTL in a 64x32 cross compile. */
8109 constant_part
8110 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8111 (HOST_WIDE_INT) 0,
8112 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8113 op0 = plus_constant (op0, INTVAL (constant_part));
8114 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8115 op0 = force_operand (op0, target);
8116 return REDUCE_BIT_FIELD (op0);
8120 /* No sense saving up arithmetic to be done
8121 if it's all in the wrong mode to form part of an address.
8122 And force_operand won't know whether to sign-extend or
8123 zero-extend. */
8124 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8125 || mode != ptr_mode)
8127 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8128 subtarget, &op0, &op1, 0);
8129 if (op0 == const0_rtx)
8130 return op1;
8131 if (op1 == const0_rtx)
8132 return op0;
8133 goto binop2;
8136 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8137 subtarget, &op0, &op1, modifier);
8138 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8140 case MINUS_EXPR:
8141 /* Check if this is a case for multiplication and subtraction. */
8142 if (TREE_CODE (type) == INTEGER_TYPE
8143 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8145 tree subsubexp0, subsubexp1;
8146 enum tree_code code0, code1;
8148 subexp1 = TREE_OPERAND (exp, 1);
8149 subsubexp0 = TREE_OPERAND (subexp1, 0);
8150 subsubexp1 = TREE_OPERAND (subexp1, 1);
8151 code0 = TREE_CODE (subsubexp0);
8152 code1 = TREE_CODE (subsubexp1);
8153 if (code0 == NOP_EXPR && code1 == NOP_EXPR
8154 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8155 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8156 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8157 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8158 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8159 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8161 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8162 enum machine_mode innermode = TYPE_MODE (op0type);
8163 bool zextend_p = TYPE_UNSIGNED (op0type);
8164 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8165 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8166 && (this_optab->handlers[(int) mode].insn_code
8167 != CODE_FOR_nothing))
8169 expand_operands (TREE_OPERAND (subsubexp0, 0),
8170 TREE_OPERAND (subsubexp1, 0),
8171 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8172 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8173 VOIDmode, EXPAND_NORMAL);
8174 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8175 target, unsignedp);
8176 gcc_assert (temp);
8177 return REDUCE_BIT_FIELD (temp);
8182 /* For initializers, we are allowed to return a MINUS of two
8183 symbolic constants. Here we handle all cases when both operands
8184 are constant. */
8185 /* Handle difference of two symbolic constants,
8186 for the sake of an initializer. */
8187 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8188 && really_constant_p (TREE_OPERAND (exp, 0))
8189 && really_constant_p (TREE_OPERAND (exp, 1)))
8191 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8192 NULL_RTX, &op0, &op1, modifier);
8194 /* If the last operand is a CONST_INT, use plus_constant of
8195 the negated constant. Else make the MINUS. */
8196 if (GET_CODE (op1) == CONST_INT)
8197 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8198 else
8199 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8202 /* No sense saving up arithmetic to be done
8203 if it's all in the wrong mode to form part of an address.
8204 And force_operand won't know whether to sign-extend or
8205 zero-extend. */
8206 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8207 || mode != ptr_mode)
8208 goto binop;
8210 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8211 subtarget, &op0, &op1, modifier);
8213 /* Convert A - const to A + (-const). */
8214 if (GET_CODE (op1) == CONST_INT)
8216 op1 = negate_rtx (mode, op1);
8217 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8220 goto binop2;
8222 case MULT_EXPR:
8223 /* If first operand is constant, swap them.
8224 Thus the following special case checks need only
8225 check the second operand. */
8226 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8228 tree t1 = TREE_OPERAND (exp, 0);
8229 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8230 TREE_OPERAND (exp, 1) = t1;
8233 /* Attempt to return something suitable for generating an
8234 indexed address, for machines that support that. */
8236 if (modifier == EXPAND_SUM && mode == ptr_mode
8237 && host_integerp (TREE_OPERAND (exp, 1), 0))
8239 tree exp1 = TREE_OPERAND (exp, 1);
8241 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8242 EXPAND_SUM);
8244 if (!REG_P (op0))
8245 op0 = force_operand (op0, NULL_RTX);
8246 if (!REG_P (op0))
8247 op0 = copy_to_mode_reg (mode, op0);
8249 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8250 gen_int_mode (tree_low_cst (exp1, 0),
8251 TYPE_MODE (TREE_TYPE (exp1)))));
8254 if (modifier == EXPAND_STACK_PARM)
8255 target = 0;
8257 /* Check for multiplying things that have been extended
8258 from a narrower type. If this machine supports multiplying
8259 in that narrower type with a result in the desired type,
8260 do it that way, and avoid the explicit type-conversion. */
8262 subexp0 = TREE_OPERAND (exp, 0);
8263 subexp1 = TREE_OPERAND (exp, 1);
8264 /* First, check if we have a multiplication of one signed and one
8265 unsigned operand. */
8266 if (TREE_CODE (subexp0) == NOP_EXPR
8267 && TREE_CODE (subexp1) == NOP_EXPR
8268 && TREE_CODE (type) == INTEGER_TYPE
8269 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8270 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8271 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8272 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8273 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8274 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8276 enum machine_mode innermode
8277 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8278 this_optab = usmul_widen_optab;
8279 if (mode == GET_MODE_WIDER_MODE (innermode))
8281 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8283 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8284 expand_operands (TREE_OPERAND (subexp0, 0),
8285 TREE_OPERAND (subexp1, 0),
8286 NULL_RTX, &op0, &op1, 0);
8287 else
8288 expand_operands (TREE_OPERAND (subexp0, 0),
8289 TREE_OPERAND (subexp1, 0),
8290 NULL_RTX, &op1, &op0, 0);
8292 goto binop3;
8296 /* Check for a multiplication with matching signedness. */
8297 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8298 && TREE_CODE (type) == INTEGER_TYPE
8299 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8300 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8301 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8302 && int_fits_type_p (TREE_OPERAND (exp, 1),
8303 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8304 /* Don't use a widening multiply if a shift will do. */
8305 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8306 > HOST_BITS_PER_WIDE_INT)
8307 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8309 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8310 && (TYPE_PRECISION (TREE_TYPE
8311 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8312 == TYPE_PRECISION (TREE_TYPE
8313 (TREE_OPERAND
8314 (TREE_OPERAND (exp, 0), 0))))
8315 /* If both operands are extended, they must either both
8316 be zero-extended or both be sign-extended. */
8317 && (TYPE_UNSIGNED (TREE_TYPE
8318 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8319 == TYPE_UNSIGNED (TREE_TYPE
8320 (TREE_OPERAND
8321 (TREE_OPERAND (exp, 0), 0)))))))
8323 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8324 enum machine_mode innermode = TYPE_MODE (op0type);
8325 bool zextend_p = TYPE_UNSIGNED (op0type);
8326 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8327 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8329 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8331 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8333 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8334 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8335 TREE_OPERAND (exp, 1),
8336 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8337 else
8338 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8339 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8340 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8341 goto binop3;
8343 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8344 && innermode == word_mode)
8346 rtx htem, hipart;
8347 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8348 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8349 op1 = convert_modes (innermode, mode,
8350 expand_normal (TREE_OPERAND (exp, 1)),
8351 unsignedp);
8352 else
8353 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8354 temp = expand_binop (mode, other_optab, op0, op1, target,
8355 unsignedp, OPTAB_LIB_WIDEN);
8356 hipart = gen_highpart (innermode, temp);
8357 htem = expand_mult_highpart_adjust (innermode, hipart,
8358 op0, op1, hipart,
8359 zextend_p);
8360 if (htem != hipart)
8361 emit_move_insn (hipart, htem);
8362 return REDUCE_BIT_FIELD (temp);
8366 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8367 subtarget, &op0, &op1, 0);
8368 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8370 case TRUNC_DIV_EXPR:
8371 case FLOOR_DIV_EXPR:
8372 case CEIL_DIV_EXPR:
8373 case ROUND_DIV_EXPR:
8374 case EXACT_DIV_EXPR:
8375 if (modifier == EXPAND_STACK_PARM)
8376 target = 0;
8377 /* Possible optimization: compute the dividend with EXPAND_SUM
8378 then if the divisor is constant can optimize the case
8379 where some terms of the dividend have coeffs divisible by it. */
8380 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8381 subtarget, &op0, &op1, 0);
8382 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8384 case RDIV_EXPR:
8385 goto binop;
8387 case TRUNC_MOD_EXPR:
8388 case FLOOR_MOD_EXPR:
8389 case CEIL_MOD_EXPR:
8390 case ROUND_MOD_EXPR:
8391 if (modifier == EXPAND_STACK_PARM)
8392 target = 0;
8393 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8394 subtarget, &op0, &op1, 0);
8395 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8397 case FIX_TRUNC_EXPR:
8398 op0 = expand_normal (TREE_OPERAND (exp, 0));
8399 if (target == 0 || modifier == EXPAND_STACK_PARM)
8400 target = gen_reg_rtx (mode);
8401 expand_fix (target, op0, unsignedp);
8402 return target;
8404 case FLOAT_EXPR:
8405 op0 = expand_normal (TREE_OPERAND (exp, 0));
8406 if (target == 0 || modifier == EXPAND_STACK_PARM)
8407 target = gen_reg_rtx (mode);
8408 /* expand_float can't figure out what to do if FROM has VOIDmode.
8409 So give it the correct mode. With -O, cse will optimize this. */
8410 if (GET_MODE (op0) == VOIDmode)
8411 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8412 op0);
8413 expand_float (target, op0,
8414 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8415 return target;
8417 case NEGATE_EXPR:
8418 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8419 VOIDmode, EXPAND_NORMAL);
8420 if (modifier == EXPAND_STACK_PARM)
8421 target = 0;
8422 temp = expand_unop (mode,
8423 optab_for_tree_code (NEGATE_EXPR, type),
8424 op0, target, 0);
8425 gcc_assert (temp);
8426 return REDUCE_BIT_FIELD (temp);
8428 case ABS_EXPR:
8429 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8430 VOIDmode, EXPAND_NORMAL);
8431 if (modifier == EXPAND_STACK_PARM)
8432 target = 0;
8434 /* ABS_EXPR is not valid for complex arguments. */
8435 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8436 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8438 /* Unsigned abs is simply the operand. Testing here means we don't
8439 risk generating incorrect code below. */
8440 if (TYPE_UNSIGNED (type))
8441 return op0;
8443 return expand_abs (mode, op0, target, unsignedp,
8444 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8446 case MAX_EXPR:
8447 case MIN_EXPR:
8448 target = original_target;
8449 if (target == 0
8450 || modifier == EXPAND_STACK_PARM
8451 || (MEM_P (target) && MEM_VOLATILE_P (target))
8452 || GET_MODE (target) != mode
8453 || (REG_P (target)
8454 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8455 target = gen_reg_rtx (mode);
8456 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8457 target, &op0, &op1, 0);
8459 /* First try to do it with a special MIN or MAX instruction.
8460 If that does not win, use a conditional jump to select the proper
8461 value. */
8462 this_optab = optab_for_tree_code (code, type);
8463 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8464 OPTAB_WIDEN);
8465 if (temp != 0)
8466 return temp;
8468 /* At this point, a MEM target is no longer useful; we will get better
8469 code without it. */
8471 if (! REG_P (target))
8472 target = gen_reg_rtx (mode);
8474 /* If op1 was placed in target, swap op0 and op1. */
8475 if (target != op0 && target == op1)
8477 temp = op0;
8478 op0 = op1;
8479 op1 = temp;
8482 /* We generate better code and avoid problems with op1 mentioning
8483 target by forcing op1 into a pseudo if it isn't a constant. */
8484 if (! CONSTANT_P (op1))
8485 op1 = force_reg (mode, op1);
8488 enum rtx_code comparison_code;
8489 rtx cmpop1 = op1;
8491 if (code == MAX_EXPR)
8492 comparison_code = unsignedp ? GEU : GE;
8493 else
8494 comparison_code = unsignedp ? LEU : LE;
8496 /* Canonicalize to comparisons against 0. */
8497 if (op1 == const1_rtx)
8499 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8500 or (a != 0 ? a : 1) for unsigned.
8501 For MIN we are safe converting (a <= 1 ? a : 1)
8502 into (a <= 0 ? a : 1) */
8503 cmpop1 = const0_rtx;
8504 if (code == MAX_EXPR)
8505 comparison_code = unsignedp ? NE : GT;
8507 if (op1 == constm1_rtx && !unsignedp)
8509 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8510 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8511 cmpop1 = const0_rtx;
8512 if (code == MIN_EXPR)
8513 comparison_code = LT;
8515 #ifdef HAVE_conditional_move
8516 /* Use a conditional move if possible. */
8517 if (can_conditionally_move_p (mode))
8519 rtx insn;
8521 /* ??? Same problem as in expmed.c: emit_conditional_move
8522 forces a stack adjustment via compare_from_rtx, and we
8523 lose the stack adjustment if the sequence we are about
8524 to create is discarded. */
8525 do_pending_stack_adjust ();
8527 start_sequence ();
8529 /* Try to emit the conditional move. */
8530 insn = emit_conditional_move (target, comparison_code,
8531 op0, cmpop1, mode,
8532 op0, op1, mode,
8533 unsignedp);
8535 /* If we could do the conditional move, emit the sequence,
8536 and return. */
8537 if (insn)
8539 rtx seq = get_insns ();
8540 end_sequence ();
8541 emit_insn (seq);
8542 return target;
8545 /* Otherwise discard the sequence and fall back to code with
8546 branches. */
8547 end_sequence ();
8549 #endif
8550 if (target != op0)
8551 emit_move_insn (target, op0);
8553 temp = gen_label_rtx ();
8554 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8555 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8557 emit_move_insn (target, op1);
8558 emit_label (temp);
8559 return target;
8561 case BIT_NOT_EXPR:
8562 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8563 VOIDmode, EXPAND_NORMAL);
8564 if (modifier == EXPAND_STACK_PARM)
8565 target = 0;
8566 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8567 gcc_assert (temp);
8568 return temp;
8570 /* ??? Can optimize bitwise operations with one arg constant.
8571 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8572 and (a bitwise1 b) bitwise2 b (etc)
8573 but that is probably not worth while. */
8575 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8576 boolean values when we want in all cases to compute both of them. In
8577 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8578 as actual zero-or-1 values and then bitwise anding. In cases where
8579 there cannot be any side effects, better code would be made by
8580 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8581 how to recognize those cases. */
8583 case TRUTH_AND_EXPR:
8584 code = BIT_AND_EXPR;
8585 case BIT_AND_EXPR:
8586 goto binop;
8588 case TRUTH_OR_EXPR:
8589 code = BIT_IOR_EXPR;
8590 case BIT_IOR_EXPR:
8591 goto binop;
8593 case TRUTH_XOR_EXPR:
8594 code = BIT_XOR_EXPR;
8595 case BIT_XOR_EXPR:
8596 goto binop;
8598 case LSHIFT_EXPR:
8599 case RSHIFT_EXPR:
8600 case LROTATE_EXPR:
8601 case RROTATE_EXPR:
8602 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8603 subtarget = 0;
8604 if (modifier == EXPAND_STACK_PARM)
8605 target = 0;
8606 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8607 VOIDmode, EXPAND_NORMAL);
8608 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8609 unsignedp);
8611 /* Could determine the answer when only additive constants differ. Also,
8612 the addition of one can be handled by changing the condition. */
8613 case LT_EXPR:
8614 case LE_EXPR:
8615 case GT_EXPR:
8616 case GE_EXPR:
8617 case EQ_EXPR:
8618 case NE_EXPR:
8619 case UNORDERED_EXPR:
8620 case ORDERED_EXPR:
8621 case UNLT_EXPR:
8622 case UNLE_EXPR:
8623 case UNGT_EXPR:
8624 case UNGE_EXPR:
8625 case UNEQ_EXPR:
8626 case LTGT_EXPR:
8627 temp = do_store_flag (exp,
8628 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8629 tmode != VOIDmode ? tmode : mode, 0);
8630 if (temp != 0)
8631 return temp;
8633 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8634 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8635 && original_target
8636 && REG_P (original_target)
8637 && (GET_MODE (original_target)
8638 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8640 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8641 VOIDmode, EXPAND_NORMAL);
8643 /* If temp is constant, we can just compute the result. */
8644 if (GET_CODE (temp) == CONST_INT)
8646 if (INTVAL (temp) != 0)
8647 emit_move_insn (target, const1_rtx);
8648 else
8649 emit_move_insn (target, const0_rtx);
8651 return target;
8654 if (temp != original_target)
8656 enum machine_mode mode1 = GET_MODE (temp);
8657 if (mode1 == VOIDmode)
8658 mode1 = tmode != VOIDmode ? tmode : mode;
8660 temp = copy_to_mode_reg (mode1, temp);
8663 op1 = gen_label_rtx ();
8664 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8665 GET_MODE (temp), unsignedp, op1);
8666 emit_move_insn (temp, const1_rtx);
8667 emit_label (op1);
8668 return temp;
8671 /* If no set-flag instruction, must generate a conditional store
8672 into a temporary variable. Drop through and handle this
8673 like && and ||. */
8675 if (! ignore
8676 && (target == 0
8677 || modifier == EXPAND_STACK_PARM
8678 || ! safe_from_p (target, exp, 1)
8679 /* Make sure we don't have a hard reg (such as function's return
8680 value) live across basic blocks, if not optimizing. */
8681 || (!optimize && REG_P (target)
8682 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8683 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8685 if (target)
8686 emit_move_insn (target, const0_rtx);
8688 op1 = gen_label_rtx ();
8689 jumpifnot (exp, op1);
8691 if (target)
8692 emit_move_insn (target, const1_rtx);
8694 emit_label (op1);
8695 return ignore ? const0_rtx : target;
8697 case TRUTH_NOT_EXPR:
8698 if (modifier == EXPAND_STACK_PARM)
8699 target = 0;
8700 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
8701 VOIDmode, EXPAND_NORMAL);
8702 /* The parser is careful to generate TRUTH_NOT_EXPR
8703 only with operands that are always zero or one. */
8704 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8705 target, 1, OPTAB_LIB_WIDEN);
8706 gcc_assert (temp);
8707 return temp;
8709 case STATEMENT_LIST:
8711 tree_stmt_iterator iter;
8713 gcc_assert (ignore);
8715 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8716 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8718 return const0_rtx;
8720 case COND_EXPR:
8721 /* A COND_EXPR with its type being VOID_TYPE represents a
8722 conditional jump and is handled in
8723 expand_gimple_cond_expr. */
8724 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8726 /* Note that COND_EXPRs whose type is a structure or union
8727 are required to be constructed to contain assignments of
8728 a temporary variable, so that we can evaluate them here
8729 for side effect only. If type is void, we must do likewise. */
8731 gcc_assert (!TREE_ADDRESSABLE (type)
8732 && !ignore
8733 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8734 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8736 /* If we are not to produce a result, we have no target. Otherwise,
8737 if a target was specified use it; it will not be used as an
8738 intermediate target unless it is safe. If no target, use a
8739 temporary. */
8741 if (modifier != EXPAND_STACK_PARM
8742 && original_target
8743 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8744 && GET_MODE (original_target) == mode
8745 #ifdef HAVE_conditional_move
8746 && (! can_conditionally_move_p (mode)
8747 || REG_P (original_target))
8748 #endif
8749 && !MEM_P (original_target))
8750 temp = original_target;
8751 else
8752 temp = assign_temp (type, 0, 0, 1);
8754 do_pending_stack_adjust ();
8755 NO_DEFER_POP;
8756 op0 = gen_label_rtx ();
8757 op1 = gen_label_rtx ();
8758 jumpifnot (TREE_OPERAND (exp, 0), op0);
8759 store_expr (TREE_OPERAND (exp, 1), temp,
8760 modifier == EXPAND_STACK_PARM);
8762 emit_jump_insn (gen_jump (op1));
8763 emit_barrier ();
8764 emit_label (op0);
8765 store_expr (TREE_OPERAND (exp, 2), temp,
8766 modifier == EXPAND_STACK_PARM);
8768 emit_label (op1);
8769 OK_DEFER_POP;
8770 return temp;
8772 case VEC_COND_EXPR:
8773 target = expand_vec_cond_expr (exp, target);
8774 return target;
8776 case MODIFY_EXPR:
8778 tree lhs = TREE_OPERAND (exp, 0);
8779 tree rhs = TREE_OPERAND (exp, 1);
8780 gcc_assert (ignore);
8781 expand_assignment (lhs, rhs);
8782 return const0_rtx;
8785 case GIMPLE_MODIFY_STMT:
8787 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
8788 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
8790 gcc_assert (ignore);
8792 /* Check for |= or &= of a bitfield of size one into another bitfield
8793 of size 1. In this case, (unless we need the result of the
8794 assignment) we can do this more efficiently with a
8795 test followed by an assignment, if necessary.
8797 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8798 things change so we do, this code should be enhanced to
8799 support it. */
8800 if (TREE_CODE (lhs) == COMPONENT_REF
8801 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8802 || TREE_CODE (rhs) == BIT_AND_EXPR)
8803 && TREE_OPERAND (rhs, 0) == lhs
8804 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8805 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8806 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8808 rtx label = gen_label_rtx ();
8809 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8810 do_jump (TREE_OPERAND (rhs, 1),
8811 value ? label : 0,
8812 value ? 0 : label);
8813 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8814 do_pending_stack_adjust ();
8815 emit_label (label);
8816 return const0_rtx;
8819 expand_assignment (lhs, rhs);
8820 return const0_rtx;
8823 case RETURN_EXPR:
8824 if (!TREE_OPERAND (exp, 0))
8825 expand_null_return ();
8826 else
8827 expand_return (TREE_OPERAND (exp, 0));
8828 return const0_rtx;
8830 case ADDR_EXPR:
8831 return expand_expr_addr_expr (exp, target, tmode, modifier);
8833 case COMPLEX_EXPR:
8834 /* Get the rtx code of the operands. */
8835 op0 = expand_normal (TREE_OPERAND (exp, 0));
8836 op1 = expand_normal (TREE_OPERAND (exp, 1));
8838 if (!target)
8839 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8841 /* Move the real (op0) and imaginary (op1) parts to their location. */
8842 write_complex_part (target, op0, false);
8843 write_complex_part (target, op1, true);
8845 return target;
8847 case REALPART_EXPR:
8848 op0 = expand_normal (TREE_OPERAND (exp, 0));
8849 return read_complex_part (op0, false);
8851 case IMAGPART_EXPR:
8852 op0 = expand_normal (TREE_OPERAND (exp, 0));
8853 return read_complex_part (op0, true);
8855 case RESX_EXPR:
8856 expand_resx_expr (exp);
8857 return const0_rtx;
8859 case TRY_CATCH_EXPR:
8860 case CATCH_EXPR:
8861 case EH_FILTER_EXPR:
8862 case TRY_FINALLY_EXPR:
8863 /* Lowered by tree-eh.c. */
8864 gcc_unreachable ();
8866 case WITH_CLEANUP_EXPR:
8867 case CLEANUP_POINT_EXPR:
8868 case TARGET_EXPR:
8869 case CASE_LABEL_EXPR:
8870 case VA_ARG_EXPR:
8871 case BIND_EXPR:
8872 case INIT_EXPR:
8873 case CONJ_EXPR:
8874 case COMPOUND_EXPR:
8875 case PREINCREMENT_EXPR:
8876 case PREDECREMENT_EXPR:
8877 case POSTINCREMENT_EXPR:
8878 case POSTDECREMENT_EXPR:
8879 case LOOP_EXPR:
8880 case EXIT_EXPR:
8881 case TRUTH_ANDIF_EXPR:
8882 case TRUTH_ORIF_EXPR:
8883 /* Lowered by gimplify.c. */
8884 gcc_unreachable ();
8886 case EXC_PTR_EXPR:
8887 return get_exception_pointer (cfun);
8889 case FILTER_EXPR:
8890 return get_exception_filter (cfun);
8892 case FDESC_EXPR:
8893 /* Function descriptors are not valid except for as
8894 initialization constants, and should not be expanded. */
8895 gcc_unreachable ();
8897 case SWITCH_EXPR:
8898 expand_case (exp);
8899 return const0_rtx;
8901 case LABEL_EXPR:
8902 expand_label (TREE_OPERAND (exp, 0));
8903 return const0_rtx;
8905 case ASM_EXPR:
8906 expand_asm_expr (exp);
8907 return const0_rtx;
8909 case WITH_SIZE_EXPR:
8910 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8911 have pulled out the size to use in whatever context it needed. */
8912 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8913 modifier, alt_rtl);
8915 case REALIGN_LOAD_EXPR:
8917 tree oprnd0 = TREE_OPERAND (exp, 0);
8918 tree oprnd1 = TREE_OPERAND (exp, 1);
8919 tree oprnd2 = TREE_OPERAND (exp, 2);
8920 rtx op2;
8922 this_optab = optab_for_tree_code (code, type);
8923 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8924 op2 = expand_normal (oprnd2);
8925 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8926 target, unsignedp);
8927 gcc_assert (temp);
8928 return temp;
8931 case DOT_PROD_EXPR:
8933 tree oprnd0 = TREE_OPERAND (exp, 0);
8934 tree oprnd1 = TREE_OPERAND (exp, 1);
8935 tree oprnd2 = TREE_OPERAND (exp, 2);
8936 rtx op2;
8938 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8939 op2 = expand_normal (oprnd2);
8940 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8941 target, unsignedp);
8942 return target;
8945 case WIDEN_SUM_EXPR:
8947 tree oprnd0 = TREE_OPERAND (exp, 0);
8948 tree oprnd1 = TREE_OPERAND (exp, 1);
8950 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8951 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8952 target, unsignedp);
8953 return target;
8956 case REDUC_MAX_EXPR:
8957 case REDUC_MIN_EXPR:
8958 case REDUC_PLUS_EXPR:
8960 op0 = expand_normal (TREE_OPERAND (exp, 0));
8961 this_optab = optab_for_tree_code (code, type);
8962 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8963 gcc_assert (temp);
8964 return temp;
8967 case VEC_EXTRACT_EVEN_EXPR:
8968 case VEC_EXTRACT_ODD_EXPR:
8970 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8971 NULL_RTX, &op0, &op1, 0);
8972 this_optab = optab_for_tree_code (code, type);
8973 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8974 OPTAB_WIDEN);
8975 gcc_assert (temp);
8976 return temp;
8979 case VEC_INTERLEAVE_HIGH_EXPR:
8980 case VEC_INTERLEAVE_LOW_EXPR:
8982 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8983 NULL_RTX, &op0, &op1, 0);
8984 this_optab = optab_for_tree_code (code, type);
8985 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8986 OPTAB_WIDEN);
8987 gcc_assert (temp);
8988 return temp;
8991 case VEC_LSHIFT_EXPR:
8992 case VEC_RSHIFT_EXPR:
8994 target = expand_vec_shift_expr (exp, target);
8995 return target;
8998 case VEC_UNPACK_HI_EXPR:
8999 case VEC_UNPACK_LO_EXPR:
9001 op0 = expand_normal (TREE_OPERAND (exp, 0));
9002 this_optab = optab_for_tree_code (code, type);
9003 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9004 target, unsignedp);
9005 gcc_assert (temp);
9006 return temp;
9009 case VEC_UNPACK_FLOAT_HI_EXPR:
9010 case VEC_UNPACK_FLOAT_LO_EXPR:
9012 op0 = expand_normal (TREE_OPERAND (exp, 0));
9013 /* The signedness is determined from input operand. */
9014 this_optab = optab_for_tree_code (code,
9015 TREE_TYPE (TREE_OPERAND (exp, 0)));
9016 temp = expand_widen_pattern_expr
9017 (exp, op0, NULL_RTX, NULL_RTX,
9018 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9020 gcc_assert (temp);
9021 return temp;
9024 case VEC_WIDEN_MULT_HI_EXPR:
9025 case VEC_WIDEN_MULT_LO_EXPR:
9027 tree oprnd0 = TREE_OPERAND (exp, 0);
9028 tree oprnd1 = TREE_OPERAND (exp, 1);
9030 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9031 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9032 target, unsignedp);
9033 gcc_assert (target);
9034 return target;
9037 case VEC_PACK_TRUNC_EXPR:
9038 case VEC_PACK_SAT_EXPR:
9039 case VEC_PACK_FIX_TRUNC_EXPR:
9041 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9042 goto binop;
9045 default:
9046 return lang_hooks.expand_expr (exp, original_target, tmode,
9047 modifier, alt_rtl);
9050 /* Here to do an ordinary binary operator. */
9051 binop:
9052 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9053 subtarget, &op0, &op1, 0);
9054 binop2:
9055 this_optab = optab_for_tree_code (code, type);
9056 binop3:
9057 if (modifier == EXPAND_STACK_PARM)
9058 target = 0;
9059 temp = expand_binop (mode, this_optab, op0, op1, target,
9060 unsignedp, OPTAB_LIB_WIDEN);
9061 gcc_assert (temp);
9062 return REDUCE_BIT_FIELD (temp);
9064 #undef REDUCE_BIT_FIELD
9066 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9067 signedness of TYPE), possibly returning the result in TARGET. */
9068 static rtx
9069 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9071 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9072 if (target && GET_MODE (target) != GET_MODE (exp))
9073 target = 0;
9074 /* For constant values, reduce using build_int_cst_type. */
9075 if (GET_CODE (exp) == CONST_INT)
9077 HOST_WIDE_INT value = INTVAL (exp);
9078 tree t = build_int_cst_type (type, value);
9079 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9081 else if (TYPE_UNSIGNED (type))
9083 rtx mask;
9084 if (prec < HOST_BITS_PER_WIDE_INT)
9085 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9086 GET_MODE (exp));
9087 else
9088 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9089 ((unsigned HOST_WIDE_INT) 1
9090 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9091 GET_MODE (exp));
9092 return expand_and (GET_MODE (exp), exp, mask, target);
9094 else
9096 tree count = build_int_cst (NULL_TREE,
9097 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9098 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9099 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9103 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9104 when applied to the address of EXP produces an address known to be
9105 aligned more than BIGGEST_ALIGNMENT. */
9107 static int
9108 is_aligning_offset (tree offset, tree exp)
9110 /* Strip off any conversions. */
9111 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9112 || TREE_CODE (offset) == NOP_EXPR
9113 || TREE_CODE (offset) == CONVERT_EXPR)
9114 offset = TREE_OPERAND (offset, 0);
9116 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9117 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9118 if (TREE_CODE (offset) != BIT_AND_EXPR
9119 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9120 || compare_tree_int (TREE_OPERAND (offset, 1),
9121 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9122 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9123 return 0;
9125 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9126 It must be NEGATE_EXPR. Then strip any more conversions. */
9127 offset = TREE_OPERAND (offset, 0);
9128 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9129 || TREE_CODE (offset) == NOP_EXPR
9130 || TREE_CODE (offset) == CONVERT_EXPR)
9131 offset = TREE_OPERAND (offset, 0);
9133 if (TREE_CODE (offset) != NEGATE_EXPR)
9134 return 0;
9136 offset = TREE_OPERAND (offset, 0);
9137 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9138 || TREE_CODE (offset) == NOP_EXPR
9139 || TREE_CODE (offset) == CONVERT_EXPR)
9140 offset = TREE_OPERAND (offset, 0);
9142 /* This must now be the address of EXP. */
9143 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9146 /* Return the tree node if an ARG corresponds to a string constant or zero
9147 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9148 in bytes within the string that ARG is accessing. The type of the
9149 offset will be `sizetype'. */
9151 tree
9152 string_constant (tree arg, tree *ptr_offset)
9154 tree array, offset, lower_bound;
9155 STRIP_NOPS (arg);
9157 if (TREE_CODE (arg) == ADDR_EXPR)
9159 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9161 *ptr_offset = size_zero_node;
9162 return TREE_OPERAND (arg, 0);
9164 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9166 array = TREE_OPERAND (arg, 0);
9167 offset = size_zero_node;
9169 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9171 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9172 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9173 if (TREE_CODE (array) != STRING_CST
9174 && TREE_CODE (array) != VAR_DECL)
9175 return 0;
9177 /* Check if the array has a nonzero lower bound. */
9178 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9179 if (!integer_zerop (lower_bound))
9181 /* If the offset and base aren't both constants, return 0. */
9182 if (TREE_CODE (lower_bound) != INTEGER_CST)
9183 return 0;
9184 if (TREE_CODE (offset) != INTEGER_CST)
9185 return 0;
9186 /* Adjust offset by the lower bound. */
9187 offset = size_diffop (fold_convert (sizetype, offset),
9188 fold_convert (sizetype, lower_bound));
9191 else
9192 return 0;
9194 else if (TREE_CODE (arg) == PLUS_EXPR)
9196 tree arg0 = TREE_OPERAND (arg, 0);
9197 tree arg1 = TREE_OPERAND (arg, 1);
9199 STRIP_NOPS (arg0);
9200 STRIP_NOPS (arg1);
9202 if (TREE_CODE (arg0) == ADDR_EXPR
9203 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9204 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9206 array = TREE_OPERAND (arg0, 0);
9207 offset = arg1;
9209 else if (TREE_CODE (arg1) == ADDR_EXPR
9210 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9211 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9213 array = TREE_OPERAND (arg1, 0);
9214 offset = arg0;
9216 else
9217 return 0;
9219 else
9220 return 0;
9222 if (TREE_CODE (array) == STRING_CST)
9224 *ptr_offset = fold_convert (sizetype, offset);
9225 return array;
9227 else if (TREE_CODE (array) == VAR_DECL)
9229 int length;
9231 /* Variables initialized to string literals can be handled too. */
9232 if (DECL_INITIAL (array) == NULL_TREE
9233 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9234 return 0;
9236 /* If they are read-only, non-volatile and bind locally. */
9237 if (! TREE_READONLY (array)
9238 || TREE_SIDE_EFFECTS (array)
9239 || ! targetm.binds_local_p (array))
9240 return 0;
9242 /* Avoid const char foo[4] = "abcde"; */
9243 if (DECL_SIZE_UNIT (array) == NULL_TREE
9244 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9245 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9246 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9247 return 0;
9249 /* If variable is bigger than the string literal, OFFSET must be constant
9250 and inside of the bounds of the string literal. */
9251 offset = fold_convert (sizetype, offset);
9252 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9253 && (! host_integerp (offset, 1)
9254 || compare_tree_int (offset, length) >= 0))
9255 return 0;
9257 *ptr_offset = offset;
9258 return DECL_INITIAL (array);
9261 return 0;
9264 /* Generate code to calculate EXP using a store-flag instruction
9265 and return an rtx for the result. EXP is either a comparison
9266 or a TRUTH_NOT_EXPR whose operand is a comparison.
9268 If TARGET is nonzero, store the result there if convenient.
9270 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9271 cheap.
9273 Return zero if there is no suitable set-flag instruction
9274 available on this machine.
9276 Once expand_expr has been called on the arguments of the comparison,
9277 we are committed to doing the store flag, since it is not safe to
9278 re-evaluate the expression. We emit the store-flag insn by calling
9279 emit_store_flag, but only expand the arguments if we have a reason
9280 to believe that emit_store_flag will be successful. If we think that
9281 it will, but it isn't, we have to simulate the store-flag with a
9282 set/jump/set sequence. */
9284 static rtx
9285 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9287 enum rtx_code code;
9288 tree arg0, arg1, type;
9289 tree tem;
9290 enum machine_mode operand_mode;
9291 int invert = 0;
9292 int unsignedp;
9293 rtx op0, op1;
9294 enum insn_code icode;
9295 rtx subtarget = target;
9296 rtx result, label;
9298 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9299 result at the end. We can't simply invert the test since it would
9300 have already been inverted if it were valid. This case occurs for
9301 some floating-point comparisons. */
9303 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9304 invert = 1, exp = TREE_OPERAND (exp, 0);
9306 arg0 = TREE_OPERAND (exp, 0);
9307 arg1 = TREE_OPERAND (exp, 1);
9309 /* Don't crash if the comparison was erroneous. */
9310 if (arg0 == error_mark_node || arg1 == error_mark_node)
9311 return const0_rtx;
9313 type = TREE_TYPE (arg0);
9314 operand_mode = TYPE_MODE (type);
9315 unsignedp = TYPE_UNSIGNED (type);
9317 /* We won't bother with BLKmode store-flag operations because it would mean
9318 passing a lot of information to emit_store_flag. */
9319 if (operand_mode == BLKmode)
9320 return 0;
9322 /* We won't bother with store-flag operations involving function pointers
9323 when function pointers must be canonicalized before comparisons. */
9324 #ifdef HAVE_canonicalize_funcptr_for_compare
9325 if (HAVE_canonicalize_funcptr_for_compare
9326 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9327 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9328 == FUNCTION_TYPE))
9329 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9330 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9331 == FUNCTION_TYPE))))
9332 return 0;
9333 #endif
9335 STRIP_NOPS (arg0);
9336 STRIP_NOPS (arg1);
9338 /* Get the rtx comparison code to use. We know that EXP is a comparison
9339 operation of some type. Some comparisons against 1 and -1 can be
9340 converted to comparisons with zero. Do so here so that the tests
9341 below will be aware that we have a comparison with zero. These
9342 tests will not catch constants in the first operand, but constants
9343 are rarely passed as the first operand. */
9345 switch (TREE_CODE (exp))
9347 case EQ_EXPR:
9348 code = EQ;
9349 break;
9350 case NE_EXPR:
9351 code = NE;
9352 break;
9353 case LT_EXPR:
9354 if (integer_onep (arg1))
9355 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9356 else
9357 code = unsignedp ? LTU : LT;
9358 break;
9359 case LE_EXPR:
9360 if (! unsignedp && integer_all_onesp (arg1))
9361 arg1 = integer_zero_node, code = LT;
9362 else
9363 code = unsignedp ? LEU : LE;
9364 break;
9365 case GT_EXPR:
9366 if (! unsignedp && integer_all_onesp (arg1))
9367 arg1 = integer_zero_node, code = GE;
9368 else
9369 code = unsignedp ? GTU : GT;
9370 break;
9371 case GE_EXPR:
9372 if (integer_onep (arg1))
9373 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9374 else
9375 code = unsignedp ? GEU : GE;
9376 break;
9378 case UNORDERED_EXPR:
9379 code = UNORDERED;
9380 break;
9381 case ORDERED_EXPR:
9382 code = ORDERED;
9383 break;
9384 case UNLT_EXPR:
9385 code = UNLT;
9386 break;
9387 case UNLE_EXPR:
9388 code = UNLE;
9389 break;
9390 case UNGT_EXPR:
9391 code = UNGT;
9392 break;
9393 case UNGE_EXPR:
9394 code = UNGE;
9395 break;
9396 case UNEQ_EXPR:
9397 code = UNEQ;
9398 break;
9399 case LTGT_EXPR:
9400 code = LTGT;
9401 break;
9403 default:
9404 gcc_unreachable ();
9407 /* Put a constant second. */
9408 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9410 tem = arg0; arg0 = arg1; arg1 = tem;
9411 code = swap_condition (code);
9414 /* If this is an equality or inequality test of a single bit, we can
9415 do this by shifting the bit being tested to the low-order bit and
9416 masking the result with the constant 1. If the condition was EQ,
9417 we xor it with 1. This does not require an scc insn and is faster
9418 than an scc insn even if we have it.
9420 The code to make this transformation was moved into fold_single_bit_test,
9421 so we just call into the folder and expand its result. */
9423 if ((code == NE || code == EQ)
9424 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9425 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9427 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9428 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9429 arg0, arg1, type),
9430 target, VOIDmode, EXPAND_NORMAL);
9433 /* Now see if we are likely to be able to do this. Return if not. */
9434 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9435 return 0;
9437 icode = setcc_gen_code[(int) code];
9439 if (icode == CODE_FOR_nothing)
9441 enum machine_mode wmode;
9443 for (wmode = operand_mode;
9444 icode == CODE_FOR_nothing && wmode != VOIDmode;
9445 wmode = GET_MODE_WIDER_MODE (wmode))
9446 icode = cstore_optab->handlers[(int) wmode].insn_code;
9449 if (icode == CODE_FOR_nothing
9450 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9452 /* We can only do this if it is one of the special cases that
9453 can be handled without an scc insn. */
9454 if ((code == LT && integer_zerop (arg1))
9455 || (! only_cheap && code == GE && integer_zerop (arg1)))
9457 else if (! only_cheap && (code == NE || code == EQ)
9458 && TREE_CODE (type) != REAL_TYPE
9459 && ((abs_optab->handlers[(int) operand_mode].insn_code
9460 != CODE_FOR_nothing)
9461 || (ffs_optab->handlers[(int) operand_mode].insn_code
9462 != CODE_FOR_nothing)))
9464 else
9465 return 0;
9468 if (! get_subtarget (target)
9469 || GET_MODE (subtarget) != operand_mode)
9470 subtarget = 0;
9472 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9474 if (target == 0)
9475 target = gen_reg_rtx (mode);
9477 result = emit_store_flag (target, code, op0, op1,
9478 operand_mode, unsignedp, 1);
9480 if (result)
9482 if (invert)
9483 result = expand_binop (mode, xor_optab, result, const1_rtx,
9484 result, 0, OPTAB_LIB_WIDEN);
9485 return result;
9488 /* If this failed, we have to do this with set/compare/jump/set code. */
9489 if (!REG_P (target)
9490 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9491 target = gen_reg_rtx (GET_MODE (target));
9493 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9494 label = gen_label_rtx ();
9495 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9496 NULL_RTX, label);
9498 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9499 emit_label (label);
9501 return target;
9505 /* Stubs in case we haven't got a casesi insn. */
9506 #ifndef HAVE_casesi
9507 # define HAVE_casesi 0
9508 # define gen_casesi(a, b, c, d, e) (0)
9509 # define CODE_FOR_casesi CODE_FOR_nothing
9510 #endif
9512 /* If the machine does not have a case insn that compares the bounds,
9513 this means extra overhead for dispatch tables, which raises the
9514 threshold for using them. */
9515 #ifndef CASE_VALUES_THRESHOLD
9516 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9517 #endif /* CASE_VALUES_THRESHOLD */
9519 unsigned int
9520 case_values_threshold (void)
9522 return CASE_VALUES_THRESHOLD;
9525 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9526 0 otherwise (i.e. if there is no casesi instruction). */
9528 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9529 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9531 enum machine_mode index_mode = SImode;
9532 int index_bits = GET_MODE_BITSIZE (index_mode);
9533 rtx op1, op2, index;
9534 enum machine_mode op_mode;
9536 if (! HAVE_casesi)
9537 return 0;
9539 /* Convert the index to SImode. */
9540 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9542 enum machine_mode omode = TYPE_MODE (index_type);
9543 rtx rangertx = expand_normal (range);
9545 /* We must handle the endpoints in the original mode. */
9546 index_expr = build2 (MINUS_EXPR, index_type,
9547 index_expr, minval);
9548 minval = integer_zero_node;
9549 index = expand_normal (index_expr);
9550 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9551 omode, 1, default_label);
9552 /* Now we can safely truncate. */
9553 index = convert_to_mode (index_mode, index, 0);
9555 else
9557 if (TYPE_MODE (index_type) != index_mode)
9559 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9560 index_expr = fold_convert (index_type, index_expr);
9563 index = expand_normal (index_expr);
9566 do_pending_stack_adjust ();
9568 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9569 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9570 (index, op_mode))
9571 index = copy_to_mode_reg (op_mode, index);
9573 op1 = expand_normal (minval);
9575 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9576 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9577 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9578 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9579 (op1, op_mode))
9580 op1 = copy_to_mode_reg (op_mode, op1);
9582 op2 = expand_normal (range);
9584 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9585 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9586 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9587 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9588 (op2, op_mode))
9589 op2 = copy_to_mode_reg (op_mode, op2);
9591 emit_jump_insn (gen_casesi (index, op1, op2,
9592 table_label, default_label));
9593 return 1;
9596 /* Attempt to generate a tablejump instruction; same concept. */
9597 #ifndef HAVE_tablejump
9598 #define HAVE_tablejump 0
9599 #define gen_tablejump(x, y) (0)
9600 #endif
9602 /* Subroutine of the next function.
9604 INDEX is the value being switched on, with the lowest value
9605 in the table already subtracted.
9606 MODE is its expected mode (needed if INDEX is constant).
9607 RANGE is the length of the jump table.
9608 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9610 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9611 index value is out of range. */
9613 static void
9614 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9615 rtx default_label)
9617 rtx temp, vector;
9619 if (INTVAL (range) > cfun->max_jumptable_ents)
9620 cfun->max_jumptable_ents = INTVAL (range);
9622 /* Do an unsigned comparison (in the proper mode) between the index
9623 expression and the value which represents the length of the range.
9624 Since we just finished subtracting the lower bound of the range
9625 from the index expression, this comparison allows us to simultaneously
9626 check that the original index expression value is both greater than
9627 or equal to the minimum value of the range and less than or equal to
9628 the maximum value of the range. */
9630 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9631 default_label);
9633 /* If index is in range, it must fit in Pmode.
9634 Convert to Pmode so we can index with it. */
9635 if (mode != Pmode)
9636 index = convert_to_mode (Pmode, index, 1);
9638 /* Don't let a MEM slip through, because then INDEX that comes
9639 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9640 and break_out_memory_refs will go to work on it and mess it up. */
9641 #ifdef PIC_CASE_VECTOR_ADDRESS
9642 if (flag_pic && !REG_P (index))
9643 index = copy_to_mode_reg (Pmode, index);
9644 #endif
9646 /* If flag_force_addr were to affect this address
9647 it could interfere with the tricky assumptions made
9648 about addresses that contain label-refs,
9649 which may be valid only very near the tablejump itself. */
9650 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9651 GET_MODE_SIZE, because this indicates how large insns are. The other
9652 uses should all be Pmode, because they are addresses. This code
9653 could fail if addresses and insns are not the same size. */
9654 index = gen_rtx_PLUS (Pmode,
9655 gen_rtx_MULT (Pmode, index,
9656 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9657 gen_rtx_LABEL_REF (Pmode, table_label));
9658 #ifdef PIC_CASE_VECTOR_ADDRESS
9659 if (flag_pic)
9660 index = PIC_CASE_VECTOR_ADDRESS (index);
9661 else
9662 #endif
9663 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9664 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9665 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9666 convert_move (temp, vector, 0);
9668 emit_jump_insn (gen_tablejump (temp, table_label));
9670 /* If we are generating PIC code or if the table is PC-relative, the
9671 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9672 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9673 emit_barrier ();
9677 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9678 rtx table_label, rtx default_label)
9680 rtx index;
9682 if (! HAVE_tablejump)
9683 return 0;
9685 index_expr = fold_build2 (MINUS_EXPR, index_type,
9686 fold_convert (index_type, index_expr),
9687 fold_convert (index_type, minval));
9688 index = expand_normal (index_expr);
9689 do_pending_stack_adjust ();
9691 do_tablejump (index, TYPE_MODE (index_type),
9692 convert_modes (TYPE_MODE (index_type),
9693 TYPE_MODE (TREE_TYPE (range)),
9694 expand_normal (range),
9695 TYPE_UNSIGNED (TREE_TYPE (range))),
9696 table_label, default_label);
9697 return 1;
9700 /* Nonzero if the mode is a valid vector mode for this architecture.
9701 This returns nonzero even if there is no hardware support for the
9702 vector mode, but we can emulate with narrower modes. */
9705 vector_mode_valid_p (enum machine_mode mode)
9707 enum mode_class class = GET_MODE_CLASS (mode);
9708 enum machine_mode innermode;
9710 /* Doh! What's going on? */
9711 if (class != MODE_VECTOR_INT
9712 && class != MODE_VECTOR_FLOAT)
9713 return 0;
9715 /* Hardware support. Woo hoo! */
9716 if (targetm.vector_mode_supported_p (mode))
9717 return 1;
9719 innermode = GET_MODE_INNER (mode);
9721 /* We should probably return 1 if requesting V4DI and we have no DI,
9722 but we have V2DI, but this is probably very unlikely. */
9724 /* If we have support for the inner mode, we can safely emulate it.
9725 We may not have V2DI, but me can emulate with a pair of DIs. */
9726 return targetm.scalar_mode_supported_p (innermode);
9729 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9730 static rtx
9731 const_vector_from_tree (tree exp)
9733 rtvec v;
9734 int units, i;
9735 tree link, elt;
9736 enum machine_mode inner, mode;
9738 mode = TYPE_MODE (TREE_TYPE (exp));
9740 if (initializer_zerop (exp))
9741 return CONST0_RTX (mode);
9743 units = GET_MODE_NUNITS (mode);
9744 inner = GET_MODE_INNER (mode);
9746 v = rtvec_alloc (units);
9748 link = TREE_VECTOR_CST_ELTS (exp);
9749 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9751 elt = TREE_VALUE (link);
9753 if (TREE_CODE (elt) == REAL_CST)
9754 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9755 inner);
9756 else
9757 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9758 TREE_INT_CST_HIGH (elt),
9759 inner);
9762 /* Initialize remaining elements to 0. */
9763 for (; i < units; ++i)
9764 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9766 return gen_rtx_CONST_VECTOR (mode, v);
9768 #include "gt-expr.h"