gcc/ChangeLog ---------------------------------------------------------
[official-gcc.git] / gcc / expr.c
blobf624d95c4158f3a2f68e0cf6e4032bbb1114f075
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
63 #ifdef PUSH_ROUNDING
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
71 #endif
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
110 struct store_by_pieces
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, int);
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
149 static int is_aligning_offset (tree, tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* Record for each mode whether we can float-extend from memory. */
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
178 #endif
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
186 #endif
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) MOVE_RATIO)
195 #endif
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
200 /* This array records the insn_code of insns to perform block sets. */
201 enum insn_code setmem_optab[NUM_MACHINE_MODES];
203 /* These arrays record the insn_code of three different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
209 /* Synchronization primitives. */
210 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
211 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
230 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
233 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
235 #ifndef SLOW_UNALIGNED_ACCESS
236 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
237 #endif
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
242 void
243 init_expr_once (void)
245 rtx insn, pat;
246 enum machine_mode mode;
247 int num_clobbers;
248 rtx mem, mem1;
249 rtx reg;
251 /* Try indexing by frame ptr and try by stack ptr.
252 It is known that on the Convex the stack ptr isn't a valid index.
253 With luck, one or the other is valid on any machine. */
254 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
255 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
257 /* A scratch register we can modify in-place below to avoid
258 useless RTL allocations. */
259 reg = gen_rtx_REG (VOIDmode, -1);
261 insn = rtx_alloc (INSN);
262 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
263 PATTERN (insn) = pat;
265 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
266 mode = (enum machine_mode) ((int) mode + 1))
268 int regno;
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
273 PUT_MODE (reg, mode);
275 /* See if there is some register that can be used in this mode and
276 directly loaded or stored from memory. */
278 if (mode != VOIDmode && mode != BLKmode)
279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
280 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
281 regno++)
283 if (! HARD_REGNO_MODE_OK (regno, mode))
284 continue;
286 REGNO (reg) = regno;
288 SET_SRC (pat) = mem;
289 SET_DEST (pat) = reg;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_load[(int) mode] = 1;
293 SET_SRC (pat) = mem1;
294 SET_DEST (pat) = reg;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_load[(int) mode] = 1;
298 SET_SRC (pat) = reg;
299 SET_DEST (pat) = mem;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_store[(int) mode] = 1;
303 SET_SRC (pat) = reg;
304 SET_DEST (pat) = mem1;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_store[(int) mode] = 1;
310 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
313 mode = GET_MODE_WIDER_MODE (mode))
315 enum machine_mode srcmode;
316 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
317 srcmode = GET_MODE_WIDER_MODE (srcmode))
319 enum insn_code ic;
321 ic = can_extend_p (mode, srcmode, 0);
322 if (ic == CODE_FOR_nothing)
323 continue;
325 PUT_MODE (mem, srcmode);
327 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
328 float_extend_from_mem[mode][srcmode] = true;
333 /* This is run at the start of compiling a function. */
335 void
336 init_expr (void)
338 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341 /* Copy data from FROM to TO, where the machine modes are not the same.
342 Both modes may be integer, or both may be floating.
343 UNSIGNEDP should be nonzero if FROM is an unsigned type.
344 This causes zero-extension instead of sign-extension. */
346 void
347 convert_move (rtx to, rtx from, int unsignedp)
349 enum machine_mode to_mode = GET_MODE (to);
350 enum machine_mode from_mode = GET_MODE (from);
351 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
352 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
353 enum insn_code code;
354 rtx libcall;
356 /* rtx code for making an equivalent value. */
357 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
358 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361 gcc_assert (to_real == from_real);
363 /* If the source and destination are already the same, then there's
364 nothing to do. */
365 if (to == from)
366 return;
368 /* If FROM is a SUBREG that indicates that we have already done at least
369 the required extension, strip it. We don't handle such SUBREGs as
370 TO here. */
372 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
373 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
374 >= GET_MODE_SIZE (to_mode))
375 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
376 from = gen_lowpart (to_mode, from), from_mode = to_mode;
378 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
380 if (to_mode == from_mode
381 || (from_mode == VOIDmode && CONSTANT_P (from)))
383 emit_move_insn (to, from);
384 return;
387 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
389 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
391 if (VECTOR_MODE_P (to_mode))
392 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
393 else
394 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
396 emit_move_insn (to, from);
397 return;
400 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
402 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
403 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
404 return;
407 if (to_real)
409 rtx value, insns;
410 convert_optab tab;
412 gcc_assert ((GET_MODE_PRECISION (from_mode)
413 != GET_MODE_PRECISION (to_mode))
414 || (DECIMAL_FLOAT_MODE_P (from_mode)
415 != DECIMAL_FLOAT_MODE_P (to_mode)));
417 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
418 /* Conversion between decimal float and binary float, same size. */
419 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
420 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
421 tab = sext_optab;
422 else
423 tab = trunc_optab;
425 /* Try converting directly if the insn is supported. */
427 code = tab->handlers[to_mode][from_mode].insn_code;
428 if (code != CODE_FOR_nothing)
430 emit_unop_insn (code, to, from,
431 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
432 return;
435 /* Otherwise use a libcall. */
436 libcall = tab->handlers[to_mode][from_mode].libfunc;
438 /* Is this conversion implemented yet? */
439 gcc_assert (libcall);
441 start_sequence ();
442 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
443 1, from, from_mode);
444 insns = get_insns ();
445 end_sequence ();
446 emit_libcall_block (insns, to, value,
447 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
448 from)
449 : gen_rtx_FLOAT_EXTEND (to_mode, from));
450 return;
453 /* Handle pointer conversion. */ /* SPEE 900220. */
454 /* Targets are expected to provide conversion insns between PxImode and
455 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
456 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
458 enum machine_mode full_mode
459 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
461 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
462 != CODE_FOR_nothing);
464 if (full_mode != from_mode)
465 from = convert_to_mode (full_mode, from, unsignedp);
466 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
467 to, from, UNKNOWN);
468 return;
470 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
472 rtx new_from;
473 enum machine_mode full_mode
474 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
476 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
477 != CODE_FOR_nothing);
479 if (to_mode == full_mode)
481 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
482 to, from, UNKNOWN);
483 return;
486 new_from = gen_reg_rtx (full_mode);
487 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
488 new_from, from, UNKNOWN);
490 /* else proceed to integer conversions below. */
491 from_mode = full_mode;
492 from = new_from;
495 /* Now both modes are integers. */
497 /* Handle expanding beyond a word. */
498 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
499 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
501 rtx insns;
502 rtx lowpart;
503 rtx fill_value;
504 rtx lowfrom;
505 int i;
506 enum machine_mode lowpart_mode;
507 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
509 /* Try converting directly if the insn is supported. */
510 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
511 != CODE_FOR_nothing)
513 /* If FROM is a SUBREG, put it into a register. Do this
514 so that we always generate the same set of insns for
515 better cse'ing; if an intermediate assignment occurred,
516 we won't be doing the operation directly on the SUBREG. */
517 if (optimize > 0 && GET_CODE (from) == SUBREG)
518 from = force_reg (from_mode, from);
519 emit_unop_insn (code, to, from, equiv_code);
520 return;
522 /* Next, try converting via full word. */
523 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
524 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
525 != CODE_FOR_nothing))
527 if (REG_P (to))
529 if (reg_overlap_mentioned_p (to, from))
530 from = force_reg (from_mode, from);
531 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
533 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
534 emit_unop_insn (code, to,
535 gen_lowpart (word_mode, to), equiv_code);
536 return;
539 /* No special multiword conversion insn; do it by hand. */
540 start_sequence ();
542 /* Since we will turn this into a no conflict block, we must ensure
543 that the source does not overlap the target. */
545 if (reg_overlap_mentioned_p (to, from))
546 from = force_reg (from_mode, from);
548 /* Get a copy of FROM widened to a word, if necessary. */
549 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
550 lowpart_mode = word_mode;
551 else
552 lowpart_mode = from_mode;
554 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
556 lowpart = gen_lowpart (lowpart_mode, to);
557 emit_move_insn (lowpart, lowfrom);
559 /* Compute the value to put in each remaining word. */
560 if (unsignedp)
561 fill_value = const0_rtx;
562 else
564 #ifdef HAVE_slt
565 if (HAVE_slt
566 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
567 && STORE_FLAG_VALUE == -1)
569 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
570 lowpart_mode, 0);
571 fill_value = gen_reg_rtx (word_mode);
572 emit_insn (gen_slt (fill_value));
574 else
575 #endif
577 fill_value
578 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
579 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
580 NULL_RTX, 0);
581 fill_value = convert_to_mode (word_mode, fill_value, 1);
585 /* Fill the remaining words. */
586 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
588 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
589 rtx subword = operand_subword (to, index, 1, to_mode);
591 gcc_assert (subword);
593 if (fill_value != subword)
594 emit_move_insn (subword, fill_value);
597 insns = get_insns ();
598 end_sequence ();
600 emit_no_conflict_block (insns, to, from, NULL_RTX,
601 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
602 return;
605 /* Truncating multi-word to a word or less. */
606 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
607 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
609 if (!((MEM_P (from)
610 && ! MEM_VOLATILE_P (from)
611 && direct_load[(int) to_mode]
612 && ! mode_dependent_address_p (XEXP (from, 0)))
613 || REG_P (from)
614 || GET_CODE (from) == SUBREG))
615 from = force_reg (from_mode, from);
616 convert_move (to, gen_lowpart (word_mode, from), 0);
617 return;
620 /* Now follow all the conversions between integers
621 no more than a word long. */
623 /* For truncation, usually we can just refer to FROM in a narrower mode. */
624 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
625 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
626 GET_MODE_BITSIZE (from_mode)))
628 if (!((MEM_P (from)
629 && ! MEM_VOLATILE_P (from)
630 && direct_load[(int) to_mode]
631 && ! mode_dependent_address_p (XEXP (from, 0)))
632 || REG_P (from)
633 || GET_CODE (from) == SUBREG))
634 from = force_reg (from_mode, from);
635 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
636 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
637 from = copy_to_reg (from);
638 emit_move_insn (to, gen_lowpart (to_mode, from));
639 return;
642 /* Handle extension. */
643 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
645 /* Convert directly if that works. */
646 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
647 != CODE_FOR_nothing)
649 emit_unop_insn (code, to, from, equiv_code);
650 return;
652 else
654 enum machine_mode intermediate;
655 rtx tmp;
656 tree shift_amount;
658 /* Search for a mode to convert via. */
659 for (intermediate = from_mode; intermediate != VOIDmode;
660 intermediate = GET_MODE_WIDER_MODE (intermediate))
661 if (((can_extend_p (to_mode, intermediate, unsignedp)
662 != CODE_FOR_nothing)
663 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
664 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
665 GET_MODE_BITSIZE (intermediate))))
666 && (can_extend_p (intermediate, from_mode, unsignedp)
667 != CODE_FOR_nothing))
669 convert_move (to, convert_to_mode (intermediate, from,
670 unsignedp), unsignedp);
671 return;
674 /* No suitable intermediate mode.
675 Generate what we need with shifts. */
676 shift_amount = build_int_cst (NULL_TREE,
677 GET_MODE_BITSIZE (to_mode)
678 - GET_MODE_BITSIZE (from_mode));
679 from = gen_lowpart (to_mode, force_reg (from_mode, from));
680 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
681 to, unsignedp);
682 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
683 to, unsignedp);
684 if (tmp != to)
685 emit_move_insn (to, tmp);
686 return;
690 /* Support special truncate insns for certain modes. */
691 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
693 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
694 to, from, UNKNOWN);
695 return;
698 /* Handle truncation of volatile memrefs, and so on;
699 the things that couldn't be truncated directly,
700 and for which there was no special instruction.
702 ??? Code above formerly short-circuited this, for most integer
703 mode pairs, with a force_reg in from_mode followed by a recursive
704 call to this routine. Appears always to have been wrong. */
705 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
707 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
708 emit_move_insn (to, temp);
709 return;
712 /* Mode combination is not recognized. */
713 gcc_unreachable ();
716 /* Return an rtx for a value that would result
717 from converting X to mode MODE.
718 Both X and MODE may be floating, or both integer.
719 UNSIGNEDP is nonzero if X is an unsigned value.
720 This can be done by referring to a part of X in place
721 or by copying to a new temporary with conversion. */
724 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
726 return convert_modes (mode, VOIDmode, x, unsignedp);
729 /* Return an rtx for a value that would result
730 from converting X from mode OLDMODE to mode MODE.
731 Both modes may be floating, or both integer.
732 UNSIGNEDP is nonzero if X is an unsigned value.
734 This can be done by referring to a part of X in place
735 or by copying to a new temporary with conversion.
737 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
740 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
742 rtx temp;
744 /* If FROM is a SUBREG that indicates that we have already done at least
745 the required extension, strip it. */
747 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
748 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
749 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
750 x = gen_lowpart (mode, x);
752 if (GET_MODE (x) != VOIDmode)
753 oldmode = GET_MODE (x);
755 if (mode == oldmode)
756 return x;
758 /* There is one case that we must handle specially: If we are converting
759 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
760 we are to interpret the constant as unsigned, gen_lowpart will do
761 the wrong if the constant appears negative. What we want to do is
762 make the high-order word of the constant zero, not all ones. */
764 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
765 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
766 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
768 HOST_WIDE_INT val = INTVAL (x);
770 if (oldmode != VOIDmode
771 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
773 int width = GET_MODE_BITSIZE (oldmode);
775 /* We need to zero extend VAL. */
776 val &= ((HOST_WIDE_INT) 1 << width) - 1;
779 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
782 /* We can do this with a gen_lowpart if both desired and current modes
783 are integer, and this is either a constant integer, a register, or a
784 non-volatile MEM. Except for the constant case where MODE is no
785 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
787 if ((GET_CODE (x) == CONST_INT
788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
789 || (GET_MODE_CLASS (mode) == MODE_INT
790 && GET_MODE_CLASS (oldmode) == MODE_INT
791 && (GET_CODE (x) == CONST_DOUBLE
792 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
793 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
794 && direct_load[(int) mode])
795 || (REG_P (x)
796 && (! HARD_REGISTER_P (x)
797 || HARD_REGNO_MODE_OK (REGNO (x), mode))
798 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
799 GET_MODE_BITSIZE (GET_MODE (x)))))))))
801 /* ?? If we don't know OLDMODE, we have to assume here that
802 X does not need sign- or zero-extension. This may not be
803 the case, but it's the best we can do. */
804 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
805 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
807 HOST_WIDE_INT val = INTVAL (x);
808 int width = GET_MODE_BITSIZE (oldmode);
810 /* We must sign or zero-extend in this case. Start by
811 zero-extending, then sign extend if we need to. */
812 val &= ((HOST_WIDE_INT) 1 << width) - 1;
813 if (! unsignedp
814 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
815 val |= (HOST_WIDE_INT) (-1) << width;
817 return gen_int_mode (val, mode);
820 return gen_lowpart (mode, x);
823 /* Converting from integer constant into mode is always equivalent to an
824 subreg operation. */
825 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
827 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
828 return simplify_gen_subreg (mode, x, oldmode, 0);
831 temp = gen_reg_rtx (mode);
832 convert_move (temp, x, unsignedp);
833 return temp;
836 /* STORE_MAX_PIECES is the number of bytes at a time that we can
837 store efficiently. Due to internal GCC limitations, this is
838 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
839 for an immediate constant. */
841 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
843 /* Determine whether the LEN bytes can be moved by using several move
844 instructions. Return nonzero if a call to move_by_pieces should
845 succeed. */
848 can_move_by_pieces (unsigned HOST_WIDE_INT len,
849 unsigned int align ATTRIBUTE_UNUSED)
851 return MOVE_BY_PIECES_P (len, align);
854 /* Generate several move instructions to copy LEN bytes from block FROM to
855 block TO. (These are MEM rtx's with BLKmode).
857 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
858 used to push FROM to the stack.
860 ALIGN is maximum stack alignment we can assume.
862 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
863 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
864 stpcpy. */
867 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
868 unsigned int align, int endp)
870 struct move_by_pieces data;
871 rtx to_addr, from_addr = XEXP (from, 0);
872 unsigned int max_size = MOVE_MAX_PIECES + 1;
873 enum machine_mode mode = VOIDmode, tmode;
874 enum insn_code icode;
876 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
878 data.offset = 0;
879 data.from_addr = from_addr;
880 if (to)
882 to_addr = XEXP (to, 0);
883 data.to = to;
884 data.autinc_to
885 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
886 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
887 data.reverse
888 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
890 else
892 to_addr = NULL_RTX;
893 data.to = NULL_RTX;
894 data.autinc_to = 1;
895 #ifdef STACK_GROWS_DOWNWARD
896 data.reverse = 1;
897 #else
898 data.reverse = 0;
899 #endif
901 data.to_addr = to_addr;
902 data.from = from;
903 data.autinc_from
904 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
905 || GET_CODE (from_addr) == POST_INC
906 || GET_CODE (from_addr) == POST_DEC);
908 data.explicit_inc_from = 0;
909 data.explicit_inc_to = 0;
910 if (data.reverse) data.offset = len;
911 data.len = len;
913 /* If copying requires more than two move insns,
914 copy addresses to registers (to make displacements shorter)
915 and use post-increment if available. */
916 if (!(data.autinc_from && data.autinc_to)
917 && move_by_pieces_ninsns (len, align, max_size) > 2)
919 /* Find the mode of the largest move... */
920 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
921 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
922 if (GET_MODE_SIZE (tmode) < max_size)
923 mode = tmode;
925 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
927 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
928 data.autinc_from = 1;
929 data.explicit_inc_from = -1;
931 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
933 data.from_addr = copy_addr_to_reg (from_addr);
934 data.autinc_from = 1;
935 data.explicit_inc_from = 1;
937 if (!data.autinc_from && CONSTANT_P (from_addr))
938 data.from_addr = copy_addr_to_reg (from_addr);
939 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
941 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
942 data.autinc_to = 1;
943 data.explicit_inc_to = -1;
945 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
947 data.to_addr = copy_addr_to_reg (to_addr);
948 data.autinc_to = 1;
949 data.explicit_inc_to = 1;
951 if (!data.autinc_to && CONSTANT_P (to_addr))
952 data.to_addr = copy_addr_to_reg (to_addr);
955 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
956 if (align >= GET_MODE_ALIGNMENT (tmode))
957 align = GET_MODE_ALIGNMENT (tmode);
958 else
960 enum machine_mode xmode;
962 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
963 tmode != VOIDmode;
964 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
965 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
966 || SLOW_UNALIGNED_ACCESS (tmode, align))
967 break;
969 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
972 /* First move what we can in the largest integer mode, then go to
973 successively smaller modes. */
975 while (max_size > 1)
977 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
978 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
979 if (GET_MODE_SIZE (tmode) < max_size)
980 mode = tmode;
982 if (mode == VOIDmode)
983 break;
985 icode = mov_optab->handlers[(int) mode].insn_code;
986 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
987 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
989 max_size = GET_MODE_SIZE (mode);
992 /* The code above should have handled everything. */
993 gcc_assert (!data.len);
995 if (endp)
997 rtx to1;
999 gcc_assert (!data.reverse);
1000 if (data.autinc_to)
1002 if (endp == 2)
1004 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1005 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1006 else
1007 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1008 -1));
1010 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1011 data.offset);
1013 else
1015 if (endp == 2)
1016 --data.offset;
1017 to1 = adjust_address (data.to, QImode, data.offset);
1019 return to1;
1021 else
1022 return data.to;
1025 /* Return number of insns required to move L bytes by pieces.
1026 ALIGN (in bits) is maximum alignment we can assume. */
1028 static unsigned HOST_WIDE_INT
1029 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1030 unsigned int max_size)
1032 unsigned HOST_WIDE_INT n_insns = 0;
1033 enum machine_mode tmode;
1035 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1036 if (align >= GET_MODE_ALIGNMENT (tmode))
1037 align = GET_MODE_ALIGNMENT (tmode);
1038 else
1040 enum machine_mode tmode, xmode;
1042 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1043 tmode != VOIDmode;
1044 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1045 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1046 || SLOW_UNALIGNED_ACCESS (tmode, align))
1047 break;
1049 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1052 while (max_size > 1)
1054 enum machine_mode mode = VOIDmode;
1055 enum insn_code icode;
1057 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1058 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1059 if (GET_MODE_SIZE (tmode) < max_size)
1060 mode = tmode;
1062 if (mode == VOIDmode)
1063 break;
1065 icode = mov_optab->handlers[(int) mode].insn_code;
1066 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1067 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1069 max_size = GET_MODE_SIZE (mode);
1072 gcc_assert (!l);
1073 return n_insns;
1076 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1077 with move instructions for mode MODE. GENFUN is the gen_... function
1078 to make a move insn for that mode. DATA has all the other info. */
1080 static void
1081 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1082 struct move_by_pieces *data)
1084 unsigned int size = GET_MODE_SIZE (mode);
1085 rtx to1 = NULL_RTX, from1;
1087 while (data->len >= size)
1089 if (data->reverse)
1090 data->offset -= size;
1092 if (data->to)
1094 if (data->autinc_to)
1095 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1096 data->offset);
1097 else
1098 to1 = adjust_address (data->to, mode, data->offset);
1101 if (data->autinc_from)
1102 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1103 data->offset);
1104 else
1105 from1 = adjust_address (data->from, mode, data->offset);
1107 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1108 emit_insn (gen_add2_insn (data->to_addr,
1109 GEN_INT (-(HOST_WIDE_INT)size)));
1110 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1111 emit_insn (gen_add2_insn (data->from_addr,
1112 GEN_INT (-(HOST_WIDE_INT)size)));
1114 if (data->to)
1115 emit_insn ((*genfun) (to1, from1));
1116 else
1118 #ifdef PUSH_ROUNDING
1119 emit_single_push_insn (mode, from1, NULL);
1120 #else
1121 gcc_unreachable ();
1122 #endif
1125 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1126 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1128 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1130 if (! data->reverse)
1131 data->offset += size;
1133 data->len -= size;
1137 /* Emit code to move a block Y to a block X. This may be done with
1138 string-move instructions, with multiple scalar move instructions,
1139 or with a library call.
1141 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1142 SIZE is an rtx that says how long they are.
1143 ALIGN is the maximum alignment we can assume they have.
1144 METHOD describes what kind of copy this is, and what mechanisms may be used.
1146 Return the address of the new block, if memcpy is called and returns it,
1147 0 otherwise. */
1150 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1151 unsigned int expected_align, HOST_WIDE_INT expected_size)
1153 bool may_use_call;
1154 rtx retval = 0;
1155 unsigned int align;
1157 switch (method)
1159 case BLOCK_OP_NORMAL:
1160 case BLOCK_OP_TAILCALL:
1161 may_use_call = true;
1162 break;
1164 case BLOCK_OP_CALL_PARM:
1165 may_use_call = block_move_libcall_safe_for_call_parm ();
1167 /* Make inhibit_defer_pop nonzero around the library call
1168 to force it to pop the arguments right away. */
1169 NO_DEFER_POP;
1170 break;
1172 case BLOCK_OP_NO_LIBCALL:
1173 may_use_call = false;
1174 break;
1176 default:
1177 gcc_unreachable ();
1180 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1182 gcc_assert (MEM_P (x));
1183 gcc_assert (MEM_P (y));
1184 gcc_assert (size);
1186 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1187 block copy is more efficient for other large modes, e.g. DCmode. */
1188 x = adjust_address (x, BLKmode, 0);
1189 y = adjust_address (y, BLKmode, 0);
1191 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1192 can be incorrect is coming from __builtin_memcpy. */
1193 if (GET_CODE (size) == CONST_INT)
1195 if (INTVAL (size) == 0)
1196 return 0;
1198 x = shallow_copy_rtx (x);
1199 y = shallow_copy_rtx (y);
1200 set_mem_size (x, size);
1201 set_mem_size (y, size);
1204 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1205 move_by_pieces (x, y, INTVAL (size), align, 0);
1206 else if (emit_block_move_via_movmem (x, y, size, align,
1207 expected_align, expected_size))
1209 else if (may_use_call)
1210 retval = emit_block_move_via_libcall (x, y, size,
1211 method == BLOCK_OP_TAILCALL);
1212 else
1213 emit_block_move_via_loop (x, y, size, align);
1215 if (method == BLOCK_OP_CALL_PARM)
1216 OK_DEFER_POP;
1218 return retval;
1222 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1224 return emit_block_move_hints (x, y, size, method, 0, -1);
1227 /* A subroutine of emit_block_move. Returns true if calling the
1228 block move libcall will not clobber any parameters which may have
1229 already been placed on the stack. */
1231 static bool
1232 block_move_libcall_safe_for_call_parm (void)
1234 /* If arguments are pushed on the stack, then they're safe. */
1235 if (PUSH_ARGS)
1236 return true;
1238 /* If registers go on the stack anyway, any argument is sure to clobber
1239 an outgoing argument. */
1240 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1242 tree fn = emit_block_move_libcall_fn (false);
1243 (void) fn;
1244 if (REG_PARM_STACK_SPACE (fn) != 0)
1245 return false;
1247 #endif
1249 /* If any argument goes in memory, then it might clobber an outgoing
1250 argument. */
1252 CUMULATIVE_ARGS args_so_far;
1253 tree fn, arg;
1255 fn = emit_block_move_libcall_fn (false);
1256 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1258 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1259 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1261 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1262 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1263 if (!tmp || !REG_P (tmp))
1264 return false;
1265 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1266 return false;
1267 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1270 return true;
1273 /* A subroutine of emit_block_move. Expand a movmem pattern;
1274 return true if successful. */
1276 static bool
1277 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1278 unsigned int expected_align, HOST_WIDE_INT expected_size)
1280 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1281 int save_volatile_ok = volatile_ok;
1282 enum machine_mode mode;
1284 if (expected_align < align)
1285 expected_align = align;
1287 /* Since this is a move insn, we don't care about volatility. */
1288 volatile_ok = 1;
1290 /* Try the most limited insn first, because there's no point
1291 including more than one in the machine description unless
1292 the more limited one has some advantage. */
1294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1295 mode = GET_MODE_WIDER_MODE (mode))
1297 enum insn_code code = movmem_optab[(int) mode];
1298 insn_operand_predicate_fn pred;
1300 if (code != CODE_FOR_nothing
1301 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1302 here because if SIZE is less than the mode mask, as it is
1303 returned by the macro, it will definitely be less than the
1304 actual mode mask. */
1305 && ((GET_CODE (size) == CONST_INT
1306 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1307 <= (GET_MODE_MASK (mode) >> 1)))
1308 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1309 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1310 || (*pred) (x, BLKmode))
1311 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1312 || (*pred) (y, BLKmode))
1313 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1314 || (*pred) (opalign, VOIDmode)))
1316 rtx op2;
1317 rtx last = get_last_insn ();
1318 rtx pat;
1320 op2 = convert_to_mode (mode, size, 1);
1321 pred = insn_data[(int) code].operand[2].predicate;
1322 if (pred != 0 && ! (*pred) (op2, mode))
1323 op2 = copy_to_mode_reg (mode, op2);
1325 /* ??? When called via emit_block_move_for_call, it'd be
1326 nice if there were some way to inform the backend, so
1327 that it doesn't fail the expansion because it thinks
1328 emitting the libcall would be more efficient. */
1330 if (insn_data[(int) code].n_operands == 4)
1331 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1332 else
1333 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1334 GEN_INT (expected_align),
1335 GEN_INT (expected_size));
1336 if (pat)
1338 emit_insn (pat);
1339 volatile_ok = save_volatile_ok;
1340 return true;
1342 else
1343 delete_insns_since (last);
1347 volatile_ok = save_volatile_ok;
1348 return false;
1351 /* A subroutine of emit_block_move. Expand a call to memcpy.
1352 Return the return value from memcpy, 0 otherwise. */
1355 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1357 rtx dst_addr, src_addr;
1358 tree call_expr, fn, src_tree, dst_tree, size_tree;
1359 enum machine_mode size_mode;
1360 rtx retval;
1362 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1363 pseudos. We can then place those new pseudos into a VAR_DECL and
1364 use them later. */
1366 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1367 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1369 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1370 src_addr = convert_memory_address (ptr_mode, src_addr);
1372 dst_tree = make_tree (ptr_type_node, dst_addr);
1373 src_tree = make_tree (ptr_type_node, src_addr);
1375 size_mode = TYPE_MODE (sizetype);
1377 size = convert_to_mode (size_mode, size, 1);
1378 size = copy_to_mode_reg (size_mode, size);
1380 /* It is incorrect to use the libcall calling conventions to call
1381 memcpy in this context. This could be a user call to memcpy and
1382 the user may wish to examine the return value from memcpy. For
1383 targets where libcalls and normal calls have different conventions
1384 for returning pointers, we could end up generating incorrect code. */
1386 size_tree = make_tree (sizetype, size);
1388 fn = emit_block_move_libcall_fn (true);
1389 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1390 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1392 retval = expand_normal (call_expr);
1394 return retval;
1397 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1398 for the function we use for block copies. The first time FOR_CALL
1399 is true, we call assemble_external. */
1401 static GTY(()) tree block_move_fn;
1403 void
1404 init_block_move_fn (const char *asmspec)
1406 if (!block_move_fn)
1408 tree args, fn;
1410 fn = get_identifier ("memcpy");
1411 args = build_function_type_list (ptr_type_node, ptr_type_node,
1412 const_ptr_type_node, sizetype,
1413 NULL_TREE);
1415 fn = build_decl (FUNCTION_DECL, fn, args);
1416 DECL_EXTERNAL (fn) = 1;
1417 TREE_PUBLIC (fn) = 1;
1418 DECL_ARTIFICIAL (fn) = 1;
1419 TREE_NOTHROW (fn) = 1;
1420 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1421 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1423 block_move_fn = fn;
1426 if (asmspec)
1427 set_user_assembler_name (block_move_fn, asmspec);
1430 static tree
1431 emit_block_move_libcall_fn (int for_call)
1433 static bool emitted_extern;
1435 if (!block_move_fn)
1436 init_block_move_fn (NULL);
1438 if (for_call && !emitted_extern)
1440 emitted_extern = true;
1441 make_decl_rtl (block_move_fn);
1442 assemble_external (block_move_fn);
1445 return block_move_fn;
1448 /* A subroutine of emit_block_move. Copy the data via an explicit
1449 loop. This is used only when libcalls are forbidden. */
1450 /* ??? It'd be nice to copy in hunks larger than QImode. */
1452 static void
1453 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1454 unsigned int align ATTRIBUTE_UNUSED)
1456 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1457 enum machine_mode iter_mode;
1459 iter_mode = GET_MODE (size);
1460 if (iter_mode == VOIDmode)
1461 iter_mode = word_mode;
1463 top_label = gen_label_rtx ();
1464 cmp_label = gen_label_rtx ();
1465 iter = gen_reg_rtx (iter_mode);
1467 emit_move_insn (iter, const0_rtx);
1469 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1470 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1471 do_pending_stack_adjust ();
1473 emit_jump (cmp_label);
1474 emit_label (top_label);
1476 tmp = convert_modes (Pmode, iter_mode, iter, true);
1477 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1478 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1479 x = change_address (x, QImode, x_addr);
1480 y = change_address (y, QImode, y_addr);
1482 emit_move_insn (x, y);
1484 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1485 true, OPTAB_LIB_WIDEN);
1486 if (tmp != iter)
1487 emit_move_insn (iter, tmp);
1489 emit_label (cmp_label);
1491 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1492 true, top_label);
1495 /* Copy all or part of a value X into registers starting at REGNO.
1496 The number of registers to be filled is NREGS. */
1498 void
1499 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1501 int i;
1502 #ifdef HAVE_load_multiple
1503 rtx pat;
1504 rtx last;
1505 #endif
1507 if (nregs == 0)
1508 return;
1510 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1511 x = validize_mem (force_const_mem (mode, x));
1513 /* See if the machine can do this with a load multiple insn. */
1514 #ifdef HAVE_load_multiple
1515 if (HAVE_load_multiple)
1517 last = get_last_insn ();
1518 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1519 GEN_INT (nregs));
1520 if (pat)
1522 emit_insn (pat);
1523 return;
1525 else
1526 delete_insns_since (last);
1528 #endif
1530 for (i = 0; i < nregs; i++)
1531 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1532 operand_subword_force (x, i, mode));
1535 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1536 The number of registers to be filled is NREGS. */
1538 void
1539 move_block_from_reg (int regno, rtx x, int nregs)
1541 int i;
1543 if (nregs == 0)
1544 return;
1546 /* See if the machine can do this with a store multiple insn. */
1547 #ifdef HAVE_store_multiple
1548 if (HAVE_store_multiple)
1550 rtx last = get_last_insn ();
1551 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1552 GEN_INT (nregs));
1553 if (pat)
1555 emit_insn (pat);
1556 return;
1558 else
1559 delete_insns_since (last);
1561 #endif
1563 for (i = 0; i < nregs; i++)
1565 rtx tem = operand_subword (x, i, 1, BLKmode);
1567 gcc_assert (tem);
1569 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1573 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1574 ORIG, where ORIG is a non-consecutive group of registers represented by
1575 a PARALLEL. The clone is identical to the original except in that the
1576 original set of registers is replaced by a new set of pseudo registers.
1577 The new set has the same modes as the original set. */
1580 gen_group_rtx (rtx orig)
1582 int i, length;
1583 rtx *tmps;
1585 gcc_assert (GET_CODE (orig) == PARALLEL);
1587 length = XVECLEN (orig, 0);
1588 tmps = alloca (sizeof (rtx) * length);
1590 /* Skip a NULL entry in first slot. */
1591 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1593 if (i)
1594 tmps[0] = 0;
1596 for (; i < length; i++)
1598 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1599 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1601 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1604 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1607 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1608 except that values are placed in TMPS[i], and must later be moved
1609 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1611 static void
1612 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1614 rtx src;
1615 int start, i;
1616 enum machine_mode m = GET_MODE (orig_src);
1618 gcc_assert (GET_CODE (dst) == PARALLEL);
1620 if (m != VOIDmode
1621 && !SCALAR_INT_MODE_P (m)
1622 && !MEM_P (orig_src)
1623 && GET_CODE (orig_src) != CONCAT)
1625 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1626 if (imode == BLKmode)
1627 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1628 else
1629 src = gen_reg_rtx (imode);
1630 if (imode != BLKmode)
1631 src = gen_lowpart (GET_MODE (orig_src), src);
1632 emit_move_insn (src, orig_src);
1633 /* ...and back again. */
1634 if (imode != BLKmode)
1635 src = gen_lowpart (imode, src);
1636 emit_group_load_1 (tmps, dst, src, type, ssize);
1637 return;
1640 /* Check for a NULL entry, used to indicate that the parameter goes
1641 both on the stack and in registers. */
1642 if (XEXP (XVECEXP (dst, 0, 0), 0))
1643 start = 0;
1644 else
1645 start = 1;
1647 /* Process the pieces. */
1648 for (i = start; i < XVECLEN (dst, 0); i++)
1650 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1651 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1652 unsigned int bytelen = GET_MODE_SIZE (mode);
1653 int shift = 0;
1655 /* Handle trailing fragments that run over the size of the struct. */
1656 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1658 /* Arrange to shift the fragment to where it belongs.
1659 extract_bit_field loads to the lsb of the reg. */
1660 if (
1661 #ifdef BLOCK_REG_PADDING
1662 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1663 == (BYTES_BIG_ENDIAN ? upward : downward)
1664 #else
1665 BYTES_BIG_ENDIAN
1666 #endif
1668 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1669 bytelen = ssize - bytepos;
1670 gcc_assert (bytelen > 0);
1673 /* If we won't be loading directly from memory, protect the real source
1674 from strange tricks we might play; but make sure that the source can
1675 be loaded directly into the destination. */
1676 src = orig_src;
1677 if (!MEM_P (orig_src)
1678 && (!CONSTANT_P (orig_src)
1679 || (GET_MODE (orig_src) != mode
1680 && GET_MODE (orig_src) != VOIDmode)))
1682 if (GET_MODE (orig_src) == VOIDmode)
1683 src = gen_reg_rtx (mode);
1684 else
1685 src = gen_reg_rtx (GET_MODE (orig_src));
1687 emit_move_insn (src, orig_src);
1690 /* Optimize the access just a bit. */
1691 if (MEM_P (src)
1692 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1693 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1694 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1695 && bytelen == GET_MODE_SIZE (mode))
1697 tmps[i] = gen_reg_rtx (mode);
1698 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1700 else if (COMPLEX_MODE_P (mode)
1701 && GET_MODE (src) == mode
1702 && bytelen == GET_MODE_SIZE (mode))
1703 /* Let emit_move_complex do the bulk of the work. */
1704 tmps[i] = src;
1705 else if (GET_CODE (src) == CONCAT)
1707 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1708 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1710 if ((bytepos == 0 && bytelen == slen0)
1711 || (bytepos != 0 && bytepos + bytelen <= slen))
1713 /* The following assumes that the concatenated objects all
1714 have the same size. In this case, a simple calculation
1715 can be used to determine the object and the bit field
1716 to be extracted. */
1717 tmps[i] = XEXP (src, bytepos / slen0);
1718 if (! CONSTANT_P (tmps[i])
1719 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1720 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1721 (bytepos % slen0) * BITS_PER_UNIT,
1722 1, NULL_RTX, mode, mode);
1724 else
1726 rtx mem;
1728 gcc_assert (!bytepos);
1729 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1730 emit_move_insn (mem, src);
1731 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1732 0, 1, NULL_RTX, mode, mode);
1735 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1736 SIMD register, which is currently broken. While we get GCC
1737 to emit proper RTL for these cases, let's dump to memory. */
1738 else if (VECTOR_MODE_P (GET_MODE (dst))
1739 && REG_P (src))
1741 int slen = GET_MODE_SIZE (GET_MODE (src));
1742 rtx mem;
1744 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1745 emit_move_insn (mem, src);
1746 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1748 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1749 && XVECLEN (dst, 0) > 1)
1750 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1751 else if (CONSTANT_P (src)
1752 || (REG_P (src) && GET_MODE (src) == mode))
1753 tmps[i] = src;
1754 else
1755 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1756 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1757 mode, mode);
1759 if (shift)
1760 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1761 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1765 /* Emit code to move a block SRC of type TYPE to a block DST,
1766 where DST is non-consecutive registers represented by a PARALLEL.
1767 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1768 if not known. */
1770 void
1771 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1773 rtx *tmps;
1774 int i;
1776 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1777 emit_group_load_1 (tmps, dst, src, type, ssize);
1779 /* Copy the extracted pieces into the proper (probable) hard regs. */
1780 for (i = 0; i < XVECLEN (dst, 0); i++)
1782 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1783 if (d == NULL)
1784 continue;
1785 emit_move_insn (d, tmps[i]);
1789 /* Similar, but load SRC into new pseudos in a format that looks like
1790 PARALLEL. This can later be fed to emit_group_move to get things
1791 in the right place. */
1794 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1796 rtvec vec;
1797 int i;
1799 vec = rtvec_alloc (XVECLEN (parallel, 0));
1800 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1802 /* Convert the vector to look just like the original PARALLEL, except
1803 with the computed values. */
1804 for (i = 0; i < XVECLEN (parallel, 0); i++)
1806 rtx e = XVECEXP (parallel, 0, i);
1807 rtx d = XEXP (e, 0);
1809 if (d)
1811 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1812 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1814 RTVEC_ELT (vec, i) = e;
1817 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1820 /* Emit code to move a block SRC to block DST, where SRC and DST are
1821 non-consecutive groups of registers, each represented by a PARALLEL. */
1823 void
1824 emit_group_move (rtx dst, rtx src)
1826 int i;
1828 gcc_assert (GET_CODE (src) == PARALLEL
1829 && GET_CODE (dst) == PARALLEL
1830 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1832 /* Skip first entry if NULL. */
1833 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1834 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1835 XEXP (XVECEXP (src, 0, i), 0));
1838 /* Move a group of registers represented by a PARALLEL into pseudos. */
1841 emit_group_move_into_temps (rtx src)
1843 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1844 int i;
1846 for (i = 0; i < XVECLEN (src, 0); i++)
1848 rtx e = XVECEXP (src, 0, i);
1849 rtx d = XEXP (e, 0);
1851 if (d)
1852 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1853 RTVEC_ELT (vec, i) = e;
1856 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1859 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1860 where SRC is non-consecutive registers represented by a PARALLEL.
1861 SSIZE represents the total size of block ORIG_DST, or -1 if not
1862 known. */
1864 void
1865 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1867 rtx *tmps, dst;
1868 int start, finish, i;
1869 enum machine_mode m = GET_MODE (orig_dst);
1871 gcc_assert (GET_CODE (src) == PARALLEL);
1873 if (!SCALAR_INT_MODE_P (m)
1874 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1876 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1877 if (imode == BLKmode)
1878 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1879 else
1880 dst = gen_reg_rtx (imode);
1881 emit_group_store (dst, src, type, ssize);
1882 if (imode != BLKmode)
1883 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1884 emit_move_insn (orig_dst, dst);
1885 return;
1888 /* Check for a NULL entry, used to indicate that the parameter goes
1889 both on the stack and in registers. */
1890 if (XEXP (XVECEXP (src, 0, 0), 0))
1891 start = 0;
1892 else
1893 start = 1;
1894 finish = XVECLEN (src, 0);
1896 tmps = alloca (sizeof (rtx) * finish);
1898 /* Copy the (probable) hard regs into pseudos. */
1899 for (i = start; i < finish; i++)
1901 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1902 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1904 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1905 emit_move_insn (tmps[i], reg);
1907 else
1908 tmps[i] = reg;
1911 /* If we won't be storing directly into memory, protect the real destination
1912 from strange tricks we might play. */
1913 dst = orig_dst;
1914 if (GET_CODE (dst) == PARALLEL)
1916 rtx temp;
1918 /* We can get a PARALLEL dst if there is a conditional expression in
1919 a return statement. In that case, the dst and src are the same,
1920 so no action is necessary. */
1921 if (rtx_equal_p (dst, src))
1922 return;
1924 /* It is unclear if we can ever reach here, but we may as well handle
1925 it. Allocate a temporary, and split this into a store/load to/from
1926 the temporary. */
1928 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1929 emit_group_store (temp, src, type, ssize);
1930 emit_group_load (dst, temp, type, ssize);
1931 return;
1933 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1935 enum machine_mode outer = GET_MODE (dst);
1936 enum machine_mode inner;
1937 HOST_WIDE_INT bytepos;
1938 bool done = false;
1939 rtx temp;
1941 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1942 dst = gen_reg_rtx (outer);
1944 /* Make life a bit easier for combine. */
1945 /* If the first element of the vector is the low part
1946 of the destination mode, use a paradoxical subreg to
1947 initialize the destination. */
1948 if (start < finish)
1950 inner = GET_MODE (tmps[start]);
1951 bytepos = subreg_lowpart_offset (inner, outer);
1952 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1954 temp = simplify_gen_subreg (outer, tmps[start],
1955 inner, 0);
1956 if (temp)
1958 emit_move_insn (dst, temp);
1959 done = true;
1960 start++;
1965 /* If the first element wasn't the low part, try the last. */
1966 if (!done
1967 && start < finish - 1)
1969 inner = GET_MODE (tmps[finish - 1]);
1970 bytepos = subreg_lowpart_offset (inner, outer);
1971 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1973 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1974 inner, 0);
1975 if (temp)
1977 emit_move_insn (dst, temp);
1978 done = true;
1979 finish--;
1984 /* Otherwise, simply initialize the result to zero. */
1985 if (!done)
1986 emit_move_insn (dst, CONST0_RTX (outer));
1989 /* Process the pieces. */
1990 for (i = start; i < finish; i++)
1992 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1993 enum machine_mode mode = GET_MODE (tmps[i]);
1994 unsigned int bytelen = GET_MODE_SIZE (mode);
1995 rtx dest = dst;
1997 /* Handle trailing fragments that run over the size of the struct. */
1998 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2000 /* store_bit_field always takes its value from the lsb.
2001 Move the fragment to the lsb if it's not already there. */
2002 if (
2003 #ifdef BLOCK_REG_PADDING
2004 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2005 == (BYTES_BIG_ENDIAN ? upward : downward)
2006 #else
2007 BYTES_BIG_ENDIAN
2008 #endif
2011 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2012 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2013 build_int_cst (NULL_TREE, shift),
2014 tmps[i], 0);
2016 bytelen = ssize - bytepos;
2019 if (GET_CODE (dst) == CONCAT)
2021 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2022 dest = XEXP (dst, 0);
2023 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2025 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2026 dest = XEXP (dst, 1);
2028 else
2030 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2031 dest = assign_stack_temp (GET_MODE (dest),
2032 GET_MODE_SIZE (GET_MODE (dest)), 0);
2033 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2034 tmps[i]);
2035 dst = dest;
2036 break;
2040 /* Optimize the access just a bit. */
2041 if (MEM_P (dest)
2042 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2043 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2044 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2045 && bytelen == GET_MODE_SIZE (mode))
2046 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2047 else
2048 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2049 mode, tmps[i]);
2052 /* Copy from the pseudo into the (probable) hard reg. */
2053 if (orig_dst != dst)
2054 emit_move_insn (orig_dst, dst);
2057 /* Generate code to copy a BLKmode object of TYPE out of a
2058 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2059 is null, a stack temporary is created. TGTBLK is returned.
2061 The purpose of this routine is to handle functions that return
2062 BLKmode structures in registers. Some machines (the PA for example)
2063 want to return all small structures in registers regardless of the
2064 structure's alignment. */
2067 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2069 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2070 rtx src = NULL, dst = NULL;
2071 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2072 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2074 if (tgtblk == 0)
2076 tgtblk = assign_temp (build_qualified_type (type,
2077 (TYPE_QUALS (type)
2078 | TYPE_QUAL_CONST)),
2079 0, 1, 1);
2080 preserve_temp_slots (tgtblk);
2083 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2084 into a new pseudo which is a full word. */
2086 if (GET_MODE (srcreg) != BLKmode
2087 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2088 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2090 /* If the structure doesn't take up a whole number of words, see whether
2091 SRCREG is padded on the left or on the right. If it's on the left,
2092 set PADDING_CORRECTION to the number of bits to skip.
2094 In most ABIs, the structure will be returned at the least end of
2095 the register, which translates to right padding on little-endian
2096 targets and left padding on big-endian targets. The opposite
2097 holds if the structure is returned at the most significant
2098 end of the register. */
2099 if (bytes % UNITS_PER_WORD != 0
2100 && (targetm.calls.return_in_msb (type)
2101 ? !BYTES_BIG_ENDIAN
2102 : BYTES_BIG_ENDIAN))
2103 padding_correction
2104 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2106 /* Copy the structure BITSIZE bites at a time.
2108 We could probably emit more efficient code for machines which do not use
2109 strict alignment, but it doesn't seem worth the effort at the current
2110 time. */
2111 for (bitpos = 0, xbitpos = padding_correction;
2112 bitpos < bytes * BITS_PER_UNIT;
2113 bitpos += bitsize, xbitpos += bitsize)
2115 /* We need a new source operand each time xbitpos is on a
2116 word boundary and when xbitpos == padding_correction
2117 (the first time through). */
2118 if (xbitpos % BITS_PER_WORD == 0
2119 || xbitpos == padding_correction)
2120 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2121 GET_MODE (srcreg));
2123 /* We need a new destination operand each time bitpos is on
2124 a word boundary. */
2125 if (bitpos % BITS_PER_WORD == 0)
2126 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2128 /* Use xbitpos for the source extraction (right justified) and
2129 xbitpos for the destination store (left justified). */
2130 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2131 extract_bit_field (src, bitsize,
2132 xbitpos % BITS_PER_WORD, 1,
2133 NULL_RTX, word_mode, word_mode));
2136 return tgtblk;
2139 /* Add a USE expression for REG to the (possibly empty) list pointed
2140 to by CALL_FUSAGE. REG must denote a hard register. */
2142 void
2143 use_reg (rtx *call_fusage, rtx reg)
2145 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2147 *call_fusage
2148 = gen_rtx_EXPR_LIST (VOIDmode,
2149 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2152 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2153 starting at REGNO. All of these registers must be hard registers. */
2155 void
2156 use_regs (rtx *call_fusage, int regno, int nregs)
2158 int i;
2160 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2162 for (i = 0; i < nregs; i++)
2163 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2166 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2167 PARALLEL REGS. This is for calls that pass values in multiple
2168 non-contiguous locations. The Irix 6 ABI has examples of this. */
2170 void
2171 use_group_regs (rtx *call_fusage, rtx regs)
2173 int i;
2175 for (i = 0; i < XVECLEN (regs, 0); i++)
2177 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2179 /* A NULL entry means the parameter goes both on the stack and in
2180 registers. This can also be a MEM for targets that pass values
2181 partially on the stack and partially in registers. */
2182 if (reg != 0 && REG_P (reg))
2183 use_reg (call_fusage, reg);
2188 /* Determine whether the LEN bytes generated by CONSTFUN can be
2189 stored to memory using several move instructions. CONSTFUNDATA is
2190 a pointer which will be passed as argument in every CONSTFUN call.
2191 ALIGN is maximum alignment we can assume. Return nonzero if a
2192 call to store_by_pieces should succeed. */
2195 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2196 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2197 void *constfundata, unsigned int align)
2199 unsigned HOST_WIDE_INT l;
2200 unsigned int max_size;
2201 HOST_WIDE_INT offset = 0;
2202 enum machine_mode mode, tmode;
2203 enum insn_code icode;
2204 int reverse;
2205 rtx cst;
2207 if (len == 0)
2208 return 1;
2210 if (! STORE_BY_PIECES_P (len, align))
2211 return 0;
2213 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2214 if (align >= GET_MODE_ALIGNMENT (tmode))
2215 align = GET_MODE_ALIGNMENT (tmode);
2216 else
2218 enum machine_mode xmode;
2220 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2221 tmode != VOIDmode;
2222 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2223 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2224 || SLOW_UNALIGNED_ACCESS (tmode, align))
2225 break;
2227 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2230 /* We would first store what we can in the largest integer mode, then go to
2231 successively smaller modes. */
2233 for (reverse = 0;
2234 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2235 reverse++)
2237 l = len;
2238 mode = VOIDmode;
2239 max_size = STORE_MAX_PIECES + 1;
2240 while (max_size > 1)
2242 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2243 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2244 if (GET_MODE_SIZE (tmode) < max_size)
2245 mode = tmode;
2247 if (mode == VOIDmode)
2248 break;
2250 icode = mov_optab->handlers[(int) mode].insn_code;
2251 if (icode != CODE_FOR_nothing
2252 && align >= GET_MODE_ALIGNMENT (mode))
2254 unsigned int size = GET_MODE_SIZE (mode);
2256 while (l >= size)
2258 if (reverse)
2259 offset -= size;
2261 cst = (*constfun) (constfundata, offset, mode);
2262 if (!LEGITIMATE_CONSTANT_P (cst))
2263 return 0;
2265 if (!reverse)
2266 offset += size;
2268 l -= size;
2272 max_size = GET_MODE_SIZE (mode);
2275 /* The code above should have handled everything. */
2276 gcc_assert (!l);
2279 return 1;
2282 /* Generate several move instructions to store LEN bytes generated by
2283 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2284 pointer which will be passed as argument in every CONSTFUN call.
2285 ALIGN is maximum alignment we can assume.
2286 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2287 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2288 stpcpy. */
2291 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2292 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2293 void *constfundata, unsigned int align, int endp)
2295 struct store_by_pieces data;
2297 if (len == 0)
2299 gcc_assert (endp != 2);
2300 return to;
2303 gcc_assert (STORE_BY_PIECES_P (len, align));
2304 data.constfun = constfun;
2305 data.constfundata = constfundata;
2306 data.len = len;
2307 data.to = to;
2308 store_by_pieces_1 (&data, align);
2309 if (endp)
2311 rtx to1;
2313 gcc_assert (!data.reverse);
2314 if (data.autinc_to)
2316 if (endp == 2)
2318 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2319 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2320 else
2321 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2322 -1));
2324 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2325 data.offset);
2327 else
2329 if (endp == 2)
2330 --data.offset;
2331 to1 = adjust_address (data.to, QImode, data.offset);
2333 return to1;
2335 else
2336 return data.to;
2339 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2340 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2342 static void
2343 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2345 struct store_by_pieces data;
2347 if (len == 0)
2348 return;
2350 data.constfun = clear_by_pieces_1;
2351 data.constfundata = NULL;
2352 data.len = len;
2353 data.to = to;
2354 store_by_pieces_1 (&data, align);
2357 /* Callback routine for clear_by_pieces.
2358 Return const0_rtx unconditionally. */
2360 static rtx
2361 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2362 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2363 enum machine_mode mode ATTRIBUTE_UNUSED)
2365 return const0_rtx;
2368 /* Subroutine of clear_by_pieces and store_by_pieces.
2369 Generate several move instructions to store LEN bytes of block TO. (A MEM
2370 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2372 static void
2373 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2374 unsigned int align ATTRIBUTE_UNUSED)
2376 rtx to_addr = XEXP (data->to, 0);
2377 unsigned int max_size = STORE_MAX_PIECES + 1;
2378 enum machine_mode mode = VOIDmode, tmode;
2379 enum insn_code icode;
2381 data->offset = 0;
2382 data->to_addr = to_addr;
2383 data->autinc_to
2384 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2385 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2387 data->explicit_inc_to = 0;
2388 data->reverse
2389 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2390 if (data->reverse)
2391 data->offset = data->len;
2393 /* If storing requires more than two move insns,
2394 copy addresses to registers (to make displacements shorter)
2395 and use post-increment if available. */
2396 if (!data->autinc_to
2397 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2399 /* Determine the main mode we'll be using. */
2400 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2401 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2402 if (GET_MODE_SIZE (tmode) < max_size)
2403 mode = tmode;
2405 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2407 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2408 data->autinc_to = 1;
2409 data->explicit_inc_to = -1;
2412 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2413 && ! data->autinc_to)
2415 data->to_addr = copy_addr_to_reg (to_addr);
2416 data->autinc_to = 1;
2417 data->explicit_inc_to = 1;
2420 if ( !data->autinc_to && CONSTANT_P (to_addr))
2421 data->to_addr = copy_addr_to_reg (to_addr);
2424 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2425 if (align >= GET_MODE_ALIGNMENT (tmode))
2426 align = GET_MODE_ALIGNMENT (tmode);
2427 else
2429 enum machine_mode xmode;
2431 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2432 tmode != VOIDmode;
2433 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2434 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2435 || SLOW_UNALIGNED_ACCESS (tmode, align))
2436 break;
2438 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2441 /* First store what we can in the largest integer mode, then go to
2442 successively smaller modes. */
2444 while (max_size > 1)
2446 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2447 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2448 if (GET_MODE_SIZE (tmode) < max_size)
2449 mode = tmode;
2451 if (mode == VOIDmode)
2452 break;
2454 icode = mov_optab->handlers[(int) mode].insn_code;
2455 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2456 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2458 max_size = GET_MODE_SIZE (mode);
2461 /* The code above should have handled everything. */
2462 gcc_assert (!data->len);
2465 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2466 with move instructions for mode MODE. GENFUN is the gen_... function
2467 to make a move insn for that mode. DATA has all the other info. */
2469 static void
2470 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2471 struct store_by_pieces *data)
2473 unsigned int size = GET_MODE_SIZE (mode);
2474 rtx to1, cst;
2476 while (data->len >= size)
2478 if (data->reverse)
2479 data->offset -= size;
2481 if (data->autinc_to)
2482 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2483 data->offset);
2484 else
2485 to1 = adjust_address (data->to, mode, data->offset);
2487 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2488 emit_insn (gen_add2_insn (data->to_addr,
2489 GEN_INT (-(HOST_WIDE_INT) size)));
2491 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2492 emit_insn ((*genfun) (to1, cst));
2494 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2495 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2497 if (! data->reverse)
2498 data->offset += size;
2500 data->len -= size;
2504 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2505 its length in bytes. */
2508 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2509 unsigned int expected_align, HOST_WIDE_INT expected_size)
2511 enum machine_mode mode = GET_MODE (object);
2512 unsigned int align;
2514 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2516 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2517 just move a zero. Otherwise, do this a piece at a time. */
2518 if (mode != BLKmode
2519 && GET_CODE (size) == CONST_INT
2520 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2522 rtx zero = CONST0_RTX (mode);
2523 if (zero != NULL)
2525 emit_move_insn (object, zero);
2526 return NULL;
2529 if (COMPLEX_MODE_P (mode))
2531 zero = CONST0_RTX (GET_MODE_INNER (mode));
2532 if (zero != NULL)
2534 write_complex_part (object, zero, 0);
2535 write_complex_part (object, zero, 1);
2536 return NULL;
2541 if (size == const0_rtx)
2542 return NULL;
2544 align = MEM_ALIGN (object);
2546 if (GET_CODE (size) == CONST_INT
2547 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2548 clear_by_pieces (object, INTVAL (size), align);
2549 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2550 expected_align, expected_size))
2552 else
2553 return set_storage_via_libcall (object, size, const0_rtx,
2554 method == BLOCK_OP_TAILCALL);
2556 return NULL;
2560 clear_storage (rtx object, rtx size, enum block_op_methods method)
2562 return clear_storage_hints (object, size, method, 0, -1);
2566 /* A subroutine of clear_storage. Expand a call to memset.
2567 Return the return value of memset, 0 otherwise. */
2570 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2572 tree call_expr, fn, object_tree, size_tree, val_tree;
2573 enum machine_mode size_mode;
2574 rtx retval;
2576 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2577 place those into new pseudos into a VAR_DECL and use them later. */
2579 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2581 size_mode = TYPE_MODE (sizetype);
2582 size = convert_to_mode (size_mode, size, 1);
2583 size = copy_to_mode_reg (size_mode, size);
2585 /* It is incorrect to use the libcall calling conventions to call
2586 memset in this context. This could be a user call to memset and
2587 the user may wish to examine the return value from memset. For
2588 targets where libcalls and normal calls have different conventions
2589 for returning pointers, we could end up generating incorrect code. */
2591 object_tree = make_tree (ptr_type_node, object);
2592 if (GET_CODE (val) != CONST_INT)
2593 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2594 size_tree = make_tree (sizetype, size);
2595 val_tree = make_tree (integer_type_node, val);
2597 fn = clear_storage_libcall_fn (true);
2598 call_expr = build_call_expr (fn, 3,
2599 object_tree, integer_zero_node, size_tree);
2600 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2602 retval = expand_normal (call_expr);
2604 return retval;
2607 /* A subroutine of set_storage_via_libcall. Create the tree node
2608 for the function we use for block clears. The first time FOR_CALL
2609 is true, we call assemble_external. */
2611 static GTY(()) tree block_clear_fn;
2613 void
2614 init_block_clear_fn (const char *asmspec)
2616 if (!block_clear_fn)
2618 tree fn, args;
2620 fn = get_identifier ("memset");
2621 args = build_function_type_list (ptr_type_node, ptr_type_node,
2622 integer_type_node, sizetype,
2623 NULL_TREE);
2625 fn = build_decl (FUNCTION_DECL, fn, args);
2626 DECL_EXTERNAL (fn) = 1;
2627 TREE_PUBLIC (fn) = 1;
2628 DECL_ARTIFICIAL (fn) = 1;
2629 TREE_NOTHROW (fn) = 1;
2630 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2631 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2633 block_clear_fn = fn;
2636 if (asmspec)
2637 set_user_assembler_name (block_clear_fn, asmspec);
2640 static tree
2641 clear_storage_libcall_fn (int for_call)
2643 static bool emitted_extern;
2645 if (!block_clear_fn)
2646 init_block_clear_fn (NULL);
2648 if (for_call && !emitted_extern)
2650 emitted_extern = true;
2651 make_decl_rtl (block_clear_fn);
2652 assemble_external (block_clear_fn);
2655 return block_clear_fn;
2658 /* Expand a setmem pattern; return true if successful. */
2660 bool
2661 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2662 unsigned int expected_align, HOST_WIDE_INT expected_size)
2664 /* Try the most limited insn first, because there's no point
2665 including more than one in the machine description unless
2666 the more limited one has some advantage. */
2668 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2669 enum machine_mode mode;
2671 if (expected_align < align)
2672 expected_align = align;
2674 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2675 mode = GET_MODE_WIDER_MODE (mode))
2677 enum insn_code code = setmem_optab[(int) mode];
2678 insn_operand_predicate_fn pred;
2680 if (code != CODE_FOR_nothing
2681 /* We don't need MODE to be narrower than
2682 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2683 the mode mask, as it is returned by the macro, it will
2684 definitely be less than the actual mode mask. */
2685 && ((GET_CODE (size) == CONST_INT
2686 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2687 <= (GET_MODE_MASK (mode) >> 1)))
2688 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2689 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2690 || (*pred) (object, BLKmode))
2691 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2692 || (*pred) (opalign, VOIDmode)))
2694 rtx opsize, opchar;
2695 enum machine_mode char_mode;
2696 rtx last = get_last_insn ();
2697 rtx pat;
2699 opsize = convert_to_mode (mode, size, 1);
2700 pred = insn_data[(int) code].operand[1].predicate;
2701 if (pred != 0 && ! (*pred) (opsize, mode))
2702 opsize = copy_to_mode_reg (mode, opsize);
2704 opchar = val;
2705 char_mode = insn_data[(int) code].operand[2].mode;
2706 if (char_mode != VOIDmode)
2708 opchar = convert_to_mode (char_mode, opchar, 1);
2709 pred = insn_data[(int) code].operand[2].predicate;
2710 if (pred != 0 && ! (*pred) (opchar, char_mode))
2711 opchar = copy_to_mode_reg (char_mode, opchar);
2714 if (insn_data[(int) code].n_operands == 4)
2715 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2716 else
2717 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2718 GEN_INT (expected_align),
2719 GEN_INT (expected_size));
2720 if (pat)
2722 emit_insn (pat);
2723 return true;
2725 else
2726 delete_insns_since (last);
2730 return false;
2734 /* Write to one of the components of the complex value CPLX. Write VAL to
2735 the real part if IMAG_P is false, and the imaginary part if its true. */
2737 static void
2738 write_complex_part (rtx cplx, rtx val, bool imag_p)
2740 enum machine_mode cmode;
2741 enum machine_mode imode;
2742 unsigned ibitsize;
2744 if (GET_CODE (cplx) == CONCAT)
2746 emit_move_insn (XEXP (cplx, imag_p), val);
2747 return;
2750 cmode = GET_MODE (cplx);
2751 imode = GET_MODE_INNER (cmode);
2752 ibitsize = GET_MODE_BITSIZE (imode);
2754 /* For MEMs simplify_gen_subreg may generate an invalid new address
2755 because, e.g., the original address is considered mode-dependent
2756 by the target, which restricts simplify_subreg from invoking
2757 adjust_address_nv. Instead of preparing fallback support for an
2758 invalid address, we call adjust_address_nv directly. */
2759 if (MEM_P (cplx))
2761 emit_move_insn (adjust_address_nv (cplx, imode,
2762 imag_p ? GET_MODE_SIZE (imode) : 0),
2763 val);
2764 return;
2767 /* If the sub-object is at least word sized, then we know that subregging
2768 will work. This special case is important, since store_bit_field
2769 wants to operate on integer modes, and there's rarely an OImode to
2770 correspond to TCmode. */
2771 if (ibitsize >= BITS_PER_WORD
2772 /* For hard regs we have exact predicates. Assume we can split
2773 the original object if it spans an even number of hard regs.
2774 This special case is important for SCmode on 64-bit platforms
2775 where the natural size of floating-point regs is 32-bit. */
2776 || (REG_P (cplx)
2777 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2778 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2780 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2781 imag_p ? GET_MODE_SIZE (imode) : 0);
2782 if (part)
2784 emit_move_insn (part, val);
2785 return;
2787 else
2788 /* simplify_gen_subreg may fail for sub-word MEMs. */
2789 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2792 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2795 /* Extract one of the components of the complex value CPLX. Extract the
2796 real part if IMAG_P is false, and the imaginary part if it's true. */
2798 static rtx
2799 read_complex_part (rtx cplx, bool imag_p)
2801 enum machine_mode cmode, imode;
2802 unsigned ibitsize;
2804 if (GET_CODE (cplx) == CONCAT)
2805 return XEXP (cplx, imag_p);
2807 cmode = GET_MODE (cplx);
2808 imode = GET_MODE_INNER (cmode);
2809 ibitsize = GET_MODE_BITSIZE (imode);
2811 /* Special case reads from complex constants that got spilled to memory. */
2812 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2814 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2815 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2817 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2818 if (CONSTANT_CLASS_P (part))
2819 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2823 /* For MEMs simplify_gen_subreg may generate an invalid new address
2824 because, e.g., the original address is considered mode-dependent
2825 by the target, which restricts simplify_subreg from invoking
2826 adjust_address_nv. Instead of preparing fallback support for an
2827 invalid address, we call adjust_address_nv directly. */
2828 if (MEM_P (cplx))
2829 return adjust_address_nv (cplx, imode,
2830 imag_p ? GET_MODE_SIZE (imode) : 0);
2832 /* If the sub-object is at least word sized, then we know that subregging
2833 will work. This special case is important, since extract_bit_field
2834 wants to operate on integer modes, and there's rarely an OImode to
2835 correspond to TCmode. */
2836 if (ibitsize >= BITS_PER_WORD
2837 /* For hard regs we have exact predicates. Assume we can split
2838 the original object if it spans an even number of hard regs.
2839 This special case is important for SCmode on 64-bit platforms
2840 where the natural size of floating-point regs is 32-bit. */
2841 || (REG_P (cplx)
2842 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2843 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2845 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2846 imag_p ? GET_MODE_SIZE (imode) : 0);
2847 if (ret)
2848 return ret;
2849 else
2850 /* simplify_gen_subreg may fail for sub-word MEMs. */
2851 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2854 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2855 true, NULL_RTX, imode, imode);
2858 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2859 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2860 represented in NEW_MODE. If FORCE is true, this will never happen, as
2861 we'll force-create a SUBREG if needed. */
2863 static rtx
2864 emit_move_change_mode (enum machine_mode new_mode,
2865 enum machine_mode old_mode, rtx x, bool force)
2867 rtx ret;
2869 if (MEM_P (x))
2871 /* We don't have to worry about changing the address since the
2872 size in bytes is supposed to be the same. */
2873 if (reload_in_progress)
2875 /* Copy the MEM to change the mode and move any
2876 substitutions from the old MEM to the new one. */
2877 ret = adjust_address_nv (x, new_mode, 0);
2878 copy_replacements (x, ret);
2880 else
2881 ret = adjust_address (x, new_mode, 0);
2883 else
2885 /* Note that we do want simplify_subreg's behavior of validating
2886 that the new mode is ok for a hard register. If we were to use
2887 simplify_gen_subreg, we would create the subreg, but would
2888 probably run into the target not being able to implement it. */
2889 /* Except, of course, when FORCE is true, when this is exactly what
2890 we want. Which is needed for CCmodes on some targets. */
2891 if (force)
2892 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2893 else
2894 ret = simplify_subreg (new_mode, x, old_mode, 0);
2897 return ret;
2900 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2901 an integer mode of the same size as MODE. Returns the instruction
2902 emitted, or NULL if such a move could not be generated. */
2904 static rtx
2905 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2907 enum machine_mode imode;
2908 enum insn_code code;
2910 /* There must exist a mode of the exact size we require. */
2911 imode = int_mode_for_mode (mode);
2912 if (imode == BLKmode)
2913 return NULL_RTX;
2915 /* The target must support moves in this mode. */
2916 code = mov_optab->handlers[imode].insn_code;
2917 if (code == CODE_FOR_nothing)
2918 return NULL_RTX;
2920 x = emit_move_change_mode (imode, mode, x, force);
2921 if (x == NULL_RTX)
2922 return NULL_RTX;
2923 y = emit_move_change_mode (imode, mode, y, force);
2924 if (y == NULL_RTX)
2925 return NULL_RTX;
2926 return emit_insn (GEN_FCN (code) (x, y));
2929 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2930 Return an equivalent MEM that does not use an auto-increment. */
2932 static rtx
2933 emit_move_resolve_push (enum machine_mode mode, rtx x)
2935 enum rtx_code code = GET_CODE (XEXP (x, 0));
2936 HOST_WIDE_INT adjust;
2937 rtx temp;
2939 adjust = GET_MODE_SIZE (mode);
2940 #ifdef PUSH_ROUNDING
2941 adjust = PUSH_ROUNDING (adjust);
2942 #endif
2943 if (code == PRE_DEC || code == POST_DEC)
2944 adjust = -adjust;
2945 else if (code == PRE_MODIFY || code == POST_MODIFY)
2947 rtx expr = XEXP (XEXP (x, 0), 1);
2948 HOST_WIDE_INT val;
2950 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2951 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2952 val = INTVAL (XEXP (expr, 1));
2953 if (GET_CODE (expr) == MINUS)
2954 val = -val;
2955 gcc_assert (adjust == val || adjust == -val);
2956 adjust = val;
2959 /* Do not use anti_adjust_stack, since we don't want to update
2960 stack_pointer_delta. */
2961 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2962 GEN_INT (adjust), stack_pointer_rtx,
2963 0, OPTAB_LIB_WIDEN);
2964 if (temp != stack_pointer_rtx)
2965 emit_move_insn (stack_pointer_rtx, temp);
2967 switch (code)
2969 case PRE_INC:
2970 case PRE_DEC:
2971 case PRE_MODIFY:
2972 temp = stack_pointer_rtx;
2973 break;
2974 case POST_INC:
2975 case POST_DEC:
2976 case POST_MODIFY:
2977 temp = plus_constant (stack_pointer_rtx, -adjust);
2978 break;
2979 default:
2980 gcc_unreachable ();
2983 return replace_equiv_address (x, temp);
2986 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2987 X is known to satisfy push_operand, and MODE is known to be complex.
2988 Returns the last instruction emitted. */
2990 static rtx
2991 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2993 enum machine_mode submode = GET_MODE_INNER (mode);
2994 bool imag_first;
2996 #ifdef PUSH_ROUNDING
2997 unsigned int submodesize = GET_MODE_SIZE (submode);
2999 /* In case we output to the stack, but the size is smaller than the
3000 machine can push exactly, we need to use move instructions. */
3001 if (PUSH_ROUNDING (submodesize) != submodesize)
3003 x = emit_move_resolve_push (mode, x);
3004 return emit_move_insn (x, y);
3006 #endif
3008 /* Note that the real part always precedes the imag part in memory
3009 regardless of machine's endianness. */
3010 switch (GET_CODE (XEXP (x, 0)))
3012 case PRE_DEC:
3013 case POST_DEC:
3014 imag_first = true;
3015 break;
3016 case PRE_INC:
3017 case POST_INC:
3018 imag_first = false;
3019 break;
3020 default:
3021 gcc_unreachable ();
3024 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3025 read_complex_part (y, imag_first));
3026 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3027 read_complex_part (y, !imag_first));
3030 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3031 MODE is known to be complex. Returns the last instruction emitted. */
3033 static rtx
3034 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3036 bool try_int;
3038 /* Need to take special care for pushes, to maintain proper ordering
3039 of the data, and possibly extra padding. */
3040 if (push_operand (x, mode))
3041 return emit_move_complex_push (mode, x, y);
3043 /* See if we can coerce the target into moving both values at once. */
3045 /* Move floating point as parts. */
3046 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3047 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3048 try_int = false;
3049 /* Not possible if the values are inherently not adjacent. */
3050 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3051 try_int = false;
3052 /* Is possible if both are registers (or subregs of registers). */
3053 else if (register_operand (x, mode) && register_operand (y, mode))
3054 try_int = true;
3055 /* If one of the operands is a memory, and alignment constraints
3056 are friendly enough, we may be able to do combined memory operations.
3057 We do not attempt this if Y is a constant because that combination is
3058 usually better with the by-parts thing below. */
3059 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3060 && (!STRICT_ALIGNMENT
3061 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3062 try_int = true;
3063 else
3064 try_int = false;
3066 if (try_int)
3068 rtx ret;
3070 /* For memory to memory moves, optimal behavior can be had with the
3071 existing block move logic. */
3072 if (MEM_P (x) && MEM_P (y))
3074 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3075 BLOCK_OP_NO_LIBCALL);
3076 return get_last_insn ();
3079 ret = emit_move_via_integer (mode, x, y, true);
3080 if (ret)
3081 return ret;
3084 /* Show the output dies here. This is necessary for SUBREGs
3085 of pseudos since we cannot track their lifetimes correctly;
3086 hard regs shouldn't appear here except as return values. */
3087 if (!reload_completed && !reload_in_progress
3088 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3089 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3091 write_complex_part (x, read_complex_part (y, false), false);
3092 write_complex_part (x, read_complex_part (y, true), true);
3093 return get_last_insn ();
3096 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3097 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3099 static rtx
3100 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3102 rtx ret;
3104 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3105 if (mode != CCmode)
3107 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3108 if (code != CODE_FOR_nothing)
3110 x = emit_move_change_mode (CCmode, mode, x, true);
3111 y = emit_move_change_mode (CCmode, mode, y, true);
3112 return emit_insn (GEN_FCN (code) (x, y));
3116 /* Otherwise, find the MODE_INT mode of the same width. */
3117 ret = emit_move_via_integer (mode, x, y, false);
3118 gcc_assert (ret != NULL);
3119 return ret;
3122 /* Return true if word I of OP lies entirely in the
3123 undefined bits of a paradoxical subreg. */
3125 static bool
3126 undefined_operand_subword_p (rtx op, int i)
3128 enum machine_mode innermode, innermostmode;
3129 int offset;
3130 if (GET_CODE (op) != SUBREG)
3131 return false;
3132 innermode = GET_MODE (op);
3133 innermostmode = GET_MODE (SUBREG_REG (op));
3134 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3135 /* The SUBREG_BYTE represents offset, as if the value were stored in
3136 memory, except for a paradoxical subreg where we define
3137 SUBREG_BYTE to be 0; undo this exception as in
3138 simplify_subreg. */
3139 if (SUBREG_BYTE (op) == 0
3140 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3142 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3143 if (WORDS_BIG_ENDIAN)
3144 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3145 if (BYTES_BIG_ENDIAN)
3146 offset += difference % UNITS_PER_WORD;
3148 if (offset >= GET_MODE_SIZE (innermostmode)
3149 || offset <= -GET_MODE_SIZE (word_mode))
3150 return true;
3151 return false;
3154 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3155 MODE is any multi-word or full-word mode that lacks a move_insn
3156 pattern. Note that you will get better code if you define such
3157 patterns, even if they must turn into multiple assembler instructions. */
3159 static rtx
3160 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3162 rtx last_insn = 0;
3163 rtx seq, inner;
3164 bool need_clobber;
3165 int i;
3167 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3169 /* If X is a push on the stack, do the push now and replace
3170 X with a reference to the stack pointer. */
3171 if (push_operand (x, mode))
3172 x = emit_move_resolve_push (mode, x);
3174 /* If we are in reload, see if either operand is a MEM whose address
3175 is scheduled for replacement. */
3176 if (reload_in_progress && MEM_P (x)
3177 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3178 x = replace_equiv_address_nv (x, inner);
3179 if (reload_in_progress && MEM_P (y)
3180 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3181 y = replace_equiv_address_nv (y, inner);
3183 start_sequence ();
3185 need_clobber = false;
3186 for (i = 0;
3187 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3188 i++)
3190 rtx xpart = operand_subword (x, i, 1, mode);
3191 rtx ypart;
3193 /* Do not generate code for a move if it would come entirely
3194 from the undefined bits of a paradoxical subreg. */
3195 if (undefined_operand_subword_p (y, i))
3196 continue;
3198 ypart = operand_subword (y, i, 1, mode);
3200 /* If we can't get a part of Y, put Y into memory if it is a
3201 constant. Otherwise, force it into a register. Then we must
3202 be able to get a part of Y. */
3203 if (ypart == 0 && CONSTANT_P (y))
3205 y = use_anchored_address (force_const_mem (mode, y));
3206 ypart = operand_subword (y, i, 1, mode);
3208 else if (ypart == 0)
3209 ypart = operand_subword_force (y, i, mode);
3211 gcc_assert (xpart && ypart);
3213 need_clobber |= (GET_CODE (xpart) == SUBREG);
3215 last_insn = emit_move_insn (xpart, ypart);
3218 seq = get_insns ();
3219 end_sequence ();
3221 /* Show the output dies here. This is necessary for SUBREGs
3222 of pseudos since we cannot track their lifetimes correctly;
3223 hard regs shouldn't appear here except as return values.
3224 We never want to emit such a clobber after reload. */
3225 if (x != y
3226 && ! (reload_in_progress || reload_completed)
3227 && need_clobber != 0)
3228 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3230 emit_insn (seq);
3232 return last_insn;
3235 /* Low level part of emit_move_insn.
3236 Called just like emit_move_insn, but assumes X and Y
3237 are basically valid. */
3240 emit_move_insn_1 (rtx x, rtx y)
3242 enum machine_mode mode = GET_MODE (x);
3243 enum insn_code code;
3245 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3247 code = mov_optab->handlers[mode].insn_code;
3248 if (code != CODE_FOR_nothing)
3249 return emit_insn (GEN_FCN (code) (x, y));
3251 /* Expand complex moves by moving real part and imag part. */
3252 if (COMPLEX_MODE_P (mode))
3253 return emit_move_complex (mode, x, y);
3255 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3257 rtx result = emit_move_via_integer (mode, x, y, true);
3259 /* If we can't find an integer mode, use multi words. */
3260 if (result)
3261 return result;
3262 else
3263 return emit_move_multi_word (mode, x, y);
3266 if (GET_MODE_CLASS (mode) == MODE_CC)
3267 return emit_move_ccmode (mode, x, y);
3269 /* Try using a move pattern for the corresponding integer mode. This is
3270 only safe when simplify_subreg can convert MODE constants into integer
3271 constants. At present, it can only do this reliably if the value
3272 fits within a HOST_WIDE_INT. */
3273 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3275 rtx ret = emit_move_via_integer (mode, x, y, false);
3276 if (ret)
3277 return ret;
3280 return emit_move_multi_word (mode, x, y);
3283 /* Generate code to copy Y into X.
3284 Both Y and X must have the same mode, except that
3285 Y can be a constant with VOIDmode.
3286 This mode cannot be BLKmode; use emit_block_move for that.
3288 Return the last instruction emitted. */
3291 emit_move_insn (rtx x, rtx y)
3293 enum machine_mode mode = GET_MODE (x);
3294 rtx y_cst = NULL_RTX;
3295 rtx last_insn, set;
3297 gcc_assert (mode != BLKmode
3298 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3300 if (CONSTANT_P (y))
3302 if (optimize
3303 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3304 && (last_insn = compress_float_constant (x, y)))
3305 return last_insn;
3307 y_cst = y;
3309 if (!LEGITIMATE_CONSTANT_P (y))
3311 y = force_const_mem (mode, y);
3313 /* If the target's cannot_force_const_mem prevented the spill,
3314 assume that the target's move expanders will also take care
3315 of the non-legitimate constant. */
3316 if (!y)
3317 y = y_cst;
3318 else
3319 y = use_anchored_address (y);
3323 /* If X or Y are memory references, verify that their addresses are valid
3324 for the machine. */
3325 if (MEM_P (x)
3326 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3327 && ! push_operand (x, GET_MODE (x)))
3328 || (flag_force_addr
3329 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3330 x = validize_mem (x);
3332 if (MEM_P (y)
3333 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3334 || (flag_force_addr
3335 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3336 y = validize_mem (y);
3338 gcc_assert (mode != BLKmode);
3340 last_insn = emit_move_insn_1 (x, y);
3342 if (y_cst && REG_P (x)
3343 && (set = single_set (last_insn)) != NULL_RTX
3344 && SET_DEST (set) == x
3345 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3346 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3348 return last_insn;
3351 /* If Y is representable exactly in a narrower mode, and the target can
3352 perform the extension directly from constant or memory, then emit the
3353 move as an extension. */
3355 static rtx
3356 compress_float_constant (rtx x, rtx y)
3358 enum machine_mode dstmode = GET_MODE (x);
3359 enum machine_mode orig_srcmode = GET_MODE (y);
3360 enum machine_mode srcmode;
3361 REAL_VALUE_TYPE r;
3362 int oldcost, newcost;
3364 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3366 if (LEGITIMATE_CONSTANT_P (y))
3367 oldcost = rtx_cost (y, SET);
3368 else
3369 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3371 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3372 srcmode != orig_srcmode;
3373 srcmode = GET_MODE_WIDER_MODE (srcmode))
3375 enum insn_code ic;
3376 rtx trunc_y, last_insn;
3378 /* Skip if the target can't extend this way. */
3379 ic = can_extend_p (dstmode, srcmode, 0);
3380 if (ic == CODE_FOR_nothing)
3381 continue;
3383 /* Skip if the narrowed value isn't exact. */
3384 if (! exact_real_truncate (srcmode, &r))
3385 continue;
3387 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3389 if (LEGITIMATE_CONSTANT_P (trunc_y))
3391 /* Skip if the target needs extra instructions to perform
3392 the extension. */
3393 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3394 continue;
3395 /* This is valid, but may not be cheaper than the original. */
3396 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3397 if (oldcost < newcost)
3398 continue;
3400 else if (float_extend_from_mem[dstmode][srcmode])
3402 trunc_y = force_const_mem (srcmode, trunc_y);
3403 /* This is valid, but may not be cheaper than the original. */
3404 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3405 if (oldcost < newcost)
3406 continue;
3407 trunc_y = validize_mem (trunc_y);
3409 else
3410 continue;
3412 /* For CSE's benefit, force the compressed constant pool entry
3413 into a new pseudo. This constant may be used in different modes,
3414 and if not, combine will put things back together for us. */
3415 trunc_y = force_reg (srcmode, trunc_y);
3416 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3417 last_insn = get_last_insn ();
3419 if (REG_P (x))
3420 set_unique_reg_note (last_insn, REG_EQUAL, y);
3422 return last_insn;
3425 return NULL_RTX;
3428 /* Pushing data onto the stack. */
3430 /* Push a block of length SIZE (perhaps variable)
3431 and return an rtx to address the beginning of the block.
3432 The value may be virtual_outgoing_args_rtx.
3434 EXTRA is the number of bytes of padding to push in addition to SIZE.
3435 BELOW nonzero means this padding comes at low addresses;
3436 otherwise, the padding comes at high addresses. */
3439 push_block (rtx size, int extra, int below)
3441 rtx temp;
3443 size = convert_modes (Pmode, ptr_mode, size, 1);
3444 if (CONSTANT_P (size))
3445 anti_adjust_stack (plus_constant (size, extra));
3446 else if (REG_P (size) && extra == 0)
3447 anti_adjust_stack (size);
3448 else
3450 temp = copy_to_mode_reg (Pmode, size);
3451 if (extra != 0)
3452 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3453 temp, 0, OPTAB_LIB_WIDEN);
3454 anti_adjust_stack (temp);
3457 #ifndef STACK_GROWS_DOWNWARD
3458 if (0)
3459 #else
3460 if (1)
3461 #endif
3463 temp = virtual_outgoing_args_rtx;
3464 if (extra != 0 && below)
3465 temp = plus_constant (temp, extra);
3467 else
3469 if (GET_CODE (size) == CONST_INT)
3470 temp = plus_constant (virtual_outgoing_args_rtx,
3471 -INTVAL (size) - (below ? 0 : extra));
3472 else if (extra != 0 && !below)
3473 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3474 negate_rtx (Pmode, plus_constant (size, extra)));
3475 else
3476 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3477 negate_rtx (Pmode, size));
3480 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3483 #ifdef PUSH_ROUNDING
3485 /* Emit single push insn. */
3487 static void
3488 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3490 rtx dest_addr;
3491 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3492 rtx dest;
3493 enum insn_code icode;
3494 insn_operand_predicate_fn pred;
3496 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3497 /* If there is push pattern, use it. Otherwise try old way of throwing
3498 MEM representing push operation to move expander. */
3499 icode = push_optab->handlers[(int) mode].insn_code;
3500 if (icode != CODE_FOR_nothing)
3502 if (((pred = insn_data[(int) icode].operand[0].predicate)
3503 && !((*pred) (x, mode))))
3504 x = force_reg (mode, x);
3505 emit_insn (GEN_FCN (icode) (x));
3506 return;
3508 if (GET_MODE_SIZE (mode) == rounded_size)
3509 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3510 /* If we are to pad downward, adjust the stack pointer first and
3511 then store X into the stack location using an offset. This is
3512 because emit_move_insn does not know how to pad; it does not have
3513 access to type. */
3514 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3516 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3517 HOST_WIDE_INT offset;
3519 emit_move_insn (stack_pointer_rtx,
3520 expand_binop (Pmode,
3521 #ifdef STACK_GROWS_DOWNWARD
3522 sub_optab,
3523 #else
3524 add_optab,
3525 #endif
3526 stack_pointer_rtx,
3527 GEN_INT (rounded_size),
3528 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3530 offset = (HOST_WIDE_INT) padding_size;
3531 #ifdef STACK_GROWS_DOWNWARD
3532 if (STACK_PUSH_CODE == POST_DEC)
3533 /* We have already decremented the stack pointer, so get the
3534 previous value. */
3535 offset += (HOST_WIDE_INT) rounded_size;
3536 #else
3537 if (STACK_PUSH_CODE == POST_INC)
3538 /* We have already incremented the stack pointer, so get the
3539 previous value. */
3540 offset -= (HOST_WIDE_INT) rounded_size;
3541 #endif
3542 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3544 else
3546 #ifdef STACK_GROWS_DOWNWARD
3547 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3548 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3549 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3550 #else
3551 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3552 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3553 GEN_INT (rounded_size));
3554 #endif
3555 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3558 dest = gen_rtx_MEM (mode, dest_addr);
3560 if (type != 0)
3562 set_mem_attributes (dest, type, 1);
3564 if (flag_optimize_sibling_calls)
3565 /* Function incoming arguments may overlap with sibling call
3566 outgoing arguments and we cannot allow reordering of reads
3567 from function arguments with stores to outgoing arguments
3568 of sibling calls. */
3569 set_mem_alias_set (dest, 0);
3571 emit_move_insn (dest, x);
3573 #endif
3575 /* Generate code to push X onto the stack, assuming it has mode MODE and
3576 type TYPE.
3577 MODE is redundant except when X is a CONST_INT (since they don't
3578 carry mode info).
3579 SIZE is an rtx for the size of data to be copied (in bytes),
3580 needed only if X is BLKmode.
3582 ALIGN (in bits) is maximum alignment we can assume.
3584 If PARTIAL and REG are both nonzero, then copy that many of the first
3585 bytes of X into registers starting with REG, and push the rest of X.
3586 The amount of space pushed is decreased by PARTIAL bytes.
3587 REG must be a hard register in this case.
3588 If REG is zero but PARTIAL is not, take any all others actions for an
3589 argument partially in registers, but do not actually load any
3590 registers.
3592 EXTRA is the amount in bytes of extra space to leave next to this arg.
3593 This is ignored if an argument block has already been allocated.
3595 On a machine that lacks real push insns, ARGS_ADDR is the address of
3596 the bottom of the argument block for this call. We use indexing off there
3597 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3598 argument block has not been preallocated.
3600 ARGS_SO_FAR is the size of args previously pushed for this call.
3602 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3603 for arguments passed in registers. If nonzero, it will be the number
3604 of bytes required. */
3606 void
3607 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3608 unsigned int align, int partial, rtx reg, int extra,
3609 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3610 rtx alignment_pad)
3612 rtx xinner;
3613 enum direction stack_direction
3614 #ifdef STACK_GROWS_DOWNWARD
3615 = downward;
3616 #else
3617 = upward;
3618 #endif
3620 /* Decide where to pad the argument: `downward' for below,
3621 `upward' for above, or `none' for don't pad it.
3622 Default is below for small data on big-endian machines; else above. */
3623 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3625 /* Invert direction if stack is post-decrement.
3626 FIXME: why? */
3627 if (STACK_PUSH_CODE == POST_DEC)
3628 if (where_pad != none)
3629 where_pad = (where_pad == downward ? upward : downward);
3631 xinner = x;
3633 if (mode == BLKmode
3634 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3636 /* Copy a block into the stack, entirely or partially. */
3638 rtx temp;
3639 int used;
3640 int offset;
3641 int skip;
3643 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3644 used = partial - offset;
3646 if (mode != BLKmode)
3648 /* A value is to be stored in an insufficiently aligned
3649 stack slot; copy via a suitably aligned slot if
3650 necessary. */
3651 size = GEN_INT (GET_MODE_SIZE (mode));
3652 if (!MEM_P (xinner))
3654 temp = assign_temp (type, 0, 1, 1);
3655 emit_move_insn (temp, xinner);
3656 xinner = temp;
3660 gcc_assert (size);
3662 /* USED is now the # of bytes we need not copy to the stack
3663 because registers will take care of them. */
3665 if (partial != 0)
3666 xinner = adjust_address (xinner, BLKmode, used);
3668 /* If the partial register-part of the arg counts in its stack size,
3669 skip the part of stack space corresponding to the registers.
3670 Otherwise, start copying to the beginning of the stack space,
3671 by setting SKIP to 0. */
3672 skip = (reg_parm_stack_space == 0) ? 0 : used;
3674 #ifdef PUSH_ROUNDING
3675 /* Do it with several push insns if that doesn't take lots of insns
3676 and if there is no difficulty with push insns that skip bytes
3677 on the stack for alignment purposes. */
3678 if (args_addr == 0
3679 && PUSH_ARGS
3680 && GET_CODE (size) == CONST_INT
3681 && skip == 0
3682 && MEM_ALIGN (xinner) >= align
3683 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3684 /* Here we avoid the case of a structure whose weak alignment
3685 forces many pushes of a small amount of data,
3686 and such small pushes do rounding that causes trouble. */
3687 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3688 || align >= BIGGEST_ALIGNMENT
3689 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3690 == (align / BITS_PER_UNIT)))
3691 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3693 /* Push padding now if padding above and stack grows down,
3694 or if padding below and stack grows up.
3695 But if space already allocated, this has already been done. */
3696 if (extra && args_addr == 0
3697 && where_pad != none && where_pad != stack_direction)
3698 anti_adjust_stack (GEN_INT (extra));
3700 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3702 else
3703 #endif /* PUSH_ROUNDING */
3705 rtx target;
3707 /* Otherwise make space on the stack and copy the data
3708 to the address of that space. */
3710 /* Deduct words put into registers from the size we must copy. */
3711 if (partial != 0)
3713 if (GET_CODE (size) == CONST_INT)
3714 size = GEN_INT (INTVAL (size) - used);
3715 else
3716 size = expand_binop (GET_MODE (size), sub_optab, size,
3717 GEN_INT (used), NULL_RTX, 0,
3718 OPTAB_LIB_WIDEN);
3721 /* Get the address of the stack space.
3722 In this case, we do not deal with EXTRA separately.
3723 A single stack adjust will do. */
3724 if (! args_addr)
3726 temp = push_block (size, extra, where_pad == downward);
3727 extra = 0;
3729 else if (GET_CODE (args_so_far) == CONST_INT)
3730 temp = memory_address (BLKmode,
3731 plus_constant (args_addr,
3732 skip + INTVAL (args_so_far)));
3733 else
3734 temp = memory_address (BLKmode,
3735 plus_constant (gen_rtx_PLUS (Pmode,
3736 args_addr,
3737 args_so_far),
3738 skip));
3740 if (!ACCUMULATE_OUTGOING_ARGS)
3742 /* If the source is referenced relative to the stack pointer,
3743 copy it to another register to stabilize it. We do not need
3744 to do this if we know that we won't be changing sp. */
3746 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3747 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3748 temp = copy_to_reg (temp);
3751 target = gen_rtx_MEM (BLKmode, temp);
3753 /* We do *not* set_mem_attributes here, because incoming arguments
3754 may overlap with sibling call outgoing arguments and we cannot
3755 allow reordering of reads from function arguments with stores
3756 to outgoing arguments of sibling calls. We do, however, want
3757 to record the alignment of the stack slot. */
3758 /* ALIGN may well be better aligned than TYPE, e.g. due to
3759 PARM_BOUNDARY. Assume the caller isn't lying. */
3760 set_mem_align (target, align);
3762 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3765 else if (partial > 0)
3767 /* Scalar partly in registers. */
3769 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3770 int i;
3771 int not_stack;
3772 /* # bytes of start of argument
3773 that we must make space for but need not store. */
3774 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3775 int args_offset = INTVAL (args_so_far);
3776 int skip;
3778 /* Push padding now if padding above and stack grows down,
3779 or if padding below and stack grows up.
3780 But if space already allocated, this has already been done. */
3781 if (extra && args_addr == 0
3782 && where_pad != none && where_pad != stack_direction)
3783 anti_adjust_stack (GEN_INT (extra));
3785 /* If we make space by pushing it, we might as well push
3786 the real data. Otherwise, we can leave OFFSET nonzero
3787 and leave the space uninitialized. */
3788 if (args_addr == 0)
3789 offset = 0;
3791 /* Now NOT_STACK gets the number of words that we don't need to
3792 allocate on the stack. Convert OFFSET to words too. */
3793 not_stack = (partial - offset) / UNITS_PER_WORD;
3794 offset /= UNITS_PER_WORD;
3796 /* If the partial register-part of the arg counts in its stack size,
3797 skip the part of stack space corresponding to the registers.
3798 Otherwise, start copying to the beginning of the stack space,
3799 by setting SKIP to 0. */
3800 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3802 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3803 x = validize_mem (force_const_mem (mode, x));
3805 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3806 SUBREGs of such registers are not allowed. */
3807 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3808 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3809 x = copy_to_reg (x);
3811 /* Loop over all the words allocated on the stack for this arg. */
3812 /* We can do it by words, because any scalar bigger than a word
3813 has a size a multiple of a word. */
3814 #ifndef PUSH_ARGS_REVERSED
3815 for (i = not_stack; i < size; i++)
3816 #else
3817 for (i = size - 1; i >= not_stack; i--)
3818 #endif
3819 if (i >= not_stack + offset)
3820 emit_push_insn (operand_subword_force (x, i, mode),
3821 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3822 0, args_addr,
3823 GEN_INT (args_offset + ((i - not_stack + skip)
3824 * UNITS_PER_WORD)),
3825 reg_parm_stack_space, alignment_pad);
3827 else
3829 rtx addr;
3830 rtx dest;
3832 /* Push padding now if padding above and stack grows down,
3833 or if padding below and stack grows up.
3834 But if space already allocated, this has already been done. */
3835 if (extra && args_addr == 0
3836 && where_pad != none && where_pad != stack_direction)
3837 anti_adjust_stack (GEN_INT (extra));
3839 #ifdef PUSH_ROUNDING
3840 if (args_addr == 0 && PUSH_ARGS)
3841 emit_single_push_insn (mode, x, type);
3842 else
3843 #endif
3845 if (GET_CODE (args_so_far) == CONST_INT)
3846 addr
3847 = memory_address (mode,
3848 plus_constant (args_addr,
3849 INTVAL (args_so_far)));
3850 else
3851 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3852 args_so_far));
3853 dest = gen_rtx_MEM (mode, addr);
3855 /* We do *not* set_mem_attributes here, because incoming arguments
3856 may overlap with sibling call outgoing arguments and we cannot
3857 allow reordering of reads from function arguments with stores
3858 to outgoing arguments of sibling calls. We do, however, want
3859 to record the alignment of the stack slot. */
3860 /* ALIGN may well be better aligned than TYPE, e.g. due to
3861 PARM_BOUNDARY. Assume the caller isn't lying. */
3862 set_mem_align (dest, align);
3864 emit_move_insn (dest, x);
3868 /* If part should go in registers, copy that part
3869 into the appropriate registers. Do this now, at the end,
3870 since mem-to-mem copies above may do function calls. */
3871 if (partial > 0 && reg != 0)
3873 /* Handle calls that pass values in multiple non-contiguous locations.
3874 The Irix 6 ABI has examples of this. */
3875 if (GET_CODE (reg) == PARALLEL)
3876 emit_group_load (reg, x, type, -1);
3877 else
3879 gcc_assert (partial % UNITS_PER_WORD == 0);
3880 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3884 if (extra && args_addr == 0 && where_pad == stack_direction)
3885 anti_adjust_stack (GEN_INT (extra));
3887 if (alignment_pad && args_addr == 0)
3888 anti_adjust_stack (alignment_pad);
3891 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3892 operations. */
3894 static rtx
3895 get_subtarget (rtx x)
3897 return (optimize
3898 || x == 0
3899 /* Only registers can be subtargets. */
3900 || !REG_P (x)
3901 /* Don't use hard regs to avoid extending their life. */
3902 || REGNO (x) < FIRST_PSEUDO_REGISTER
3903 ? 0 : x);
3906 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3907 FIELD is a bitfield. Returns true if the optimization was successful,
3908 and there's nothing else to do. */
3910 static bool
3911 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3912 unsigned HOST_WIDE_INT bitpos,
3913 enum machine_mode mode1, rtx str_rtx,
3914 tree to, tree src)
3916 enum machine_mode str_mode = GET_MODE (str_rtx);
3917 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3918 tree op0, op1;
3919 rtx value, result;
3920 optab binop;
3922 if (mode1 != VOIDmode
3923 || bitsize >= BITS_PER_WORD
3924 || str_bitsize > BITS_PER_WORD
3925 || TREE_SIDE_EFFECTS (to)
3926 || TREE_THIS_VOLATILE (to))
3927 return false;
3929 STRIP_NOPS (src);
3930 if (!BINARY_CLASS_P (src)
3931 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3932 return false;
3934 op0 = TREE_OPERAND (src, 0);
3935 op1 = TREE_OPERAND (src, 1);
3936 STRIP_NOPS (op0);
3938 if (!operand_equal_p (to, op0, 0))
3939 return false;
3941 if (MEM_P (str_rtx))
3943 unsigned HOST_WIDE_INT offset1;
3945 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3946 str_mode = word_mode;
3947 str_mode = get_best_mode (bitsize, bitpos,
3948 MEM_ALIGN (str_rtx), str_mode, 0);
3949 if (str_mode == VOIDmode)
3950 return false;
3951 str_bitsize = GET_MODE_BITSIZE (str_mode);
3953 offset1 = bitpos;
3954 bitpos %= str_bitsize;
3955 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3956 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3958 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3959 return false;
3961 /* If the bit field covers the whole REG/MEM, store_field
3962 will likely generate better code. */
3963 if (bitsize >= str_bitsize)
3964 return false;
3966 /* We can't handle fields split across multiple entities. */
3967 if (bitpos + bitsize > str_bitsize)
3968 return false;
3970 if (BYTES_BIG_ENDIAN)
3971 bitpos = str_bitsize - bitpos - bitsize;
3973 switch (TREE_CODE (src))
3975 case PLUS_EXPR:
3976 case MINUS_EXPR:
3977 /* For now, just optimize the case of the topmost bitfield
3978 where we don't need to do any masking and also
3979 1 bit bitfields where xor can be used.
3980 We might win by one instruction for the other bitfields
3981 too if insv/extv instructions aren't used, so that
3982 can be added later. */
3983 if (bitpos + bitsize != str_bitsize
3984 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3985 break;
3987 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3988 value = convert_modes (str_mode,
3989 TYPE_MODE (TREE_TYPE (op1)), value,
3990 TYPE_UNSIGNED (TREE_TYPE (op1)));
3992 /* We may be accessing data outside the field, which means
3993 we can alias adjacent data. */
3994 if (MEM_P (str_rtx))
3996 str_rtx = shallow_copy_rtx (str_rtx);
3997 set_mem_alias_set (str_rtx, 0);
3998 set_mem_expr (str_rtx, 0);
4001 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4002 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4004 value = expand_and (str_mode, value, const1_rtx, NULL);
4005 binop = xor_optab;
4007 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4008 build_int_cst (NULL_TREE, bitpos),
4009 NULL_RTX, 1);
4010 result = expand_binop (str_mode, binop, str_rtx,
4011 value, str_rtx, 1, OPTAB_WIDEN);
4012 if (result != str_rtx)
4013 emit_move_insn (str_rtx, result);
4014 return true;
4016 case BIT_IOR_EXPR:
4017 case BIT_XOR_EXPR:
4018 if (TREE_CODE (op1) != INTEGER_CST)
4019 break;
4020 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
4021 value = convert_modes (GET_MODE (str_rtx),
4022 TYPE_MODE (TREE_TYPE (op1)), value,
4023 TYPE_UNSIGNED (TREE_TYPE (op1)));
4025 /* We may be accessing data outside the field, which means
4026 we can alias adjacent data. */
4027 if (MEM_P (str_rtx))
4029 str_rtx = shallow_copy_rtx (str_rtx);
4030 set_mem_alias_set (str_rtx, 0);
4031 set_mem_expr (str_rtx, 0);
4034 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4035 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4037 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4038 - 1);
4039 value = expand_and (GET_MODE (str_rtx), value, mask,
4040 NULL_RTX);
4042 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4043 build_int_cst (NULL_TREE, bitpos),
4044 NULL_RTX, 1);
4045 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4046 value, str_rtx, 1, OPTAB_WIDEN);
4047 if (result != str_rtx)
4048 emit_move_insn (str_rtx, result);
4049 return true;
4051 default:
4052 break;
4055 return false;
4059 /* Expand an assignment that stores the value of FROM into TO. */
4061 void
4062 expand_assignment (tree to, tree from)
4064 rtx to_rtx = 0;
4065 rtx result;
4067 /* Don't crash if the lhs of the assignment was erroneous. */
4068 if (TREE_CODE (to) == ERROR_MARK)
4070 result = expand_normal (from);
4071 return;
4074 /* Optimize away no-op moves without side-effects. */
4075 if (operand_equal_p (to, from, 0))
4076 return;
4078 /* Assignment of a structure component needs special treatment
4079 if the structure component's rtx is not simply a MEM.
4080 Assignment of an array element at a constant index, and assignment of
4081 an array element in an unaligned packed structure field, has the same
4082 problem. */
4083 if (handled_component_p (to)
4084 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4086 enum machine_mode mode1;
4087 HOST_WIDE_INT bitsize, bitpos;
4088 tree offset;
4089 int unsignedp;
4090 int volatilep = 0;
4091 tree tem;
4093 push_temp_slots ();
4094 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4095 &unsignedp, &volatilep, true);
4097 /* If we are going to use store_bit_field and extract_bit_field,
4098 make sure to_rtx will be safe for multiple use. */
4100 to_rtx = expand_normal (tem);
4102 if (offset != 0)
4104 rtx offset_rtx;
4106 if (!MEM_P (to_rtx))
4108 /* We can get constant negative offsets into arrays with broken
4109 user code. Translate this to a trap instead of ICEing. */
4110 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4111 expand_builtin_trap ();
4112 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4115 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4116 #ifdef POINTERS_EXTEND_UNSIGNED
4117 if (GET_MODE (offset_rtx) != Pmode)
4118 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4119 #else
4120 if (GET_MODE (offset_rtx) != ptr_mode)
4121 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4122 #endif
4124 /* A constant address in TO_RTX can have VOIDmode, we must not try
4125 to call force_reg for that case. Avoid that case. */
4126 if (MEM_P (to_rtx)
4127 && GET_MODE (to_rtx) == BLKmode
4128 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4129 && bitsize > 0
4130 && (bitpos % bitsize) == 0
4131 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4132 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4134 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4135 bitpos = 0;
4138 to_rtx = offset_address (to_rtx, offset_rtx,
4139 highest_pow2_factor_for_target (to,
4140 offset));
4143 /* Handle expand_expr of a complex value returning a CONCAT. */
4144 if (GET_CODE (to_rtx) == CONCAT)
4146 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4148 gcc_assert (bitpos == 0);
4149 result = store_expr (from, to_rtx, false);
4151 else
4153 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4154 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4157 else
4159 if (MEM_P (to_rtx))
4161 /* If the field is at offset zero, we could have been given the
4162 DECL_RTX of the parent struct. Don't munge it. */
4163 to_rtx = shallow_copy_rtx (to_rtx);
4165 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4167 /* Deal with volatile and readonly fields. The former is only
4168 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4169 if (volatilep)
4170 MEM_VOLATILE_P (to_rtx) = 1;
4171 if (component_uses_parent_alias_set (to))
4172 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4175 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4176 to_rtx, to, from))
4177 result = NULL;
4178 else
4179 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4180 TREE_TYPE (tem), get_alias_set (to));
4183 if (result)
4184 preserve_temp_slots (result);
4185 free_temp_slots ();
4186 pop_temp_slots ();
4187 return;
4190 /* If the rhs is a function call and its value is not an aggregate,
4191 call the function before we start to compute the lhs.
4192 This is needed for correct code for cases such as
4193 val = setjmp (buf) on machines where reference to val
4194 requires loading up part of an address in a separate insn.
4196 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4197 since it might be a promoted variable where the zero- or sign- extension
4198 needs to be done. Handling this in the normal way is safe because no
4199 computation is done before the call. */
4200 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4201 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4202 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4203 && REG_P (DECL_RTL (to))))
4205 rtx value;
4207 push_temp_slots ();
4208 value = expand_normal (from);
4209 if (to_rtx == 0)
4210 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4212 /* Handle calls that return values in multiple non-contiguous locations.
4213 The Irix 6 ABI has examples of this. */
4214 if (GET_CODE (to_rtx) == PARALLEL)
4215 emit_group_load (to_rtx, value, TREE_TYPE (from),
4216 int_size_in_bytes (TREE_TYPE (from)));
4217 else if (GET_MODE (to_rtx) == BLKmode)
4218 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4219 else
4221 if (POINTER_TYPE_P (TREE_TYPE (to)))
4222 value = convert_memory_address (GET_MODE (to_rtx), value);
4223 emit_move_insn (to_rtx, value);
4225 preserve_temp_slots (to_rtx);
4226 free_temp_slots ();
4227 pop_temp_slots ();
4228 return;
4231 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4232 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4234 if (to_rtx == 0)
4235 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4237 /* Don't move directly into a return register. */
4238 if (TREE_CODE (to) == RESULT_DECL
4239 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4241 rtx temp;
4243 push_temp_slots ();
4244 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4246 if (GET_CODE (to_rtx) == PARALLEL)
4247 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4248 int_size_in_bytes (TREE_TYPE (from)));
4249 else
4250 emit_move_insn (to_rtx, temp);
4252 preserve_temp_slots (to_rtx);
4253 free_temp_slots ();
4254 pop_temp_slots ();
4255 return;
4258 /* In case we are returning the contents of an object which overlaps
4259 the place the value is being stored, use a safe function when copying
4260 a value through a pointer into a structure value return block. */
4261 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4262 && current_function_returns_struct
4263 && !current_function_returns_pcc_struct)
4265 rtx from_rtx, size;
4267 push_temp_slots ();
4268 size = expr_size (from);
4269 from_rtx = expand_normal (from);
4271 emit_library_call (memmove_libfunc, LCT_NORMAL,
4272 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4273 XEXP (from_rtx, 0), Pmode,
4274 convert_to_mode (TYPE_MODE (sizetype),
4275 size, TYPE_UNSIGNED (sizetype)),
4276 TYPE_MODE (sizetype));
4278 preserve_temp_slots (to_rtx);
4279 free_temp_slots ();
4280 pop_temp_slots ();
4281 return;
4284 /* Compute FROM and store the value in the rtx we got. */
4286 push_temp_slots ();
4287 result = store_expr (from, to_rtx, 0);
4288 preserve_temp_slots (result);
4289 free_temp_slots ();
4290 pop_temp_slots ();
4291 return;
4294 /* Generate code for computing expression EXP,
4295 and storing the value into TARGET.
4297 If the mode is BLKmode then we may return TARGET itself.
4298 It turns out that in BLKmode it doesn't cause a problem.
4299 because C has no operators that could combine two different
4300 assignments into the same BLKmode object with different values
4301 with no sequence point. Will other languages need this to
4302 be more thorough?
4304 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4305 stack, and block moves may need to be treated specially. */
4308 store_expr (tree exp, rtx target, int call_param_p)
4310 rtx temp;
4311 rtx alt_rtl = NULL_RTX;
4312 int dont_return_target = 0;
4314 if (VOID_TYPE_P (TREE_TYPE (exp)))
4316 /* C++ can generate ?: expressions with a throw expression in one
4317 branch and an rvalue in the other. Here, we resolve attempts to
4318 store the throw expression's nonexistent result. */
4319 gcc_assert (!call_param_p);
4320 expand_expr (exp, const0_rtx, VOIDmode, 0);
4321 return NULL_RTX;
4323 if (TREE_CODE (exp) == COMPOUND_EXPR)
4325 /* Perform first part of compound expression, then assign from second
4326 part. */
4327 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4328 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4329 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4331 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4333 /* For conditional expression, get safe form of the target. Then
4334 test the condition, doing the appropriate assignment on either
4335 side. This avoids the creation of unnecessary temporaries.
4336 For non-BLKmode, it is more efficient not to do this. */
4338 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4340 do_pending_stack_adjust ();
4341 NO_DEFER_POP;
4342 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4343 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4344 emit_jump_insn (gen_jump (lab2));
4345 emit_barrier ();
4346 emit_label (lab1);
4347 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4348 emit_label (lab2);
4349 OK_DEFER_POP;
4351 return NULL_RTX;
4353 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4354 /* If this is a scalar in a register that is stored in a wider mode
4355 than the declared mode, compute the result into its declared mode
4356 and then convert to the wider mode. Our value is the computed
4357 expression. */
4359 rtx inner_target = 0;
4361 /* We can do the conversion inside EXP, which will often result
4362 in some optimizations. Do the conversion in two steps: first
4363 change the signedness, if needed, then the extend. But don't
4364 do this if the type of EXP is a subtype of something else
4365 since then the conversion might involve more than just
4366 converting modes. */
4367 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4368 && TREE_TYPE (TREE_TYPE (exp)) == 0
4369 && (!lang_hooks.reduce_bit_field_operations
4370 || (GET_MODE_PRECISION (GET_MODE (target))
4371 == TYPE_PRECISION (TREE_TYPE (exp)))))
4373 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4374 != SUBREG_PROMOTED_UNSIGNED_P (target))
4375 exp = fold_convert
4376 (lang_hooks.types.signed_or_unsigned_type
4377 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4379 exp = fold_convert (lang_hooks.types.type_for_mode
4380 (GET_MODE (SUBREG_REG (target)),
4381 SUBREG_PROMOTED_UNSIGNED_P (target)),
4382 exp);
4384 inner_target = SUBREG_REG (target);
4387 temp = expand_expr (exp, inner_target, VOIDmode,
4388 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4390 /* If TEMP is a VOIDmode constant, use convert_modes to make
4391 sure that we properly convert it. */
4392 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4394 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4395 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4396 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4397 GET_MODE (target), temp,
4398 SUBREG_PROMOTED_UNSIGNED_P (target));
4401 convert_move (SUBREG_REG (target), temp,
4402 SUBREG_PROMOTED_UNSIGNED_P (target));
4404 return NULL_RTX;
4406 else
4408 temp = expand_expr_real (exp, target, GET_MODE (target),
4409 (call_param_p
4410 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4411 &alt_rtl);
4412 /* Return TARGET if it's a specified hardware register.
4413 If TARGET is a volatile mem ref, either return TARGET
4414 or return a reg copied *from* TARGET; ANSI requires this.
4416 Otherwise, if TEMP is not TARGET, return TEMP
4417 if it is constant (for efficiency),
4418 or if we really want the correct value. */
4419 if (!(target && REG_P (target)
4420 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4421 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4422 && ! rtx_equal_p (temp, target)
4423 && CONSTANT_P (temp))
4424 dont_return_target = 1;
4427 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4428 the same as that of TARGET, adjust the constant. This is needed, for
4429 example, in case it is a CONST_DOUBLE and we want only a word-sized
4430 value. */
4431 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4432 && TREE_CODE (exp) != ERROR_MARK
4433 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4434 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4435 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4437 /* If value was not generated in the target, store it there.
4438 Convert the value to TARGET's type first if necessary and emit the
4439 pending incrementations that have been queued when expanding EXP.
4440 Note that we cannot emit the whole queue blindly because this will
4441 effectively disable the POST_INC optimization later.
4443 If TEMP and TARGET compare equal according to rtx_equal_p, but
4444 one or both of them are volatile memory refs, we have to distinguish
4445 two cases:
4446 - expand_expr has used TARGET. In this case, we must not generate
4447 another copy. This can be detected by TARGET being equal according
4448 to == .
4449 - expand_expr has not used TARGET - that means that the source just
4450 happens to have the same RTX form. Since temp will have been created
4451 by expand_expr, it will compare unequal according to == .
4452 We must generate a copy in this case, to reach the correct number
4453 of volatile memory references. */
4455 if ((! rtx_equal_p (temp, target)
4456 || (temp != target && (side_effects_p (temp)
4457 || side_effects_p (target))))
4458 && TREE_CODE (exp) != ERROR_MARK
4459 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4460 but TARGET is not valid memory reference, TEMP will differ
4461 from TARGET although it is really the same location. */
4462 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4463 /* If there's nothing to copy, don't bother. Don't call
4464 expr_size unless necessary, because some front-ends (C++)
4465 expr_size-hook must not be given objects that are not
4466 supposed to be bit-copied or bit-initialized. */
4467 && expr_size (exp) != const0_rtx)
4469 if (GET_MODE (temp) != GET_MODE (target)
4470 && GET_MODE (temp) != VOIDmode)
4472 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4473 if (dont_return_target)
4475 /* In this case, we will return TEMP,
4476 so make sure it has the proper mode.
4477 But don't forget to store the value into TARGET. */
4478 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4479 emit_move_insn (target, temp);
4481 else
4482 convert_move (target, temp, unsignedp);
4485 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4487 /* Handle copying a string constant into an array. The string
4488 constant may be shorter than the array. So copy just the string's
4489 actual length, and clear the rest. First get the size of the data
4490 type of the string, which is actually the size of the target. */
4491 rtx size = expr_size (exp);
4493 if (GET_CODE (size) == CONST_INT
4494 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4495 emit_block_move (target, temp, size,
4496 (call_param_p
4497 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4498 else
4500 /* Compute the size of the data to copy from the string. */
4501 tree copy_size
4502 = size_binop (MIN_EXPR,
4503 make_tree (sizetype, size),
4504 size_int (TREE_STRING_LENGTH (exp)));
4505 rtx copy_size_rtx
4506 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4507 (call_param_p
4508 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4509 rtx label = 0;
4511 /* Copy that much. */
4512 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4513 TYPE_UNSIGNED (sizetype));
4514 emit_block_move (target, temp, copy_size_rtx,
4515 (call_param_p
4516 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4518 /* Figure out how much is left in TARGET that we have to clear.
4519 Do all calculations in ptr_mode. */
4520 if (GET_CODE (copy_size_rtx) == CONST_INT)
4522 size = plus_constant (size, -INTVAL (copy_size_rtx));
4523 target = adjust_address (target, BLKmode,
4524 INTVAL (copy_size_rtx));
4526 else
4528 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4529 copy_size_rtx, NULL_RTX, 0,
4530 OPTAB_LIB_WIDEN);
4532 #ifdef POINTERS_EXTEND_UNSIGNED
4533 if (GET_MODE (copy_size_rtx) != Pmode)
4534 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4535 TYPE_UNSIGNED (sizetype));
4536 #endif
4538 target = offset_address (target, copy_size_rtx,
4539 highest_pow2_factor (copy_size));
4540 label = gen_label_rtx ();
4541 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4542 GET_MODE (size), 0, label);
4545 if (size != const0_rtx)
4546 clear_storage (target, size, BLOCK_OP_NORMAL);
4548 if (label)
4549 emit_label (label);
4552 /* Handle calls that return values in multiple non-contiguous locations.
4553 The Irix 6 ABI has examples of this. */
4554 else if (GET_CODE (target) == PARALLEL)
4555 emit_group_load (target, temp, TREE_TYPE (exp),
4556 int_size_in_bytes (TREE_TYPE (exp)));
4557 else if (GET_MODE (temp) == BLKmode)
4558 emit_block_move (target, temp, expr_size (exp),
4559 (call_param_p
4560 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4561 else
4563 temp = force_operand (temp, target);
4564 if (temp != target)
4565 emit_move_insn (target, temp);
4569 return NULL_RTX;
4572 /* Helper for categorize_ctor_elements. Identical interface. */
4574 static bool
4575 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4576 HOST_WIDE_INT *p_elt_count,
4577 bool *p_must_clear)
4579 unsigned HOST_WIDE_INT idx;
4580 HOST_WIDE_INT nz_elts, elt_count;
4581 tree value, purpose;
4583 /* Whether CTOR is a valid constant initializer, in accordance with what
4584 initializer_constant_valid_p does. If inferred from the constructor
4585 elements, true until proven otherwise. */
4586 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4587 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4589 nz_elts = 0;
4590 elt_count = 0;
4592 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4594 HOST_WIDE_INT mult;
4596 mult = 1;
4597 if (TREE_CODE (purpose) == RANGE_EXPR)
4599 tree lo_index = TREE_OPERAND (purpose, 0);
4600 tree hi_index = TREE_OPERAND (purpose, 1);
4602 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4603 mult = (tree_low_cst (hi_index, 1)
4604 - tree_low_cst (lo_index, 1) + 1);
4607 switch (TREE_CODE (value))
4609 case CONSTRUCTOR:
4611 HOST_WIDE_INT nz = 0, ic = 0;
4613 bool const_elt_p
4614 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4616 nz_elts += mult * nz;
4617 elt_count += mult * ic;
4619 if (const_from_elts_p && const_p)
4620 const_p = const_elt_p;
4622 break;
4624 case INTEGER_CST:
4625 case REAL_CST:
4626 if (!initializer_zerop (value))
4627 nz_elts += mult;
4628 elt_count += mult;
4629 break;
4631 case STRING_CST:
4632 nz_elts += mult * TREE_STRING_LENGTH (value);
4633 elt_count += mult * TREE_STRING_LENGTH (value);
4634 break;
4636 case COMPLEX_CST:
4637 if (!initializer_zerop (TREE_REALPART (value)))
4638 nz_elts += mult;
4639 if (!initializer_zerop (TREE_IMAGPART (value)))
4640 nz_elts += mult;
4641 elt_count += mult;
4642 break;
4644 case VECTOR_CST:
4646 tree v;
4647 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4649 if (!initializer_zerop (TREE_VALUE (v)))
4650 nz_elts += mult;
4651 elt_count += mult;
4654 break;
4656 default:
4657 nz_elts += mult;
4658 elt_count += mult;
4660 if (const_from_elts_p && const_p)
4661 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4662 != NULL_TREE;
4663 break;
4667 if (!*p_must_clear
4668 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4669 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4671 tree init_sub_type;
4672 bool clear_this = true;
4674 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4676 /* We don't expect more than one element of the union to be
4677 initialized. Not sure what we should do otherwise... */
4678 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4679 == 1);
4681 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4682 CONSTRUCTOR_ELTS (ctor),
4683 0)->value);
4685 /* ??? We could look at each element of the union, and find the
4686 largest element. Which would avoid comparing the size of the
4687 initialized element against any tail padding in the union.
4688 Doesn't seem worth the effort... */
4689 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4690 TYPE_SIZE (init_sub_type)) == 1)
4692 /* And now we have to find out if the element itself is fully
4693 constructed. E.g. for union { struct { int a, b; } s; } u
4694 = { .s = { .a = 1 } }. */
4695 if (elt_count == count_type_elements (init_sub_type, false))
4696 clear_this = false;
4700 *p_must_clear = clear_this;
4703 *p_nz_elts += nz_elts;
4704 *p_elt_count += elt_count;
4706 return const_p;
4709 /* Examine CTOR to discover:
4710 * how many scalar fields are set to nonzero values,
4711 and place it in *P_NZ_ELTS;
4712 * how many scalar fields in total are in CTOR,
4713 and place it in *P_ELT_COUNT.
4714 * if a type is a union, and the initializer from the constructor
4715 is not the largest element in the union, then set *p_must_clear.
4717 Return whether or not CTOR is a valid static constant initializer, the same
4718 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4720 bool
4721 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4722 HOST_WIDE_INT *p_elt_count,
4723 bool *p_must_clear)
4725 *p_nz_elts = 0;
4726 *p_elt_count = 0;
4727 *p_must_clear = false;
4729 return
4730 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4733 /* Count the number of scalars in TYPE. Return -1 on overflow or
4734 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4735 array member at the end of the structure. */
4737 HOST_WIDE_INT
4738 count_type_elements (tree type, bool allow_flexarr)
4740 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4741 switch (TREE_CODE (type))
4743 case ARRAY_TYPE:
4745 tree telts = array_type_nelts (type);
4746 if (telts && host_integerp (telts, 1))
4748 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4749 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4750 if (n == 0)
4751 return 0;
4752 else if (max / n > m)
4753 return n * m;
4755 return -1;
4758 case RECORD_TYPE:
4760 HOST_WIDE_INT n = 0, t;
4761 tree f;
4763 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4764 if (TREE_CODE (f) == FIELD_DECL)
4766 t = count_type_elements (TREE_TYPE (f), false);
4767 if (t < 0)
4769 /* Check for structures with flexible array member. */
4770 tree tf = TREE_TYPE (f);
4771 if (allow_flexarr
4772 && TREE_CHAIN (f) == NULL
4773 && TREE_CODE (tf) == ARRAY_TYPE
4774 && TYPE_DOMAIN (tf)
4775 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4776 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4777 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4778 && int_size_in_bytes (type) >= 0)
4779 break;
4781 return -1;
4783 n += t;
4786 return n;
4789 case UNION_TYPE:
4790 case QUAL_UNION_TYPE:
4792 /* Ho hum. How in the world do we guess here? Clearly it isn't
4793 right to count the fields. Guess based on the number of words. */
4794 HOST_WIDE_INT n = int_size_in_bytes (type);
4795 if (n < 0)
4796 return -1;
4797 return n / UNITS_PER_WORD;
4800 case COMPLEX_TYPE:
4801 return 2;
4803 case VECTOR_TYPE:
4804 return TYPE_VECTOR_SUBPARTS (type);
4806 case INTEGER_TYPE:
4807 case REAL_TYPE:
4808 case ENUMERAL_TYPE:
4809 case BOOLEAN_TYPE:
4810 case POINTER_TYPE:
4811 case OFFSET_TYPE:
4812 case REFERENCE_TYPE:
4813 return 1;
4815 case VOID_TYPE:
4816 case METHOD_TYPE:
4817 case FUNCTION_TYPE:
4818 case LANG_TYPE:
4819 default:
4820 gcc_unreachable ();
4824 /* Return 1 if EXP contains mostly (3/4) zeros. */
4826 static int
4827 mostly_zeros_p (tree exp)
4829 if (TREE_CODE (exp) == CONSTRUCTOR)
4832 HOST_WIDE_INT nz_elts, count, elts;
4833 bool must_clear;
4835 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4836 if (must_clear)
4837 return 1;
4839 elts = count_type_elements (TREE_TYPE (exp), false);
4841 return nz_elts < elts / 4;
4844 return initializer_zerop (exp);
4847 /* Return 1 if EXP contains all zeros. */
4849 static int
4850 all_zeros_p (tree exp)
4852 if (TREE_CODE (exp) == CONSTRUCTOR)
4855 HOST_WIDE_INT nz_elts, count;
4856 bool must_clear;
4858 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4859 return nz_elts == 0;
4862 return initializer_zerop (exp);
4865 /* Helper function for store_constructor.
4866 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4867 TYPE is the type of the CONSTRUCTOR, not the element type.
4868 CLEARED is as for store_constructor.
4869 ALIAS_SET is the alias set to use for any stores.
4871 This provides a recursive shortcut back to store_constructor when it isn't
4872 necessary to go through store_field. This is so that we can pass through
4873 the cleared field to let store_constructor know that we may not have to
4874 clear a substructure if the outer structure has already been cleared. */
4876 static void
4877 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4878 HOST_WIDE_INT bitpos, enum machine_mode mode,
4879 tree exp, tree type, int cleared, int alias_set)
4881 if (TREE_CODE (exp) == CONSTRUCTOR
4882 /* We can only call store_constructor recursively if the size and
4883 bit position are on a byte boundary. */
4884 && bitpos % BITS_PER_UNIT == 0
4885 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4886 /* If we have a nonzero bitpos for a register target, then we just
4887 let store_field do the bitfield handling. This is unlikely to
4888 generate unnecessary clear instructions anyways. */
4889 && (bitpos == 0 || MEM_P (target)))
4891 if (MEM_P (target))
4892 target
4893 = adjust_address (target,
4894 GET_MODE (target) == BLKmode
4895 || 0 != (bitpos
4896 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4897 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4900 /* Update the alias set, if required. */
4901 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4902 && MEM_ALIAS_SET (target) != 0)
4904 target = copy_rtx (target);
4905 set_mem_alias_set (target, alias_set);
4908 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4910 else
4911 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4914 /* Store the value of constructor EXP into the rtx TARGET.
4915 TARGET is either a REG or a MEM; we know it cannot conflict, since
4916 safe_from_p has been called.
4917 CLEARED is true if TARGET is known to have been zero'd.
4918 SIZE is the number of bytes of TARGET we are allowed to modify: this
4919 may not be the same as the size of EXP if we are assigning to a field
4920 which has been packed to exclude padding bits. */
4922 static void
4923 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4925 tree type = TREE_TYPE (exp);
4926 #ifdef WORD_REGISTER_OPERATIONS
4927 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4928 #endif
4930 switch (TREE_CODE (type))
4932 case RECORD_TYPE:
4933 case UNION_TYPE:
4934 case QUAL_UNION_TYPE:
4936 unsigned HOST_WIDE_INT idx;
4937 tree field, value;
4939 /* If size is zero or the target is already cleared, do nothing. */
4940 if (size == 0 || cleared)
4941 cleared = 1;
4942 /* We either clear the aggregate or indicate the value is dead. */
4943 else if ((TREE_CODE (type) == UNION_TYPE
4944 || TREE_CODE (type) == QUAL_UNION_TYPE)
4945 && ! CONSTRUCTOR_ELTS (exp))
4946 /* If the constructor is empty, clear the union. */
4948 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4949 cleared = 1;
4952 /* If we are building a static constructor into a register,
4953 set the initial value as zero so we can fold the value into
4954 a constant. But if more than one register is involved,
4955 this probably loses. */
4956 else if (REG_P (target) && TREE_STATIC (exp)
4957 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4959 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4960 cleared = 1;
4963 /* If the constructor has fewer fields than the structure or
4964 if we are initializing the structure to mostly zeros, clear
4965 the whole structure first. Don't do this if TARGET is a
4966 register whose mode size isn't equal to SIZE since
4967 clear_storage can't handle this case. */
4968 else if (size > 0
4969 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4970 != fields_length (type))
4971 || mostly_zeros_p (exp))
4972 && (!REG_P (target)
4973 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4974 == size)))
4976 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4977 cleared = 1;
4980 if (! cleared)
4981 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4983 /* Store each element of the constructor into the
4984 corresponding field of TARGET. */
4985 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4987 enum machine_mode mode;
4988 HOST_WIDE_INT bitsize;
4989 HOST_WIDE_INT bitpos = 0;
4990 tree offset;
4991 rtx to_rtx = target;
4993 /* Just ignore missing fields. We cleared the whole
4994 structure, above, if any fields are missing. */
4995 if (field == 0)
4996 continue;
4998 if (cleared && initializer_zerop (value))
4999 continue;
5001 if (host_integerp (DECL_SIZE (field), 1))
5002 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5003 else
5004 bitsize = -1;
5006 mode = DECL_MODE (field);
5007 if (DECL_BIT_FIELD (field))
5008 mode = VOIDmode;
5010 offset = DECL_FIELD_OFFSET (field);
5011 if (host_integerp (offset, 0)
5012 && host_integerp (bit_position (field), 0))
5014 bitpos = int_bit_position (field);
5015 offset = 0;
5017 else
5018 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5020 if (offset)
5022 rtx offset_rtx;
5024 offset
5025 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5026 make_tree (TREE_TYPE (exp),
5027 target));
5029 offset_rtx = expand_normal (offset);
5030 gcc_assert (MEM_P (to_rtx));
5032 #ifdef POINTERS_EXTEND_UNSIGNED
5033 if (GET_MODE (offset_rtx) != Pmode)
5034 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5035 #else
5036 if (GET_MODE (offset_rtx) != ptr_mode)
5037 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5038 #endif
5040 to_rtx = offset_address (to_rtx, offset_rtx,
5041 highest_pow2_factor (offset));
5044 #ifdef WORD_REGISTER_OPERATIONS
5045 /* If this initializes a field that is smaller than a
5046 word, at the start of a word, try to widen it to a full
5047 word. This special case allows us to output C++ member
5048 function initializations in a form that the optimizers
5049 can understand. */
5050 if (REG_P (target)
5051 && bitsize < BITS_PER_WORD
5052 && bitpos % BITS_PER_WORD == 0
5053 && GET_MODE_CLASS (mode) == MODE_INT
5054 && TREE_CODE (value) == INTEGER_CST
5055 && exp_size >= 0
5056 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5058 tree type = TREE_TYPE (value);
5060 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5062 type = lang_hooks.types.type_for_size
5063 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5064 value = fold_convert (type, value);
5067 if (BYTES_BIG_ENDIAN)
5068 value
5069 = fold_build2 (LSHIFT_EXPR, type, value,
5070 build_int_cst (type,
5071 BITS_PER_WORD - bitsize));
5072 bitsize = BITS_PER_WORD;
5073 mode = word_mode;
5075 #endif
5077 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5078 && DECL_NONADDRESSABLE_P (field))
5080 to_rtx = copy_rtx (to_rtx);
5081 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5084 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5085 value, type, cleared,
5086 get_alias_set (TREE_TYPE (field)));
5088 break;
5090 case ARRAY_TYPE:
5092 tree value, index;
5093 unsigned HOST_WIDE_INT i;
5094 int need_to_clear;
5095 tree domain;
5096 tree elttype = TREE_TYPE (type);
5097 int const_bounds_p;
5098 HOST_WIDE_INT minelt = 0;
5099 HOST_WIDE_INT maxelt = 0;
5101 domain = TYPE_DOMAIN (type);
5102 const_bounds_p = (TYPE_MIN_VALUE (domain)
5103 && TYPE_MAX_VALUE (domain)
5104 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5105 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5107 /* If we have constant bounds for the range of the type, get them. */
5108 if (const_bounds_p)
5110 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5111 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5114 /* If the constructor has fewer elements than the array, clear
5115 the whole array first. Similarly if this is static
5116 constructor of a non-BLKmode object. */
5117 if (cleared)
5118 need_to_clear = 0;
5119 else if (REG_P (target) && TREE_STATIC (exp))
5120 need_to_clear = 1;
5121 else
5123 unsigned HOST_WIDE_INT idx;
5124 tree index, value;
5125 HOST_WIDE_INT count = 0, zero_count = 0;
5126 need_to_clear = ! const_bounds_p;
5128 /* This loop is a more accurate version of the loop in
5129 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5130 is also needed to check for missing elements. */
5131 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5133 HOST_WIDE_INT this_node_count;
5135 if (need_to_clear)
5136 break;
5138 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5140 tree lo_index = TREE_OPERAND (index, 0);
5141 tree hi_index = TREE_OPERAND (index, 1);
5143 if (! host_integerp (lo_index, 1)
5144 || ! host_integerp (hi_index, 1))
5146 need_to_clear = 1;
5147 break;
5150 this_node_count = (tree_low_cst (hi_index, 1)
5151 - tree_low_cst (lo_index, 1) + 1);
5153 else
5154 this_node_count = 1;
5156 count += this_node_count;
5157 if (mostly_zeros_p (value))
5158 zero_count += this_node_count;
5161 /* Clear the entire array first if there are any missing
5162 elements, or if the incidence of zero elements is >=
5163 75%. */
5164 if (! need_to_clear
5165 && (count < maxelt - minelt + 1
5166 || 4 * zero_count >= 3 * count))
5167 need_to_clear = 1;
5170 if (need_to_clear && size > 0)
5172 if (REG_P (target))
5173 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5174 else
5175 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5176 cleared = 1;
5179 if (!cleared && REG_P (target))
5180 /* Inform later passes that the old value is dead. */
5181 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5183 /* Store each element of the constructor into the
5184 corresponding element of TARGET, determined by counting the
5185 elements. */
5186 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5188 enum machine_mode mode;
5189 HOST_WIDE_INT bitsize;
5190 HOST_WIDE_INT bitpos;
5191 int unsignedp;
5192 rtx xtarget = target;
5194 if (cleared && initializer_zerop (value))
5195 continue;
5197 unsignedp = TYPE_UNSIGNED (elttype);
5198 mode = TYPE_MODE (elttype);
5199 if (mode == BLKmode)
5200 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5201 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5202 : -1);
5203 else
5204 bitsize = GET_MODE_BITSIZE (mode);
5206 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5208 tree lo_index = TREE_OPERAND (index, 0);
5209 tree hi_index = TREE_OPERAND (index, 1);
5210 rtx index_r, pos_rtx;
5211 HOST_WIDE_INT lo, hi, count;
5212 tree position;
5214 /* If the range is constant and "small", unroll the loop. */
5215 if (const_bounds_p
5216 && host_integerp (lo_index, 0)
5217 && host_integerp (hi_index, 0)
5218 && (lo = tree_low_cst (lo_index, 0),
5219 hi = tree_low_cst (hi_index, 0),
5220 count = hi - lo + 1,
5221 (!MEM_P (target)
5222 || count <= 2
5223 || (host_integerp (TYPE_SIZE (elttype), 1)
5224 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5225 <= 40 * 8)))))
5227 lo -= minelt; hi -= minelt;
5228 for (; lo <= hi; lo++)
5230 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5232 if (MEM_P (target)
5233 && !MEM_KEEP_ALIAS_SET_P (target)
5234 && TREE_CODE (type) == ARRAY_TYPE
5235 && TYPE_NONALIASED_COMPONENT (type))
5237 target = copy_rtx (target);
5238 MEM_KEEP_ALIAS_SET_P (target) = 1;
5241 store_constructor_field
5242 (target, bitsize, bitpos, mode, value, type, cleared,
5243 get_alias_set (elttype));
5246 else
5248 rtx loop_start = gen_label_rtx ();
5249 rtx loop_end = gen_label_rtx ();
5250 tree exit_cond;
5252 expand_normal (hi_index);
5253 unsignedp = TYPE_UNSIGNED (domain);
5255 index = build_decl (VAR_DECL, NULL_TREE, domain);
5257 index_r
5258 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5259 &unsignedp, 0));
5260 SET_DECL_RTL (index, index_r);
5261 store_expr (lo_index, index_r, 0);
5263 /* Build the head of the loop. */
5264 do_pending_stack_adjust ();
5265 emit_label (loop_start);
5267 /* Assign value to element index. */
5268 position =
5269 fold_convert (ssizetype,
5270 fold_build2 (MINUS_EXPR,
5271 TREE_TYPE (index),
5272 index,
5273 TYPE_MIN_VALUE (domain)));
5275 position =
5276 size_binop (MULT_EXPR, position,
5277 fold_convert (ssizetype,
5278 TYPE_SIZE_UNIT (elttype)));
5280 pos_rtx = expand_normal (position);
5281 xtarget = offset_address (target, pos_rtx,
5282 highest_pow2_factor (position));
5283 xtarget = adjust_address (xtarget, mode, 0);
5284 if (TREE_CODE (value) == CONSTRUCTOR)
5285 store_constructor (value, xtarget, cleared,
5286 bitsize / BITS_PER_UNIT);
5287 else
5288 store_expr (value, xtarget, 0);
5290 /* Generate a conditional jump to exit the loop. */
5291 exit_cond = build2 (LT_EXPR, integer_type_node,
5292 index, hi_index);
5293 jumpif (exit_cond, loop_end);
5295 /* Update the loop counter, and jump to the head of
5296 the loop. */
5297 expand_assignment (index,
5298 build2 (PLUS_EXPR, TREE_TYPE (index),
5299 index, integer_one_node));
5301 emit_jump (loop_start);
5303 /* Build the end of the loop. */
5304 emit_label (loop_end);
5307 else if ((index != 0 && ! host_integerp (index, 0))
5308 || ! host_integerp (TYPE_SIZE (elttype), 1))
5310 tree position;
5312 if (index == 0)
5313 index = ssize_int (1);
5315 if (minelt)
5316 index = fold_convert (ssizetype,
5317 fold_build2 (MINUS_EXPR,
5318 TREE_TYPE (index),
5319 index,
5320 TYPE_MIN_VALUE (domain)));
5322 position =
5323 size_binop (MULT_EXPR, index,
5324 fold_convert (ssizetype,
5325 TYPE_SIZE_UNIT (elttype)));
5326 xtarget = offset_address (target,
5327 expand_normal (position),
5328 highest_pow2_factor (position));
5329 xtarget = adjust_address (xtarget, mode, 0);
5330 store_expr (value, xtarget, 0);
5332 else
5334 if (index != 0)
5335 bitpos = ((tree_low_cst (index, 0) - minelt)
5336 * tree_low_cst (TYPE_SIZE (elttype), 1));
5337 else
5338 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5340 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5341 && TREE_CODE (type) == ARRAY_TYPE
5342 && TYPE_NONALIASED_COMPONENT (type))
5344 target = copy_rtx (target);
5345 MEM_KEEP_ALIAS_SET_P (target) = 1;
5347 store_constructor_field (target, bitsize, bitpos, mode, value,
5348 type, cleared, get_alias_set (elttype));
5351 break;
5354 case VECTOR_TYPE:
5356 unsigned HOST_WIDE_INT idx;
5357 constructor_elt *ce;
5358 int i;
5359 int need_to_clear;
5360 int icode = 0;
5361 tree elttype = TREE_TYPE (type);
5362 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5363 enum machine_mode eltmode = TYPE_MODE (elttype);
5364 HOST_WIDE_INT bitsize;
5365 HOST_WIDE_INT bitpos;
5366 rtvec vector = NULL;
5367 unsigned n_elts;
5369 gcc_assert (eltmode != BLKmode);
5371 n_elts = TYPE_VECTOR_SUBPARTS (type);
5372 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5374 enum machine_mode mode = GET_MODE (target);
5376 icode = (int) vec_init_optab->handlers[mode].insn_code;
5377 if (icode != CODE_FOR_nothing)
5379 unsigned int i;
5381 vector = rtvec_alloc (n_elts);
5382 for (i = 0; i < n_elts; i++)
5383 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5387 /* If the constructor has fewer elements than the vector,
5388 clear the whole array first. Similarly if this is static
5389 constructor of a non-BLKmode object. */
5390 if (cleared)
5391 need_to_clear = 0;
5392 else if (REG_P (target) && TREE_STATIC (exp))
5393 need_to_clear = 1;
5394 else
5396 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5397 tree value;
5399 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5401 int n_elts_here = tree_low_cst
5402 (int_const_binop (TRUNC_DIV_EXPR,
5403 TYPE_SIZE (TREE_TYPE (value)),
5404 TYPE_SIZE (elttype), 0), 1);
5406 count += n_elts_here;
5407 if (mostly_zeros_p (value))
5408 zero_count += n_elts_here;
5411 /* Clear the entire vector first if there are any missing elements,
5412 or if the incidence of zero elements is >= 75%. */
5413 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5416 if (need_to_clear && size > 0 && !vector)
5418 if (REG_P (target))
5419 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5420 else
5421 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5422 cleared = 1;
5425 /* Inform later passes that the old value is dead. */
5426 if (!cleared && !vector && REG_P (target))
5427 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5429 /* Store each element of the constructor into the corresponding
5430 element of TARGET, determined by counting the elements. */
5431 for (idx = 0, i = 0;
5432 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5433 idx++, i += bitsize / elt_size)
5435 HOST_WIDE_INT eltpos;
5436 tree value = ce->value;
5438 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5439 if (cleared && initializer_zerop (value))
5440 continue;
5442 if (ce->index)
5443 eltpos = tree_low_cst (ce->index, 1);
5444 else
5445 eltpos = i;
5447 if (vector)
5449 /* Vector CONSTRUCTORs should only be built from smaller
5450 vectors in the case of BLKmode vectors. */
5451 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5452 RTVEC_ELT (vector, eltpos)
5453 = expand_normal (value);
5455 else
5457 enum machine_mode value_mode =
5458 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5459 ? TYPE_MODE (TREE_TYPE (value))
5460 : eltmode;
5461 bitpos = eltpos * elt_size;
5462 store_constructor_field (target, bitsize, bitpos,
5463 value_mode, value, type,
5464 cleared, get_alias_set (elttype));
5468 if (vector)
5469 emit_insn (GEN_FCN (icode)
5470 (target,
5471 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5472 break;
5475 default:
5476 gcc_unreachable ();
5480 /* Store the value of EXP (an expression tree)
5481 into a subfield of TARGET which has mode MODE and occupies
5482 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5483 If MODE is VOIDmode, it means that we are storing into a bit-field.
5485 Always return const0_rtx unless we have something particular to
5486 return.
5488 TYPE is the type of the underlying object,
5490 ALIAS_SET is the alias set for the destination. This value will
5491 (in general) be different from that for TARGET, since TARGET is a
5492 reference to the containing structure. */
5494 static rtx
5495 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5496 enum machine_mode mode, tree exp, tree type, int alias_set)
5498 HOST_WIDE_INT width_mask = 0;
5500 if (TREE_CODE (exp) == ERROR_MARK)
5501 return const0_rtx;
5503 /* If we have nothing to store, do nothing unless the expression has
5504 side-effects. */
5505 if (bitsize == 0)
5506 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5507 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5508 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5510 /* If we are storing into an unaligned field of an aligned union that is
5511 in a register, we may have the mode of TARGET being an integer mode but
5512 MODE == BLKmode. In that case, get an aligned object whose size and
5513 alignment are the same as TARGET and store TARGET into it (we can avoid
5514 the store if the field being stored is the entire width of TARGET). Then
5515 call ourselves recursively to store the field into a BLKmode version of
5516 that object. Finally, load from the object into TARGET. This is not
5517 very efficient in general, but should only be slightly more expensive
5518 than the otherwise-required unaligned accesses. Perhaps this can be
5519 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5520 twice, once with emit_move_insn and once via store_field. */
5522 if (mode == BLKmode
5523 && (REG_P (target) || GET_CODE (target) == SUBREG))
5525 rtx object = assign_temp (type, 0, 1, 1);
5526 rtx blk_object = adjust_address (object, BLKmode, 0);
5528 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5529 emit_move_insn (object, target);
5531 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5533 emit_move_insn (target, object);
5535 /* We want to return the BLKmode version of the data. */
5536 return blk_object;
5539 if (GET_CODE (target) == CONCAT)
5541 /* We're storing into a struct containing a single __complex. */
5543 gcc_assert (!bitpos);
5544 return store_expr (exp, target, 0);
5547 /* If the structure is in a register or if the component
5548 is a bit field, we cannot use addressing to access it.
5549 Use bit-field techniques or SUBREG to store in it. */
5551 if (mode == VOIDmode
5552 || (mode != BLKmode && ! direct_store[(int) mode]
5553 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5554 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5555 || REG_P (target)
5556 || GET_CODE (target) == SUBREG
5557 /* If the field isn't aligned enough to store as an ordinary memref,
5558 store it as a bit field. */
5559 || (mode != BLKmode
5560 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5561 || bitpos % GET_MODE_ALIGNMENT (mode))
5562 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5563 || (bitpos % BITS_PER_UNIT != 0)))
5564 /* If the RHS and field are a constant size and the size of the
5565 RHS isn't the same size as the bitfield, we must use bitfield
5566 operations. */
5567 || (bitsize >= 0
5568 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5569 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5571 rtx temp;
5573 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5574 implies a mask operation. If the precision is the same size as
5575 the field we're storing into, that mask is redundant. This is
5576 particularly common with bit field assignments generated by the
5577 C front end. */
5578 if (TREE_CODE (exp) == NOP_EXPR)
5580 tree type = TREE_TYPE (exp);
5581 if (INTEGRAL_TYPE_P (type)
5582 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5583 && bitsize == TYPE_PRECISION (type))
5585 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5586 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5587 exp = TREE_OPERAND (exp, 0);
5591 temp = expand_normal (exp);
5593 /* If BITSIZE is narrower than the size of the type of EXP
5594 we will be narrowing TEMP. Normally, what's wanted are the
5595 low-order bits. However, if EXP's type is a record and this is
5596 big-endian machine, we want the upper BITSIZE bits. */
5597 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5598 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5599 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5600 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5601 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5602 - bitsize),
5603 NULL_RTX, 1);
5605 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5606 MODE. */
5607 if (mode != VOIDmode && mode != BLKmode
5608 && mode != TYPE_MODE (TREE_TYPE (exp)))
5609 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5611 /* If the modes of TARGET and TEMP are both BLKmode, both
5612 must be in memory and BITPOS must be aligned on a byte
5613 boundary. If so, we simply do a block copy. */
5614 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5616 gcc_assert (MEM_P (target) && MEM_P (temp)
5617 && !(bitpos % BITS_PER_UNIT));
5619 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5620 emit_block_move (target, temp,
5621 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5622 / BITS_PER_UNIT),
5623 BLOCK_OP_NORMAL);
5625 return const0_rtx;
5628 /* Store the value in the bitfield. */
5629 store_bit_field (target, bitsize, bitpos, mode, temp);
5631 return const0_rtx;
5633 else
5635 /* Now build a reference to just the desired component. */
5636 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5638 if (to_rtx == target)
5639 to_rtx = copy_rtx (to_rtx);
5641 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5642 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5643 set_mem_alias_set (to_rtx, alias_set);
5645 return store_expr (exp, to_rtx, 0);
5649 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5650 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5651 codes and find the ultimate containing object, which we return.
5653 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5654 bit position, and *PUNSIGNEDP to the signedness of the field.
5655 If the position of the field is variable, we store a tree
5656 giving the variable offset (in units) in *POFFSET.
5657 This offset is in addition to the bit position.
5658 If the position is not variable, we store 0 in *POFFSET.
5660 If any of the extraction expressions is volatile,
5661 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5663 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5664 is a mode that can be used to access the field. In that case, *PBITSIZE
5665 is redundant.
5667 If the field describes a variable-sized object, *PMODE is set to
5668 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5669 this case, but the address of the object can be found.
5671 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5672 look through nodes that serve as markers of a greater alignment than
5673 the one that can be deduced from the expression. These nodes make it
5674 possible for front-ends to prevent temporaries from being created by
5675 the middle-end on alignment considerations. For that purpose, the
5676 normal operating mode at high-level is to always pass FALSE so that
5677 the ultimate containing object is really returned; moreover, the
5678 associated predicate handled_component_p will always return TRUE
5679 on these nodes, thus indicating that they are essentially handled
5680 by get_inner_reference. TRUE should only be passed when the caller
5681 is scanning the expression in order to build another representation
5682 and specifically knows how to handle these nodes; as such, this is
5683 the normal operating mode in the RTL expanders. */
5685 tree
5686 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5687 HOST_WIDE_INT *pbitpos, tree *poffset,
5688 enum machine_mode *pmode, int *punsignedp,
5689 int *pvolatilep, bool keep_aligning)
5691 tree size_tree = 0;
5692 enum machine_mode mode = VOIDmode;
5693 tree offset = size_zero_node;
5694 tree bit_offset = bitsize_zero_node;
5695 tree tem;
5697 /* First get the mode, signedness, and size. We do this from just the
5698 outermost expression. */
5699 if (TREE_CODE (exp) == COMPONENT_REF)
5701 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5702 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5703 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5705 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5707 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5709 size_tree = TREE_OPERAND (exp, 1);
5710 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5712 /* For vector types, with the correct size of access, use the mode of
5713 inner type. */
5714 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5715 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5716 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5717 mode = TYPE_MODE (TREE_TYPE (exp));
5719 else
5721 mode = TYPE_MODE (TREE_TYPE (exp));
5722 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5724 if (mode == BLKmode)
5725 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5726 else
5727 *pbitsize = GET_MODE_BITSIZE (mode);
5730 if (size_tree != 0)
5732 if (! host_integerp (size_tree, 1))
5733 mode = BLKmode, *pbitsize = -1;
5734 else
5735 *pbitsize = tree_low_cst (size_tree, 1);
5738 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5739 and find the ultimate containing object. */
5740 while (1)
5742 switch (TREE_CODE (exp))
5744 case BIT_FIELD_REF:
5745 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5746 TREE_OPERAND (exp, 2));
5747 break;
5749 case COMPONENT_REF:
5751 tree field = TREE_OPERAND (exp, 1);
5752 tree this_offset = component_ref_field_offset (exp);
5754 /* If this field hasn't been filled in yet, don't go past it.
5755 This should only happen when folding expressions made during
5756 type construction. */
5757 if (this_offset == 0)
5758 break;
5760 offset = size_binop (PLUS_EXPR, offset, this_offset);
5761 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5762 DECL_FIELD_BIT_OFFSET (field));
5764 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5766 break;
5768 case ARRAY_REF:
5769 case ARRAY_RANGE_REF:
5771 tree index = TREE_OPERAND (exp, 1);
5772 tree low_bound = array_ref_low_bound (exp);
5773 tree unit_size = array_ref_element_size (exp);
5775 /* We assume all arrays have sizes that are a multiple of a byte.
5776 First subtract the lower bound, if any, in the type of the
5777 index, then convert to sizetype and multiply by the size of
5778 the array element. */
5779 if (! integer_zerop (low_bound))
5780 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5781 index, low_bound);
5783 offset = size_binop (PLUS_EXPR, offset,
5784 size_binop (MULT_EXPR,
5785 fold_convert (sizetype, index),
5786 unit_size));
5788 break;
5790 case REALPART_EXPR:
5791 break;
5793 case IMAGPART_EXPR:
5794 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5795 bitsize_int (*pbitsize));
5796 break;
5798 case VIEW_CONVERT_EXPR:
5799 if (keep_aligning && STRICT_ALIGNMENT
5800 && (TYPE_ALIGN (TREE_TYPE (exp))
5801 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5802 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5803 < BIGGEST_ALIGNMENT)
5804 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5805 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5806 goto done;
5807 break;
5809 default:
5810 goto done;
5813 /* If any reference in the chain is volatile, the effect is volatile. */
5814 if (TREE_THIS_VOLATILE (exp))
5815 *pvolatilep = 1;
5817 exp = TREE_OPERAND (exp, 0);
5819 done:
5821 /* If OFFSET is constant, see if we can return the whole thing as a
5822 constant bit position. Otherwise, split it up. */
5823 if (host_integerp (offset, 0)
5824 && 0 != (tem = size_binop (MULT_EXPR,
5825 fold_convert (bitsizetype, offset),
5826 bitsize_unit_node))
5827 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5828 && host_integerp (tem, 0))
5829 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5830 else
5831 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5833 *pmode = mode;
5834 return exp;
5837 /* Return a tree of sizetype representing the size, in bytes, of the element
5838 of EXP, an ARRAY_REF. */
5840 tree
5841 array_ref_element_size (tree exp)
5843 tree aligned_size = TREE_OPERAND (exp, 3);
5844 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5846 /* If a size was specified in the ARRAY_REF, it's the size measured
5847 in alignment units of the element type. So multiply by that value. */
5848 if (aligned_size)
5850 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5851 sizetype from another type of the same width and signedness. */
5852 if (TREE_TYPE (aligned_size) != sizetype)
5853 aligned_size = fold_convert (sizetype, aligned_size);
5854 return size_binop (MULT_EXPR, aligned_size,
5855 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5858 /* Otherwise, take the size from that of the element type. Substitute
5859 any PLACEHOLDER_EXPR that we have. */
5860 else
5861 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5864 /* Return a tree representing the lower bound of the array mentioned in
5865 EXP, an ARRAY_REF. */
5867 tree
5868 array_ref_low_bound (tree exp)
5870 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5872 /* If a lower bound is specified in EXP, use it. */
5873 if (TREE_OPERAND (exp, 2))
5874 return TREE_OPERAND (exp, 2);
5876 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5877 substituting for a PLACEHOLDER_EXPR as needed. */
5878 if (domain_type && TYPE_MIN_VALUE (domain_type))
5879 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5881 /* Otherwise, return a zero of the appropriate type. */
5882 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5885 /* Return a tree representing the upper bound of the array mentioned in
5886 EXP, an ARRAY_REF. */
5888 tree
5889 array_ref_up_bound (tree exp)
5891 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5893 /* If there is a domain type and it has an upper bound, use it, substituting
5894 for a PLACEHOLDER_EXPR as needed. */
5895 if (domain_type && TYPE_MAX_VALUE (domain_type))
5896 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5898 /* Otherwise fail. */
5899 return NULL_TREE;
5902 /* Return a tree representing the offset, in bytes, of the field referenced
5903 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5905 tree
5906 component_ref_field_offset (tree exp)
5908 tree aligned_offset = TREE_OPERAND (exp, 2);
5909 tree field = TREE_OPERAND (exp, 1);
5911 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5912 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5913 value. */
5914 if (aligned_offset)
5916 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5917 sizetype from another type of the same width and signedness. */
5918 if (TREE_TYPE (aligned_offset) != sizetype)
5919 aligned_offset = fold_convert (sizetype, aligned_offset);
5920 return size_binop (MULT_EXPR, aligned_offset,
5921 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5924 /* Otherwise, take the offset from that of the field. Substitute
5925 any PLACEHOLDER_EXPR that we have. */
5926 else
5927 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5930 /* Return 1 if T is an expression that get_inner_reference handles. */
5933 handled_component_p (tree t)
5935 switch (TREE_CODE (t))
5937 case BIT_FIELD_REF:
5938 case COMPONENT_REF:
5939 case ARRAY_REF:
5940 case ARRAY_RANGE_REF:
5941 case VIEW_CONVERT_EXPR:
5942 case REALPART_EXPR:
5943 case IMAGPART_EXPR:
5944 return 1;
5946 default:
5947 return 0;
5951 /* Given an rtx VALUE that may contain additions and multiplications, return
5952 an equivalent value that just refers to a register, memory, or constant.
5953 This is done by generating instructions to perform the arithmetic and
5954 returning a pseudo-register containing the value.
5956 The returned value may be a REG, SUBREG, MEM or constant. */
5959 force_operand (rtx value, rtx target)
5961 rtx op1, op2;
5962 /* Use subtarget as the target for operand 0 of a binary operation. */
5963 rtx subtarget = get_subtarget (target);
5964 enum rtx_code code = GET_CODE (value);
5966 /* Check for subreg applied to an expression produced by loop optimizer. */
5967 if (code == SUBREG
5968 && !REG_P (SUBREG_REG (value))
5969 && !MEM_P (SUBREG_REG (value)))
5971 value = simplify_gen_subreg (GET_MODE (value),
5972 force_reg (GET_MODE (SUBREG_REG (value)),
5973 force_operand (SUBREG_REG (value),
5974 NULL_RTX)),
5975 GET_MODE (SUBREG_REG (value)),
5976 SUBREG_BYTE (value));
5977 code = GET_CODE (value);
5980 /* Check for a PIC address load. */
5981 if ((code == PLUS || code == MINUS)
5982 && XEXP (value, 0) == pic_offset_table_rtx
5983 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5984 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5985 || GET_CODE (XEXP (value, 1)) == CONST))
5987 if (!subtarget)
5988 subtarget = gen_reg_rtx (GET_MODE (value));
5989 emit_move_insn (subtarget, value);
5990 return subtarget;
5993 if (ARITHMETIC_P (value))
5995 op2 = XEXP (value, 1);
5996 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5997 subtarget = 0;
5998 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6000 code = PLUS;
6001 op2 = negate_rtx (GET_MODE (value), op2);
6004 /* Check for an addition with OP2 a constant integer and our first
6005 operand a PLUS of a virtual register and something else. In that
6006 case, we want to emit the sum of the virtual register and the
6007 constant first and then add the other value. This allows virtual
6008 register instantiation to simply modify the constant rather than
6009 creating another one around this addition. */
6010 if (code == PLUS && GET_CODE (op2) == CONST_INT
6011 && GET_CODE (XEXP (value, 0)) == PLUS
6012 && REG_P (XEXP (XEXP (value, 0), 0))
6013 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6014 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6016 rtx temp = expand_simple_binop (GET_MODE (value), code,
6017 XEXP (XEXP (value, 0), 0), op2,
6018 subtarget, 0, OPTAB_LIB_WIDEN);
6019 return expand_simple_binop (GET_MODE (value), code, temp,
6020 force_operand (XEXP (XEXP (value,
6021 0), 1), 0),
6022 target, 0, OPTAB_LIB_WIDEN);
6025 op1 = force_operand (XEXP (value, 0), subtarget);
6026 op2 = force_operand (op2, NULL_RTX);
6027 switch (code)
6029 case MULT:
6030 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6031 case DIV:
6032 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6033 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6034 target, 1, OPTAB_LIB_WIDEN);
6035 else
6036 return expand_divmod (0,
6037 FLOAT_MODE_P (GET_MODE (value))
6038 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6039 GET_MODE (value), op1, op2, target, 0);
6040 break;
6041 case MOD:
6042 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6043 target, 0);
6044 break;
6045 case UDIV:
6046 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6047 target, 1);
6048 break;
6049 case UMOD:
6050 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6051 target, 1);
6052 break;
6053 case ASHIFTRT:
6054 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6055 target, 0, OPTAB_LIB_WIDEN);
6056 break;
6057 default:
6058 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6059 target, 1, OPTAB_LIB_WIDEN);
6062 if (UNARY_P (value))
6064 if (!target)
6065 target = gen_reg_rtx (GET_MODE (value));
6066 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6067 switch (code)
6069 case ZERO_EXTEND:
6070 case SIGN_EXTEND:
6071 case TRUNCATE:
6072 case FLOAT_EXTEND:
6073 case FLOAT_TRUNCATE:
6074 convert_move (target, op1, code == ZERO_EXTEND);
6075 return target;
6077 case FIX:
6078 case UNSIGNED_FIX:
6079 expand_fix (target, op1, code == UNSIGNED_FIX);
6080 return target;
6082 case FLOAT:
6083 case UNSIGNED_FLOAT:
6084 expand_float (target, op1, code == UNSIGNED_FLOAT);
6085 return target;
6087 default:
6088 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6092 #ifdef INSN_SCHEDULING
6093 /* On machines that have insn scheduling, we want all memory reference to be
6094 explicit, so we need to deal with such paradoxical SUBREGs. */
6095 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6096 && (GET_MODE_SIZE (GET_MODE (value))
6097 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6098 value
6099 = simplify_gen_subreg (GET_MODE (value),
6100 force_reg (GET_MODE (SUBREG_REG (value)),
6101 force_operand (SUBREG_REG (value),
6102 NULL_RTX)),
6103 GET_MODE (SUBREG_REG (value)),
6104 SUBREG_BYTE (value));
6105 #endif
6107 return value;
6110 /* Subroutine of expand_expr: return nonzero iff there is no way that
6111 EXP can reference X, which is being modified. TOP_P is nonzero if this
6112 call is going to be used to determine whether we need a temporary
6113 for EXP, as opposed to a recursive call to this function.
6115 It is always safe for this routine to return zero since it merely
6116 searches for optimization opportunities. */
6119 safe_from_p (rtx x, tree exp, int top_p)
6121 rtx exp_rtl = 0;
6122 int i, nops;
6124 if (x == 0
6125 /* If EXP has varying size, we MUST use a target since we currently
6126 have no way of allocating temporaries of variable size
6127 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6128 So we assume here that something at a higher level has prevented a
6129 clash. This is somewhat bogus, but the best we can do. Only
6130 do this when X is BLKmode and when we are at the top level. */
6131 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6132 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6133 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6134 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6135 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6136 != INTEGER_CST)
6137 && GET_MODE (x) == BLKmode)
6138 /* If X is in the outgoing argument area, it is always safe. */
6139 || (MEM_P (x)
6140 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6141 || (GET_CODE (XEXP (x, 0)) == PLUS
6142 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6143 return 1;
6145 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6146 find the underlying pseudo. */
6147 if (GET_CODE (x) == SUBREG)
6149 x = SUBREG_REG (x);
6150 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6151 return 0;
6154 /* Now look at our tree code and possibly recurse. */
6155 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6157 case tcc_declaration:
6158 exp_rtl = DECL_RTL_IF_SET (exp);
6159 break;
6161 case tcc_constant:
6162 return 1;
6164 case tcc_exceptional:
6165 if (TREE_CODE (exp) == TREE_LIST)
6167 while (1)
6169 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6170 return 0;
6171 exp = TREE_CHAIN (exp);
6172 if (!exp)
6173 return 1;
6174 if (TREE_CODE (exp) != TREE_LIST)
6175 return safe_from_p (x, exp, 0);
6178 else if (TREE_CODE (exp) == CONSTRUCTOR)
6180 constructor_elt *ce;
6181 unsigned HOST_WIDE_INT idx;
6183 for (idx = 0;
6184 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6185 idx++)
6186 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6187 || !safe_from_p (x, ce->value, 0))
6188 return 0;
6189 return 1;
6191 else if (TREE_CODE (exp) == ERROR_MARK)
6192 return 1; /* An already-visited SAVE_EXPR? */
6193 else
6194 return 0;
6196 case tcc_statement:
6197 /* The only case we look at here is the DECL_INITIAL inside a
6198 DECL_EXPR. */
6199 return (TREE_CODE (exp) != DECL_EXPR
6200 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6201 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6202 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6204 case tcc_binary:
6205 case tcc_comparison:
6206 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6207 return 0;
6208 /* Fall through. */
6210 case tcc_unary:
6211 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6213 case tcc_expression:
6214 case tcc_reference:
6215 case tcc_vl_exp:
6216 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6217 the expression. If it is set, we conflict iff we are that rtx or
6218 both are in memory. Otherwise, we check all operands of the
6219 expression recursively. */
6221 switch (TREE_CODE (exp))
6223 case ADDR_EXPR:
6224 /* If the operand is static or we are static, we can't conflict.
6225 Likewise if we don't conflict with the operand at all. */
6226 if (staticp (TREE_OPERAND (exp, 0))
6227 || TREE_STATIC (exp)
6228 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6229 return 1;
6231 /* Otherwise, the only way this can conflict is if we are taking
6232 the address of a DECL a that address if part of X, which is
6233 very rare. */
6234 exp = TREE_OPERAND (exp, 0);
6235 if (DECL_P (exp))
6237 if (!DECL_RTL_SET_P (exp)
6238 || !MEM_P (DECL_RTL (exp)))
6239 return 0;
6240 else
6241 exp_rtl = XEXP (DECL_RTL (exp), 0);
6243 break;
6245 case MISALIGNED_INDIRECT_REF:
6246 case ALIGN_INDIRECT_REF:
6247 case INDIRECT_REF:
6248 if (MEM_P (x)
6249 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6250 get_alias_set (exp)))
6251 return 0;
6252 break;
6254 case CALL_EXPR:
6255 /* Assume that the call will clobber all hard registers and
6256 all of memory. */
6257 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6258 || MEM_P (x))
6259 return 0;
6260 break;
6262 case WITH_CLEANUP_EXPR:
6263 case CLEANUP_POINT_EXPR:
6264 /* Lowered by gimplify.c. */
6265 gcc_unreachable ();
6267 case SAVE_EXPR:
6268 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6270 default:
6271 break;
6274 /* If we have an rtx, we do not need to scan our operands. */
6275 if (exp_rtl)
6276 break;
6278 nops = TREE_OPERAND_LENGTH (exp);
6279 for (i = 0; i < nops; i++)
6280 if (TREE_OPERAND (exp, i) != 0
6281 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6282 return 0;
6284 /* If this is a language-specific tree code, it may require
6285 special handling. */
6286 if ((unsigned int) TREE_CODE (exp)
6287 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6288 && !lang_hooks.safe_from_p (x, exp))
6289 return 0;
6290 break;
6292 case tcc_type:
6293 /* Should never get a type here. */
6294 gcc_unreachable ();
6296 case tcc_gimple_stmt:
6297 gcc_unreachable ();
6300 /* If we have an rtl, find any enclosed object. Then see if we conflict
6301 with it. */
6302 if (exp_rtl)
6304 if (GET_CODE (exp_rtl) == SUBREG)
6306 exp_rtl = SUBREG_REG (exp_rtl);
6307 if (REG_P (exp_rtl)
6308 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6309 return 0;
6312 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6313 are memory and they conflict. */
6314 return ! (rtx_equal_p (x, exp_rtl)
6315 || (MEM_P (x) && MEM_P (exp_rtl)
6316 && true_dependence (exp_rtl, VOIDmode, x,
6317 rtx_addr_varies_p)));
6320 /* If we reach here, it is safe. */
6321 return 1;
6325 /* Return the highest power of two that EXP is known to be a multiple of.
6326 This is used in updating alignment of MEMs in array references. */
6328 unsigned HOST_WIDE_INT
6329 highest_pow2_factor (tree exp)
6331 unsigned HOST_WIDE_INT c0, c1;
6333 switch (TREE_CODE (exp))
6335 case INTEGER_CST:
6336 /* We can find the lowest bit that's a one. If the low
6337 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6338 We need to handle this case since we can find it in a COND_EXPR,
6339 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6340 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6341 later ICE. */
6342 if (TREE_OVERFLOW (exp))
6343 return BIGGEST_ALIGNMENT;
6344 else
6346 /* Note: tree_low_cst is intentionally not used here,
6347 we don't care about the upper bits. */
6348 c0 = TREE_INT_CST_LOW (exp);
6349 c0 &= -c0;
6350 return c0 ? c0 : BIGGEST_ALIGNMENT;
6352 break;
6354 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6355 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6356 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6357 return MIN (c0, c1);
6359 case MULT_EXPR:
6360 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6361 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6362 return c0 * c1;
6364 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6365 case CEIL_DIV_EXPR:
6366 if (integer_pow2p (TREE_OPERAND (exp, 1))
6367 && host_integerp (TREE_OPERAND (exp, 1), 1))
6369 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6370 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6371 return MAX (1, c0 / c1);
6373 break;
6375 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6376 case SAVE_EXPR:
6377 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6379 case COMPOUND_EXPR:
6380 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6382 case COND_EXPR:
6383 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6384 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6385 return MIN (c0, c1);
6387 default:
6388 break;
6391 return 1;
6394 /* Similar, except that the alignment requirements of TARGET are
6395 taken into account. Assume it is at least as aligned as its
6396 type, unless it is a COMPONENT_REF in which case the layout of
6397 the structure gives the alignment. */
6399 static unsigned HOST_WIDE_INT
6400 highest_pow2_factor_for_target (tree target, tree exp)
6402 unsigned HOST_WIDE_INT target_align, factor;
6404 factor = highest_pow2_factor (exp);
6405 if (TREE_CODE (target) == COMPONENT_REF)
6406 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6407 else
6408 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6409 return MAX (factor, target_align);
6412 /* Return &VAR expression for emulated thread local VAR. */
6414 static tree
6415 emutls_var_address (tree var)
6417 tree emuvar = emutls_decl (var);
6418 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6419 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6420 tree arglist = build_tree_list (NULL_TREE, arg);
6421 tree call = build_function_call_expr (fn, arglist);
6422 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6425 /* Expands variable VAR. */
6427 void
6428 expand_var (tree var)
6430 if (DECL_EXTERNAL (var))
6431 return;
6433 if (TREE_STATIC (var))
6434 /* If this is an inlined copy of a static local variable,
6435 look up the original decl. */
6436 var = DECL_ORIGIN (var);
6438 if (TREE_STATIC (var)
6439 ? !TREE_ASM_WRITTEN (var)
6440 : !DECL_RTL_SET_P (var))
6442 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6443 /* Should be ignored. */;
6444 else if (lang_hooks.expand_decl (var))
6445 /* OK. */;
6446 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6447 expand_decl (var);
6448 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6449 rest_of_decl_compilation (var, 0, 0);
6450 else
6451 /* No expansion needed. */
6452 gcc_assert (TREE_CODE (var) == TYPE_DECL
6453 || TREE_CODE (var) == CONST_DECL
6454 || TREE_CODE (var) == FUNCTION_DECL
6455 || TREE_CODE (var) == LABEL_DECL);
6459 /* Subroutine of expand_expr. Expand the two operands of a binary
6460 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6461 The value may be stored in TARGET if TARGET is nonzero. The
6462 MODIFIER argument is as documented by expand_expr. */
6464 static void
6465 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6466 enum expand_modifier modifier)
6468 if (! safe_from_p (target, exp1, 1))
6469 target = 0;
6470 if (operand_equal_p (exp0, exp1, 0))
6472 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6473 *op1 = copy_rtx (*op0);
6475 else
6477 /* If we need to preserve evaluation order, copy exp0 into its own
6478 temporary variable so that it can't be clobbered by exp1. */
6479 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6480 exp0 = save_expr (exp0);
6481 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6482 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6487 /* Return a MEM that contains constant EXP. DEFER is as for
6488 output_constant_def and MODIFIER is as for expand_expr. */
6490 static rtx
6491 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6493 rtx mem;
6495 mem = output_constant_def (exp, defer);
6496 if (modifier != EXPAND_INITIALIZER)
6497 mem = use_anchored_address (mem);
6498 return mem;
6501 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6502 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6504 static rtx
6505 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6506 enum expand_modifier modifier)
6508 rtx result, subtarget;
6509 tree inner, offset;
6510 HOST_WIDE_INT bitsize, bitpos;
6511 int volatilep, unsignedp;
6512 enum machine_mode mode1;
6514 /* If we are taking the address of a constant and are at the top level,
6515 we have to use output_constant_def since we can't call force_const_mem
6516 at top level. */
6517 /* ??? This should be considered a front-end bug. We should not be
6518 generating ADDR_EXPR of something that isn't an LVALUE. The only
6519 exception here is STRING_CST. */
6520 if (TREE_CODE (exp) == CONSTRUCTOR
6521 || CONSTANT_CLASS_P (exp))
6522 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6524 /* Everything must be something allowed by is_gimple_addressable. */
6525 switch (TREE_CODE (exp))
6527 case INDIRECT_REF:
6528 /* This case will happen via recursion for &a->b. */
6529 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6531 case CONST_DECL:
6532 /* Recurse and make the output_constant_def clause above handle this. */
6533 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6534 tmode, modifier);
6536 case REALPART_EXPR:
6537 /* The real part of the complex number is always first, therefore
6538 the address is the same as the address of the parent object. */
6539 offset = 0;
6540 bitpos = 0;
6541 inner = TREE_OPERAND (exp, 0);
6542 break;
6544 case IMAGPART_EXPR:
6545 /* The imaginary part of the complex number is always second.
6546 The expression is therefore always offset by the size of the
6547 scalar type. */
6548 offset = 0;
6549 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6550 inner = TREE_OPERAND (exp, 0);
6551 break;
6553 case VAR_DECL:
6554 /* TLS emulation hook - replace __thread VAR's &VAR with
6555 __emutls_get_address (&_emutls.VAR). */
6556 if (! targetm.have_tls
6557 && TREE_CODE (exp) == VAR_DECL
6558 && DECL_THREAD_LOCAL_P (exp))
6560 exp = emutls_var_address (exp);
6561 return expand_expr (exp, target, tmode, modifier);
6563 /* Fall through. */
6565 default:
6566 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6567 expand_expr, as that can have various side effects; LABEL_DECLs for
6568 example, may not have their DECL_RTL set yet. Assume language
6569 specific tree nodes can be expanded in some interesting way. */
6570 if (DECL_P (exp)
6571 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6573 result = expand_expr (exp, target, tmode,
6574 modifier == EXPAND_INITIALIZER
6575 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6577 /* If the DECL isn't in memory, then the DECL wasn't properly
6578 marked TREE_ADDRESSABLE, which will be either a front-end
6579 or a tree optimizer bug. */
6580 gcc_assert (MEM_P (result));
6581 result = XEXP (result, 0);
6583 /* ??? Is this needed anymore? */
6584 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6586 assemble_external (exp);
6587 TREE_USED (exp) = 1;
6590 if (modifier != EXPAND_INITIALIZER
6591 && modifier != EXPAND_CONST_ADDRESS)
6592 result = force_operand (result, target);
6593 return result;
6596 /* Pass FALSE as the last argument to get_inner_reference although
6597 we are expanding to RTL. The rationale is that we know how to
6598 handle "aligning nodes" here: we can just bypass them because
6599 they won't change the final object whose address will be returned
6600 (they actually exist only for that purpose). */
6601 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6602 &mode1, &unsignedp, &volatilep, false);
6603 break;
6606 /* We must have made progress. */
6607 gcc_assert (inner != exp);
6609 subtarget = offset || bitpos ? NULL_RTX : target;
6610 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6612 if (offset)
6614 rtx tmp;
6616 if (modifier != EXPAND_NORMAL)
6617 result = force_operand (result, NULL);
6618 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6620 result = convert_memory_address (tmode, result);
6621 tmp = convert_memory_address (tmode, tmp);
6623 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6624 result = gen_rtx_PLUS (tmode, result, tmp);
6625 else
6627 subtarget = bitpos ? NULL_RTX : target;
6628 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6629 1, OPTAB_LIB_WIDEN);
6633 if (bitpos)
6635 /* Someone beforehand should have rejected taking the address
6636 of such an object. */
6637 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6639 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6640 if (modifier < EXPAND_SUM)
6641 result = force_operand (result, target);
6644 return result;
6647 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6648 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6650 static rtx
6651 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6652 enum expand_modifier modifier)
6654 enum machine_mode rmode;
6655 rtx result;
6657 /* Target mode of VOIDmode says "whatever's natural". */
6658 if (tmode == VOIDmode)
6659 tmode = TYPE_MODE (TREE_TYPE (exp));
6661 /* We can get called with some Weird Things if the user does silliness
6662 like "(short) &a". In that case, convert_memory_address won't do
6663 the right thing, so ignore the given target mode. */
6664 if (tmode != Pmode && tmode != ptr_mode)
6665 tmode = Pmode;
6667 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6668 tmode, modifier);
6670 /* Despite expand_expr claims concerning ignoring TMODE when not
6671 strictly convenient, stuff breaks if we don't honor it. Note
6672 that combined with the above, we only do this for pointer modes. */
6673 rmode = GET_MODE (result);
6674 if (rmode == VOIDmode)
6675 rmode = tmode;
6676 if (rmode != tmode)
6677 result = convert_memory_address (tmode, result);
6679 return result;
6683 /* expand_expr: generate code for computing expression EXP.
6684 An rtx for the computed value is returned. The value is never null.
6685 In the case of a void EXP, const0_rtx is returned.
6687 The value may be stored in TARGET if TARGET is nonzero.
6688 TARGET is just a suggestion; callers must assume that
6689 the rtx returned may not be the same as TARGET.
6691 If TARGET is CONST0_RTX, it means that the value will be ignored.
6693 If TMODE is not VOIDmode, it suggests generating the
6694 result in mode TMODE. But this is done only when convenient.
6695 Otherwise, TMODE is ignored and the value generated in its natural mode.
6696 TMODE is just a suggestion; callers must assume that
6697 the rtx returned may not have mode TMODE.
6699 Note that TARGET may have neither TMODE nor MODE. In that case, it
6700 probably will not be used.
6702 If MODIFIER is EXPAND_SUM then when EXP is an addition
6703 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6704 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6705 products as above, or REG or MEM, or constant.
6706 Ordinarily in such cases we would output mul or add instructions
6707 and then return a pseudo reg containing the sum.
6709 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6710 it also marks a label as absolutely required (it can't be dead).
6711 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6712 This is used for outputting expressions used in initializers.
6714 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6715 with a constant address even if that address is not normally legitimate.
6716 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6718 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6719 a call parameter. Such targets require special care as we haven't yet
6720 marked TARGET so that it's safe from being trashed by libcalls. We
6721 don't want to use TARGET for anything but the final result;
6722 Intermediate values must go elsewhere. Additionally, calls to
6723 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6725 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6726 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6727 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6728 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6729 recursively. */
6731 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6732 enum expand_modifier, rtx *);
6735 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6736 enum expand_modifier modifier, rtx *alt_rtl)
6738 int rn = -1;
6739 rtx ret, last = NULL;
6741 /* Handle ERROR_MARK before anybody tries to access its type. */
6742 if (TREE_CODE (exp) == ERROR_MARK
6743 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6745 ret = CONST0_RTX (tmode);
6746 return ret ? ret : const0_rtx;
6749 if (flag_non_call_exceptions)
6751 rn = lookup_stmt_eh_region (exp);
6752 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6753 if (rn >= 0)
6754 last = get_last_insn ();
6757 /* If this is an expression of some kind and it has an associated line
6758 number, then emit the line number before expanding the expression.
6760 We need to save and restore the file and line information so that
6761 errors discovered during expansion are emitted with the right
6762 information. It would be better of the diagnostic routines
6763 used the file/line information embedded in the tree nodes rather
6764 than globals. */
6765 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6767 location_t saved_location = input_location;
6768 input_location = EXPR_LOCATION (exp);
6769 emit_line_note (input_location);
6771 /* Record where the insns produced belong. */
6772 record_block_change (TREE_BLOCK (exp));
6774 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6776 input_location = saved_location;
6778 else
6780 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6783 /* If using non-call exceptions, mark all insns that may trap.
6784 expand_call() will mark CALL_INSNs before we get to this code,
6785 but it doesn't handle libcalls, and these may trap. */
6786 if (rn >= 0)
6788 rtx insn;
6789 for (insn = next_real_insn (last); insn;
6790 insn = next_real_insn (insn))
6792 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6793 /* If we want exceptions for non-call insns, any
6794 may_trap_p instruction may throw. */
6795 && GET_CODE (PATTERN (insn)) != CLOBBER
6796 && GET_CODE (PATTERN (insn)) != USE
6797 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6799 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6800 REG_NOTES (insn));
6805 return ret;
6808 static rtx
6809 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6810 enum expand_modifier modifier, rtx *alt_rtl)
6812 rtx op0, op1, temp, decl_rtl;
6813 tree type;
6814 int unsignedp;
6815 enum machine_mode mode;
6816 enum tree_code code = TREE_CODE (exp);
6817 optab this_optab;
6818 rtx subtarget, original_target;
6819 int ignore;
6820 tree context, subexp0, subexp1;
6821 bool reduce_bit_field = false;
6822 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6823 ? reduce_to_bit_field_precision ((expr), \
6824 target, \
6825 type) \
6826 : (expr))
6828 if (GIMPLE_STMT_P (exp))
6830 type = void_type_node;
6831 mode = VOIDmode;
6832 unsignedp = 0;
6834 else
6836 type = TREE_TYPE (exp);
6837 mode = TYPE_MODE (type);
6838 unsignedp = TYPE_UNSIGNED (type);
6840 if (lang_hooks.reduce_bit_field_operations
6841 && TREE_CODE (type) == INTEGER_TYPE
6842 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6844 /* An operation in what may be a bit-field type needs the
6845 result to be reduced to the precision of the bit-field type,
6846 which is narrower than that of the type's mode. */
6847 reduce_bit_field = true;
6848 if (modifier == EXPAND_STACK_PARM)
6849 target = 0;
6852 /* Use subtarget as the target for operand 0 of a binary operation. */
6853 subtarget = get_subtarget (target);
6854 original_target = target;
6855 ignore = (target == const0_rtx
6856 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6857 || code == CONVERT_EXPR || code == COND_EXPR
6858 || code == VIEW_CONVERT_EXPR)
6859 && TREE_CODE (type) == VOID_TYPE));
6861 /* If we are going to ignore this result, we need only do something
6862 if there is a side-effect somewhere in the expression. If there
6863 is, short-circuit the most common cases here. Note that we must
6864 not call expand_expr with anything but const0_rtx in case this
6865 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6867 if (ignore)
6869 if (! TREE_SIDE_EFFECTS (exp))
6870 return const0_rtx;
6872 /* Ensure we reference a volatile object even if value is ignored, but
6873 don't do this if all we are doing is taking its address. */
6874 if (TREE_THIS_VOLATILE (exp)
6875 && TREE_CODE (exp) != FUNCTION_DECL
6876 && mode != VOIDmode && mode != BLKmode
6877 && modifier != EXPAND_CONST_ADDRESS)
6879 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6880 if (MEM_P (temp))
6881 temp = copy_to_reg (temp);
6882 return const0_rtx;
6885 if (TREE_CODE_CLASS (code) == tcc_unary
6886 || code == COMPONENT_REF || code == INDIRECT_REF)
6887 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6888 modifier);
6890 else if (TREE_CODE_CLASS (code) == tcc_binary
6891 || TREE_CODE_CLASS (code) == tcc_comparison
6892 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6894 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6895 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6896 return const0_rtx;
6898 else if (code == BIT_FIELD_REF)
6900 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6901 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6902 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6903 return const0_rtx;
6906 target = 0;
6910 switch (code)
6912 case LABEL_DECL:
6914 tree function = decl_function_context (exp);
6916 temp = label_rtx (exp);
6917 temp = gen_rtx_LABEL_REF (Pmode, temp);
6919 if (function != current_function_decl
6920 && function != 0)
6921 LABEL_REF_NONLOCAL_P (temp) = 1;
6923 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6924 return temp;
6927 case SSA_NAME:
6928 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6929 NULL);
6931 case PARM_DECL:
6932 case VAR_DECL:
6933 /* If a static var's type was incomplete when the decl was written,
6934 but the type is complete now, lay out the decl now. */
6935 if (DECL_SIZE (exp) == 0
6936 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6937 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6938 layout_decl (exp, 0);
6940 /* TLS emulation hook - replace __thread vars with
6941 *__emutls_get_address (&_emutls.var). */
6942 if (! targetm.have_tls
6943 && TREE_CODE (exp) == VAR_DECL
6944 && DECL_THREAD_LOCAL_P (exp))
6946 exp = build_fold_indirect_ref (emutls_var_address (exp));
6947 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
6950 /* ... fall through ... */
6952 case FUNCTION_DECL:
6953 case RESULT_DECL:
6954 decl_rtl = DECL_RTL (exp);
6955 gcc_assert (decl_rtl);
6957 /* Ensure variable marked as used even if it doesn't go through
6958 a parser. If it hasn't be used yet, write out an external
6959 definition. */
6960 if (! TREE_USED (exp))
6962 assemble_external (exp);
6963 TREE_USED (exp) = 1;
6966 /* Show we haven't gotten RTL for this yet. */
6967 temp = 0;
6969 /* Variables inherited from containing functions should have
6970 been lowered by this point. */
6971 context = decl_function_context (exp);
6972 gcc_assert (!context
6973 || context == current_function_decl
6974 || TREE_STATIC (exp)
6975 /* ??? C++ creates functions that are not TREE_STATIC. */
6976 || TREE_CODE (exp) == FUNCTION_DECL);
6978 /* This is the case of an array whose size is to be determined
6979 from its initializer, while the initializer is still being parsed.
6980 See expand_decl. */
6982 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6983 temp = validize_mem (decl_rtl);
6985 /* If DECL_RTL is memory, we are in the normal case and either
6986 the address is not valid or it is not a register and -fforce-addr
6987 is specified, get the address into a register. */
6989 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6991 if (alt_rtl)
6992 *alt_rtl = decl_rtl;
6993 decl_rtl = use_anchored_address (decl_rtl);
6994 if (modifier != EXPAND_CONST_ADDRESS
6995 && modifier != EXPAND_SUM
6996 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6997 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6998 temp = replace_equiv_address (decl_rtl,
6999 copy_rtx (XEXP (decl_rtl, 0)));
7002 /* If we got something, return it. But first, set the alignment
7003 if the address is a register. */
7004 if (temp != 0)
7006 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7007 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7009 return temp;
7012 /* If the mode of DECL_RTL does not match that of the decl, it
7013 must be a promoted value. We return a SUBREG of the wanted mode,
7014 but mark it so that we know that it was already extended. */
7016 if (REG_P (decl_rtl)
7017 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7019 enum machine_mode pmode;
7021 /* Get the signedness used for this variable. Ensure we get the
7022 same mode we got when the variable was declared. */
7023 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7024 (TREE_CODE (exp) == RESULT_DECL
7025 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7026 gcc_assert (GET_MODE (decl_rtl) == pmode);
7028 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7029 SUBREG_PROMOTED_VAR_P (temp) = 1;
7030 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7031 return temp;
7034 return decl_rtl;
7036 case INTEGER_CST:
7037 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7038 TREE_INT_CST_HIGH (exp), mode);
7040 /* ??? If overflow is set, fold will have done an incomplete job,
7041 which can result in (plus xx (const_int 0)), which can get
7042 simplified by validate_replace_rtx during virtual register
7043 instantiation, which can result in unrecognizable insns.
7044 Avoid this by forcing all overflows into registers. */
7045 if (TREE_OVERFLOW (exp)
7046 && modifier != EXPAND_INITIALIZER)
7047 temp = force_reg (mode, temp);
7049 return temp;
7051 case VECTOR_CST:
7053 tree tmp = NULL_TREE;
7054 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7055 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7056 return const_vector_from_tree (exp);
7057 if (GET_MODE_CLASS (mode) == MODE_INT)
7059 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7060 if (type_for_mode)
7061 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7063 if (!tmp)
7064 tmp = build_constructor_from_list (type,
7065 TREE_VECTOR_CST_ELTS (exp));
7066 return expand_expr (tmp, ignore ? const0_rtx : target,
7067 tmode, modifier);
7070 case CONST_DECL:
7071 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7073 case REAL_CST:
7074 /* If optimized, generate immediate CONST_DOUBLE
7075 which will be turned into memory by reload if necessary.
7077 We used to force a register so that loop.c could see it. But
7078 this does not allow gen_* patterns to perform optimizations with
7079 the constants. It also produces two insns in cases like "x = 1.0;".
7080 On most machines, floating-point constants are not permitted in
7081 many insns, so we'd end up copying it to a register in any case.
7083 Now, we do the copying in expand_binop, if appropriate. */
7084 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7085 TYPE_MODE (TREE_TYPE (exp)));
7087 case COMPLEX_CST:
7088 /* Handle evaluating a complex constant in a CONCAT target. */
7089 if (original_target && GET_CODE (original_target) == CONCAT)
7091 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7092 rtx rtarg, itarg;
7094 rtarg = XEXP (original_target, 0);
7095 itarg = XEXP (original_target, 1);
7097 /* Move the real and imaginary parts separately. */
7098 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7099 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7101 if (op0 != rtarg)
7102 emit_move_insn (rtarg, op0);
7103 if (op1 != itarg)
7104 emit_move_insn (itarg, op1);
7106 return original_target;
7109 /* ... fall through ... */
7111 case STRING_CST:
7112 temp = expand_expr_constant (exp, 1, modifier);
7114 /* temp contains a constant address.
7115 On RISC machines where a constant address isn't valid,
7116 make some insns to get that address into a register. */
7117 if (modifier != EXPAND_CONST_ADDRESS
7118 && modifier != EXPAND_INITIALIZER
7119 && modifier != EXPAND_SUM
7120 && (! memory_address_p (mode, XEXP (temp, 0))
7121 || flag_force_addr))
7122 return replace_equiv_address (temp,
7123 copy_rtx (XEXP (temp, 0)));
7124 return temp;
7126 case SAVE_EXPR:
7128 tree val = TREE_OPERAND (exp, 0);
7129 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7131 if (!SAVE_EXPR_RESOLVED_P (exp))
7133 /* We can indeed still hit this case, typically via builtin
7134 expanders calling save_expr immediately before expanding
7135 something. Assume this means that we only have to deal
7136 with non-BLKmode values. */
7137 gcc_assert (GET_MODE (ret) != BLKmode);
7139 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7140 DECL_ARTIFICIAL (val) = 1;
7141 DECL_IGNORED_P (val) = 1;
7142 TREE_OPERAND (exp, 0) = val;
7143 SAVE_EXPR_RESOLVED_P (exp) = 1;
7145 if (!CONSTANT_P (ret))
7146 ret = copy_to_reg (ret);
7147 SET_DECL_RTL (val, ret);
7150 return ret;
7153 case GOTO_EXPR:
7154 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7155 expand_goto (TREE_OPERAND (exp, 0));
7156 else
7157 expand_computed_goto (TREE_OPERAND (exp, 0));
7158 return const0_rtx;
7160 case CONSTRUCTOR:
7161 /* If we don't need the result, just ensure we evaluate any
7162 subexpressions. */
7163 if (ignore)
7165 unsigned HOST_WIDE_INT idx;
7166 tree value;
7168 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7169 expand_expr (value, const0_rtx, VOIDmode, 0);
7171 return const0_rtx;
7174 /* Try to avoid creating a temporary at all. This is possible
7175 if all of the initializer is zero.
7176 FIXME: try to handle all [0..255] initializers we can handle
7177 with memset. */
7178 else if (TREE_STATIC (exp)
7179 && !TREE_ADDRESSABLE (exp)
7180 && target != 0 && mode == BLKmode
7181 && all_zeros_p (exp))
7183 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7184 return target;
7187 /* All elts simple constants => refer to a constant in memory. But
7188 if this is a non-BLKmode mode, let it store a field at a time
7189 since that should make a CONST_INT or CONST_DOUBLE when we
7190 fold. Likewise, if we have a target we can use, it is best to
7191 store directly into the target unless the type is large enough
7192 that memcpy will be used. If we are making an initializer and
7193 all operands are constant, put it in memory as well.
7195 FIXME: Avoid trying to fill vector constructors piece-meal.
7196 Output them with output_constant_def below unless we're sure
7197 they're zeros. This should go away when vector initializers
7198 are treated like VECTOR_CST instead of arrays.
7200 else if ((TREE_STATIC (exp)
7201 && ((mode == BLKmode
7202 && ! (target != 0 && safe_from_p (target, exp, 1)))
7203 || TREE_ADDRESSABLE (exp)
7204 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7205 && (! MOVE_BY_PIECES_P
7206 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7207 TYPE_ALIGN (type)))
7208 && ! mostly_zeros_p (exp))))
7209 || ((modifier == EXPAND_INITIALIZER
7210 || modifier == EXPAND_CONST_ADDRESS)
7211 && TREE_CONSTANT (exp)))
7213 rtx constructor = expand_expr_constant (exp, 1, modifier);
7215 if (modifier != EXPAND_CONST_ADDRESS
7216 && modifier != EXPAND_INITIALIZER
7217 && modifier != EXPAND_SUM)
7218 constructor = validize_mem (constructor);
7220 return constructor;
7222 else
7224 /* Handle calls that pass values in multiple non-contiguous
7225 locations. The Irix 6 ABI has examples of this. */
7226 if (target == 0 || ! safe_from_p (target, exp, 1)
7227 || GET_CODE (target) == PARALLEL
7228 || modifier == EXPAND_STACK_PARM)
7229 target
7230 = assign_temp (build_qualified_type (type,
7231 (TYPE_QUALS (type)
7232 | (TREE_READONLY (exp)
7233 * TYPE_QUAL_CONST))),
7234 0, TREE_ADDRESSABLE (exp), 1);
7236 store_constructor (exp, target, 0, int_expr_size (exp));
7237 return target;
7240 case MISALIGNED_INDIRECT_REF:
7241 case ALIGN_INDIRECT_REF:
7242 case INDIRECT_REF:
7244 tree exp1 = TREE_OPERAND (exp, 0);
7246 if (modifier != EXPAND_WRITE)
7248 tree t;
7250 t = fold_read_from_constant_string (exp);
7251 if (t)
7252 return expand_expr (t, target, tmode, modifier);
7255 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7256 op0 = memory_address (mode, op0);
7258 if (code == ALIGN_INDIRECT_REF)
7260 int align = TYPE_ALIGN_UNIT (type);
7261 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7262 op0 = memory_address (mode, op0);
7265 temp = gen_rtx_MEM (mode, op0);
7267 set_mem_attributes (temp, exp, 0);
7269 /* Resolve the misalignment now, so that we don't have to remember
7270 to resolve it later. Of course, this only works for reads. */
7271 /* ??? When we get around to supporting writes, we'll have to handle
7272 this in store_expr directly. The vectorizer isn't generating
7273 those yet, however. */
7274 if (code == MISALIGNED_INDIRECT_REF)
7276 int icode;
7277 rtx reg, insn;
7279 gcc_assert (modifier == EXPAND_NORMAL
7280 || modifier == EXPAND_STACK_PARM);
7282 /* The vectorizer should have already checked the mode. */
7283 icode = movmisalign_optab->handlers[mode].insn_code;
7284 gcc_assert (icode != CODE_FOR_nothing);
7286 /* We've already validated the memory, and we're creating a
7287 new pseudo destination. The predicates really can't fail. */
7288 reg = gen_reg_rtx (mode);
7290 /* Nor can the insn generator. */
7291 insn = GEN_FCN (icode) (reg, temp);
7292 emit_insn (insn);
7294 return reg;
7297 return temp;
7300 case TARGET_MEM_REF:
7302 struct mem_address addr;
7304 get_address_description (exp, &addr);
7305 op0 = addr_for_mem_ref (&addr, true);
7306 op0 = memory_address (mode, op0);
7307 temp = gen_rtx_MEM (mode, op0);
7308 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7310 return temp;
7312 case ARRAY_REF:
7315 tree array = TREE_OPERAND (exp, 0);
7316 tree index = TREE_OPERAND (exp, 1);
7318 /* Fold an expression like: "foo"[2].
7319 This is not done in fold so it won't happen inside &.
7320 Don't fold if this is for wide characters since it's too
7321 difficult to do correctly and this is a very rare case. */
7323 if (modifier != EXPAND_CONST_ADDRESS
7324 && modifier != EXPAND_INITIALIZER
7325 && modifier != EXPAND_MEMORY)
7327 tree t = fold_read_from_constant_string (exp);
7329 if (t)
7330 return expand_expr (t, target, tmode, modifier);
7333 /* If this is a constant index into a constant array,
7334 just get the value from the array. Handle both the cases when
7335 we have an explicit constructor and when our operand is a variable
7336 that was declared const. */
7338 if (modifier != EXPAND_CONST_ADDRESS
7339 && modifier != EXPAND_INITIALIZER
7340 && modifier != EXPAND_MEMORY
7341 && TREE_CODE (array) == CONSTRUCTOR
7342 && ! TREE_SIDE_EFFECTS (array)
7343 && TREE_CODE (index) == INTEGER_CST)
7345 unsigned HOST_WIDE_INT ix;
7346 tree field, value;
7348 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7349 field, value)
7350 if (tree_int_cst_equal (field, index))
7352 if (!TREE_SIDE_EFFECTS (value))
7353 return expand_expr (fold (value), target, tmode, modifier);
7354 break;
7358 else if (optimize >= 1
7359 && modifier != EXPAND_CONST_ADDRESS
7360 && modifier != EXPAND_INITIALIZER
7361 && modifier != EXPAND_MEMORY
7362 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7363 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7364 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7365 && targetm.binds_local_p (array))
7367 if (TREE_CODE (index) == INTEGER_CST)
7369 tree init = DECL_INITIAL (array);
7371 if (TREE_CODE (init) == CONSTRUCTOR)
7373 unsigned HOST_WIDE_INT ix;
7374 tree field, value;
7376 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7377 field, value)
7378 if (tree_int_cst_equal (field, index))
7380 if (!TREE_SIDE_EFFECTS (value))
7381 return expand_expr (fold (value), target, tmode,
7382 modifier);
7383 break;
7386 else if(TREE_CODE (init) == STRING_CST)
7388 tree index1 = index;
7389 tree low_bound = array_ref_low_bound (exp);
7390 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7392 /* Optimize the special-case of a zero lower bound.
7394 We convert the low_bound to sizetype to avoid some problems
7395 with constant folding. (E.g. suppose the lower bound is 1,
7396 and its mode is QI. Without the conversion,l (ARRAY
7397 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7398 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7400 if (! integer_zerop (low_bound))
7401 index1 = size_diffop (index1, fold_convert (sizetype,
7402 low_bound));
7404 if (0 > compare_tree_int (index1,
7405 TREE_STRING_LENGTH (init)))
7407 tree type = TREE_TYPE (TREE_TYPE (init));
7408 enum machine_mode mode = TYPE_MODE (type);
7410 if (GET_MODE_CLASS (mode) == MODE_INT
7411 && GET_MODE_SIZE (mode) == 1)
7412 return gen_int_mode (TREE_STRING_POINTER (init)
7413 [TREE_INT_CST_LOW (index1)],
7414 mode);
7420 goto normal_inner_ref;
7422 case COMPONENT_REF:
7423 /* If the operand is a CONSTRUCTOR, we can just extract the
7424 appropriate field if it is present. */
7425 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7427 unsigned HOST_WIDE_INT idx;
7428 tree field, value;
7430 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7431 idx, field, value)
7432 if (field == TREE_OPERAND (exp, 1)
7433 /* We can normally use the value of the field in the
7434 CONSTRUCTOR. However, if this is a bitfield in
7435 an integral mode that we can fit in a HOST_WIDE_INT,
7436 we must mask only the number of bits in the bitfield,
7437 since this is done implicitly by the constructor. If
7438 the bitfield does not meet either of those conditions,
7439 we can't do this optimization. */
7440 && (! DECL_BIT_FIELD (field)
7441 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7442 && (GET_MODE_BITSIZE (DECL_MODE (field))
7443 <= HOST_BITS_PER_WIDE_INT))))
7445 if (DECL_BIT_FIELD (field)
7446 && modifier == EXPAND_STACK_PARM)
7447 target = 0;
7448 op0 = expand_expr (value, target, tmode, modifier);
7449 if (DECL_BIT_FIELD (field))
7451 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7452 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7454 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7456 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7457 op0 = expand_and (imode, op0, op1, target);
7459 else
7461 tree count
7462 = build_int_cst (NULL_TREE,
7463 GET_MODE_BITSIZE (imode) - bitsize);
7465 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7466 target, 0);
7467 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7468 target, 0);
7472 return op0;
7475 goto normal_inner_ref;
7477 case BIT_FIELD_REF:
7478 case ARRAY_RANGE_REF:
7479 normal_inner_ref:
7481 enum machine_mode mode1;
7482 HOST_WIDE_INT bitsize, bitpos;
7483 tree offset;
7484 int volatilep = 0;
7485 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7486 &mode1, &unsignedp, &volatilep, true);
7487 rtx orig_op0;
7489 /* If we got back the original object, something is wrong. Perhaps
7490 we are evaluating an expression too early. In any event, don't
7491 infinitely recurse. */
7492 gcc_assert (tem != exp);
7494 /* If TEM's type is a union of variable size, pass TARGET to the inner
7495 computation, since it will need a temporary and TARGET is known
7496 to have to do. This occurs in unchecked conversion in Ada. */
7498 orig_op0 = op0
7499 = expand_expr (tem,
7500 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7501 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7502 != INTEGER_CST)
7503 && modifier != EXPAND_STACK_PARM
7504 ? target : NULL_RTX),
7505 VOIDmode,
7506 (modifier == EXPAND_INITIALIZER
7507 || modifier == EXPAND_CONST_ADDRESS
7508 || modifier == EXPAND_STACK_PARM)
7509 ? modifier : EXPAND_NORMAL);
7511 /* If this is a constant, put it into a register if it is a legitimate
7512 constant, OFFSET is 0, and we won't try to extract outside the
7513 register (in case we were passed a partially uninitialized object
7514 or a view_conversion to a larger size). Force the constant to
7515 memory otherwise. */
7516 if (CONSTANT_P (op0))
7518 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7519 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7520 && offset == 0
7521 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7522 op0 = force_reg (mode, op0);
7523 else
7524 op0 = validize_mem (force_const_mem (mode, op0));
7527 /* Otherwise, if this object not in memory and we either have an
7528 offset, a BLKmode result, or a reference outside the object, put it
7529 there. Such cases can occur in Ada if we have unchecked conversion
7530 of an expression from a scalar type to an array or record type or
7531 for an ARRAY_RANGE_REF whose type is BLKmode. */
7532 else if (!MEM_P (op0)
7533 && (offset != 0
7534 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7535 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7537 tree nt = build_qualified_type (TREE_TYPE (tem),
7538 (TYPE_QUALS (TREE_TYPE (tem))
7539 | TYPE_QUAL_CONST));
7540 rtx memloc = assign_temp (nt, 1, 1, 1);
7542 emit_move_insn (memloc, op0);
7543 op0 = memloc;
7546 if (offset != 0)
7548 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7549 EXPAND_SUM);
7551 gcc_assert (MEM_P (op0));
7553 #ifdef POINTERS_EXTEND_UNSIGNED
7554 if (GET_MODE (offset_rtx) != Pmode)
7555 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7556 #else
7557 if (GET_MODE (offset_rtx) != ptr_mode)
7558 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7559 #endif
7561 if (GET_MODE (op0) == BLKmode
7562 /* A constant address in OP0 can have VOIDmode, we must
7563 not try to call force_reg in that case. */
7564 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7565 && bitsize != 0
7566 && (bitpos % bitsize) == 0
7567 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7568 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7570 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7571 bitpos = 0;
7574 op0 = offset_address (op0, offset_rtx,
7575 highest_pow2_factor (offset));
7578 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7579 record its alignment as BIGGEST_ALIGNMENT. */
7580 if (MEM_P (op0) && bitpos == 0 && offset != 0
7581 && is_aligning_offset (offset, tem))
7582 set_mem_align (op0, BIGGEST_ALIGNMENT);
7584 /* Don't forget about volatility even if this is a bitfield. */
7585 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7587 if (op0 == orig_op0)
7588 op0 = copy_rtx (op0);
7590 MEM_VOLATILE_P (op0) = 1;
7593 /* The following code doesn't handle CONCAT.
7594 Assume only bitpos == 0 can be used for CONCAT, due to
7595 one element arrays having the same mode as its element. */
7596 if (GET_CODE (op0) == CONCAT)
7598 gcc_assert (bitpos == 0
7599 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7600 return op0;
7603 /* In cases where an aligned union has an unaligned object
7604 as a field, we might be extracting a BLKmode value from
7605 an integer-mode (e.g., SImode) object. Handle this case
7606 by doing the extract into an object as wide as the field
7607 (which we know to be the width of a basic mode), then
7608 storing into memory, and changing the mode to BLKmode. */
7609 if (mode1 == VOIDmode
7610 || REG_P (op0) || GET_CODE (op0) == SUBREG
7611 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7612 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7613 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7614 && modifier != EXPAND_CONST_ADDRESS
7615 && modifier != EXPAND_INITIALIZER)
7616 /* If the field isn't aligned enough to fetch as a memref,
7617 fetch it as a bit field. */
7618 || (mode1 != BLKmode
7619 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7620 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7621 || (MEM_P (op0)
7622 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7623 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7624 && ((modifier == EXPAND_CONST_ADDRESS
7625 || modifier == EXPAND_INITIALIZER)
7626 ? STRICT_ALIGNMENT
7627 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7628 || (bitpos % BITS_PER_UNIT != 0)))
7629 /* If the type and the field are a constant size and the
7630 size of the type isn't the same size as the bitfield,
7631 we must use bitfield operations. */
7632 || (bitsize >= 0
7633 && TYPE_SIZE (TREE_TYPE (exp))
7634 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7635 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7636 bitsize)))
7638 enum machine_mode ext_mode = mode;
7640 if (ext_mode == BLKmode
7641 && ! (target != 0 && MEM_P (op0)
7642 && MEM_P (target)
7643 && bitpos % BITS_PER_UNIT == 0))
7644 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7646 if (ext_mode == BLKmode)
7648 if (target == 0)
7649 target = assign_temp (type, 0, 1, 1);
7651 if (bitsize == 0)
7652 return target;
7654 /* In this case, BITPOS must start at a byte boundary and
7655 TARGET, if specified, must be a MEM. */
7656 gcc_assert (MEM_P (op0)
7657 && (!target || MEM_P (target))
7658 && !(bitpos % BITS_PER_UNIT));
7660 emit_block_move (target,
7661 adjust_address (op0, VOIDmode,
7662 bitpos / BITS_PER_UNIT),
7663 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7664 / BITS_PER_UNIT),
7665 (modifier == EXPAND_STACK_PARM
7666 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7668 return target;
7671 op0 = validize_mem (op0);
7673 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7674 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7676 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7677 (modifier == EXPAND_STACK_PARM
7678 ? NULL_RTX : target),
7679 ext_mode, ext_mode);
7681 /* If the result is a record type and BITSIZE is narrower than
7682 the mode of OP0, an integral mode, and this is a big endian
7683 machine, we must put the field into the high-order bits. */
7684 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7685 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7686 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7687 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7688 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7689 - bitsize),
7690 op0, 1);
7692 /* If the result type is BLKmode, store the data into a temporary
7693 of the appropriate type, but with the mode corresponding to the
7694 mode for the data we have (op0's mode). It's tempting to make
7695 this a constant type, since we know it's only being stored once,
7696 but that can cause problems if we are taking the address of this
7697 COMPONENT_REF because the MEM of any reference via that address
7698 will have flags corresponding to the type, which will not
7699 necessarily be constant. */
7700 if (mode == BLKmode)
7702 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7703 rtx new;
7705 /* If the reference doesn't use the alias set of its type,
7706 we cannot create the temporary using that type. */
7707 if (component_uses_parent_alias_set (exp))
7709 new = assign_stack_local (ext_mode, size, 0);
7710 set_mem_alias_set (new, get_alias_set (exp));
7712 else
7713 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7715 emit_move_insn (new, op0);
7716 op0 = copy_rtx (new);
7717 PUT_MODE (op0, BLKmode);
7718 set_mem_attributes (op0, exp, 1);
7721 return op0;
7724 /* If the result is BLKmode, use that to access the object
7725 now as well. */
7726 if (mode == BLKmode)
7727 mode1 = BLKmode;
7729 /* Get a reference to just this component. */
7730 if (modifier == EXPAND_CONST_ADDRESS
7731 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7732 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7733 else
7734 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7736 if (op0 == orig_op0)
7737 op0 = copy_rtx (op0);
7739 set_mem_attributes (op0, exp, 0);
7740 if (REG_P (XEXP (op0, 0)))
7741 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7743 MEM_VOLATILE_P (op0) |= volatilep;
7744 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7745 || modifier == EXPAND_CONST_ADDRESS
7746 || modifier == EXPAND_INITIALIZER)
7747 return op0;
7748 else if (target == 0)
7749 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7751 convert_move (target, op0, unsignedp);
7752 return target;
7755 case OBJ_TYPE_REF:
7756 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7758 case CALL_EXPR:
7759 /* Check for a built-in function. */
7760 if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
7761 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7762 == FUNCTION_DECL)
7763 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7765 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7766 == BUILT_IN_FRONTEND)
7767 return lang_hooks.expand_expr (exp, original_target,
7768 tmode, modifier,
7769 alt_rtl);
7770 else
7771 return expand_builtin (exp, target, subtarget, tmode, ignore);
7774 return expand_call (exp, target, ignore);
7776 case NON_LVALUE_EXPR:
7777 case NOP_EXPR:
7778 case CONVERT_EXPR:
7779 if (TREE_OPERAND (exp, 0) == error_mark_node)
7780 return const0_rtx;
7782 if (TREE_CODE (type) == UNION_TYPE)
7784 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7786 /* If both input and output are BLKmode, this conversion isn't doing
7787 anything except possibly changing memory attribute. */
7788 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7790 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7791 modifier);
7793 result = copy_rtx (result);
7794 set_mem_attributes (result, exp, 0);
7795 return result;
7798 if (target == 0)
7800 if (TYPE_MODE (type) != BLKmode)
7801 target = gen_reg_rtx (TYPE_MODE (type));
7802 else
7803 target = assign_temp (type, 0, 1, 1);
7806 if (MEM_P (target))
7807 /* Store data into beginning of memory target. */
7808 store_expr (TREE_OPERAND (exp, 0),
7809 adjust_address (target, TYPE_MODE (valtype), 0),
7810 modifier == EXPAND_STACK_PARM);
7812 else
7814 gcc_assert (REG_P (target));
7816 /* Store this field into a union of the proper type. */
7817 store_field (target,
7818 MIN ((int_size_in_bytes (TREE_TYPE
7819 (TREE_OPERAND (exp, 0)))
7820 * BITS_PER_UNIT),
7821 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7822 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7823 type, 0);
7826 /* Return the entire union. */
7827 return target;
7830 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7832 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7833 modifier);
7835 /* If the signedness of the conversion differs and OP0 is
7836 a promoted SUBREG, clear that indication since we now
7837 have to do the proper extension. */
7838 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7839 && GET_CODE (op0) == SUBREG)
7840 SUBREG_PROMOTED_VAR_P (op0) = 0;
7842 return REDUCE_BIT_FIELD (op0);
7845 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7846 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7847 if (GET_MODE (op0) == mode)
7850 /* If OP0 is a constant, just convert it into the proper mode. */
7851 else if (CONSTANT_P (op0))
7853 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7854 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7856 if (modifier == EXPAND_INITIALIZER)
7857 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7858 subreg_lowpart_offset (mode,
7859 inner_mode));
7860 else
7861 op0= convert_modes (mode, inner_mode, op0,
7862 TYPE_UNSIGNED (inner_type));
7865 else if (modifier == EXPAND_INITIALIZER)
7866 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7868 else if (target == 0)
7869 op0 = convert_to_mode (mode, op0,
7870 TYPE_UNSIGNED (TREE_TYPE
7871 (TREE_OPERAND (exp, 0))));
7872 else
7874 convert_move (target, op0,
7875 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7876 op0 = target;
7879 return REDUCE_BIT_FIELD (op0);
7881 case VIEW_CONVERT_EXPR:
7882 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7884 /* If the input and output modes are both the same, we are done. */
7885 if (TYPE_MODE (type) == GET_MODE (op0))
7887 /* If neither mode is BLKmode, and both modes are the same size
7888 then we can use gen_lowpart. */
7889 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7890 && GET_MODE_SIZE (TYPE_MODE (type))
7891 == GET_MODE_SIZE (GET_MODE (op0)))
7893 if (GET_CODE (op0) == SUBREG)
7894 op0 = force_reg (GET_MODE (op0), op0);
7895 op0 = gen_lowpart (TYPE_MODE (type), op0);
7897 /* If both modes are integral, then we can convert from one to the
7898 other. */
7899 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7900 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7901 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7902 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7903 /* As a last resort, spill op0 to memory, and reload it in a
7904 different mode. */
7905 else if (!MEM_P (op0))
7907 /* If the operand is not a MEM, force it into memory. Since we
7908 are going to be changing the mode of the MEM, don't call
7909 force_const_mem for constants because we don't allow pool
7910 constants to change mode. */
7911 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7913 gcc_assert (!TREE_ADDRESSABLE (exp));
7915 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7916 target
7917 = assign_stack_temp_for_type
7918 (TYPE_MODE (inner_type),
7919 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7921 emit_move_insn (target, op0);
7922 op0 = target;
7925 /* At this point, OP0 is in the correct mode. If the output type is such
7926 that the operand is known to be aligned, indicate that it is.
7927 Otherwise, we need only be concerned about alignment for non-BLKmode
7928 results. */
7929 if (MEM_P (op0))
7931 op0 = copy_rtx (op0);
7933 if (TYPE_ALIGN_OK (type))
7934 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7935 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7936 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7938 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7939 HOST_WIDE_INT temp_size
7940 = MAX (int_size_in_bytes (inner_type),
7941 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7942 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7943 temp_size, 0, type);
7944 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7946 gcc_assert (!TREE_ADDRESSABLE (exp));
7948 if (GET_MODE (op0) == BLKmode)
7949 emit_block_move (new_with_op0_mode, op0,
7950 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7951 (modifier == EXPAND_STACK_PARM
7952 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7953 else
7954 emit_move_insn (new_with_op0_mode, op0);
7956 op0 = new;
7959 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7962 return op0;
7964 case PLUS_EXPR:
7965 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7966 something else, make sure we add the register to the constant and
7967 then to the other thing. This case can occur during strength
7968 reduction and doing it this way will produce better code if the
7969 frame pointer or argument pointer is eliminated.
7971 fold-const.c will ensure that the constant is always in the inner
7972 PLUS_EXPR, so the only case we need to do anything about is if
7973 sp, ap, or fp is our second argument, in which case we must swap
7974 the innermost first argument and our second argument. */
7976 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7977 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7978 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7979 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7980 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7981 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7983 tree t = TREE_OPERAND (exp, 1);
7985 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7986 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7989 /* If the result is to be ptr_mode and we are adding an integer to
7990 something, we might be forming a constant. So try to use
7991 plus_constant. If it produces a sum and we can't accept it,
7992 use force_operand. This allows P = &ARR[const] to generate
7993 efficient code on machines where a SYMBOL_REF is not a valid
7994 address.
7996 If this is an EXPAND_SUM call, always return the sum. */
7997 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7998 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8000 if (modifier == EXPAND_STACK_PARM)
8001 target = 0;
8002 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8003 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8004 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8006 rtx constant_part;
8008 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8009 EXPAND_SUM);
8010 /* Use immed_double_const to ensure that the constant is
8011 truncated according to the mode of OP1, then sign extended
8012 to a HOST_WIDE_INT. Using the constant directly can result
8013 in non-canonical RTL in a 64x32 cross compile. */
8014 constant_part
8015 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8016 (HOST_WIDE_INT) 0,
8017 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8018 op1 = plus_constant (op1, INTVAL (constant_part));
8019 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8020 op1 = force_operand (op1, target);
8021 return REDUCE_BIT_FIELD (op1);
8024 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8025 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8026 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8028 rtx constant_part;
8030 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8031 (modifier == EXPAND_INITIALIZER
8032 ? EXPAND_INITIALIZER : EXPAND_SUM));
8033 if (! CONSTANT_P (op0))
8035 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8036 VOIDmode, modifier);
8037 /* Return a PLUS if modifier says it's OK. */
8038 if (modifier == EXPAND_SUM
8039 || modifier == EXPAND_INITIALIZER)
8040 return simplify_gen_binary (PLUS, mode, op0, op1);
8041 goto binop2;
8043 /* Use immed_double_const to ensure that the constant is
8044 truncated according to the mode of OP1, then sign extended
8045 to a HOST_WIDE_INT. Using the constant directly can result
8046 in non-canonical RTL in a 64x32 cross compile. */
8047 constant_part
8048 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8049 (HOST_WIDE_INT) 0,
8050 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8051 op0 = plus_constant (op0, INTVAL (constant_part));
8052 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8053 op0 = force_operand (op0, target);
8054 return REDUCE_BIT_FIELD (op0);
8058 /* No sense saving up arithmetic to be done
8059 if it's all in the wrong mode to form part of an address.
8060 And force_operand won't know whether to sign-extend or
8061 zero-extend. */
8062 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8063 || mode != ptr_mode)
8065 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8066 subtarget, &op0, &op1, 0);
8067 if (op0 == const0_rtx)
8068 return op1;
8069 if (op1 == const0_rtx)
8070 return op0;
8071 goto binop2;
8074 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8075 subtarget, &op0, &op1, modifier);
8076 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8078 case MINUS_EXPR:
8079 /* For initializers, we are allowed to return a MINUS of two
8080 symbolic constants. Here we handle all cases when both operands
8081 are constant. */
8082 /* Handle difference of two symbolic constants,
8083 for the sake of an initializer. */
8084 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8085 && really_constant_p (TREE_OPERAND (exp, 0))
8086 && really_constant_p (TREE_OPERAND (exp, 1)))
8088 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8089 NULL_RTX, &op0, &op1, modifier);
8091 /* If the last operand is a CONST_INT, use plus_constant of
8092 the negated constant. Else make the MINUS. */
8093 if (GET_CODE (op1) == CONST_INT)
8094 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8095 else
8096 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8099 /* No sense saving up arithmetic to be done
8100 if it's all in the wrong mode to form part of an address.
8101 And force_operand won't know whether to sign-extend or
8102 zero-extend. */
8103 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8104 || mode != ptr_mode)
8105 goto binop;
8107 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8108 subtarget, &op0, &op1, modifier);
8110 /* Convert A - const to A + (-const). */
8111 if (GET_CODE (op1) == CONST_INT)
8113 op1 = negate_rtx (mode, op1);
8114 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8117 goto binop2;
8119 case MULT_EXPR:
8120 /* If first operand is constant, swap them.
8121 Thus the following special case checks need only
8122 check the second operand. */
8123 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8125 tree t1 = TREE_OPERAND (exp, 0);
8126 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8127 TREE_OPERAND (exp, 1) = t1;
8130 /* Attempt to return something suitable for generating an
8131 indexed address, for machines that support that. */
8133 if (modifier == EXPAND_SUM && mode == ptr_mode
8134 && host_integerp (TREE_OPERAND (exp, 1), 0))
8136 tree exp1 = TREE_OPERAND (exp, 1);
8138 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8139 EXPAND_SUM);
8141 if (!REG_P (op0))
8142 op0 = force_operand (op0, NULL_RTX);
8143 if (!REG_P (op0))
8144 op0 = copy_to_mode_reg (mode, op0);
8146 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8147 gen_int_mode (tree_low_cst (exp1, 0),
8148 TYPE_MODE (TREE_TYPE (exp1)))));
8151 if (modifier == EXPAND_STACK_PARM)
8152 target = 0;
8154 /* Check for multiplying things that have been extended
8155 from a narrower type. If this machine supports multiplying
8156 in that narrower type with a result in the desired type,
8157 do it that way, and avoid the explicit type-conversion. */
8159 subexp0 = TREE_OPERAND (exp, 0);
8160 subexp1 = TREE_OPERAND (exp, 1);
8161 /* First, check if we have a multiplication of one signed and one
8162 unsigned operand. */
8163 if (TREE_CODE (subexp0) == NOP_EXPR
8164 && TREE_CODE (subexp1) == NOP_EXPR
8165 && TREE_CODE (type) == INTEGER_TYPE
8166 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8167 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8168 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8169 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8170 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8171 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8173 enum machine_mode innermode
8174 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8175 this_optab = usmul_widen_optab;
8176 if (mode == GET_MODE_WIDER_MODE (innermode))
8178 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8180 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8181 expand_operands (TREE_OPERAND (subexp0, 0),
8182 TREE_OPERAND (subexp1, 0),
8183 NULL_RTX, &op0, &op1, 0);
8184 else
8185 expand_operands (TREE_OPERAND (subexp0, 0),
8186 TREE_OPERAND (subexp1, 0),
8187 NULL_RTX, &op1, &op0, 0);
8189 goto binop3;
8193 /* Check for a multiplication with matching signedness. */
8194 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8195 && TREE_CODE (type) == INTEGER_TYPE
8196 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8197 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8198 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8199 && int_fits_type_p (TREE_OPERAND (exp, 1),
8200 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8201 /* Don't use a widening multiply if a shift will do. */
8202 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8203 > HOST_BITS_PER_WIDE_INT)
8204 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8206 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8207 && (TYPE_PRECISION (TREE_TYPE
8208 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8209 == TYPE_PRECISION (TREE_TYPE
8210 (TREE_OPERAND
8211 (TREE_OPERAND (exp, 0), 0))))
8212 /* If both operands are extended, they must either both
8213 be zero-extended or both be sign-extended. */
8214 && (TYPE_UNSIGNED (TREE_TYPE
8215 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8216 == TYPE_UNSIGNED (TREE_TYPE
8217 (TREE_OPERAND
8218 (TREE_OPERAND (exp, 0), 0)))))))
8220 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8221 enum machine_mode innermode = TYPE_MODE (op0type);
8222 bool zextend_p = TYPE_UNSIGNED (op0type);
8223 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8224 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8226 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8228 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8230 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8231 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8232 TREE_OPERAND (exp, 1),
8233 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8234 else
8235 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8236 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8237 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8238 goto binop3;
8240 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8241 && innermode == word_mode)
8243 rtx htem, hipart;
8244 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8245 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8246 op1 = convert_modes (innermode, mode,
8247 expand_normal (TREE_OPERAND (exp, 1)),
8248 unsignedp);
8249 else
8250 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8251 temp = expand_binop (mode, other_optab, op0, op1, target,
8252 unsignedp, OPTAB_LIB_WIDEN);
8253 hipart = gen_highpart (innermode, temp);
8254 htem = expand_mult_highpart_adjust (innermode, hipart,
8255 op0, op1, hipart,
8256 zextend_p);
8257 if (htem != hipart)
8258 emit_move_insn (hipart, htem);
8259 return REDUCE_BIT_FIELD (temp);
8263 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8264 subtarget, &op0, &op1, 0);
8265 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8267 case TRUNC_DIV_EXPR:
8268 case FLOOR_DIV_EXPR:
8269 case CEIL_DIV_EXPR:
8270 case ROUND_DIV_EXPR:
8271 case EXACT_DIV_EXPR:
8272 if (modifier == EXPAND_STACK_PARM)
8273 target = 0;
8274 /* Possible optimization: compute the dividend with EXPAND_SUM
8275 then if the divisor is constant can optimize the case
8276 where some terms of the dividend have coeffs divisible by it. */
8277 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8278 subtarget, &op0, &op1, 0);
8279 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8281 case RDIV_EXPR:
8282 goto binop;
8284 case TRUNC_MOD_EXPR:
8285 case FLOOR_MOD_EXPR:
8286 case CEIL_MOD_EXPR:
8287 case ROUND_MOD_EXPR:
8288 if (modifier == EXPAND_STACK_PARM)
8289 target = 0;
8290 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8291 subtarget, &op0, &op1, 0);
8292 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8294 case FIX_TRUNC_EXPR:
8295 op0 = expand_normal (TREE_OPERAND (exp, 0));
8296 if (target == 0 || modifier == EXPAND_STACK_PARM)
8297 target = gen_reg_rtx (mode);
8298 expand_fix (target, op0, unsignedp);
8299 return target;
8301 case FLOAT_EXPR:
8302 op0 = expand_normal (TREE_OPERAND (exp, 0));
8303 if (target == 0 || modifier == EXPAND_STACK_PARM)
8304 target = gen_reg_rtx (mode);
8305 /* expand_float can't figure out what to do if FROM has VOIDmode.
8306 So give it the correct mode. With -O, cse will optimize this. */
8307 if (GET_MODE (op0) == VOIDmode)
8308 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8309 op0);
8310 expand_float (target, op0,
8311 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8312 return target;
8314 case NEGATE_EXPR:
8315 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8316 if (modifier == EXPAND_STACK_PARM)
8317 target = 0;
8318 temp = expand_unop (mode,
8319 optab_for_tree_code (NEGATE_EXPR, type),
8320 op0, target, 0);
8321 gcc_assert (temp);
8322 return REDUCE_BIT_FIELD (temp);
8324 case ABS_EXPR:
8325 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8326 if (modifier == EXPAND_STACK_PARM)
8327 target = 0;
8329 /* ABS_EXPR is not valid for complex arguments. */
8330 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8331 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8333 /* Unsigned abs is simply the operand. Testing here means we don't
8334 risk generating incorrect code below. */
8335 if (TYPE_UNSIGNED (type))
8336 return op0;
8338 return expand_abs (mode, op0, target, unsignedp,
8339 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8341 case MAX_EXPR:
8342 case MIN_EXPR:
8343 target = original_target;
8344 if (target == 0
8345 || modifier == EXPAND_STACK_PARM
8346 || (MEM_P (target) && MEM_VOLATILE_P (target))
8347 || GET_MODE (target) != mode
8348 || (REG_P (target)
8349 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8350 target = gen_reg_rtx (mode);
8351 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8352 target, &op0, &op1, 0);
8354 /* First try to do it with a special MIN or MAX instruction.
8355 If that does not win, use a conditional jump to select the proper
8356 value. */
8357 this_optab = optab_for_tree_code (code, type);
8358 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8359 OPTAB_WIDEN);
8360 if (temp != 0)
8361 return temp;
8363 /* At this point, a MEM target is no longer useful; we will get better
8364 code without it. */
8366 if (! REG_P (target))
8367 target = gen_reg_rtx (mode);
8369 /* If op1 was placed in target, swap op0 and op1. */
8370 if (target != op0 && target == op1)
8372 temp = op0;
8373 op0 = op1;
8374 op1 = temp;
8377 /* We generate better code and avoid problems with op1 mentioning
8378 target by forcing op1 into a pseudo if it isn't a constant. */
8379 if (! CONSTANT_P (op1))
8380 op1 = force_reg (mode, op1);
8383 enum rtx_code comparison_code;
8384 rtx cmpop1 = op1;
8386 if (code == MAX_EXPR)
8387 comparison_code = unsignedp ? GEU : GE;
8388 else
8389 comparison_code = unsignedp ? LEU : LE;
8391 /* Canonicalize to comparisons against 0. */
8392 if (op1 == const1_rtx)
8394 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8395 or (a != 0 ? a : 1) for unsigned.
8396 For MIN we are safe converting (a <= 1 ? a : 1)
8397 into (a <= 0 ? a : 1) */
8398 cmpop1 = const0_rtx;
8399 if (code == MAX_EXPR)
8400 comparison_code = unsignedp ? NE : GT;
8402 if (op1 == constm1_rtx && !unsignedp)
8404 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8405 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8406 cmpop1 = const0_rtx;
8407 if (code == MIN_EXPR)
8408 comparison_code = LT;
8410 #ifdef HAVE_conditional_move
8411 /* Use a conditional move if possible. */
8412 if (can_conditionally_move_p (mode))
8414 rtx insn;
8416 /* ??? Same problem as in expmed.c: emit_conditional_move
8417 forces a stack adjustment via compare_from_rtx, and we
8418 lose the stack adjustment if the sequence we are about
8419 to create is discarded. */
8420 do_pending_stack_adjust ();
8422 start_sequence ();
8424 /* Try to emit the conditional move. */
8425 insn = emit_conditional_move (target, comparison_code,
8426 op0, cmpop1, mode,
8427 op0, op1, mode,
8428 unsignedp);
8430 /* If we could do the conditional move, emit the sequence,
8431 and return. */
8432 if (insn)
8434 rtx seq = get_insns ();
8435 end_sequence ();
8436 emit_insn (seq);
8437 return target;
8440 /* Otherwise discard the sequence and fall back to code with
8441 branches. */
8442 end_sequence ();
8444 #endif
8445 if (target != op0)
8446 emit_move_insn (target, op0);
8448 temp = gen_label_rtx ();
8449 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8450 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8452 emit_move_insn (target, op1);
8453 emit_label (temp);
8454 return target;
8456 case BIT_NOT_EXPR:
8457 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8458 if (modifier == EXPAND_STACK_PARM)
8459 target = 0;
8460 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8461 gcc_assert (temp);
8462 return temp;
8464 /* ??? Can optimize bitwise operations with one arg constant.
8465 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8466 and (a bitwise1 b) bitwise2 b (etc)
8467 but that is probably not worth while. */
8469 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8470 boolean values when we want in all cases to compute both of them. In
8471 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8472 as actual zero-or-1 values and then bitwise anding. In cases where
8473 there cannot be any side effects, better code would be made by
8474 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8475 how to recognize those cases. */
8477 case TRUTH_AND_EXPR:
8478 code = BIT_AND_EXPR;
8479 case BIT_AND_EXPR:
8480 goto binop;
8482 case TRUTH_OR_EXPR:
8483 code = BIT_IOR_EXPR;
8484 case BIT_IOR_EXPR:
8485 goto binop;
8487 case TRUTH_XOR_EXPR:
8488 code = BIT_XOR_EXPR;
8489 case BIT_XOR_EXPR:
8490 goto binop;
8492 case LSHIFT_EXPR:
8493 case RSHIFT_EXPR:
8494 case LROTATE_EXPR:
8495 case RROTATE_EXPR:
8496 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8497 subtarget = 0;
8498 if (modifier == EXPAND_STACK_PARM)
8499 target = 0;
8500 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8501 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8502 unsignedp);
8504 /* Could determine the answer when only additive constants differ. Also,
8505 the addition of one can be handled by changing the condition. */
8506 case LT_EXPR:
8507 case LE_EXPR:
8508 case GT_EXPR:
8509 case GE_EXPR:
8510 case EQ_EXPR:
8511 case NE_EXPR:
8512 case UNORDERED_EXPR:
8513 case ORDERED_EXPR:
8514 case UNLT_EXPR:
8515 case UNLE_EXPR:
8516 case UNGT_EXPR:
8517 case UNGE_EXPR:
8518 case UNEQ_EXPR:
8519 case LTGT_EXPR:
8520 temp = do_store_flag (exp,
8521 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8522 tmode != VOIDmode ? tmode : mode, 0);
8523 if (temp != 0)
8524 return temp;
8526 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8527 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8528 && original_target
8529 && REG_P (original_target)
8530 && (GET_MODE (original_target)
8531 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8533 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8534 VOIDmode, 0);
8536 /* If temp is constant, we can just compute the result. */
8537 if (GET_CODE (temp) == CONST_INT)
8539 if (INTVAL (temp) != 0)
8540 emit_move_insn (target, const1_rtx);
8541 else
8542 emit_move_insn (target, const0_rtx);
8544 return target;
8547 if (temp != original_target)
8549 enum machine_mode mode1 = GET_MODE (temp);
8550 if (mode1 == VOIDmode)
8551 mode1 = tmode != VOIDmode ? tmode : mode;
8553 temp = copy_to_mode_reg (mode1, temp);
8556 op1 = gen_label_rtx ();
8557 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8558 GET_MODE (temp), unsignedp, op1);
8559 emit_move_insn (temp, const1_rtx);
8560 emit_label (op1);
8561 return temp;
8564 /* If no set-flag instruction, must generate a conditional store
8565 into a temporary variable. Drop through and handle this
8566 like && and ||. */
8568 if (! ignore
8569 && (target == 0
8570 || modifier == EXPAND_STACK_PARM
8571 || ! safe_from_p (target, exp, 1)
8572 /* Make sure we don't have a hard reg (such as function's return
8573 value) live across basic blocks, if not optimizing. */
8574 || (!optimize && REG_P (target)
8575 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8576 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8578 if (target)
8579 emit_move_insn (target, const0_rtx);
8581 op1 = gen_label_rtx ();
8582 jumpifnot (exp, op1);
8584 if (target)
8585 emit_move_insn (target, const1_rtx);
8587 emit_label (op1);
8588 return ignore ? const0_rtx : target;
8590 case TRUTH_NOT_EXPR:
8591 if (modifier == EXPAND_STACK_PARM)
8592 target = 0;
8593 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8594 /* The parser is careful to generate TRUTH_NOT_EXPR
8595 only with operands that are always zero or one. */
8596 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8597 target, 1, OPTAB_LIB_WIDEN);
8598 gcc_assert (temp);
8599 return temp;
8601 case STATEMENT_LIST:
8603 tree_stmt_iterator iter;
8605 gcc_assert (ignore);
8607 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8608 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8610 return const0_rtx;
8612 case COND_EXPR:
8613 /* A COND_EXPR with its type being VOID_TYPE represents a
8614 conditional jump and is handled in
8615 expand_gimple_cond_expr. */
8616 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8618 /* Note that COND_EXPRs whose type is a structure or union
8619 are required to be constructed to contain assignments of
8620 a temporary variable, so that we can evaluate them here
8621 for side effect only. If type is void, we must do likewise. */
8623 gcc_assert (!TREE_ADDRESSABLE (type)
8624 && !ignore
8625 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8626 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8628 /* If we are not to produce a result, we have no target. Otherwise,
8629 if a target was specified use it; it will not be used as an
8630 intermediate target unless it is safe. If no target, use a
8631 temporary. */
8633 if (modifier != EXPAND_STACK_PARM
8634 && original_target
8635 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8636 && GET_MODE (original_target) == mode
8637 #ifdef HAVE_conditional_move
8638 && (! can_conditionally_move_p (mode)
8639 || REG_P (original_target))
8640 #endif
8641 && !MEM_P (original_target))
8642 temp = original_target;
8643 else
8644 temp = assign_temp (type, 0, 0, 1);
8646 do_pending_stack_adjust ();
8647 NO_DEFER_POP;
8648 op0 = gen_label_rtx ();
8649 op1 = gen_label_rtx ();
8650 jumpifnot (TREE_OPERAND (exp, 0), op0);
8651 store_expr (TREE_OPERAND (exp, 1), temp,
8652 modifier == EXPAND_STACK_PARM);
8654 emit_jump_insn (gen_jump (op1));
8655 emit_barrier ();
8656 emit_label (op0);
8657 store_expr (TREE_OPERAND (exp, 2), temp,
8658 modifier == EXPAND_STACK_PARM);
8660 emit_label (op1);
8661 OK_DEFER_POP;
8662 return temp;
8664 case VEC_COND_EXPR:
8665 target = expand_vec_cond_expr (exp, target);
8666 return target;
8668 case MODIFY_EXPR:
8670 tree lhs = TREE_OPERAND (exp, 0);
8671 tree rhs = TREE_OPERAND (exp, 1);
8672 gcc_assert (ignore);
8673 expand_assignment (lhs, rhs);
8674 return const0_rtx;
8677 case GIMPLE_MODIFY_STMT:
8679 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
8680 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
8682 gcc_assert (ignore);
8684 /* Check for |= or &= of a bitfield of size one into another bitfield
8685 of size 1. In this case, (unless we need the result of the
8686 assignment) we can do this more efficiently with a
8687 test followed by an assignment, if necessary.
8689 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8690 things change so we do, this code should be enhanced to
8691 support it. */
8692 if (TREE_CODE (lhs) == COMPONENT_REF
8693 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8694 || TREE_CODE (rhs) == BIT_AND_EXPR)
8695 && TREE_OPERAND (rhs, 0) == lhs
8696 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8697 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8698 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8700 rtx label = gen_label_rtx ();
8701 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8702 do_jump (TREE_OPERAND (rhs, 1),
8703 value ? label : 0,
8704 value ? 0 : label);
8705 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8706 do_pending_stack_adjust ();
8707 emit_label (label);
8708 return const0_rtx;
8711 expand_assignment (lhs, rhs);
8712 return const0_rtx;
8715 case RETURN_EXPR:
8716 if (!TREE_OPERAND (exp, 0))
8717 expand_null_return ();
8718 else
8719 expand_return (TREE_OPERAND (exp, 0));
8720 return const0_rtx;
8722 case ADDR_EXPR:
8723 return expand_expr_addr_expr (exp, target, tmode, modifier);
8725 case COMPLEX_EXPR:
8726 /* Get the rtx code of the operands. */
8727 op0 = expand_normal (TREE_OPERAND (exp, 0));
8728 op1 = expand_normal (TREE_OPERAND (exp, 1));
8730 if (!target)
8731 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8733 /* Move the real (op0) and imaginary (op1) parts to their location. */
8734 write_complex_part (target, op0, false);
8735 write_complex_part (target, op1, true);
8737 return target;
8739 case REALPART_EXPR:
8740 op0 = expand_normal (TREE_OPERAND (exp, 0));
8741 return read_complex_part (op0, false);
8743 case IMAGPART_EXPR:
8744 op0 = expand_normal (TREE_OPERAND (exp, 0));
8745 return read_complex_part (op0, true);
8747 case RESX_EXPR:
8748 expand_resx_expr (exp);
8749 return const0_rtx;
8751 case TRY_CATCH_EXPR:
8752 case CATCH_EXPR:
8753 case EH_FILTER_EXPR:
8754 case TRY_FINALLY_EXPR:
8755 /* Lowered by tree-eh.c. */
8756 gcc_unreachable ();
8758 case WITH_CLEANUP_EXPR:
8759 case CLEANUP_POINT_EXPR:
8760 case TARGET_EXPR:
8761 case CASE_LABEL_EXPR:
8762 case VA_ARG_EXPR:
8763 case BIND_EXPR:
8764 case INIT_EXPR:
8765 case CONJ_EXPR:
8766 case COMPOUND_EXPR:
8767 case PREINCREMENT_EXPR:
8768 case PREDECREMENT_EXPR:
8769 case POSTINCREMENT_EXPR:
8770 case POSTDECREMENT_EXPR:
8771 case LOOP_EXPR:
8772 case EXIT_EXPR:
8773 case TRUTH_ANDIF_EXPR:
8774 case TRUTH_ORIF_EXPR:
8775 /* Lowered by gimplify.c. */
8776 gcc_unreachable ();
8778 case EXC_PTR_EXPR:
8779 return get_exception_pointer (cfun);
8781 case FILTER_EXPR:
8782 return get_exception_filter (cfun);
8784 case FDESC_EXPR:
8785 /* Function descriptors are not valid except for as
8786 initialization constants, and should not be expanded. */
8787 gcc_unreachable ();
8789 case SWITCH_EXPR:
8790 expand_case (exp);
8791 return const0_rtx;
8793 case LABEL_EXPR:
8794 expand_label (TREE_OPERAND (exp, 0));
8795 return const0_rtx;
8797 case ASM_EXPR:
8798 expand_asm_expr (exp);
8799 return const0_rtx;
8801 case WITH_SIZE_EXPR:
8802 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8803 have pulled out the size to use in whatever context it needed. */
8804 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8805 modifier, alt_rtl);
8807 case REALIGN_LOAD_EXPR:
8809 tree oprnd0 = TREE_OPERAND (exp, 0);
8810 tree oprnd1 = TREE_OPERAND (exp, 1);
8811 tree oprnd2 = TREE_OPERAND (exp, 2);
8812 rtx op2;
8814 this_optab = optab_for_tree_code (code, type);
8815 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8816 op2 = expand_normal (oprnd2);
8817 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8818 target, unsignedp);
8819 gcc_assert (temp);
8820 return temp;
8823 case DOT_PROD_EXPR:
8825 tree oprnd0 = TREE_OPERAND (exp, 0);
8826 tree oprnd1 = TREE_OPERAND (exp, 1);
8827 tree oprnd2 = TREE_OPERAND (exp, 2);
8828 rtx op2;
8830 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8831 op2 = expand_normal (oprnd2);
8832 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8833 target, unsignedp);
8834 return target;
8837 case WIDEN_SUM_EXPR:
8839 tree oprnd0 = TREE_OPERAND (exp, 0);
8840 tree oprnd1 = TREE_OPERAND (exp, 1);
8842 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8843 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8844 target, unsignedp);
8845 return target;
8848 case REDUC_MAX_EXPR:
8849 case REDUC_MIN_EXPR:
8850 case REDUC_PLUS_EXPR:
8852 op0 = expand_normal (TREE_OPERAND (exp, 0));
8853 this_optab = optab_for_tree_code (code, type);
8854 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8855 gcc_assert (temp);
8856 return temp;
8859 case VEC_EXTRACT_EVEN_EXPR:
8860 case VEC_EXTRACT_ODD_EXPR:
8862 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8863 NULL_RTX, &op0, &op1, 0);
8864 this_optab = optab_for_tree_code (code, type);
8865 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8866 OPTAB_WIDEN);
8867 gcc_assert (temp);
8868 return temp;
8871 case VEC_INTERLEAVE_HIGH_EXPR:
8872 case VEC_INTERLEAVE_LOW_EXPR:
8874 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8875 NULL_RTX, &op0, &op1, 0);
8876 this_optab = optab_for_tree_code (code, type);
8877 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8878 OPTAB_WIDEN);
8879 gcc_assert (temp);
8880 return temp;
8883 case VEC_LSHIFT_EXPR:
8884 case VEC_RSHIFT_EXPR:
8886 target = expand_vec_shift_expr (exp, target);
8887 return target;
8890 case VEC_UNPACK_HI_EXPR:
8891 case VEC_UNPACK_LO_EXPR:
8893 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8894 this_optab = optab_for_tree_code (code, type);
8895 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
8896 target, unsignedp);
8897 gcc_assert (temp);
8898 return temp;
8901 case VEC_WIDEN_MULT_HI_EXPR:
8902 case VEC_WIDEN_MULT_LO_EXPR:
8904 tree oprnd0 = TREE_OPERAND (exp, 0);
8905 tree oprnd1 = TREE_OPERAND (exp, 1);
8907 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8908 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
8909 target, unsignedp);
8910 gcc_assert (target);
8911 return target;
8914 case VEC_PACK_MOD_EXPR:
8915 case VEC_PACK_SAT_EXPR:
8917 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8918 goto binop;
8921 default:
8922 return lang_hooks.expand_expr (exp, original_target, tmode,
8923 modifier, alt_rtl);
8926 /* Here to do an ordinary binary operator. */
8927 binop:
8928 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8929 subtarget, &op0, &op1, 0);
8930 binop2:
8931 this_optab = optab_for_tree_code (code, type);
8932 binop3:
8933 if (modifier == EXPAND_STACK_PARM)
8934 target = 0;
8935 temp = expand_binop (mode, this_optab, op0, op1, target,
8936 unsignedp, OPTAB_LIB_WIDEN);
8937 gcc_assert (temp);
8938 return REDUCE_BIT_FIELD (temp);
8940 #undef REDUCE_BIT_FIELD
8942 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8943 signedness of TYPE), possibly returning the result in TARGET. */
8944 static rtx
8945 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8947 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8948 if (target && GET_MODE (target) != GET_MODE (exp))
8949 target = 0;
8950 if (TYPE_UNSIGNED (type))
8952 rtx mask;
8953 if (prec < HOST_BITS_PER_WIDE_INT)
8954 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8955 GET_MODE (exp));
8956 else
8957 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8958 ((unsigned HOST_WIDE_INT) 1
8959 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8960 GET_MODE (exp));
8961 return expand_and (GET_MODE (exp), exp, mask, target);
8963 else
8965 tree count = build_int_cst (NULL_TREE,
8966 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8967 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8968 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8972 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8973 when applied to the address of EXP produces an address known to be
8974 aligned more than BIGGEST_ALIGNMENT. */
8976 static int
8977 is_aligning_offset (tree offset, tree exp)
8979 /* Strip off any conversions. */
8980 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8981 || TREE_CODE (offset) == NOP_EXPR
8982 || TREE_CODE (offset) == CONVERT_EXPR)
8983 offset = TREE_OPERAND (offset, 0);
8985 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8986 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8987 if (TREE_CODE (offset) != BIT_AND_EXPR
8988 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8989 || compare_tree_int (TREE_OPERAND (offset, 1),
8990 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8991 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8992 return 0;
8994 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8995 It must be NEGATE_EXPR. Then strip any more conversions. */
8996 offset = TREE_OPERAND (offset, 0);
8997 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8998 || TREE_CODE (offset) == NOP_EXPR
8999 || TREE_CODE (offset) == CONVERT_EXPR)
9000 offset = TREE_OPERAND (offset, 0);
9002 if (TREE_CODE (offset) != NEGATE_EXPR)
9003 return 0;
9005 offset = TREE_OPERAND (offset, 0);
9006 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9007 || TREE_CODE (offset) == NOP_EXPR
9008 || TREE_CODE (offset) == CONVERT_EXPR)
9009 offset = TREE_OPERAND (offset, 0);
9011 /* This must now be the address of EXP. */
9012 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9015 /* Return the tree node if an ARG corresponds to a string constant or zero
9016 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9017 in bytes within the string that ARG is accessing. The type of the
9018 offset will be `sizetype'. */
9020 tree
9021 string_constant (tree arg, tree *ptr_offset)
9023 tree array, offset, lower_bound;
9024 STRIP_NOPS (arg);
9026 if (TREE_CODE (arg) == ADDR_EXPR)
9028 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9030 *ptr_offset = size_zero_node;
9031 return TREE_OPERAND (arg, 0);
9033 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9035 array = TREE_OPERAND (arg, 0);
9036 offset = size_zero_node;
9038 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9040 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9041 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9042 if (TREE_CODE (array) != STRING_CST
9043 && TREE_CODE (array) != VAR_DECL)
9044 return 0;
9046 /* Check if the array has a nonzero lower bound. */
9047 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9048 if (!integer_zerop (lower_bound))
9050 /* If the offset and base aren't both constants, return 0. */
9051 if (TREE_CODE (lower_bound) != INTEGER_CST)
9052 return 0;
9053 if (TREE_CODE (offset) != INTEGER_CST)
9054 return 0;
9055 /* Adjust offset by the lower bound. */
9056 offset = size_diffop (fold_convert (sizetype, offset),
9057 fold_convert (sizetype, lower_bound));
9060 else
9061 return 0;
9063 else if (TREE_CODE (arg) == PLUS_EXPR)
9065 tree arg0 = TREE_OPERAND (arg, 0);
9066 tree arg1 = TREE_OPERAND (arg, 1);
9068 STRIP_NOPS (arg0);
9069 STRIP_NOPS (arg1);
9071 if (TREE_CODE (arg0) == ADDR_EXPR
9072 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9073 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9075 array = TREE_OPERAND (arg0, 0);
9076 offset = arg1;
9078 else if (TREE_CODE (arg1) == ADDR_EXPR
9079 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9080 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9082 array = TREE_OPERAND (arg1, 0);
9083 offset = arg0;
9085 else
9086 return 0;
9088 else
9089 return 0;
9091 if (TREE_CODE (array) == STRING_CST)
9093 *ptr_offset = fold_convert (sizetype, offset);
9094 return array;
9096 else if (TREE_CODE (array) == VAR_DECL)
9098 int length;
9100 /* Variables initialized to string literals can be handled too. */
9101 if (DECL_INITIAL (array) == NULL_TREE
9102 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9103 return 0;
9105 /* If they are read-only, non-volatile and bind locally. */
9106 if (! TREE_READONLY (array)
9107 || TREE_SIDE_EFFECTS (array)
9108 || ! targetm.binds_local_p (array))
9109 return 0;
9111 /* Avoid const char foo[4] = "abcde"; */
9112 if (DECL_SIZE_UNIT (array) == NULL_TREE
9113 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9114 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9115 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9116 return 0;
9118 /* If variable is bigger than the string literal, OFFSET must be constant
9119 and inside of the bounds of the string literal. */
9120 offset = fold_convert (sizetype, offset);
9121 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9122 && (! host_integerp (offset, 1)
9123 || compare_tree_int (offset, length) >= 0))
9124 return 0;
9126 *ptr_offset = offset;
9127 return DECL_INITIAL (array);
9130 return 0;
9133 /* Generate code to calculate EXP using a store-flag instruction
9134 and return an rtx for the result. EXP is either a comparison
9135 or a TRUTH_NOT_EXPR whose operand is a comparison.
9137 If TARGET is nonzero, store the result there if convenient.
9139 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9140 cheap.
9142 Return zero if there is no suitable set-flag instruction
9143 available on this machine.
9145 Once expand_expr has been called on the arguments of the comparison,
9146 we are committed to doing the store flag, since it is not safe to
9147 re-evaluate the expression. We emit the store-flag insn by calling
9148 emit_store_flag, but only expand the arguments if we have a reason
9149 to believe that emit_store_flag will be successful. If we think that
9150 it will, but it isn't, we have to simulate the store-flag with a
9151 set/jump/set sequence. */
9153 static rtx
9154 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9156 enum rtx_code code;
9157 tree arg0, arg1, type;
9158 tree tem;
9159 enum machine_mode operand_mode;
9160 int invert = 0;
9161 int unsignedp;
9162 rtx op0, op1;
9163 enum insn_code icode;
9164 rtx subtarget = target;
9165 rtx result, label;
9167 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9168 result at the end. We can't simply invert the test since it would
9169 have already been inverted if it were valid. This case occurs for
9170 some floating-point comparisons. */
9172 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9173 invert = 1, exp = TREE_OPERAND (exp, 0);
9175 arg0 = TREE_OPERAND (exp, 0);
9176 arg1 = TREE_OPERAND (exp, 1);
9178 /* Don't crash if the comparison was erroneous. */
9179 if (arg0 == error_mark_node || arg1 == error_mark_node)
9180 return const0_rtx;
9182 type = TREE_TYPE (arg0);
9183 operand_mode = TYPE_MODE (type);
9184 unsignedp = TYPE_UNSIGNED (type);
9186 /* We won't bother with BLKmode store-flag operations because it would mean
9187 passing a lot of information to emit_store_flag. */
9188 if (operand_mode == BLKmode)
9189 return 0;
9191 /* We won't bother with store-flag operations involving function pointers
9192 when function pointers must be canonicalized before comparisons. */
9193 #ifdef HAVE_canonicalize_funcptr_for_compare
9194 if (HAVE_canonicalize_funcptr_for_compare
9195 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9196 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9197 == FUNCTION_TYPE))
9198 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9199 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9200 == FUNCTION_TYPE))))
9201 return 0;
9202 #endif
9204 STRIP_NOPS (arg0);
9205 STRIP_NOPS (arg1);
9207 /* Get the rtx comparison code to use. We know that EXP is a comparison
9208 operation of some type. Some comparisons against 1 and -1 can be
9209 converted to comparisons with zero. Do so here so that the tests
9210 below will be aware that we have a comparison with zero. These
9211 tests will not catch constants in the first operand, but constants
9212 are rarely passed as the first operand. */
9214 switch (TREE_CODE (exp))
9216 case EQ_EXPR:
9217 code = EQ;
9218 break;
9219 case NE_EXPR:
9220 code = NE;
9221 break;
9222 case LT_EXPR:
9223 if (integer_onep (arg1))
9224 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9225 else
9226 code = unsignedp ? LTU : LT;
9227 break;
9228 case LE_EXPR:
9229 if (! unsignedp && integer_all_onesp (arg1))
9230 arg1 = integer_zero_node, code = LT;
9231 else
9232 code = unsignedp ? LEU : LE;
9233 break;
9234 case GT_EXPR:
9235 if (! unsignedp && integer_all_onesp (arg1))
9236 arg1 = integer_zero_node, code = GE;
9237 else
9238 code = unsignedp ? GTU : GT;
9239 break;
9240 case GE_EXPR:
9241 if (integer_onep (arg1))
9242 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9243 else
9244 code = unsignedp ? GEU : GE;
9245 break;
9247 case UNORDERED_EXPR:
9248 code = UNORDERED;
9249 break;
9250 case ORDERED_EXPR:
9251 code = ORDERED;
9252 break;
9253 case UNLT_EXPR:
9254 code = UNLT;
9255 break;
9256 case UNLE_EXPR:
9257 code = UNLE;
9258 break;
9259 case UNGT_EXPR:
9260 code = UNGT;
9261 break;
9262 case UNGE_EXPR:
9263 code = UNGE;
9264 break;
9265 case UNEQ_EXPR:
9266 code = UNEQ;
9267 break;
9268 case LTGT_EXPR:
9269 code = LTGT;
9270 break;
9272 default:
9273 gcc_unreachable ();
9276 /* Put a constant second. */
9277 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9279 tem = arg0; arg0 = arg1; arg1 = tem;
9280 code = swap_condition (code);
9283 /* If this is an equality or inequality test of a single bit, we can
9284 do this by shifting the bit being tested to the low-order bit and
9285 masking the result with the constant 1. If the condition was EQ,
9286 we xor it with 1. This does not require an scc insn and is faster
9287 than an scc insn even if we have it.
9289 The code to make this transformation was moved into fold_single_bit_test,
9290 so we just call into the folder and expand its result. */
9292 if ((code == NE || code == EQ)
9293 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9294 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9296 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9297 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9298 arg0, arg1, type),
9299 target, VOIDmode, EXPAND_NORMAL);
9302 /* Now see if we are likely to be able to do this. Return if not. */
9303 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9304 return 0;
9306 icode = setcc_gen_code[(int) code];
9308 if (icode == CODE_FOR_nothing)
9310 enum machine_mode wmode;
9312 for (wmode = operand_mode;
9313 icode == CODE_FOR_nothing && wmode != VOIDmode;
9314 wmode = GET_MODE_WIDER_MODE (wmode))
9315 icode = cstore_optab->handlers[(int) wmode].insn_code;
9318 if (icode == CODE_FOR_nothing
9319 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9321 /* We can only do this if it is one of the special cases that
9322 can be handled without an scc insn. */
9323 if ((code == LT && integer_zerop (arg1))
9324 || (! only_cheap && code == GE && integer_zerop (arg1)))
9326 else if (! only_cheap && (code == NE || code == EQ)
9327 && TREE_CODE (type) != REAL_TYPE
9328 && ((abs_optab->handlers[(int) operand_mode].insn_code
9329 != CODE_FOR_nothing)
9330 || (ffs_optab->handlers[(int) operand_mode].insn_code
9331 != CODE_FOR_nothing)))
9333 else
9334 return 0;
9337 if (! get_subtarget (target)
9338 || GET_MODE (subtarget) != operand_mode)
9339 subtarget = 0;
9341 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9343 if (target == 0)
9344 target = gen_reg_rtx (mode);
9346 result = emit_store_flag (target, code, op0, op1,
9347 operand_mode, unsignedp, 1);
9349 if (result)
9351 if (invert)
9352 result = expand_binop (mode, xor_optab, result, const1_rtx,
9353 result, 0, OPTAB_LIB_WIDEN);
9354 return result;
9357 /* If this failed, we have to do this with set/compare/jump/set code. */
9358 if (!REG_P (target)
9359 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9360 target = gen_reg_rtx (GET_MODE (target));
9362 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9363 label = gen_label_rtx ();
9364 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9365 NULL_RTX, label);
9367 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9368 emit_label (label);
9370 return target;
9374 /* Stubs in case we haven't got a casesi insn. */
9375 #ifndef HAVE_casesi
9376 # define HAVE_casesi 0
9377 # define gen_casesi(a, b, c, d, e) (0)
9378 # define CODE_FOR_casesi CODE_FOR_nothing
9379 #endif
9381 /* If the machine does not have a case insn that compares the bounds,
9382 this means extra overhead for dispatch tables, which raises the
9383 threshold for using them. */
9384 #ifndef CASE_VALUES_THRESHOLD
9385 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9386 #endif /* CASE_VALUES_THRESHOLD */
9388 unsigned int
9389 case_values_threshold (void)
9391 return CASE_VALUES_THRESHOLD;
9394 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9395 0 otherwise (i.e. if there is no casesi instruction). */
9397 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9398 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9400 enum machine_mode index_mode = SImode;
9401 int index_bits = GET_MODE_BITSIZE (index_mode);
9402 rtx op1, op2, index;
9403 enum machine_mode op_mode;
9405 if (! HAVE_casesi)
9406 return 0;
9408 /* Convert the index to SImode. */
9409 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9411 enum machine_mode omode = TYPE_MODE (index_type);
9412 rtx rangertx = expand_normal (range);
9414 /* We must handle the endpoints in the original mode. */
9415 index_expr = build2 (MINUS_EXPR, index_type,
9416 index_expr, minval);
9417 minval = integer_zero_node;
9418 index = expand_normal (index_expr);
9419 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9420 omode, 1, default_label);
9421 /* Now we can safely truncate. */
9422 index = convert_to_mode (index_mode, index, 0);
9424 else
9426 if (TYPE_MODE (index_type) != index_mode)
9428 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9429 index_expr = fold_convert (index_type, index_expr);
9432 index = expand_normal (index_expr);
9435 do_pending_stack_adjust ();
9437 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9438 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9439 (index, op_mode))
9440 index = copy_to_mode_reg (op_mode, index);
9442 op1 = expand_normal (minval);
9444 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9445 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9446 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9447 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9448 (op1, op_mode))
9449 op1 = copy_to_mode_reg (op_mode, op1);
9451 op2 = expand_normal (range);
9453 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9454 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9455 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9456 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9457 (op2, op_mode))
9458 op2 = copy_to_mode_reg (op_mode, op2);
9460 emit_jump_insn (gen_casesi (index, op1, op2,
9461 table_label, default_label));
9462 return 1;
9465 /* Attempt to generate a tablejump instruction; same concept. */
9466 #ifndef HAVE_tablejump
9467 #define HAVE_tablejump 0
9468 #define gen_tablejump(x, y) (0)
9469 #endif
9471 /* Subroutine of the next function.
9473 INDEX is the value being switched on, with the lowest value
9474 in the table already subtracted.
9475 MODE is its expected mode (needed if INDEX is constant).
9476 RANGE is the length of the jump table.
9477 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9479 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9480 index value is out of range. */
9482 static void
9483 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9484 rtx default_label)
9486 rtx temp, vector;
9488 if (INTVAL (range) > cfun->max_jumptable_ents)
9489 cfun->max_jumptable_ents = INTVAL (range);
9491 /* Do an unsigned comparison (in the proper mode) between the index
9492 expression and the value which represents the length of the range.
9493 Since we just finished subtracting the lower bound of the range
9494 from the index expression, this comparison allows us to simultaneously
9495 check that the original index expression value is both greater than
9496 or equal to the minimum value of the range and less than or equal to
9497 the maximum value of the range. */
9499 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9500 default_label);
9502 /* If index is in range, it must fit in Pmode.
9503 Convert to Pmode so we can index with it. */
9504 if (mode != Pmode)
9505 index = convert_to_mode (Pmode, index, 1);
9507 /* Don't let a MEM slip through, because then INDEX that comes
9508 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9509 and break_out_memory_refs will go to work on it and mess it up. */
9510 #ifdef PIC_CASE_VECTOR_ADDRESS
9511 if (flag_pic && !REG_P (index))
9512 index = copy_to_mode_reg (Pmode, index);
9513 #endif
9515 /* If flag_force_addr were to affect this address
9516 it could interfere with the tricky assumptions made
9517 about addresses that contain label-refs,
9518 which may be valid only very near the tablejump itself. */
9519 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9520 GET_MODE_SIZE, because this indicates how large insns are. The other
9521 uses should all be Pmode, because they are addresses. This code
9522 could fail if addresses and insns are not the same size. */
9523 index = gen_rtx_PLUS (Pmode,
9524 gen_rtx_MULT (Pmode, index,
9525 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9526 gen_rtx_LABEL_REF (Pmode, table_label));
9527 #ifdef PIC_CASE_VECTOR_ADDRESS
9528 if (flag_pic)
9529 index = PIC_CASE_VECTOR_ADDRESS (index);
9530 else
9531 #endif
9532 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9533 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9534 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9535 convert_move (temp, vector, 0);
9537 emit_jump_insn (gen_tablejump (temp, table_label));
9539 /* If we are generating PIC code or if the table is PC-relative, the
9540 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9541 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9542 emit_barrier ();
9546 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9547 rtx table_label, rtx default_label)
9549 rtx index;
9551 if (! HAVE_tablejump)
9552 return 0;
9554 index_expr = fold_build2 (MINUS_EXPR, index_type,
9555 fold_convert (index_type, index_expr),
9556 fold_convert (index_type, minval));
9557 index = expand_normal (index_expr);
9558 do_pending_stack_adjust ();
9560 do_tablejump (index, TYPE_MODE (index_type),
9561 convert_modes (TYPE_MODE (index_type),
9562 TYPE_MODE (TREE_TYPE (range)),
9563 expand_normal (range),
9564 TYPE_UNSIGNED (TREE_TYPE (range))),
9565 table_label, default_label);
9566 return 1;
9569 /* Nonzero if the mode is a valid vector mode for this architecture.
9570 This returns nonzero even if there is no hardware support for the
9571 vector mode, but we can emulate with narrower modes. */
9574 vector_mode_valid_p (enum machine_mode mode)
9576 enum mode_class class = GET_MODE_CLASS (mode);
9577 enum machine_mode innermode;
9579 /* Doh! What's going on? */
9580 if (class != MODE_VECTOR_INT
9581 && class != MODE_VECTOR_FLOAT)
9582 return 0;
9584 /* Hardware support. Woo hoo! */
9585 if (targetm.vector_mode_supported_p (mode))
9586 return 1;
9588 innermode = GET_MODE_INNER (mode);
9590 /* We should probably return 1 if requesting V4DI and we have no DI,
9591 but we have V2DI, but this is probably very unlikely. */
9593 /* If we have support for the inner mode, we can safely emulate it.
9594 We may not have V2DI, but me can emulate with a pair of DIs. */
9595 return targetm.scalar_mode_supported_p (innermode);
9598 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9599 static rtx
9600 const_vector_from_tree (tree exp)
9602 rtvec v;
9603 int units, i;
9604 tree link, elt;
9605 enum machine_mode inner, mode;
9607 mode = TYPE_MODE (TREE_TYPE (exp));
9609 if (initializer_zerop (exp))
9610 return CONST0_RTX (mode);
9612 units = GET_MODE_NUNITS (mode);
9613 inner = GET_MODE_INNER (mode);
9615 v = rtvec_alloc (units);
9617 link = TREE_VECTOR_CST_ELTS (exp);
9618 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9620 elt = TREE_VALUE (link);
9622 if (TREE_CODE (elt) == REAL_CST)
9623 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9624 inner);
9625 else
9626 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9627 TREE_INT_CST_HIGH (elt),
9628 inner);
9631 /* Initialize remaining elements to 0. */
9632 for (; i < units; ++i)
9633 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9635 return gen_rtx_CONST_VECTOR (mode, v);
9637 #include "gt-expr.h"