Add BID decimal support
[official-gcc.git] / gcc / expr.c
blob1e22c8ca3f82377cf4404ba7a906cc63ac146730
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
63 #ifdef PUSH_ROUNDING
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
71 #endif
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
110 struct store_by_pieces
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, int);
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
149 static int is_aligning_offset (tree, tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* Record for each mode whether we can float-extend from memory. */
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
178 #endif
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
186 #endif
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) MOVE_RATIO)
195 #endif
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
200 /* This array records the insn_code of insns to perform block sets. */
201 enum insn_code setmem_optab[NUM_MACHINE_MODES];
203 /* These arrays record the insn_code of three different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
209 /* Synchronization primitives. */
210 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
211 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
230 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
233 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
235 #ifndef SLOW_UNALIGNED_ACCESS
236 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
237 #endif
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
242 void
243 init_expr_once (void)
245 rtx insn, pat;
246 enum machine_mode mode;
247 int num_clobbers;
248 rtx mem, mem1;
249 rtx reg;
251 /* Try indexing by frame ptr and try by stack ptr.
252 It is known that on the Convex the stack ptr isn't a valid index.
253 With luck, one or the other is valid on any machine. */
254 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
255 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
257 /* A scratch register we can modify in-place below to avoid
258 useless RTL allocations. */
259 reg = gen_rtx_REG (VOIDmode, -1);
261 insn = rtx_alloc (INSN);
262 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
263 PATTERN (insn) = pat;
265 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
266 mode = (enum machine_mode) ((int) mode + 1))
268 int regno;
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
273 PUT_MODE (reg, mode);
275 /* See if there is some register that can be used in this mode and
276 directly loaded or stored from memory. */
278 if (mode != VOIDmode && mode != BLKmode)
279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
280 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
281 regno++)
283 if (! HARD_REGNO_MODE_OK (regno, mode))
284 continue;
286 REGNO (reg) = regno;
288 SET_SRC (pat) = mem;
289 SET_DEST (pat) = reg;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_load[(int) mode] = 1;
293 SET_SRC (pat) = mem1;
294 SET_DEST (pat) = reg;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_load[(int) mode] = 1;
298 SET_SRC (pat) = reg;
299 SET_DEST (pat) = mem;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_store[(int) mode] = 1;
303 SET_SRC (pat) = reg;
304 SET_DEST (pat) = mem1;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_store[(int) mode] = 1;
310 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
313 mode = GET_MODE_WIDER_MODE (mode))
315 enum machine_mode srcmode;
316 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
317 srcmode = GET_MODE_WIDER_MODE (srcmode))
319 enum insn_code ic;
321 ic = can_extend_p (mode, srcmode, 0);
322 if (ic == CODE_FOR_nothing)
323 continue;
325 PUT_MODE (mem, srcmode);
327 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
328 float_extend_from_mem[mode][srcmode] = true;
333 /* This is run at the start of compiling a function. */
335 void
336 init_expr (void)
338 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341 /* Copy data from FROM to TO, where the machine modes are not the same.
342 Both modes may be integer, or both may be floating.
343 UNSIGNEDP should be nonzero if FROM is an unsigned type.
344 This causes zero-extension instead of sign-extension. */
346 void
347 convert_move (rtx to, rtx from, int unsignedp)
349 enum machine_mode to_mode = GET_MODE (to);
350 enum machine_mode from_mode = GET_MODE (from);
351 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
352 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
353 enum insn_code code;
354 rtx libcall;
356 /* rtx code for making an equivalent value. */
357 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
358 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361 gcc_assert (to_real == from_real);
363 /* If the source and destination are already the same, then there's
364 nothing to do. */
365 if (to == from)
366 return;
368 /* If FROM is a SUBREG that indicates that we have already done at least
369 the required extension, strip it. We don't handle such SUBREGs as
370 TO here. */
372 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
373 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
374 >= GET_MODE_SIZE (to_mode))
375 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
376 from = gen_lowpart (to_mode, from), from_mode = to_mode;
378 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
380 if (to_mode == from_mode
381 || (from_mode == VOIDmode && CONSTANT_P (from)))
383 emit_move_insn (to, from);
384 return;
387 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
389 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
391 if (VECTOR_MODE_P (to_mode))
392 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
393 else
394 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
396 emit_move_insn (to, from);
397 return;
400 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
402 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
403 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
404 return;
407 if (to_real)
409 rtx value, insns;
410 convert_optab tab;
412 gcc_assert ((GET_MODE_PRECISION (from_mode)
413 != GET_MODE_PRECISION (to_mode))
414 || (DECIMAL_FLOAT_MODE_P (from_mode)
415 != DECIMAL_FLOAT_MODE_P (to_mode)));
417 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
418 /* Conversion between decimal float and binary float, same size. */
419 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
420 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
421 tab = sext_optab;
422 else
423 tab = trunc_optab;
425 /* Try converting directly if the insn is supported. */
427 code = tab->handlers[to_mode][from_mode].insn_code;
428 if (code != CODE_FOR_nothing)
430 emit_unop_insn (code, to, from,
431 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
432 return;
435 /* Otherwise use a libcall. */
436 libcall = tab->handlers[to_mode][from_mode].libfunc;
438 /* Is this conversion implemented yet? */
439 gcc_assert (libcall);
441 start_sequence ();
442 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
443 1, from, from_mode);
444 insns = get_insns ();
445 end_sequence ();
446 emit_libcall_block (insns, to, value,
447 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
448 from)
449 : gen_rtx_FLOAT_EXTEND (to_mode, from));
450 return;
453 /* Handle pointer conversion. */ /* SPEE 900220. */
454 /* Targets are expected to provide conversion insns between PxImode and
455 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
456 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
458 enum machine_mode full_mode
459 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
461 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
462 != CODE_FOR_nothing);
464 if (full_mode != from_mode)
465 from = convert_to_mode (full_mode, from, unsignedp);
466 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
467 to, from, UNKNOWN);
468 return;
470 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
472 rtx new_from;
473 enum machine_mode full_mode
474 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
476 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
477 != CODE_FOR_nothing);
479 if (to_mode == full_mode)
481 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
482 to, from, UNKNOWN);
483 return;
486 new_from = gen_reg_rtx (full_mode);
487 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
488 new_from, from, UNKNOWN);
490 /* else proceed to integer conversions below. */
491 from_mode = full_mode;
492 from = new_from;
495 /* Now both modes are integers. */
497 /* Handle expanding beyond a word. */
498 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
499 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
501 rtx insns;
502 rtx lowpart;
503 rtx fill_value;
504 rtx lowfrom;
505 int i;
506 enum machine_mode lowpart_mode;
507 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
509 /* Try converting directly if the insn is supported. */
510 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
511 != CODE_FOR_nothing)
513 /* If FROM is a SUBREG, put it into a register. Do this
514 so that we always generate the same set of insns for
515 better cse'ing; if an intermediate assignment occurred,
516 we won't be doing the operation directly on the SUBREG. */
517 if (optimize > 0 && GET_CODE (from) == SUBREG)
518 from = force_reg (from_mode, from);
519 emit_unop_insn (code, to, from, equiv_code);
520 return;
522 /* Next, try converting via full word. */
523 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
524 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
525 != CODE_FOR_nothing))
527 if (REG_P (to))
529 if (reg_overlap_mentioned_p (to, from))
530 from = force_reg (from_mode, from);
531 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
533 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
534 emit_unop_insn (code, to,
535 gen_lowpart (word_mode, to), equiv_code);
536 return;
539 /* No special multiword conversion insn; do it by hand. */
540 start_sequence ();
542 /* Since we will turn this into a no conflict block, we must ensure
543 that the source does not overlap the target. */
545 if (reg_overlap_mentioned_p (to, from))
546 from = force_reg (from_mode, from);
548 /* Get a copy of FROM widened to a word, if necessary. */
549 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
550 lowpart_mode = word_mode;
551 else
552 lowpart_mode = from_mode;
554 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
556 lowpart = gen_lowpart (lowpart_mode, to);
557 emit_move_insn (lowpart, lowfrom);
559 /* Compute the value to put in each remaining word. */
560 if (unsignedp)
561 fill_value = const0_rtx;
562 else
564 #ifdef HAVE_slt
565 if (HAVE_slt
566 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
567 && STORE_FLAG_VALUE == -1)
569 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
570 lowpart_mode, 0);
571 fill_value = gen_reg_rtx (word_mode);
572 emit_insn (gen_slt (fill_value));
574 else
575 #endif
577 fill_value
578 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
579 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
580 NULL_RTX, 0);
581 fill_value = convert_to_mode (word_mode, fill_value, 1);
585 /* Fill the remaining words. */
586 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
588 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
589 rtx subword = operand_subword (to, index, 1, to_mode);
591 gcc_assert (subword);
593 if (fill_value != subword)
594 emit_move_insn (subword, fill_value);
597 insns = get_insns ();
598 end_sequence ();
600 emit_no_conflict_block (insns, to, from, NULL_RTX,
601 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
602 return;
605 /* Truncating multi-word to a word or less. */
606 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
607 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
609 if (!((MEM_P (from)
610 && ! MEM_VOLATILE_P (from)
611 && direct_load[(int) to_mode]
612 && ! mode_dependent_address_p (XEXP (from, 0)))
613 || REG_P (from)
614 || GET_CODE (from) == SUBREG))
615 from = force_reg (from_mode, from);
616 convert_move (to, gen_lowpart (word_mode, from), 0);
617 return;
620 /* Now follow all the conversions between integers
621 no more than a word long. */
623 /* For truncation, usually we can just refer to FROM in a narrower mode. */
624 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
625 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
626 GET_MODE_BITSIZE (from_mode)))
628 if (!((MEM_P (from)
629 && ! MEM_VOLATILE_P (from)
630 && direct_load[(int) to_mode]
631 && ! mode_dependent_address_p (XEXP (from, 0)))
632 || REG_P (from)
633 || GET_CODE (from) == SUBREG))
634 from = force_reg (from_mode, from);
635 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
636 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
637 from = copy_to_reg (from);
638 emit_move_insn (to, gen_lowpart (to_mode, from));
639 return;
642 /* Handle extension. */
643 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
645 /* Convert directly if that works. */
646 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
647 != CODE_FOR_nothing)
649 emit_unop_insn (code, to, from, equiv_code);
650 return;
652 else
654 enum machine_mode intermediate;
655 rtx tmp;
656 tree shift_amount;
658 /* Search for a mode to convert via. */
659 for (intermediate = from_mode; intermediate != VOIDmode;
660 intermediate = GET_MODE_WIDER_MODE (intermediate))
661 if (((can_extend_p (to_mode, intermediate, unsignedp)
662 != CODE_FOR_nothing)
663 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
664 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
665 GET_MODE_BITSIZE (intermediate))))
666 && (can_extend_p (intermediate, from_mode, unsignedp)
667 != CODE_FOR_nothing))
669 convert_move (to, convert_to_mode (intermediate, from,
670 unsignedp), unsignedp);
671 return;
674 /* No suitable intermediate mode.
675 Generate what we need with shifts. */
676 shift_amount = build_int_cst (NULL_TREE,
677 GET_MODE_BITSIZE (to_mode)
678 - GET_MODE_BITSIZE (from_mode));
679 from = gen_lowpart (to_mode, force_reg (from_mode, from));
680 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
681 to, unsignedp);
682 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
683 to, unsignedp);
684 if (tmp != to)
685 emit_move_insn (to, tmp);
686 return;
690 /* Support special truncate insns for certain modes. */
691 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
693 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
694 to, from, UNKNOWN);
695 return;
698 /* Handle truncation of volatile memrefs, and so on;
699 the things that couldn't be truncated directly,
700 and for which there was no special instruction.
702 ??? Code above formerly short-circuited this, for most integer
703 mode pairs, with a force_reg in from_mode followed by a recursive
704 call to this routine. Appears always to have been wrong. */
705 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
707 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
708 emit_move_insn (to, temp);
709 return;
712 /* Mode combination is not recognized. */
713 gcc_unreachable ();
716 /* Return an rtx for a value that would result
717 from converting X to mode MODE.
718 Both X and MODE may be floating, or both integer.
719 UNSIGNEDP is nonzero if X is an unsigned value.
720 This can be done by referring to a part of X in place
721 or by copying to a new temporary with conversion. */
724 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
726 return convert_modes (mode, VOIDmode, x, unsignedp);
729 /* Return an rtx for a value that would result
730 from converting X from mode OLDMODE to mode MODE.
731 Both modes may be floating, or both integer.
732 UNSIGNEDP is nonzero if X is an unsigned value.
734 This can be done by referring to a part of X in place
735 or by copying to a new temporary with conversion.
737 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
740 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
742 rtx temp;
744 /* If FROM is a SUBREG that indicates that we have already done at least
745 the required extension, strip it. */
747 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
748 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
749 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
750 x = gen_lowpart (mode, x);
752 if (GET_MODE (x) != VOIDmode)
753 oldmode = GET_MODE (x);
755 if (mode == oldmode)
756 return x;
758 /* There is one case that we must handle specially: If we are converting
759 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
760 we are to interpret the constant as unsigned, gen_lowpart will do
761 the wrong if the constant appears negative. What we want to do is
762 make the high-order word of the constant zero, not all ones. */
764 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
765 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
766 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
768 HOST_WIDE_INT val = INTVAL (x);
770 if (oldmode != VOIDmode
771 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
773 int width = GET_MODE_BITSIZE (oldmode);
775 /* We need to zero extend VAL. */
776 val &= ((HOST_WIDE_INT) 1 << width) - 1;
779 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
782 /* We can do this with a gen_lowpart if both desired and current modes
783 are integer, and this is either a constant integer, a register, or a
784 non-volatile MEM. Except for the constant case where MODE is no
785 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
787 if ((GET_CODE (x) == CONST_INT
788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
789 || (GET_MODE_CLASS (mode) == MODE_INT
790 && GET_MODE_CLASS (oldmode) == MODE_INT
791 && (GET_CODE (x) == CONST_DOUBLE
792 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
793 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
794 && direct_load[(int) mode])
795 || (REG_P (x)
796 && (! HARD_REGISTER_P (x)
797 || HARD_REGNO_MODE_OK (REGNO (x), mode))
798 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
799 GET_MODE_BITSIZE (GET_MODE (x)))))))))
801 /* ?? If we don't know OLDMODE, we have to assume here that
802 X does not need sign- or zero-extension. This may not be
803 the case, but it's the best we can do. */
804 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
805 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
807 HOST_WIDE_INT val = INTVAL (x);
808 int width = GET_MODE_BITSIZE (oldmode);
810 /* We must sign or zero-extend in this case. Start by
811 zero-extending, then sign extend if we need to. */
812 val &= ((HOST_WIDE_INT) 1 << width) - 1;
813 if (! unsignedp
814 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
815 val |= (HOST_WIDE_INT) (-1) << width;
817 return gen_int_mode (val, mode);
820 return gen_lowpart (mode, x);
823 /* Converting from integer constant into mode is always equivalent to an
824 subreg operation. */
825 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
827 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
828 return simplify_gen_subreg (mode, x, oldmode, 0);
831 temp = gen_reg_rtx (mode);
832 convert_move (temp, x, unsignedp);
833 return temp;
836 /* STORE_MAX_PIECES is the number of bytes at a time that we can
837 store efficiently. Due to internal GCC limitations, this is
838 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
839 for an immediate constant. */
841 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
843 /* Determine whether the LEN bytes can be moved by using several move
844 instructions. Return nonzero if a call to move_by_pieces should
845 succeed. */
848 can_move_by_pieces (unsigned HOST_WIDE_INT len,
849 unsigned int align ATTRIBUTE_UNUSED)
851 return MOVE_BY_PIECES_P (len, align);
854 /* Generate several move instructions to copy LEN bytes from block FROM to
855 block TO. (These are MEM rtx's with BLKmode).
857 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
858 used to push FROM to the stack.
860 ALIGN is maximum stack alignment we can assume.
862 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
863 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
864 stpcpy. */
867 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
868 unsigned int align, int endp)
870 struct move_by_pieces data;
871 rtx to_addr, from_addr = XEXP (from, 0);
872 unsigned int max_size = MOVE_MAX_PIECES + 1;
873 enum machine_mode mode = VOIDmode, tmode;
874 enum insn_code icode;
876 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
878 data.offset = 0;
879 data.from_addr = from_addr;
880 if (to)
882 to_addr = XEXP (to, 0);
883 data.to = to;
884 data.autinc_to
885 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
886 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
887 data.reverse
888 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
890 else
892 to_addr = NULL_RTX;
893 data.to = NULL_RTX;
894 data.autinc_to = 1;
895 #ifdef STACK_GROWS_DOWNWARD
896 data.reverse = 1;
897 #else
898 data.reverse = 0;
899 #endif
901 data.to_addr = to_addr;
902 data.from = from;
903 data.autinc_from
904 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
905 || GET_CODE (from_addr) == POST_INC
906 || GET_CODE (from_addr) == POST_DEC);
908 data.explicit_inc_from = 0;
909 data.explicit_inc_to = 0;
910 if (data.reverse) data.offset = len;
911 data.len = len;
913 /* If copying requires more than two move insns,
914 copy addresses to registers (to make displacements shorter)
915 and use post-increment if available. */
916 if (!(data.autinc_from && data.autinc_to)
917 && move_by_pieces_ninsns (len, align, max_size) > 2)
919 /* Find the mode of the largest move... */
920 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
921 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
922 if (GET_MODE_SIZE (tmode) < max_size)
923 mode = tmode;
925 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
927 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
928 data.autinc_from = 1;
929 data.explicit_inc_from = -1;
931 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
933 data.from_addr = copy_addr_to_reg (from_addr);
934 data.autinc_from = 1;
935 data.explicit_inc_from = 1;
937 if (!data.autinc_from && CONSTANT_P (from_addr))
938 data.from_addr = copy_addr_to_reg (from_addr);
939 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
941 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
942 data.autinc_to = 1;
943 data.explicit_inc_to = -1;
945 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
947 data.to_addr = copy_addr_to_reg (to_addr);
948 data.autinc_to = 1;
949 data.explicit_inc_to = 1;
951 if (!data.autinc_to && CONSTANT_P (to_addr))
952 data.to_addr = copy_addr_to_reg (to_addr);
955 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
956 if (align >= GET_MODE_ALIGNMENT (tmode))
957 align = GET_MODE_ALIGNMENT (tmode);
958 else
960 enum machine_mode xmode;
962 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
963 tmode != VOIDmode;
964 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
965 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
966 || SLOW_UNALIGNED_ACCESS (tmode, align))
967 break;
969 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
972 /* First move what we can in the largest integer mode, then go to
973 successively smaller modes. */
975 while (max_size > 1)
977 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
978 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
979 if (GET_MODE_SIZE (tmode) < max_size)
980 mode = tmode;
982 if (mode == VOIDmode)
983 break;
985 icode = mov_optab->handlers[(int) mode].insn_code;
986 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
987 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
989 max_size = GET_MODE_SIZE (mode);
992 /* The code above should have handled everything. */
993 gcc_assert (!data.len);
995 if (endp)
997 rtx to1;
999 gcc_assert (!data.reverse);
1000 if (data.autinc_to)
1002 if (endp == 2)
1004 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1005 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1006 else
1007 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1008 -1));
1010 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1011 data.offset);
1013 else
1015 if (endp == 2)
1016 --data.offset;
1017 to1 = adjust_address (data.to, QImode, data.offset);
1019 return to1;
1021 else
1022 return data.to;
1025 /* Return number of insns required to move L bytes by pieces.
1026 ALIGN (in bits) is maximum alignment we can assume. */
1028 static unsigned HOST_WIDE_INT
1029 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1030 unsigned int max_size)
1032 unsigned HOST_WIDE_INT n_insns = 0;
1033 enum machine_mode tmode;
1035 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1036 if (align >= GET_MODE_ALIGNMENT (tmode))
1037 align = GET_MODE_ALIGNMENT (tmode);
1038 else
1040 enum machine_mode tmode, xmode;
1042 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1043 tmode != VOIDmode;
1044 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1045 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1046 || SLOW_UNALIGNED_ACCESS (tmode, align))
1047 break;
1049 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1052 while (max_size > 1)
1054 enum machine_mode mode = VOIDmode;
1055 enum insn_code icode;
1057 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1058 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1059 if (GET_MODE_SIZE (tmode) < max_size)
1060 mode = tmode;
1062 if (mode == VOIDmode)
1063 break;
1065 icode = mov_optab->handlers[(int) mode].insn_code;
1066 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1067 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1069 max_size = GET_MODE_SIZE (mode);
1072 gcc_assert (!l);
1073 return n_insns;
1076 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1077 with move instructions for mode MODE. GENFUN is the gen_... function
1078 to make a move insn for that mode. DATA has all the other info. */
1080 static void
1081 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1082 struct move_by_pieces *data)
1084 unsigned int size = GET_MODE_SIZE (mode);
1085 rtx to1 = NULL_RTX, from1;
1087 while (data->len >= size)
1089 if (data->reverse)
1090 data->offset -= size;
1092 if (data->to)
1094 if (data->autinc_to)
1095 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1096 data->offset);
1097 else
1098 to1 = adjust_address (data->to, mode, data->offset);
1101 if (data->autinc_from)
1102 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1103 data->offset);
1104 else
1105 from1 = adjust_address (data->from, mode, data->offset);
1107 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1108 emit_insn (gen_add2_insn (data->to_addr,
1109 GEN_INT (-(HOST_WIDE_INT)size)));
1110 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1111 emit_insn (gen_add2_insn (data->from_addr,
1112 GEN_INT (-(HOST_WIDE_INT)size)));
1114 if (data->to)
1115 emit_insn ((*genfun) (to1, from1));
1116 else
1118 #ifdef PUSH_ROUNDING
1119 emit_single_push_insn (mode, from1, NULL);
1120 #else
1121 gcc_unreachable ();
1122 #endif
1125 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1126 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1128 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1130 if (! data->reverse)
1131 data->offset += size;
1133 data->len -= size;
1137 /* Emit code to move a block Y to a block X. This may be done with
1138 string-move instructions, with multiple scalar move instructions,
1139 or with a library call.
1141 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1142 SIZE is an rtx that says how long they are.
1143 ALIGN is the maximum alignment we can assume they have.
1144 METHOD describes what kind of copy this is, and what mechanisms may be used.
1146 Return the address of the new block, if memcpy is called and returns it,
1147 0 otherwise. */
1150 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1151 unsigned int expected_align, HOST_WIDE_INT expected_size)
1153 bool may_use_call;
1154 rtx retval = 0;
1155 unsigned int align;
1157 switch (method)
1159 case BLOCK_OP_NORMAL:
1160 case BLOCK_OP_TAILCALL:
1161 may_use_call = true;
1162 break;
1164 case BLOCK_OP_CALL_PARM:
1165 may_use_call = block_move_libcall_safe_for_call_parm ();
1167 /* Make inhibit_defer_pop nonzero around the library call
1168 to force it to pop the arguments right away. */
1169 NO_DEFER_POP;
1170 break;
1172 case BLOCK_OP_NO_LIBCALL:
1173 may_use_call = false;
1174 break;
1176 default:
1177 gcc_unreachable ();
1180 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1182 gcc_assert (MEM_P (x));
1183 gcc_assert (MEM_P (y));
1184 gcc_assert (size);
1186 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1187 block copy is more efficient for other large modes, e.g. DCmode. */
1188 x = adjust_address (x, BLKmode, 0);
1189 y = adjust_address (y, BLKmode, 0);
1191 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1192 can be incorrect is coming from __builtin_memcpy. */
1193 if (GET_CODE (size) == CONST_INT)
1195 if (INTVAL (size) == 0)
1196 return 0;
1198 x = shallow_copy_rtx (x);
1199 y = shallow_copy_rtx (y);
1200 set_mem_size (x, size);
1201 set_mem_size (y, size);
1204 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1205 move_by_pieces (x, y, INTVAL (size), align, 0);
1206 else if (emit_block_move_via_movmem (x, y, size, align,
1207 expected_align, expected_size))
1209 else if (may_use_call)
1210 retval = emit_block_move_via_libcall (x, y, size,
1211 method == BLOCK_OP_TAILCALL);
1212 else
1213 emit_block_move_via_loop (x, y, size, align);
1215 if (method == BLOCK_OP_CALL_PARM)
1216 OK_DEFER_POP;
1218 return retval;
1222 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1224 return emit_block_move_hints (x, y, size, method, 0, -1);
1227 /* A subroutine of emit_block_move. Returns true if calling the
1228 block move libcall will not clobber any parameters which may have
1229 already been placed on the stack. */
1231 static bool
1232 block_move_libcall_safe_for_call_parm (void)
1234 /* If arguments are pushed on the stack, then they're safe. */
1235 if (PUSH_ARGS)
1236 return true;
1238 /* If registers go on the stack anyway, any argument is sure to clobber
1239 an outgoing argument. */
1240 #if defined (REG_PARM_STACK_SPACE)
1241 if (OUTGOING_REG_PARM_STACK_SPACE)
1243 tree fn;
1244 fn = emit_block_move_libcall_fn (false);
1245 if (REG_PARM_STACK_SPACE (fn) != 0)
1246 return false;
1248 #endif
1250 /* If any argument goes in memory, then it might clobber an outgoing
1251 argument. */
1253 CUMULATIVE_ARGS args_so_far;
1254 tree fn, arg;
1256 fn = emit_block_move_libcall_fn (false);
1257 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1259 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1260 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1262 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1263 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1264 if (!tmp || !REG_P (tmp))
1265 return false;
1266 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1267 return false;
1268 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1271 return true;
1274 /* A subroutine of emit_block_move. Expand a movmem pattern;
1275 return true if successful. */
1277 static bool
1278 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1279 unsigned int expected_align, HOST_WIDE_INT expected_size)
1281 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1282 int save_volatile_ok = volatile_ok;
1283 enum machine_mode mode;
1285 if (expected_align < align)
1286 expected_align = align;
1288 /* Since this is a move insn, we don't care about volatility. */
1289 volatile_ok = 1;
1291 /* Try the most limited insn first, because there's no point
1292 including more than one in the machine description unless
1293 the more limited one has some advantage. */
1295 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1296 mode = GET_MODE_WIDER_MODE (mode))
1298 enum insn_code code = movmem_optab[(int) mode];
1299 insn_operand_predicate_fn pred;
1301 if (code != CODE_FOR_nothing
1302 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1303 here because if SIZE is less than the mode mask, as it is
1304 returned by the macro, it will definitely be less than the
1305 actual mode mask. */
1306 && ((GET_CODE (size) == CONST_INT
1307 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1308 <= (GET_MODE_MASK (mode) >> 1)))
1309 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1310 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1311 || (*pred) (x, BLKmode))
1312 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1313 || (*pred) (y, BLKmode))
1314 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1315 || (*pred) (opalign, VOIDmode)))
1317 rtx op2;
1318 rtx last = get_last_insn ();
1319 rtx pat;
1321 op2 = convert_to_mode (mode, size, 1);
1322 pred = insn_data[(int) code].operand[2].predicate;
1323 if (pred != 0 && ! (*pred) (op2, mode))
1324 op2 = copy_to_mode_reg (mode, op2);
1326 /* ??? When called via emit_block_move_for_call, it'd be
1327 nice if there were some way to inform the backend, so
1328 that it doesn't fail the expansion because it thinks
1329 emitting the libcall would be more efficient. */
1331 if (insn_data[(int) code].n_operands == 4)
1332 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1333 else
1334 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1335 GEN_INT (expected_align),
1336 GEN_INT (expected_size));
1337 if (pat)
1339 emit_insn (pat);
1340 volatile_ok = save_volatile_ok;
1341 return true;
1343 else
1344 delete_insns_since (last);
1348 volatile_ok = save_volatile_ok;
1349 return false;
1352 /* A subroutine of emit_block_move. Expand a call to memcpy.
1353 Return the return value from memcpy, 0 otherwise. */
1356 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1358 rtx dst_addr, src_addr;
1359 tree call_expr, fn, src_tree, dst_tree, size_tree;
1360 enum machine_mode size_mode;
1361 rtx retval;
1363 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1364 pseudos. We can then place those new pseudos into a VAR_DECL and
1365 use them later. */
1367 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1368 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1370 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1371 src_addr = convert_memory_address (ptr_mode, src_addr);
1373 dst_tree = make_tree (ptr_type_node, dst_addr);
1374 src_tree = make_tree (ptr_type_node, src_addr);
1376 size_mode = TYPE_MODE (sizetype);
1378 size = convert_to_mode (size_mode, size, 1);
1379 size = copy_to_mode_reg (size_mode, size);
1381 /* It is incorrect to use the libcall calling conventions to call
1382 memcpy in this context. This could be a user call to memcpy and
1383 the user may wish to examine the return value from memcpy. For
1384 targets where libcalls and normal calls have different conventions
1385 for returning pointers, we could end up generating incorrect code. */
1387 size_tree = make_tree (sizetype, size);
1389 fn = emit_block_move_libcall_fn (true);
1390 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1391 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1393 retval = expand_normal (call_expr);
1395 return retval;
1398 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1399 for the function we use for block copies. The first time FOR_CALL
1400 is true, we call assemble_external. */
1402 static GTY(()) tree block_move_fn;
1404 void
1405 init_block_move_fn (const char *asmspec)
1407 if (!block_move_fn)
1409 tree args, fn;
1411 fn = get_identifier ("memcpy");
1412 args = build_function_type_list (ptr_type_node, ptr_type_node,
1413 const_ptr_type_node, sizetype,
1414 NULL_TREE);
1416 fn = build_decl (FUNCTION_DECL, fn, args);
1417 DECL_EXTERNAL (fn) = 1;
1418 TREE_PUBLIC (fn) = 1;
1419 DECL_ARTIFICIAL (fn) = 1;
1420 TREE_NOTHROW (fn) = 1;
1421 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1422 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1424 block_move_fn = fn;
1427 if (asmspec)
1428 set_user_assembler_name (block_move_fn, asmspec);
1431 static tree
1432 emit_block_move_libcall_fn (int for_call)
1434 static bool emitted_extern;
1436 if (!block_move_fn)
1437 init_block_move_fn (NULL);
1439 if (for_call && !emitted_extern)
1441 emitted_extern = true;
1442 make_decl_rtl (block_move_fn);
1443 assemble_external (block_move_fn);
1446 return block_move_fn;
1449 /* A subroutine of emit_block_move. Copy the data via an explicit
1450 loop. This is used only when libcalls are forbidden. */
1451 /* ??? It'd be nice to copy in hunks larger than QImode. */
1453 static void
1454 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1455 unsigned int align ATTRIBUTE_UNUSED)
1457 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1458 enum machine_mode iter_mode;
1460 iter_mode = GET_MODE (size);
1461 if (iter_mode == VOIDmode)
1462 iter_mode = word_mode;
1464 top_label = gen_label_rtx ();
1465 cmp_label = gen_label_rtx ();
1466 iter = gen_reg_rtx (iter_mode);
1468 emit_move_insn (iter, const0_rtx);
1470 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1471 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1472 do_pending_stack_adjust ();
1474 emit_jump (cmp_label);
1475 emit_label (top_label);
1477 tmp = convert_modes (Pmode, iter_mode, iter, true);
1478 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1479 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1480 x = change_address (x, QImode, x_addr);
1481 y = change_address (y, QImode, y_addr);
1483 emit_move_insn (x, y);
1485 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1486 true, OPTAB_LIB_WIDEN);
1487 if (tmp != iter)
1488 emit_move_insn (iter, tmp);
1490 emit_label (cmp_label);
1492 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1493 true, top_label);
1496 /* Copy all or part of a value X into registers starting at REGNO.
1497 The number of registers to be filled is NREGS. */
1499 void
1500 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1502 int i;
1503 #ifdef HAVE_load_multiple
1504 rtx pat;
1505 rtx last;
1506 #endif
1508 if (nregs == 0)
1509 return;
1511 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1512 x = validize_mem (force_const_mem (mode, x));
1514 /* See if the machine can do this with a load multiple insn. */
1515 #ifdef HAVE_load_multiple
1516 if (HAVE_load_multiple)
1518 last = get_last_insn ();
1519 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1520 GEN_INT (nregs));
1521 if (pat)
1523 emit_insn (pat);
1524 return;
1526 else
1527 delete_insns_since (last);
1529 #endif
1531 for (i = 0; i < nregs; i++)
1532 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1533 operand_subword_force (x, i, mode));
1536 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1537 The number of registers to be filled is NREGS. */
1539 void
1540 move_block_from_reg (int regno, rtx x, int nregs)
1542 int i;
1544 if (nregs == 0)
1545 return;
1547 /* See if the machine can do this with a store multiple insn. */
1548 #ifdef HAVE_store_multiple
1549 if (HAVE_store_multiple)
1551 rtx last = get_last_insn ();
1552 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1553 GEN_INT (nregs));
1554 if (pat)
1556 emit_insn (pat);
1557 return;
1559 else
1560 delete_insns_since (last);
1562 #endif
1564 for (i = 0; i < nregs; i++)
1566 rtx tem = operand_subword (x, i, 1, BLKmode);
1568 gcc_assert (tem);
1570 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1574 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1575 ORIG, where ORIG is a non-consecutive group of registers represented by
1576 a PARALLEL. The clone is identical to the original except in that the
1577 original set of registers is replaced by a new set of pseudo registers.
1578 The new set has the same modes as the original set. */
1581 gen_group_rtx (rtx orig)
1583 int i, length;
1584 rtx *tmps;
1586 gcc_assert (GET_CODE (orig) == PARALLEL);
1588 length = XVECLEN (orig, 0);
1589 tmps = alloca (sizeof (rtx) * length);
1591 /* Skip a NULL entry in first slot. */
1592 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1594 if (i)
1595 tmps[0] = 0;
1597 for (; i < length; i++)
1599 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1600 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1602 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1605 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1608 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1609 except that values are placed in TMPS[i], and must later be moved
1610 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1612 static void
1613 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1615 rtx src;
1616 int start, i;
1617 enum machine_mode m = GET_MODE (orig_src);
1619 gcc_assert (GET_CODE (dst) == PARALLEL);
1621 if (m != VOIDmode
1622 && !SCALAR_INT_MODE_P (m)
1623 && !MEM_P (orig_src)
1624 && GET_CODE (orig_src) != CONCAT)
1626 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1627 if (imode == BLKmode)
1628 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1629 else
1630 src = gen_reg_rtx (imode);
1631 if (imode != BLKmode)
1632 src = gen_lowpart (GET_MODE (orig_src), src);
1633 emit_move_insn (src, orig_src);
1634 /* ...and back again. */
1635 if (imode != BLKmode)
1636 src = gen_lowpart (imode, src);
1637 emit_group_load_1 (tmps, dst, src, type, ssize);
1638 return;
1641 /* Check for a NULL entry, used to indicate that the parameter goes
1642 both on the stack and in registers. */
1643 if (XEXP (XVECEXP (dst, 0, 0), 0))
1644 start = 0;
1645 else
1646 start = 1;
1648 /* Process the pieces. */
1649 for (i = start; i < XVECLEN (dst, 0); i++)
1651 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1652 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1653 unsigned int bytelen = GET_MODE_SIZE (mode);
1654 int shift = 0;
1656 /* Handle trailing fragments that run over the size of the struct. */
1657 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1659 /* Arrange to shift the fragment to where it belongs.
1660 extract_bit_field loads to the lsb of the reg. */
1661 if (
1662 #ifdef BLOCK_REG_PADDING
1663 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1664 == (BYTES_BIG_ENDIAN ? upward : downward)
1665 #else
1666 BYTES_BIG_ENDIAN
1667 #endif
1669 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1670 bytelen = ssize - bytepos;
1671 gcc_assert (bytelen > 0);
1674 /* If we won't be loading directly from memory, protect the real source
1675 from strange tricks we might play; but make sure that the source can
1676 be loaded directly into the destination. */
1677 src = orig_src;
1678 if (!MEM_P (orig_src)
1679 && (!CONSTANT_P (orig_src)
1680 || (GET_MODE (orig_src) != mode
1681 && GET_MODE (orig_src) != VOIDmode)))
1683 if (GET_MODE (orig_src) == VOIDmode)
1684 src = gen_reg_rtx (mode);
1685 else
1686 src = gen_reg_rtx (GET_MODE (orig_src));
1688 emit_move_insn (src, orig_src);
1691 /* Optimize the access just a bit. */
1692 if (MEM_P (src)
1693 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1694 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1695 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1696 && bytelen == GET_MODE_SIZE (mode))
1698 tmps[i] = gen_reg_rtx (mode);
1699 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1701 else if (COMPLEX_MODE_P (mode)
1702 && GET_MODE (src) == mode
1703 && bytelen == GET_MODE_SIZE (mode))
1704 /* Let emit_move_complex do the bulk of the work. */
1705 tmps[i] = src;
1706 else if (GET_CODE (src) == CONCAT)
1708 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1709 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1711 if ((bytepos == 0 && bytelen == slen0)
1712 || (bytepos != 0 && bytepos + bytelen <= slen))
1714 /* The following assumes that the concatenated objects all
1715 have the same size. In this case, a simple calculation
1716 can be used to determine the object and the bit field
1717 to be extracted. */
1718 tmps[i] = XEXP (src, bytepos / slen0);
1719 if (! CONSTANT_P (tmps[i])
1720 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1721 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1722 (bytepos % slen0) * BITS_PER_UNIT,
1723 1, NULL_RTX, mode, mode);
1725 else
1727 rtx mem;
1729 gcc_assert (!bytepos);
1730 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1731 emit_move_insn (mem, src);
1732 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1733 0, 1, NULL_RTX, mode, mode);
1736 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1737 SIMD register, which is currently broken. While we get GCC
1738 to emit proper RTL for these cases, let's dump to memory. */
1739 else if (VECTOR_MODE_P (GET_MODE (dst))
1740 && REG_P (src))
1742 int slen = GET_MODE_SIZE (GET_MODE (src));
1743 rtx mem;
1745 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1746 emit_move_insn (mem, src);
1747 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1749 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1750 && XVECLEN (dst, 0) > 1)
1751 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1752 else if (CONSTANT_P (src)
1753 || (REG_P (src) && GET_MODE (src) == mode))
1754 tmps[i] = src;
1755 else
1756 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1757 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1758 mode, mode);
1760 if (shift)
1761 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1762 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1766 /* Emit code to move a block SRC of type TYPE to a block DST,
1767 where DST is non-consecutive registers represented by a PARALLEL.
1768 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1769 if not known. */
1771 void
1772 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1774 rtx *tmps;
1775 int i;
1777 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1778 emit_group_load_1 (tmps, dst, src, type, ssize);
1780 /* Copy the extracted pieces into the proper (probable) hard regs. */
1781 for (i = 0; i < XVECLEN (dst, 0); i++)
1783 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1784 if (d == NULL)
1785 continue;
1786 emit_move_insn (d, tmps[i]);
1790 /* Similar, but load SRC into new pseudos in a format that looks like
1791 PARALLEL. This can later be fed to emit_group_move to get things
1792 in the right place. */
1795 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1797 rtvec vec;
1798 int i;
1800 vec = rtvec_alloc (XVECLEN (parallel, 0));
1801 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1803 /* Convert the vector to look just like the original PARALLEL, except
1804 with the computed values. */
1805 for (i = 0; i < XVECLEN (parallel, 0); i++)
1807 rtx e = XVECEXP (parallel, 0, i);
1808 rtx d = XEXP (e, 0);
1810 if (d)
1812 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1813 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1815 RTVEC_ELT (vec, i) = e;
1818 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1821 /* Emit code to move a block SRC to block DST, where SRC and DST are
1822 non-consecutive groups of registers, each represented by a PARALLEL. */
1824 void
1825 emit_group_move (rtx dst, rtx src)
1827 int i;
1829 gcc_assert (GET_CODE (src) == PARALLEL
1830 && GET_CODE (dst) == PARALLEL
1831 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1833 /* Skip first entry if NULL. */
1834 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1835 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1836 XEXP (XVECEXP (src, 0, i), 0));
1839 /* Move a group of registers represented by a PARALLEL into pseudos. */
1842 emit_group_move_into_temps (rtx src)
1844 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1845 int i;
1847 for (i = 0; i < XVECLEN (src, 0); i++)
1849 rtx e = XVECEXP (src, 0, i);
1850 rtx d = XEXP (e, 0);
1852 if (d)
1853 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1854 RTVEC_ELT (vec, i) = e;
1857 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1860 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1861 where SRC is non-consecutive registers represented by a PARALLEL.
1862 SSIZE represents the total size of block ORIG_DST, or -1 if not
1863 known. */
1865 void
1866 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1868 rtx *tmps, dst;
1869 int start, finish, i;
1870 enum machine_mode m = GET_MODE (orig_dst);
1872 gcc_assert (GET_CODE (src) == PARALLEL);
1874 if (!SCALAR_INT_MODE_P (m)
1875 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1877 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1878 if (imode == BLKmode)
1879 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1880 else
1881 dst = gen_reg_rtx (imode);
1882 emit_group_store (dst, src, type, ssize);
1883 if (imode != BLKmode)
1884 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1885 emit_move_insn (orig_dst, dst);
1886 return;
1889 /* Check for a NULL entry, used to indicate that the parameter goes
1890 both on the stack and in registers. */
1891 if (XEXP (XVECEXP (src, 0, 0), 0))
1892 start = 0;
1893 else
1894 start = 1;
1895 finish = XVECLEN (src, 0);
1897 tmps = alloca (sizeof (rtx) * finish);
1899 /* Copy the (probable) hard regs into pseudos. */
1900 for (i = start; i < finish; i++)
1902 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1903 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1905 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1906 emit_move_insn (tmps[i], reg);
1908 else
1909 tmps[i] = reg;
1912 /* If we won't be storing directly into memory, protect the real destination
1913 from strange tricks we might play. */
1914 dst = orig_dst;
1915 if (GET_CODE (dst) == PARALLEL)
1917 rtx temp;
1919 /* We can get a PARALLEL dst if there is a conditional expression in
1920 a return statement. In that case, the dst and src are the same,
1921 so no action is necessary. */
1922 if (rtx_equal_p (dst, src))
1923 return;
1925 /* It is unclear if we can ever reach here, but we may as well handle
1926 it. Allocate a temporary, and split this into a store/load to/from
1927 the temporary. */
1929 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1930 emit_group_store (temp, src, type, ssize);
1931 emit_group_load (dst, temp, type, ssize);
1932 return;
1934 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1936 enum machine_mode outer = GET_MODE (dst);
1937 enum machine_mode inner;
1938 HOST_WIDE_INT bytepos;
1939 bool done = false;
1940 rtx temp;
1942 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1943 dst = gen_reg_rtx (outer);
1945 /* Make life a bit easier for combine. */
1946 /* If the first element of the vector is the low part
1947 of the destination mode, use a paradoxical subreg to
1948 initialize the destination. */
1949 if (start < finish)
1951 inner = GET_MODE (tmps[start]);
1952 bytepos = subreg_lowpart_offset (inner, outer);
1953 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1955 temp = simplify_gen_subreg (outer, tmps[start],
1956 inner, 0);
1957 if (temp)
1959 emit_move_insn (dst, temp);
1960 done = true;
1961 start++;
1966 /* If the first element wasn't the low part, try the last. */
1967 if (!done
1968 && start < finish - 1)
1970 inner = GET_MODE (tmps[finish - 1]);
1971 bytepos = subreg_lowpart_offset (inner, outer);
1972 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1974 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1975 inner, 0);
1976 if (temp)
1978 emit_move_insn (dst, temp);
1979 done = true;
1980 finish--;
1985 /* Otherwise, simply initialize the result to zero. */
1986 if (!done)
1987 emit_move_insn (dst, CONST0_RTX (outer));
1990 /* Process the pieces. */
1991 for (i = start; i < finish; i++)
1993 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1994 enum machine_mode mode = GET_MODE (tmps[i]);
1995 unsigned int bytelen = GET_MODE_SIZE (mode);
1996 rtx dest = dst;
1998 /* Handle trailing fragments that run over the size of the struct. */
1999 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2001 /* store_bit_field always takes its value from the lsb.
2002 Move the fragment to the lsb if it's not already there. */
2003 if (
2004 #ifdef BLOCK_REG_PADDING
2005 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2006 == (BYTES_BIG_ENDIAN ? upward : downward)
2007 #else
2008 BYTES_BIG_ENDIAN
2009 #endif
2012 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2013 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2014 build_int_cst (NULL_TREE, shift),
2015 tmps[i], 0);
2017 bytelen = ssize - bytepos;
2020 if (GET_CODE (dst) == CONCAT)
2022 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2023 dest = XEXP (dst, 0);
2024 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2026 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2027 dest = XEXP (dst, 1);
2029 else
2031 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2032 dest = assign_stack_temp (GET_MODE (dest),
2033 GET_MODE_SIZE (GET_MODE (dest)), 0);
2034 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2035 tmps[i]);
2036 dst = dest;
2037 break;
2041 /* Optimize the access just a bit. */
2042 if (MEM_P (dest)
2043 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2044 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2045 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2046 && bytelen == GET_MODE_SIZE (mode))
2047 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2048 else
2049 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2050 mode, tmps[i]);
2053 /* Copy from the pseudo into the (probable) hard reg. */
2054 if (orig_dst != dst)
2055 emit_move_insn (orig_dst, dst);
2058 /* Generate code to copy a BLKmode object of TYPE out of a
2059 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2060 is null, a stack temporary is created. TGTBLK is returned.
2062 The purpose of this routine is to handle functions that return
2063 BLKmode structures in registers. Some machines (the PA for example)
2064 want to return all small structures in registers regardless of the
2065 structure's alignment. */
2068 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2070 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2071 rtx src = NULL, dst = NULL;
2072 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2073 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2075 if (tgtblk == 0)
2077 tgtblk = assign_temp (build_qualified_type (type,
2078 (TYPE_QUALS (type)
2079 | TYPE_QUAL_CONST)),
2080 0, 1, 1);
2081 preserve_temp_slots (tgtblk);
2084 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2085 into a new pseudo which is a full word. */
2087 if (GET_MODE (srcreg) != BLKmode
2088 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2089 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2091 /* If the structure doesn't take up a whole number of words, see whether
2092 SRCREG is padded on the left or on the right. If it's on the left,
2093 set PADDING_CORRECTION to the number of bits to skip.
2095 In most ABIs, the structure will be returned at the least end of
2096 the register, which translates to right padding on little-endian
2097 targets and left padding on big-endian targets. The opposite
2098 holds if the structure is returned at the most significant
2099 end of the register. */
2100 if (bytes % UNITS_PER_WORD != 0
2101 && (targetm.calls.return_in_msb (type)
2102 ? !BYTES_BIG_ENDIAN
2103 : BYTES_BIG_ENDIAN))
2104 padding_correction
2105 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2107 /* Copy the structure BITSIZE bites at a time.
2109 We could probably emit more efficient code for machines which do not use
2110 strict alignment, but it doesn't seem worth the effort at the current
2111 time. */
2112 for (bitpos = 0, xbitpos = padding_correction;
2113 bitpos < bytes * BITS_PER_UNIT;
2114 bitpos += bitsize, xbitpos += bitsize)
2116 /* We need a new source operand each time xbitpos is on a
2117 word boundary and when xbitpos == padding_correction
2118 (the first time through). */
2119 if (xbitpos % BITS_PER_WORD == 0
2120 || xbitpos == padding_correction)
2121 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2122 GET_MODE (srcreg));
2124 /* We need a new destination operand each time bitpos is on
2125 a word boundary. */
2126 if (bitpos % BITS_PER_WORD == 0)
2127 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2129 /* Use xbitpos for the source extraction (right justified) and
2130 xbitpos for the destination store (left justified). */
2131 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2132 extract_bit_field (src, bitsize,
2133 xbitpos % BITS_PER_WORD, 1,
2134 NULL_RTX, word_mode, word_mode));
2137 return tgtblk;
2140 /* Add a USE expression for REG to the (possibly empty) list pointed
2141 to by CALL_FUSAGE. REG must denote a hard register. */
2143 void
2144 use_reg (rtx *call_fusage, rtx reg)
2146 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2148 *call_fusage
2149 = gen_rtx_EXPR_LIST (VOIDmode,
2150 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2153 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2154 starting at REGNO. All of these registers must be hard registers. */
2156 void
2157 use_regs (rtx *call_fusage, int regno, int nregs)
2159 int i;
2161 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2163 for (i = 0; i < nregs; i++)
2164 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2167 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2168 PARALLEL REGS. This is for calls that pass values in multiple
2169 non-contiguous locations. The Irix 6 ABI has examples of this. */
2171 void
2172 use_group_regs (rtx *call_fusage, rtx regs)
2174 int i;
2176 for (i = 0; i < XVECLEN (regs, 0); i++)
2178 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2180 /* A NULL entry means the parameter goes both on the stack and in
2181 registers. This can also be a MEM for targets that pass values
2182 partially on the stack and partially in registers. */
2183 if (reg != 0 && REG_P (reg))
2184 use_reg (call_fusage, reg);
2189 /* Determine whether the LEN bytes generated by CONSTFUN can be
2190 stored to memory using several move instructions. CONSTFUNDATA is
2191 a pointer which will be passed as argument in every CONSTFUN call.
2192 ALIGN is maximum alignment we can assume. Return nonzero if a
2193 call to store_by_pieces should succeed. */
2196 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2197 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2198 void *constfundata, unsigned int align)
2200 unsigned HOST_WIDE_INT l;
2201 unsigned int max_size;
2202 HOST_WIDE_INT offset = 0;
2203 enum machine_mode mode, tmode;
2204 enum insn_code icode;
2205 int reverse;
2206 rtx cst;
2208 if (len == 0)
2209 return 1;
2211 if (! STORE_BY_PIECES_P (len, align))
2212 return 0;
2214 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2215 if (align >= GET_MODE_ALIGNMENT (tmode))
2216 align = GET_MODE_ALIGNMENT (tmode);
2217 else
2219 enum machine_mode xmode;
2221 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2222 tmode != VOIDmode;
2223 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2224 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2225 || SLOW_UNALIGNED_ACCESS (tmode, align))
2226 break;
2228 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2231 /* We would first store what we can in the largest integer mode, then go to
2232 successively smaller modes. */
2234 for (reverse = 0;
2235 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2236 reverse++)
2238 l = len;
2239 mode = VOIDmode;
2240 max_size = STORE_MAX_PIECES + 1;
2241 while (max_size > 1)
2243 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2244 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2245 if (GET_MODE_SIZE (tmode) < max_size)
2246 mode = tmode;
2248 if (mode == VOIDmode)
2249 break;
2251 icode = mov_optab->handlers[(int) mode].insn_code;
2252 if (icode != CODE_FOR_nothing
2253 && align >= GET_MODE_ALIGNMENT (mode))
2255 unsigned int size = GET_MODE_SIZE (mode);
2257 while (l >= size)
2259 if (reverse)
2260 offset -= size;
2262 cst = (*constfun) (constfundata, offset, mode);
2263 if (!LEGITIMATE_CONSTANT_P (cst))
2264 return 0;
2266 if (!reverse)
2267 offset += size;
2269 l -= size;
2273 max_size = GET_MODE_SIZE (mode);
2276 /* The code above should have handled everything. */
2277 gcc_assert (!l);
2280 return 1;
2283 /* Generate several move instructions to store LEN bytes generated by
2284 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2285 pointer which will be passed as argument in every CONSTFUN call.
2286 ALIGN is maximum alignment we can assume.
2287 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2288 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2289 stpcpy. */
2292 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2293 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2294 void *constfundata, unsigned int align, int endp)
2296 struct store_by_pieces data;
2298 if (len == 0)
2300 gcc_assert (endp != 2);
2301 return to;
2304 gcc_assert (STORE_BY_PIECES_P (len, align));
2305 data.constfun = constfun;
2306 data.constfundata = constfundata;
2307 data.len = len;
2308 data.to = to;
2309 store_by_pieces_1 (&data, align);
2310 if (endp)
2312 rtx to1;
2314 gcc_assert (!data.reverse);
2315 if (data.autinc_to)
2317 if (endp == 2)
2319 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2320 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2321 else
2322 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2323 -1));
2325 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2326 data.offset);
2328 else
2330 if (endp == 2)
2331 --data.offset;
2332 to1 = adjust_address (data.to, QImode, data.offset);
2334 return to1;
2336 else
2337 return data.to;
2340 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2341 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2343 static void
2344 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2346 struct store_by_pieces data;
2348 if (len == 0)
2349 return;
2351 data.constfun = clear_by_pieces_1;
2352 data.constfundata = NULL;
2353 data.len = len;
2354 data.to = to;
2355 store_by_pieces_1 (&data, align);
2358 /* Callback routine for clear_by_pieces.
2359 Return const0_rtx unconditionally. */
2361 static rtx
2362 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2363 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2364 enum machine_mode mode ATTRIBUTE_UNUSED)
2366 return const0_rtx;
2369 /* Subroutine of clear_by_pieces and store_by_pieces.
2370 Generate several move instructions to store LEN bytes of block TO. (A MEM
2371 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2373 static void
2374 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2375 unsigned int align ATTRIBUTE_UNUSED)
2377 rtx to_addr = XEXP (data->to, 0);
2378 unsigned int max_size = STORE_MAX_PIECES + 1;
2379 enum machine_mode mode = VOIDmode, tmode;
2380 enum insn_code icode;
2382 data->offset = 0;
2383 data->to_addr = to_addr;
2384 data->autinc_to
2385 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2386 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2388 data->explicit_inc_to = 0;
2389 data->reverse
2390 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2391 if (data->reverse)
2392 data->offset = data->len;
2394 /* If storing requires more than two move insns,
2395 copy addresses to registers (to make displacements shorter)
2396 and use post-increment if available. */
2397 if (!data->autinc_to
2398 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2400 /* Determine the main mode we'll be using. */
2401 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2402 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2403 if (GET_MODE_SIZE (tmode) < max_size)
2404 mode = tmode;
2406 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2408 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2409 data->autinc_to = 1;
2410 data->explicit_inc_to = -1;
2413 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2414 && ! data->autinc_to)
2416 data->to_addr = copy_addr_to_reg (to_addr);
2417 data->autinc_to = 1;
2418 data->explicit_inc_to = 1;
2421 if ( !data->autinc_to && CONSTANT_P (to_addr))
2422 data->to_addr = copy_addr_to_reg (to_addr);
2425 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2426 if (align >= GET_MODE_ALIGNMENT (tmode))
2427 align = GET_MODE_ALIGNMENT (tmode);
2428 else
2430 enum machine_mode xmode;
2432 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2433 tmode != VOIDmode;
2434 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2435 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2436 || SLOW_UNALIGNED_ACCESS (tmode, align))
2437 break;
2439 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2442 /* First store what we can in the largest integer mode, then go to
2443 successively smaller modes. */
2445 while (max_size > 1)
2447 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2448 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2449 if (GET_MODE_SIZE (tmode) < max_size)
2450 mode = tmode;
2452 if (mode == VOIDmode)
2453 break;
2455 icode = mov_optab->handlers[(int) mode].insn_code;
2456 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2457 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2459 max_size = GET_MODE_SIZE (mode);
2462 /* The code above should have handled everything. */
2463 gcc_assert (!data->len);
2466 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2467 with move instructions for mode MODE. GENFUN is the gen_... function
2468 to make a move insn for that mode. DATA has all the other info. */
2470 static void
2471 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2472 struct store_by_pieces *data)
2474 unsigned int size = GET_MODE_SIZE (mode);
2475 rtx to1, cst;
2477 while (data->len >= size)
2479 if (data->reverse)
2480 data->offset -= size;
2482 if (data->autinc_to)
2483 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2484 data->offset);
2485 else
2486 to1 = adjust_address (data->to, mode, data->offset);
2488 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2489 emit_insn (gen_add2_insn (data->to_addr,
2490 GEN_INT (-(HOST_WIDE_INT) size)));
2492 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2493 emit_insn ((*genfun) (to1, cst));
2495 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2496 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2498 if (! data->reverse)
2499 data->offset += size;
2501 data->len -= size;
2505 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2506 its length in bytes. */
2509 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2510 unsigned int expected_align, HOST_WIDE_INT expected_size)
2512 enum machine_mode mode = GET_MODE (object);
2513 unsigned int align;
2515 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2517 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2518 just move a zero. Otherwise, do this a piece at a time. */
2519 if (mode != BLKmode
2520 && GET_CODE (size) == CONST_INT
2521 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2523 rtx zero = CONST0_RTX (mode);
2524 if (zero != NULL)
2526 emit_move_insn (object, zero);
2527 return NULL;
2530 if (COMPLEX_MODE_P (mode))
2532 zero = CONST0_RTX (GET_MODE_INNER (mode));
2533 if (zero != NULL)
2535 write_complex_part (object, zero, 0);
2536 write_complex_part (object, zero, 1);
2537 return NULL;
2542 if (size == const0_rtx)
2543 return NULL;
2545 align = MEM_ALIGN (object);
2547 if (GET_CODE (size) == CONST_INT
2548 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2549 clear_by_pieces (object, INTVAL (size), align);
2550 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2551 expected_align, expected_size))
2553 else
2554 return set_storage_via_libcall (object, size, const0_rtx,
2555 method == BLOCK_OP_TAILCALL);
2557 return NULL;
2561 clear_storage (rtx object, rtx size, enum block_op_methods method)
2563 return clear_storage_hints (object, size, method, 0, -1);
2567 /* A subroutine of clear_storage. Expand a call to memset.
2568 Return the return value of memset, 0 otherwise. */
2571 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2573 tree call_expr, fn, object_tree, size_tree, val_tree;
2574 enum machine_mode size_mode;
2575 rtx retval;
2577 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2578 place those into new pseudos into a VAR_DECL and use them later. */
2580 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2582 size_mode = TYPE_MODE (sizetype);
2583 size = convert_to_mode (size_mode, size, 1);
2584 size = copy_to_mode_reg (size_mode, size);
2586 /* It is incorrect to use the libcall calling conventions to call
2587 memset in this context. This could be a user call to memset and
2588 the user may wish to examine the return value from memset. For
2589 targets where libcalls and normal calls have different conventions
2590 for returning pointers, we could end up generating incorrect code. */
2592 object_tree = make_tree (ptr_type_node, object);
2593 if (GET_CODE (val) != CONST_INT)
2594 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2595 size_tree = make_tree (sizetype, size);
2596 val_tree = make_tree (integer_type_node, val);
2598 fn = clear_storage_libcall_fn (true);
2599 call_expr = build_call_expr (fn, 3,
2600 object_tree, integer_zero_node, size_tree);
2601 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2603 retval = expand_normal (call_expr);
2605 return retval;
2608 /* A subroutine of set_storage_via_libcall. Create the tree node
2609 for the function we use for block clears. The first time FOR_CALL
2610 is true, we call assemble_external. */
2612 static GTY(()) tree block_clear_fn;
2614 void
2615 init_block_clear_fn (const char *asmspec)
2617 if (!block_clear_fn)
2619 tree fn, args;
2621 fn = get_identifier ("memset");
2622 args = build_function_type_list (ptr_type_node, ptr_type_node,
2623 integer_type_node, sizetype,
2624 NULL_TREE);
2626 fn = build_decl (FUNCTION_DECL, fn, args);
2627 DECL_EXTERNAL (fn) = 1;
2628 TREE_PUBLIC (fn) = 1;
2629 DECL_ARTIFICIAL (fn) = 1;
2630 TREE_NOTHROW (fn) = 1;
2631 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2632 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2634 block_clear_fn = fn;
2637 if (asmspec)
2638 set_user_assembler_name (block_clear_fn, asmspec);
2641 static tree
2642 clear_storage_libcall_fn (int for_call)
2644 static bool emitted_extern;
2646 if (!block_clear_fn)
2647 init_block_clear_fn (NULL);
2649 if (for_call && !emitted_extern)
2651 emitted_extern = true;
2652 make_decl_rtl (block_clear_fn);
2653 assemble_external (block_clear_fn);
2656 return block_clear_fn;
2659 /* Expand a setmem pattern; return true if successful. */
2661 bool
2662 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2663 unsigned int expected_align, HOST_WIDE_INT expected_size)
2665 /* Try the most limited insn first, because there's no point
2666 including more than one in the machine description unless
2667 the more limited one has some advantage. */
2669 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2670 enum machine_mode mode;
2672 if (expected_align < align)
2673 expected_align = align;
2675 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2676 mode = GET_MODE_WIDER_MODE (mode))
2678 enum insn_code code = setmem_optab[(int) mode];
2679 insn_operand_predicate_fn pred;
2681 if (code != CODE_FOR_nothing
2682 /* We don't need MODE to be narrower than
2683 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2684 the mode mask, as it is returned by the macro, it will
2685 definitely be less than the actual mode mask. */
2686 && ((GET_CODE (size) == CONST_INT
2687 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2688 <= (GET_MODE_MASK (mode) >> 1)))
2689 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2690 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2691 || (*pred) (object, BLKmode))
2692 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2693 || (*pred) (opalign, VOIDmode)))
2695 rtx opsize, opchar;
2696 enum machine_mode char_mode;
2697 rtx last = get_last_insn ();
2698 rtx pat;
2700 opsize = convert_to_mode (mode, size, 1);
2701 pred = insn_data[(int) code].operand[1].predicate;
2702 if (pred != 0 && ! (*pred) (opsize, mode))
2703 opsize = copy_to_mode_reg (mode, opsize);
2705 opchar = val;
2706 char_mode = insn_data[(int) code].operand[2].mode;
2707 if (char_mode != VOIDmode)
2709 opchar = convert_to_mode (char_mode, opchar, 1);
2710 pred = insn_data[(int) code].operand[2].predicate;
2711 if (pred != 0 && ! (*pred) (opchar, char_mode))
2712 opchar = copy_to_mode_reg (char_mode, opchar);
2715 if (insn_data[(int) code].n_operands == 4)
2716 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2717 else
2718 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2719 GEN_INT (expected_align),
2720 GEN_INT (expected_size));
2721 if (pat)
2723 emit_insn (pat);
2724 return true;
2726 else
2727 delete_insns_since (last);
2731 return false;
2735 /* Write to one of the components of the complex value CPLX. Write VAL to
2736 the real part if IMAG_P is false, and the imaginary part if its true. */
2738 static void
2739 write_complex_part (rtx cplx, rtx val, bool imag_p)
2741 enum machine_mode cmode;
2742 enum machine_mode imode;
2743 unsigned ibitsize;
2745 if (GET_CODE (cplx) == CONCAT)
2747 emit_move_insn (XEXP (cplx, imag_p), val);
2748 return;
2751 cmode = GET_MODE (cplx);
2752 imode = GET_MODE_INNER (cmode);
2753 ibitsize = GET_MODE_BITSIZE (imode);
2755 /* For MEMs simplify_gen_subreg may generate an invalid new address
2756 because, e.g., the original address is considered mode-dependent
2757 by the target, which restricts simplify_subreg from invoking
2758 adjust_address_nv. Instead of preparing fallback support for an
2759 invalid address, we call adjust_address_nv directly. */
2760 if (MEM_P (cplx))
2762 emit_move_insn (adjust_address_nv (cplx, imode,
2763 imag_p ? GET_MODE_SIZE (imode) : 0),
2764 val);
2765 return;
2768 /* If the sub-object is at least word sized, then we know that subregging
2769 will work. This special case is important, since store_bit_field
2770 wants to operate on integer modes, and there's rarely an OImode to
2771 correspond to TCmode. */
2772 if (ibitsize >= BITS_PER_WORD
2773 /* For hard regs we have exact predicates. Assume we can split
2774 the original object if it spans an even number of hard regs.
2775 This special case is important for SCmode on 64-bit platforms
2776 where the natural size of floating-point regs is 32-bit. */
2777 || (REG_P (cplx)
2778 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2779 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2781 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2782 imag_p ? GET_MODE_SIZE (imode) : 0);
2783 if (part)
2785 emit_move_insn (part, val);
2786 return;
2788 else
2789 /* simplify_gen_subreg may fail for sub-word MEMs. */
2790 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2793 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2796 /* Extract one of the components of the complex value CPLX. Extract the
2797 real part if IMAG_P is false, and the imaginary part if it's true. */
2799 static rtx
2800 read_complex_part (rtx cplx, bool imag_p)
2802 enum machine_mode cmode, imode;
2803 unsigned ibitsize;
2805 if (GET_CODE (cplx) == CONCAT)
2806 return XEXP (cplx, imag_p);
2808 cmode = GET_MODE (cplx);
2809 imode = GET_MODE_INNER (cmode);
2810 ibitsize = GET_MODE_BITSIZE (imode);
2812 /* Special case reads from complex constants that got spilled to memory. */
2813 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2815 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2816 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2818 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2819 if (CONSTANT_CLASS_P (part))
2820 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2824 /* For MEMs simplify_gen_subreg may generate an invalid new address
2825 because, e.g., the original address is considered mode-dependent
2826 by the target, which restricts simplify_subreg from invoking
2827 adjust_address_nv. Instead of preparing fallback support for an
2828 invalid address, we call adjust_address_nv directly. */
2829 if (MEM_P (cplx))
2830 return adjust_address_nv (cplx, imode,
2831 imag_p ? GET_MODE_SIZE (imode) : 0);
2833 /* If the sub-object is at least word sized, then we know that subregging
2834 will work. This special case is important, since extract_bit_field
2835 wants to operate on integer modes, and there's rarely an OImode to
2836 correspond to TCmode. */
2837 if (ibitsize >= BITS_PER_WORD
2838 /* For hard regs we have exact predicates. Assume we can split
2839 the original object if it spans an even number of hard regs.
2840 This special case is important for SCmode on 64-bit platforms
2841 where the natural size of floating-point regs is 32-bit. */
2842 || (REG_P (cplx)
2843 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2844 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2846 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2847 imag_p ? GET_MODE_SIZE (imode) : 0);
2848 if (ret)
2849 return ret;
2850 else
2851 /* simplify_gen_subreg may fail for sub-word MEMs. */
2852 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2855 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2856 true, NULL_RTX, imode, imode);
2859 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2860 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2861 represented in NEW_MODE. If FORCE is true, this will never happen, as
2862 we'll force-create a SUBREG if needed. */
2864 static rtx
2865 emit_move_change_mode (enum machine_mode new_mode,
2866 enum machine_mode old_mode, rtx x, bool force)
2868 rtx ret;
2870 if (MEM_P (x))
2872 /* We don't have to worry about changing the address since the
2873 size in bytes is supposed to be the same. */
2874 if (reload_in_progress)
2876 /* Copy the MEM to change the mode and move any
2877 substitutions from the old MEM to the new one. */
2878 ret = adjust_address_nv (x, new_mode, 0);
2879 copy_replacements (x, ret);
2881 else
2882 ret = adjust_address (x, new_mode, 0);
2884 else
2886 /* Note that we do want simplify_subreg's behavior of validating
2887 that the new mode is ok for a hard register. If we were to use
2888 simplify_gen_subreg, we would create the subreg, but would
2889 probably run into the target not being able to implement it. */
2890 /* Except, of course, when FORCE is true, when this is exactly what
2891 we want. Which is needed for CCmodes on some targets. */
2892 if (force)
2893 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2894 else
2895 ret = simplify_subreg (new_mode, x, old_mode, 0);
2898 return ret;
2901 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2902 an integer mode of the same size as MODE. Returns the instruction
2903 emitted, or NULL if such a move could not be generated. */
2905 static rtx
2906 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2908 enum machine_mode imode;
2909 enum insn_code code;
2911 /* There must exist a mode of the exact size we require. */
2912 imode = int_mode_for_mode (mode);
2913 if (imode == BLKmode)
2914 return NULL_RTX;
2916 /* The target must support moves in this mode. */
2917 code = mov_optab->handlers[imode].insn_code;
2918 if (code == CODE_FOR_nothing)
2919 return NULL_RTX;
2921 x = emit_move_change_mode (imode, mode, x, force);
2922 if (x == NULL_RTX)
2923 return NULL_RTX;
2924 y = emit_move_change_mode (imode, mode, y, force);
2925 if (y == NULL_RTX)
2926 return NULL_RTX;
2927 return emit_insn (GEN_FCN (code) (x, y));
2930 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2931 Return an equivalent MEM that does not use an auto-increment. */
2933 static rtx
2934 emit_move_resolve_push (enum machine_mode mode, rtx x)
2936 enum rtx_code code = GET_CODE (XEXP (x, 0));
2937 HOST_WIDE_INT adjust;
2938 rtx temp;
2940 adjust = GET_MODE_SIZE (mode);
2941 #ifdef PUSH_ROUNDING
2942 adjust = PUSH_ROUNDING (adjust);
2943 #endif
2944 if (code == PRE_DEC || code == POST_DEC)
2945 adjust = -adjust;
2946 else if (code == PRE_MODIFY || code == POST_MODIFY)
2948 rtx expr = XEXP (XEXP (x, 0), 1);
2949 HOST_WIDE_INT val;
2951 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2952 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2953 val = INTVAL (XEXP (expr, 1));
2954 if (GET_CODE (expr) == MINUS)
2955 val = -val;
2956 gcc_assert (adjust == val || adjust == -val);
2957 adjust = val;
2960 /* Do not use anti_adjust_stack, since we don't want to update
2961 stack_pointer_delta. */
2962 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2963 GEN_INT (adjust), stack_pointer_rtx,
2964 0, OPTAB_LIB_WIDEN);
2965 if (temp != stack_pointer_rtx)
2966 emit_move_insn (stack_pointer_rtx, temp);
2968 switch (code)
2970 case PRE_INC:
2971 case PRE_DEC:
2972 case PRE_MODIFY:
2973 temp = stack_pointer_rtx;
2974 break;
2975 case POST_INC:
2976 case POST_DEC:
2977 case POST_MODIFY:
2978 temp = plus_constant (stack_pointer_rtx, -adjust);
2979 break;
2980 default:
2981 gcc_unreachable ();
2984 return replace_equiv_address (x, temp);
2987 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2988 X is known to satisfy push_operand, and MODE is known to be complex.
2989 Returns the last instruction emitted. */
2992 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2994 enum machine_mode submode = GET_MODE_INNER (mode);
2995 bool imag_first;
2997 #ifdef PUSH_ROUNDING
2998 unsigned int submodesize = GET_MODE_SIZE (submode);
3000 /* In case we output to the stack, but the size is smaller than the
3001 machine can push exactly, we need to use move instructions. */
3002 if (PUSH_ROUNDING (submodesize) != submodesize)
3004 x = emit_move_resolve_push (mode, x);
3005 return emit_move_insn (x, y);
3007 #endif
3009 /* Note that the real part always precedes the imag part in memory
3010 regardless of machine's endianness. */
3011 switch (GET_CODE (XEXP (x, 0)))
3013 case PRE_DEC:
3014 case POST_DEC:
3015 imag_first = true;
3016 break;
3017 case PRE_INC:
3018 case POST_INC:
3019 imag_first = false;
3020 break;
3021 default:
3022 gcc_unreachable ();
3025 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3026 read_complex_part (y, imag_first));
3027 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3028 read_complex_part (y, !imag_first));
3031 /* A subroutine of emit_move_complex. Perform the move from Y to X
3032 via two moves of the parts. Returns the last instruction emitted. */
3035 emit_move_complex_parts (rtx x, rtx y)
3037 /* Show the output dies here. This is necessary for SUBREGs
3038 of pseudos since we cannot track their lifetimes correctly;
3039 hard regs shouldn't appear here except as return values. */
3040 if (!reload_completed && !reload_in_progress
3041 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3042 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3044 write_complex_part (x, read_complex_part (y, false), false);
3045 write_complex_part (x, read_complex_part (y, true), true);
3047 return get_last_insn ();
3050 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3051 MODE is known to be complex. Returns the last instruction emitted. */
3053 static rtx
3054 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3056 bool try_int;
3058 /* Need to take special care for pushes, to maintain proper ordering
3059 of the data, and possibly extra padding. */
3060 if (push_operand (x, mode))
3061 return emit_move_complex_push (mode, x, y);
3063 /* See if we can coerce the target into moving both values at once. */
3065 /* Move floating point as parts. */
3066 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3067 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3068 try_int = false;
3069 /* Not possible if the values are inherently not adjacent. */
3070 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3071 try_int = false;
3072 /* Is possible if both are registers (or subregs of registers). */
3073 else if (register_operand (x, mode) && register_operand (y, mode))
3074 try_int = true;
3075 /* If one of the operands is a memory, and alignment constraints
3076 are friendly enough, we may be able to do combined memory operations.
3077 We do not attempt this if Y is a constant because that combination is
3078 usually better with the by-parts thing below. */
3079 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3080 && (!STRICT_ALIGNMENT
3081 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3082 try_int = true;
3083 else
3084 try_int = false;
3086 if (try_int)
3088 rtx ret;
3090 /* For memory to memory moves, optimal behavior can be had with the
3091 existing block move logic. */
3092 if (MEM_P (x) && MEM_P (y))
3094 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3095 BLOCK_OP_NO_LIBCALL);
3096 return get_last_insn ();
3099 ret = emit_move_via_integer (mode, x, y, true);
3100 if (ret)
3101 return ret;
3104 return emit_move_complex_parts (x, y);
3107 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3108 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3110 static rtx
3111 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3113 rtx ret;
3115 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3116 if (mode != CCmode)
3118 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3119 if (code != CODE_FOR_nothing)
3121 x = emit_move_change_mode (CCmode, mode, x, true);
3122 y = emit_move_change_mode (CCmode, mode, y, true);
3123 return emit_insn (GEN_FCN (code) (x, y));
3127 /* Otherwise, find the MODE_INT mode of the same width. */
3128 ret = emit_move_via_integer (mode, x, y, false);
3129 gcc_assert (ret != NULL);
3130 return ret;
3133 /* Return true if word I of OP lies entirely in the
3134 undefined bits of a paradoxical subreg. */
3136 static bool
3137 undefined_operand_subword_p (rtx op, int i)
3139 enum machine_mode innermode, innermostmode;
3140 int offset;
3141 if (GET_CODE (op) != SUBREG)
3142 return false;
3143 innermode = GET_MODE (op);
3144 innermostmode = GET_MODE (SUBREG_REG (op));
3145 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3146 /* The SUBREG_BYTE represents offset, as if the value were stored in
3147 memory, except for a paradoxical subreg where we define
3148 SUBREG_BYTE to be 0; undo this exception as in
3149 simplify_subreg. */
3150 if (SUBREG_BYTE (op) == 0
3151 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3153 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3154 if (WORDS_BIG_ENDIAN)
3155 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3156 if (BYTES_BIG_ENDIAN)
3157 offset += difference % UNITS_PER_WORD;
3159 if (offset >= GET_MODE_SIZE (innermostmode)
3160 || offset <= -GET_MODE_SIZE (word_mode))
3161 return true;
3162 return false;
3165 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3166 MODE is any multi-word or full-word mode that lacks a move_insn
3167 pattern. Note that you will get better code if you define such
3168 patterns, even if they must turn into multiple assembler instructions. */
3170 static rtx
3171 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3173 rtx last_insn = 0;
3174 rtx seq, inner;
3175 bool need_clobber;
3176 int i;
3178 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3180 /* If X is a push on the stack, do the push now and replace
3181 X with a reference to the stack pointer. */
3182 if (push_operand (x, mode))
3183 x = emit_move_resolve_push (mode, x);
3185 /* If we are in reload, see if either operand is a MEM whose address
3186 is scheduled for replacement. */
3187 if (reload_in_progress && MEM_P (x)
3188 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3189 x = replace_equiv_address_nv (x, inner);
3190 if (reload_in_progress && MEM_P (y)
3191 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3192 y = replace_equiv_address_nv (y, inner);
3194 start_sequence ();
3196 need_clobber = false;
3197 for (i = 0;
3198 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3199 i++)
3201 rtx xpart = operand_subword (x, i, 1, mode);
3202 rtx ypart;
3204 /* Do not generate code for a move if it would come entirely
3205 from the undefined bits of a paradoxical subreg. */
3206 if (undefined_operand_subword_p (y, i))
3207 continue;
3209 ypart = operand_subword (y, i, 1, mode);
3211 /* If we can't get a part of Y, put Y into memory if it is a
3212 constant. Otherwise, force it into a register. Then we must
3213 be able to get a part of Y. */
3214 if (ypart == 0 && CONSTANT_P (y))
3216 y = use_anchored_address (force_const_mem (mode, y));
3217 ypart = operand_subword (y, i, 1, mode);
3219 else if (ypart == 0)
3220 ypart = operand_subword_force (y, i, mode);
3222 gcc_assert (xpart && ypart);
3224 need_clobber |= (GET_CODE (xpart) == SUBREG);
3226 last_insn = emit_move_insn (xpart, ypart);
3229 seq = get_insns ();
3230 end_sequence ();
3232 /* Show the output dies here. This is necessary for SUBREGs
3233 of pseudos since we cannot track their lifetimes correctly;
3234 hard regs shouldn't appear here except as return values.
3235 We never want to emit such a clobber after reload. */
3236 if (x != y
3237 && ! (reload_in_progress || reload_completed)
3238 && need_clobber != 0)
3239 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3241 emit_insn (seq);
3243 return last_insn;
3246 /* Low level part of emit_move_insn.
3247 Called just like emit_move_insn, but assumes X and Y
3248 are basically valid. */
3251 emit_move_insn_1 (rtx x, rtx y)
3253 enum machine_mode mode = GET_MODE (x);
3254 enum insn_code code;
3256 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3258 code = mov_optab->handlers[mode].insn_code;
3259 if (code != CODE_FOR_nothing)
3260 return emit_insn (GEN_FCN (code) (x, y));
3262 /* Expand complex moves by moving real part and imag part. */
3263 if (COMPLEX_MODE_P (mode))
3264 return emit_move_complex (mode, x, y);
3266 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3268 rtx result = emit_move_via_integer (mode, x, y, true);
3270 /* If we can't find an integer mode, use multi words. */
3271 if (result)
3272 return result;
3273 else
3274 return emit_move_multi_word (mode, x, y);
3277 if (GET_MODE_CLASS (mode) == MODE_CC)
3278 return emit_move_ccmode (mode, x, y);
3280 /* Try using a move pattern for the corresponding integer mode. This is
3281 only safe when simplify_subreg can convert MODE constants into integer
3282 constants. At present, it can only do this reliably if the value
3283 fits within a HOST_WIDE_INT. */
3284 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3286 rtx ret = emit_move_via_integer (mode, x, y, false);
3287 if (ret)
3288 return ret;
3291 return emit_move_multi_word (mode, x, y);
3294 /* Generate code to copy Y into X.
3295 Both Y and X must have the same mode, except that
3296 Y can be a constant with VOIDmode.
3297 This mode cannot be BLKmode; use emit_block_move for that.
3299 Return the last instruction emitted. */
3302 emit_move_insn (rtx x, rtx y)
3304 enum machine_mode mode = GET_MODE (x);
3305 rtx y_cst = NULL_RTX;
3306 rtx last_insn, set;
3308 gcc_assert (mode != BLKmode
3309 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3311 if (CONSTANT_P (y))
3313 if (optimize
3314 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3315 && (last_insn = compress_float_constant (x, y)))
3316 return last_insn;
3318 y_cst = y;
3320 if (!LEGITIMATE_CONSTANT_P (y))
3322 y = force_const_mem (mode, y);
3324 /* If the target's cannot_force_const_mem prevented the spill,
3325 assume that the target's move expanders will also take care
3326 of the non-legitimate constant. */
3327 if (!y)
3328 y = y_cst;
3329 else
3330 y = use_anchored_address (y);
3334 /* If X or Y are memory references, verify that their addresses are valid
3335 for the machine. */
3336 if (MEM_P (x)
3337 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3338 && ! push_operand (x, GET_MODE (x)))
3339 || (flag_force_addr
3340 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3341 x = validize_mem (x);
3343 if (MEM_P (y)
3344 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3345 || (flag_force_addr
3346 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3347 y = validize_mem (y);
3349 gcc_assert (mode != BLKmode);
3351 last_insn = emit_move_insn_1 (x, y);
3353 if (y_cst && REG_P (x)
3354 && (set = single_set (last_insn)) != NULL_RTX
3355 && SET_DEST (set) == x
3356 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3357 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3359 return last_insn;
3362 /* If Y is representable exactly in a narrower mode, and the target can
3363 perform the extension directly from constant or memory, then emit the
3364 move as an extension. */
3366 static rtx
3367 compress_float_constant (rtx x, rtx y)
3369 enum machine_mode dstmode = GET_MODE (x);
3370 enum machine_mode orig_srcmode = GET_MODE (y);
3371 enum machine_mode srcmode;
3372 REAL_VALUE_TYPE r;
3373 int oldcost, newcost;
3375 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3377 if (LEGITIMATE_CONSTANT_P (y))
3378 oldcost = rtx_cost (y, SET);
3379 else
3380 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3382 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3383 srcmode != orig_srcmode;
3384 srcmode = GET_MODE_WIDER_MODE (srcmode))
3386 enum insn_code ic;
3387 rtx trunc_y, last_insn;
3389 /* Skip if the target can't extend this way. */
3390 ic = can_extend_p (dstmode, srcmode, 0);
3391 if (ic == CODE_FOR_nothing)
3392 continue;
3394 /* Skip if the narrowed value isn't exact. */
3395 if (! exact_real_truncate (srcmode, &r))
3396 continue;
3398 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3400 if (LEGITIMATE_CONSTANT_P (trunc_y))
3402 /* Skip if the target needs extra instructions to perform
3403 the extension. */
3404 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3405 continue;
3406 /* This is valid, but may not be cheaper than the original. */
3407 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3408 if (oldcost < newcost)
3409 continue;
3411 else if (float_extend_from_mem[dstmode][srcmode])
3413 trunc_y = force_const_mem (srcmode, trunc_y);
3414 /* This is valid, but may not be cheaper than the original. */
3415 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3416 if (oldcost < newcost)
3417 continue;
3418 trunc_y = validize_mem (trunc_y);
3420 else
3421 continue;
3423 /* For CSE's benefit, force the compressed constant pool entry
3424 into a new pseudo. This constant may be used in different modes,
3425 and if not, combine will put things back together for us. */
3426 trunc_y = force_reg (srcmode, trunc_y);
3427 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3428 last_insn = get_last_insn ();
3430 if (REG_P (x))
3431 set_unique_reg_note (last_insn, REG_EQUAL, y);
3433 return last_insn;
3436 return NULL_RTX;
3439 /* Pushing data onto the stack. */
3441 /* Push a block of length SIZE (perhaps variable)
3442 and return an rtx to address the beginning of the block.
3443 The value may be virtual_outgoing_args_rtx.
3445 EXTRA is the number of bytes of padding to push in addition to SIZE.
3446 BELOW nonzero means this padding comes at low addresses;
3447 otherwise, the padding comes at high addresses. */
3450 push_block (rtx size, int extra, int below)
3452 rtx temp;
3454 size = convert_modes (Pmode, ptr_mode, size, 1);
3455 if (CONSTANT_P (size))
3456 anti_adjust_stack (plus_constant (size, extra));
3457 else if (REG_P (size) && extra == 0)
3458 anti_adjust_stack (size);
3459 else
3461 temp = copy_to_mode_reg (Pmode, size);
3462 if (extra != 0)
3463 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3464 temp, 0, OPTAB_LIB_WIDEN);
3465 anti_adjust_stack (temp);
3468 #ifndef STACK_GROWS_DOWNWARD
3469 if (0)
3470 #else
3471 if (1)
3472 #endif
3474 temp = virtual_outgoing_args_rtx;
3475 if (extra != 0 && below)
3476 temp = plus_constant (temp, extra);
3478 else
3480 if (GET_CODE (size) == CONST_INT)
3481 temp = plus_constant (virtual_outgoing_args_rtx,
3482 -INTVAL (size) - (below ? 0 : extra));
3483 else if (extra != 0 && !below)
3484 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3485 negate_rtx (Pmode, plus_constant (size, extra)));
3486 else
3487 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3488 negate_rtx (Pmode, size));
3491 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3494 #ifdef PUSH_ROUNDING
3496 /* Emit single push insn. */
3498 static void
3499 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3501 rtx dest_addr;
3502 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3503 rtx dest;
3504 enum insn_code icode;
3505 insn_operand_predicate_fn pred;
3507 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3508 /* If there is push pattern, use it. Otherwise try old way of throwing
3509 MEM representing push operation to move expander. */
3510 icode = push_optab->handlers[(int) mode].insn_code;
3511 if (icode != CODE_FOR_nothing)
3513 if (((pred = insn_data[(int) icode].operand[0].predicate)
3514 && !((*pred) (x, mode))))
3515 x = force_reg (mode, x);
3516 emit_insn (GEN_FCN (icode) (x));
3517 return;
3519 if (GET_MODE_SIZE (mode) == rounded_size)
3520 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3521 /* If we are to pad downward, adjust the stack pointer first and
3522 then store X into the stack location using an offset. This is
3523 because emit_move_insn does not know how to pad; it does not have
3524 access to type. */
3525 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3527 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3528 HOST_WIDE_INT offset;
3530 emit_move_insn (stack_pointer_rtx,
3531 expand_binop (Pmode,
3532 #ifdef STACK_GROWS_DOWNWARD
3533 sub_optab,
3534 #else
3535 add_optab,
3536 #endif
3537 stack_pointer_rtx,
3538 GEN_INT (rounded_size),
3539 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3541 offset = (HOST_WIDE_INT) padding_size;
3542 #ifdef STACK_GROWS_DOWNWARD
3543 if (STACK_PUSH_CODE == POST_DEC)
3544 /* We have already decremented the stack pointer, so get the
3545 previous value. */
3546 offset += (HOST_WIDE_INT) rounded_size;
3547 #else
3548 if (STACK_PUSH_CODE == POST_INC)
3549 /* We have already incremented the stack pointer, so get the
3550 previous value. */
3551 offset -= (HOST_WIDE_INT) rounded_size;
3552 #endif
3553 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3555 else
3557 #ifdef STACK_GROWS_DOWNWARD
3558 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3559 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3560 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3561 #else
3562 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3563 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3564 GEN_INT (rounded_size));
3565 #endif
3566 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3569 dest = gen_rtx_MEM (mode, dest_addr);
3571 if (type != 0)
3573 set_mem_attributes (dest, type, 1);
3575 if (flag_optimize_sibling_calls)
3576 /* Function incoming arguments may overlap with sibling call
3577 outgoing arguments and we cannot allow reordering of reads
3578 from function arguments with stores to outgoing arguments
3579 of sibling calls. */
3580 set_mem_alias_set (dest, 0);
3582 emit_move_insn (dest, x);
3584 #endif
3586 /* Generate code to push X onto the stack, assuming it has mode MODE and
3587 type TYPE.
3588 MODE is redundant except when X is a CONST_INT (since they don't
3589 carry mode info).
3590 SIZE is an rtx for the size of data to be copied (in bytes),
3591 needed only if X is BLKmode.
3593 ALIGN (in bits) is maximum alignment we can assume.
3595 If PARTIAL and REG are both nonzero, then copy that many of the first
3596 bytes of X into registers starting with REG, and push the rest of X.
3597 The amount of space pushed is decreased by PARTIAL bytes.
3598 REG must be a hard register in this case.
3599 If REG is zero but PARTIAL is not, take any all others actions for an
3600 argument partially in registers, but do not actually load any
3601 registers.
3603 EXTRA is the amount in bytes of extra space to leave next to this arg.
3604 This is ignored if an argument block has already been allocated.
3606 On a machine that lacks real push insns, ARGS_ADDR is the address of
3607 the bottom of the argument block for this call. We use indexing off there
3608 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3609 argument block has not been preallocated.
3611 ARGS_SO_FAR is the size of args previously pushed for this call.
3613 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3614 for arguments passed in registers. If nonzero, it will be the number
3615 of bytes required. */
3617 void
3618 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3619 unsigned int align, int partial, rtx reg, int extra,
3620 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3621 rtx alignment_pad)
3623 rtx xinner;
3624 enum direction stack_direction
3625 #ifdef STACK_GROWS_DOWNWARD
3626 = downward;
3627 #else
3628 = upward;
3629 #endif
3631 /* Decide where to pad the argument: `downward' for below,
3632 `upward' for above, or `none' for don't pad it.
3633 Default is below for small data on big-endian machines; else above. */
3634 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3636 /* Invert direction if stack is post-decrement.
3637 FIXME: why? */
3638 if (STACK_PUSH_CODE == POST_DEC)
3639 if (where_pad != none)
3640 where_pad = (where_pad == downward ? upward : downward);
3642 xinner = x;
3644 if (mode == BLKmode
3645 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3647 /* Copy a block into the stack, entirely or partially. */
3649 rtx temp;
3650 int used;
3651 int offset;
3652 int skip;
3654 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3655 used = partial - offset;
3657 if (mode != BLKmode)
3659 /* A value is to be stored in an insufficiently aligned
3660 stack slot; copy via a suitably aligned slot if
3661 necessary. */
3662 size = GEN_INT (GET_MODE_SIZE (mode));
3663 if (!MEM_P (xinner))
3665 temp = assign_temp (type, 0, 1, 1);
3666 emit_move_insn (temp, xinner);
3667 xinner = temp;
3671 gcc_assert (size);
3673 /* USED is now the # of bytes we need not copy to the stack
3674 because registers will take care of them. */
3676 if (partial != 0)
3677 xinner = adjust_address (xinner, BLKmode, used);
3679 /* If the partial register-part of the arg counts in its stack size,
3680 skip the part of stack space corresponding to the registers.
3681 Otherwise, start copying to the beginning of the stack space,
3682 by setting SKIP to 0. */
3683 skip = (reg_parm_stack_space == 0) ? 0 : used;
3685 #ifdef PUSH_ROUNDING
3686 /* Do it with several push insns if that doesn't take lots of insns
3687 and if there is no difficulty with push insns that skip bytes
3688 on the stack for alignment purposes. */
3689 if (args_addr == 0
3690 && PUSH_ARGS
3691 && GET_CODE (size) == CONST_INT
3692 && skip == 0
3693 && MEM_ALIGN (xinner) >= align
3694 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3695 /* Here we avoid the case of a structure whose weak alignment
3696 forces many pushes of a small amount of data,
3697 and such small pushes do rounding that causes trouble. */
3698 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3699 || align >= BIGGEST_ALIGNMENT
3700 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3701 == (align / BITS_PER_UNIT)))
3702 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3704 /* Push padding now if padding above and stack grows down,
3705 or if padding below and stack grows up.
3706 But if space already allocated, this has already been done. */
3707 if (extra && args_addr == 0
3708 && where_pad != none && where_pad != stack_direction)
3709 anti_adjust_stack (GEN_INT (extra));
3711 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3713 else
3714 #endif /* PUSH_ROUNDING */
3716 rtx target;
3718 /* Otherwise make space on the stack and copy the data
3719 to the address of that space. */
3721 /* Deduct words put into registers from the size we must copy. */
3722 if (partial != 0)
3724 if (GET_CODE (size) == CONST_INT)
3725 size = GEN_INT (INTVAL (size) - used);
3726 else
3727 size = expand_binop (GET_MODE (size), sub_optab, size,
3728 GEN_INT (used), NULL_RTX, 0,
3729 OPTAB_LIB_WIDEN);
3732 /* Get the address of the stack space.
3733 In this case, we do not deal with EXTRA separately.
3734 A single stack adjust will do. */
3735 if (! args_addr)
3737 temp = push_block (size, extra, where_pad == downward);
3738 extra = 0;
3740 else if (GET_CODE (args_so_far) == CONST_INT)
3741 temp = memory_address (BLKmode,
3742 plus_constant (args_addr,
3743 skip + INTVAL (args_so_far)));
3744 else
3745 temp = memory_address (BLKmode,
3746 plus_constant (gen_rtx_PLUS (Pmode,
3747 args_addr,
3748 args_so_far),
3749 skip));
3751 if (!ACCUMULATE_OUTGOING_ARGS)
3753 /* If the source is referenced relative to the stack pointer,
3754 copy it to another register to stabilize it. We do not need
3755 to do this if we know that we won't be changing sp. */
3757 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3758 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3759 temp = copy_to_reg (temp);
3762 target = gen_rtx_MEM (BLKmode, temp);
3764 /* We do *not* set_mem_attributes here, because incoming arguments
3765 may overlap with sibling call outgoing arguments and we cannot
3766 allow reordering of reads from function arguments with stores
3767 to outgoing arguments of sibling calls. We do, however, want
3768 to record the alignment of the stack slot. */
3769 /* ALIGN may well be better aligned than TYPE, e.g. due to
3770 PARM_BOUNDARY. Assume the caller isn't lying. */
3771 set_mem_align (target, align);
3773 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3776 else if (partial > 0)
3778 /* Scalar partly in registers. */
3780 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3781 int i;
3782 int not_stack;
3783 /* # bytes of start of argument
3784 that we must make space for but need not store. */
3785 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3786 int args_offset = INTVAL (args_so_far);
3787 int skip;
3789 /* Push padding now if padding above and stack grows down,
3790 or if padding below and stack grows up.
3791 But if space already allocated, this has already been done. */
3792 if (extra && args_addr == 0
3793 && where_pad != none && where_pad != stack_direction)
3794 anti_adjust_stack (GEN_INT (extra));
3796 /* If we make space by pushing it, we might as well push
3797 the real data. Otherwise, we can leave OFFSET nonzero
3798 and leave the space uninitialized. */
3799 if (args_addr == 0)
3800 offset = 0;
3802 /* Now NOT_STACK gets the number of words that we don't need to
3803 allocate on the stack. Convert OFFSET to words too. */
3804 not_stack = (partial - offset) / UNITS_PER_WORD;
3805 offset /= UNITS_PER_WORD;
3807 /* If the partial register-part of the arg counts in its stack size,
3808 skip the part of stack space corresponding to the registers.
3809 Otherwise, start copying to the beginning of the stack space,
3810 by setting SKIP to 0. */
3811 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3813 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3814 x = validize_mem (force_const_mem (mode, x));
3816 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3817 SUBREGs of such registers are not allowed. */
3818 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3819 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3820 x = copy_to_reg (x);
3822 /* Loop over all the words allocated on the stack for this arg. */
3823 /* We can do it by words, because any scalar bigger than a word
3824 has a size a multiple of a word. */
3825 #ifndef PUSH_ARGS_REVERSED
3826 for (i = not_stack; i < size; i++)
3827 #else
3828 for (i = size - 1; i >= not_stack; i--)
3829 #endif
3830 if (i >= not_stack + offset)
3831 emit_push_insn (operand_subword_force (x, i, mode),
3832 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3833 0, args_addr,
3834 GEN_INT (args_offset + ((i - not_stack + skip)
3835 * UNITS_PER_WORD)),
3836 reg_parm_stack_space, alignment_pad);
3838 else
3840 rtx addr;
3841 rtx dest;
3843 /* Push padding now if padding above and stack grows down,
3844 or if padding below and stack grows up.
3845 But if space already allocated, this has already been done. */
3846 if (extra && args_addr == 0
3847 && where_pad != none && where_pad != stack_direction)
3848 anti_adjust_stack (GEN_INT (extra));
3850 #ifdef PUSH_ROUNDING
3851 if (args_addr == 0 && PUSH_ARGS)
3852 emit_single_push_insn (mode, x, type);
3853 else
3854 #endif
3856 if (GET_CODE (args_so_far) == CONST_INT)
3857 addr
3858 = memory_address (mode,
3859 plus_constant (args_addr,
3860 INTVAL (args_so_far)));
3861 else
3862 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3863 args_so_far));
3864 dest = gen_rtx_MEM (mode, addr);
3866 /* We do *not* set_mem_attributes here, because incoming arguments
3867 may overlap with sibling call outgoing arguments and we cannot
3868 allow reordering of reads from function arguments with stores
3869 to outgoing arguments of sibling calls. We do, however, want
3870 to record the alignment of the stack slot. */
3871 /* ALIGN may well be better aligned than TYPE, e.g. due to
3872 PARM_BOUNDARY. Assume the caller isn't lying. */
3873 set_mem_align (dest, align);
3875 emit_move_insn (dest, x);
3879 /* If part should go in registers, copy that part
3880 into the appropriate registers. Do this now, at the end,
3881 since mem-to-mem copies above may do function calls. */
3882 if (partial > 0 && reg != 0)
3884 /* Handle calls that pass values in multiple non-contiguous locations.
3885 The Irix 6 ABI has examples of this. */
3886 if (GET_CODE (reg) == PARALLEL)
3887 emit_group_load (reg, x, type, -1);
3888 else
3890 gcc_assert (partial % UNITS_PER_WORD == 0);
3891 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3895 if (extra && args_addr == 0 && where_pad == stack_direction)
3896 anti_adjust_stack (GEN_INT (extra));
3898 if (alignment_pad && args_addr == 0)
3899 anti_adjust_stack (alignment_pad);
3902 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3903 operations. */
3905 static rtx
3906 get_subtarget (rtx x)
3908 return (optimize
3909 || x == 0
3910 /* Only registers can be subtargets. */
3911 || !REG_P (x)
3912 /* Don't use hard regs to avoid extending their life. */
3913 || REGNO (x) < FIRST_PSEUDO_REGISTER
3914 ? 0 : x);
3917 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3918 FIELD is a bitfield. Returns true if the optimization was successful,
3919 and there's nothing else to do. */
3921 static bool
3922 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3923 unsigned HOST_WIDE_INT bitpos,
3924 enum machine_mode mode1, rtx str_rtx,
3925 tree to, tree src)
3927 enum machine_mode str_mode = GET_MODE (str_rtx);
3928 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3929 tree op0, op1;
3930 rtx value, result;
3931 optab binop;
3933 if (mode1 != VOIDmode
3934 || bitsize >= BITS_PER_WORD
3935 || str_bitsize > BITS_PER_WORD
3936 || TREE_SIDE_EFFECTS (to)
3937 || TREE_THIS_VOLATILE (to))
3938 return false;
3940 STRIP_NOPS (src);
3941 if (!BINARY_CLASS_P (src)
3942 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3943 return false;
3945 op0 = TREE_OPERAND (src, 0);
3946 op1 = TREE_OPERAND (src, 1);
3947 STRIP_NOPS (op0);
3949 if (!operand_equal_p (to, op0, 0))
3950 return false;
3952 if (MEM_P (str_rtx))
3954 unsigned HOST_WIDE_INT offset1;
3956 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3957 str_mode = word_mode;
3958 str_mode = get_best_mode (bitsize, bitpos,
3959 MEM_ALIGN (str_rtx), str_mode, 0);
3960 if (str_mode == VOIDmode)
3961 return false;
3962 str_bitsize = GET_MODE_BITSIZE (str_mode);
3964 offset1 = bitpos;
3965 bitpos %= str_bitsize;
3966 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3967 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3969 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3970 return false;
3972 /* If the bit field covers the whole REG/MEM, store_field
3973 will likely generate better code. */
3974 if (bitsize >= str_bitsize)
3975 return false;
3977 /* We can't handle fields split across multiple entities. */
3978 if (bitpos + bitsize > str_bitsize)
3979 return false;
3981 if (BYTES_BIG_ENDIAN)
3982 bitpos = str_bitsize - bitpos - bitsize;
3984 switch (TREE_CODE (src))
3986 case PLUS_EXPR:
3987 case MINUS_EXPR:
3988 /* For now, just optimize the case of the topmost bitfield
3989 where we don't need to do any masking and also
3990 1 bit bitfields where xor can be used.
3991 We might win by one instruction for the other bitfields
3992 too if insv/extv instructions aren't used, so that
3993 can be added later. */
3994 if (bitpos + bitsize != str_bitsize
3995 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3996 break;
3998 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3999 value = convert_modes (str_mode,
4000 TYPE_MODE (TREE_TYPE (op1)), value,
4001 TYPE_UNSIGNED (TREE_TYPE (op1)));
4003 /* We may be accessing data outside the field, which means
4004 we can alias adjacent data. */
4005 if (MEM_P (str_rtx))
4007 str_rtx = shallow_copy_rtx (str_rtx);
4008 set_mem_alias_set (str_rtx, 0);
4009 set_mem_expr (str_rtx, 0);
4012 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4013 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4015 value = expand_and (str_mode, value, const1_rtx, NULL);
4016 binop = xor_optab;
4018 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4019 build_int_cst (NULL_TREE, bitpos),
4020 NULL_RTX, 1);
4021 result = expand_binop (str_mode, binop, str_rtx,
4022 value, str_rtx, 1, OPTAB_WIDEN);
4023 if (result != str_rtx)
4024 emit_move_insn (str_rtx, result);
4025 return true;
4027 case BIT_IOR_EXPR:
4028 case BIT_XOR_EXPR:
4029 if (TREE_CODE (op1) != INTEGER_CST)
4030 break;
4031 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
4032 value = convert_modes (GET_MODE (str_rtx),
4033 TYPE_MODE (TREE_TYPE (op1)), value,
4034 TYPE_UNSIGNED (TREE_TYPE (op1)));
4036 /* We may be accessing data outside the field, which means
4037 we can alias adjacent data. */
4038 if (MEM_P (str_rtx))
4040 str_rtx = shallow_copy_rtx (str_rtx);
4041 set_mem_alias_set (str_rtx, 0);
4042 set_mem_expr (str_rtx, 0);
4045 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4046 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4048 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4049 - 1);
4050 value = expand_and (GET_MODE (str_rtx), value, mask,
4051 NULL_RTX);
4053 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4054 build_int_cst (NULL_TREE, bitpos),
4055 NULL_RTX, 1);
4056 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4057 value, str_rtx, 1, OPTAB_WIDEN);
4058 if (result != str_rtx)
4059 emit_move_insn (str_rtx, result);
4060 return true;
4062 default:
4063 break;
4066 return false;
4070 /* Expand an assignment that stores the value of FROM into TO. */
4072 void
4073 expand_assignment (tree to, tree from)
4075 rtx to_rtx = 0;
4076 rtx result;
4078 /* Don't crash if the lhs of the assignment was erroneous. */
4079 if (TREE_CODE (to) == ERROR_MARK)
4081 result = expand_normal (from);
4082 return;
4085 /* Optimize away no-op moves without side-effects. */
4086 if (operand_equal_p (to, from, 0))
4087 return;
4089 /* Assignment of a structure component needs special treatment
4090 if the structure component's rtx is not simply a MEM.
4091 Assignment of an array element at a constant index, and assignment of
4092 an array element in an unaligned packed structure field, has the same
4093 problem. */
4094 if (handled_component_p (to)
4095 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4097 enum machine_mode mode1;
4098 HOST_WIDE_INT bitsize, bitpos;
4099 tree offset;
4100 int unsignedp;
4101 int volatilep = 0;
4102 tree tem;
4104 push_temp_slots ();
4105 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4106 &unsignedp, &volatilep, true);
4108 /* If we are going to use store_bit_field and extract_bit_field,
4109 make sure to_rtx will be safe for multiple use. */
4111 to_rtx = expand_normal (tem);
4113 if (offset != 0)
4115 rtx offset_rtx;
4117 if (!MEM_P (to_rtx))
4119 /* We can get constant negative offsets into arrays with broken
4120 user code. Translate this to a trap instead of ICEing. */
4121 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4122 expand_builtin_trap ();
4123 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4126 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4127 #ifdef POINTERS_EXTEND_UNSIGNED
4128 if (GET_MODE (offset_rtx) != Pmode)
4129 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4130 #else
4131 if (GET_MODE (offset_rtx) != ptr_mode)
4132 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4133 #endif
4135 /* A constant address in TO_RTX can have VOIDmode, we must not try
4136 to call force_reg for that case. Avoid that case. */
4137 if (MEM_P (to_rtx)
4138 && GET_MODE (to_rtx) == BLKmode
4139 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4140 && bitsize > 0
4141 && (bitpos % bitsize) == 0
4142 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4143 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4145 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4146 bitpos = 0;
4149 to_rtx = offset_address (to_rtx, offset_rtx,
4150 highest_pow2_factor_for_target (to,
4151 offset));
4154 /* Handle expand_expr of a complex value returning a CONCAT. */
4155 if (GET_CODE (to_rtx) == CONCAT)
4157 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4159 gcc_assert (bitpos == 0);
4160 result = store_expr (from, to_rtx, false);
4162 else
4164 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4165 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4168 else
4170 if (MEM_P (to_rtx))
4172 /* If the field is at offset zero, we could have been given the
4173 DECL_RTX of the parent struct. Don't munge it. */
4174 to_rtx = shallow_copy_rtx (to_rtx);
4176 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4178 /* Deal with volatile and readonly fields. The former is only
4179 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4180 if (volatilep)
4181 MEM_VOLATILE_P (to_rtx) = 1;
4182 if (component_uses_parent_alias_set (to))
4183 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4186 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4187 to_rtx, to, from))
4188 result = NULL;
4189 else
4190 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4191 TREE_TYPE (tem), get_alias_set (to));
4194 if (result)
4195 preserve_temp_slots (result);
4196 free_temp_slots ();
4197 pop_temp_slots ();
4198 return;
4201 /* If the rhs is a function call and its value is not an aggregate,
4202 call the function before we start to compute the lhs.
4203 This is needed for correct code for cases such as
4204 val = setjmp (buf) on machines where reference to val
4205 requires loading up part of an address in a separate insn.
4207 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4208 since it might be a promoted variable where the zero- or sign- extension
4209 needs to be done. Handling this in the normal way is safe because no
4210 computation is done before the call. */
4211 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4212 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4213 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4214 && REG_P (DECL_RTL (to))))
4216 rtx value;
4218 push_temp_slots ();
4219 value = expand_normal (from);
4220 if (to_rtx == 0)
4221 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4223 /* Handle calls that return values in multiple non-contiguous locations.
4224 The Irix 6 ABI has examples of this. */
4225 if (GET_CODE (to_rtx) == PARALLEL)
4226 emit_group_load (to_rtx, value, TREE_TYPE (from),
4227 int_size_in_bytes (TREE_TYPE (from)));
4228 else if (GET_MODE (to_rtx) == BLKmode)
4229 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4230 else
4232 if (POINTER_TYPE_P (TREE_TYPE (to)))
4233 value = convert_memory_address (GET_MODE (to_rtx), value);
4234 emit_move_insn (to_rtx, value);
4236 preserve_temp_slots (to_rtx);
4237 free_temp_slots ();
4238 pop_temp_slots ();
4239 return;
4242 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4243 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4245 if (to_rtx == 0)
4246 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4248 /* Don't move directly into a return register. */
4249 if (TREE_CODE (to) == RESULT_DECL
4250 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4252 rtx temp;
4254 push_temp_slots ();
4255 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4257 if (GET_CODE (to_rtx) == PARALLEL)
4258 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4259 int_size_in_bytes (TREE_TYPE (from)));
4260 else
4261 emit_move_insn (to_rtx, temp);
4263 preserve_temp_slots (to_rtx);
4264 free_temp_slots ();
4265 pop_temp_slots ();
4266 return;
4269 /* In case we are returning the contents of an object which overlaps
4270 the place the value is being stored, use a safe function when copying
4271 a value through a pointer into a structure value return block. */
4272 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4273 && current_function_returns_struct
4274 && !current_function_returns_pcc_struct)
4276 rtx from_rtx, size;
4278 push_temp_slots ();
4279 size = expr_size (from);
4280 from_rtx = expand_normal (from);
4282 emit_library_call (memmove_libfunc, LCT_NORMAL,
4283 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4284 XEXP (from_rtx, 0), Pmode,
4285 convert_to_mode (TYPE_MODE (sizetype),
4286 size, TYPE_UNSIGNED (sizetype)),
4287 TYPE_MODE (sizetype));
4289 preserve_temp_slots (to_rtx);
4290 free_temp_slots ();
4291 pop_temp_slots ();
4292 return;
4295 /* Compute FROM and store the value in the rtx we got. */
4297 push_temp_slots ();
4298 result = store_expr (from, to_rtx, 0);
4299 preserve_temp_slots (result);
4300 free_temp_slots ();
4301 pop_temp_slots ();
4302 return;
4305 /* Generate code for computing expression EXP,
4306 and storing the value into TARGET.
4308 If the mode is BLKmode then we may return TARGET itself.
4309 It turns out that in BLKmode it doesn't cause a problem.
4310 because C has no operators that could combine two different
4311 assignments into the same BLKmode object with different values
4312 with no sequence point. Will other languages need this to
4313 be more thorough?
4315 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4316 stack, and block moves may need to be treated specially. */
4319 store_expr (tree exp, rtx target, int call_param_p)
4321 rtx temp;
4322 rtx alt_rtl = NULL_RTX;
4323 int dont_return_target = 0;
4325 if (VOID_TYPE_P (TREE_TYPE (exp)))
4327 /* C++ can generate ?: expressions with a throw expression in one
4328 branch and an rvalue in the other. Here, we resolve attempts to
4329 store the throw expression's nonexistent result. */
4330 gcc_assert (!call_param_p);
4331 expand_expr (exp, const0_rtx, VOIDmode, 0);
4332 return NULL_RTX;
4334 if (TREE_CODE (exp) == COMPOUND_EXPR)
4336 /* Perform first part of compound expression, then assign from second
4337 part. */
4338 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4339 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4340 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4342 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4344 /* For conditional expression, get safe form of the target. Then
4345 test the condition, doing the appropriate assignment on either
4346 side. This avoids the creation of unnecessary temporaries.
4347 For non-BLKmode, it is more efficient not to do this. */
4349 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4351 do_pending_stack_adjust ();
4352 NO_DEFER_POP;
4353 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4354 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4355 emit_jump_insn (gen_jump (lab2));
4356 emit_barrier ();
4357 emit_label (lab1);
4358 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4359 emit_label (lab2);
4360 OK_DEFER_POP;
4362 return NULL_RTX;
4364 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4365 /* If this is a scalar in a register that is stored in a wider mode
4366 than the declared mode, compute the result into its declared mode
4367 and then convert to the wider mode. Our value is the computed
4368 expression. */
4370 rtx inner_target = 0;
4372 /* We can do the conversion inside EXP, which will often result
4373 in some optimizations. Do the conversion in two steps: first
4374 change the signedness, if needed, then the extend. But don't
4375 do this if the type of EXP is a subtype of something else
4376 since then the conversion might involve more than just
4377 converting modes. */
4378 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4379 && TREE_TYPE (TREE_TYPE (exp)) == 0
4380 && (!lang_hooks.reduce_bit_field_operations
4381 || (GET_MODE_PRECISION (GET_MODE (target))
4382 == TYPE_PRECISION (TREE_TYPE (exp)))))
4384 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4385 != SUBREG_PROMOTED_UNSIGNED_P (target))
4386 exp = fold_convert
4387 (lang_hooks.types.signed_or_unsigned_type
4388 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4390 exp = fold_convert (lang_hooks.types.type_for_mode
4391 (GET_MODE (SUBREG_REG (target)),
4392 SUBREG_PROMOTED_UNSIGNED_P (target)),
4393 exp);
4395 inner_target = SUBREG_REG (target);
4398 temp = expand_expr (exp, inner_target, VOIDmode,
4399 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4401 /* If TEMP is a VOIDmode constant, use convert_modes to make
4402 sure that we properly convert it. */
4403 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4405 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4406 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4407 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4408 GET_MODE (target), temp,
4409 SUBREG_PROMOTED_UNSIGNED_P (target));
4412 convert_move (SUBREG_REG (target), temp,
4413 SUBREG_PROMOTED_UNSIGNED_P (target));
4415 return NULL_RTX;
4417 else
4419 temp = expand_expr_real (exp, target, GET_MODE (target),
4420 (call_param_p
4421 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4422 &alt_rtl);
4423 /* Return TARGET if it's a specified hardware register.
4424 If TARGET is a volatile mem ref, either return TARGET
4425 or return a reg copied *from* TARGET; ANSI requires this.
4427 Otherwise, if TEMP is not TARGET, return TEMP
4428 if it is constant (for efficiency),
4429 or if we really want the correct value. */
4430 if (!(target && REG_P (target)
4431 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4432 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4433 && ! rtx_equal_p (temp, target)
4434 && CONSTANT_P (temp))
4435 dont_return_target = 1;
4438 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4439 the same as that of TARGET, adjust the constant. This is needed, for
4440 example, in case it is a CONST_DOUBLE and we want only a word-sized
4441 value. */
4442 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4443 && TREE_CODE (exp) != ERROR_MARK
4444 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4445 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4446 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4448 /* If value was not generated in the target, store it there.
4449 Convert the value to TARGET's type first if necessary and emit the
4450 pending incrementations that have been queued when expanding EXP.
4451 Note that we cannot emit the whole queue blindly because this will
4452 effectively disable the POST_INC optimization later.
4454 If TEMP and TARGET compare equal according to rtx_equal_p, but
4455 one or both of them are volatile memory refs, we have to distinguish
4456 two cases:
4457 - expand_expr has used TARGET. In this case, we must not generate
4458 another copy. This can be detected by TARGET being equal according
4459 to == .
4460 - expand_expr has not used TARGET - that means that the source just
4461 happens to have the same RTX form. Since temp will have been created
4462 by expand_expr, it will compare unequal according to == .
4463 We must generate a copy in this case, to reach the correct number
4464 of volatile memory references. */
4466 if ((! rtx_equal_p (temp, target)
4467 || (temp != target && (side_effects_p (temp)
4468 || side_effects_p (target))))
4469 && TREE_CODE (exp) != ERROR_MARK
4470 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4471 but TARGET is not valid memory reference, TEMP will differ
4472 from TARGET although it is really the same location. */
4473 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4474 /* If there's nothing to copy, don't bother. Don't call
4475 expr_size unless necessary, because some front-ends (C++)
4476 expr_size-hook must not be given objects that are not
4477 supposed to be bit-copied or bit-initialized. */
4478 && expr_size (exp) != const0_rtx)
4480 if (GET_MODE (temp) != GET_MODE (target)
4481 && GET_MODE (temp) != VOIDmode)
4483 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4484 if (dont_return_target)
4486 /* In this case, we will return TEMP,
4487 so make sure it has the proper mode.
4488 But don't forget to store the value into TARGET. */
4489 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4490 emit_move_insn (target, temp);
4492 else
4493 convert_move (target, temp, unsignedp);
4496 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4498 /* Handle copying a string constant into an array. The string
4499 constant may be shorter than the array. So copy just the string's
4500 actual length, and clear the rest. First get the size of the data
4501 type of the string, which is actually the size of the target. */
4502 rtx size = expr_size (exp);
4504 if (GET_CODE (size) == CONST_INT
4505 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4506 emit_block_move (target, temp, size,
4507 (call_param_p
4508 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4509 else
4511 /* Compute the size of the data to copy from the string. */
4512 tree copy_size
4513 = size_binop (MIN_EXPR,
4514 make_tree (sizetype, size),
4515 size_int (TREE_STRING_LENGTH (exp)));
4516 rtx copy_size_rtx
4517 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4518 (call_param_p
4519 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4520 rtx label = 0;
4522 /* Copy that much. */
4523 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4524 TYPE_UNSIGNED (sizetype));
4525 emit_block_move (target, temp, copy_size_rtx,
4526 (call_param_p
4527 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4529 /* Figure out how much is left in TARGET that we have to clear.
4530 Do all calculations in ptr_mode. */
4531 if (GET_CODE (copy_size_rtx) == CONST_INT)
4533 size = plus_constant (size, -INTVAL (copy_size_rtx));
4534 target = adjust_address (target, BLKmode,
4535 INTVAL (copy_size_rtx));
4537 else
4539 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4540 copy_size_rtx, NULL_RTX, 0,
4541 OPTAB_LIB_WIDEN);
4543 #ifdef POINTERS_EXTEND_UNSIGNED
4544 if (GET_MODE (copy_size_rtx) != Pmode)
4545 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4546 TYPE_UNSIGNED (sizetype));
4547 #endif
4549 target = offset_address (target, copy_size_rtx,
4550 highest_pow2_factor (copy_size));
4551 label = gen_label_rtx ();
4552 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4553 GET_MODE (size), 0, label);
4556 if (size != const0_rtx)
4557 clear_storage (target, size, BLOCK_OP_NORMAL);
4559 if (label)
4560 emit_label (label);
4563 /* Handle calls that return values in multiple non-contiguous locations.
4564 The Irix 6 ABI has examples of this. */
4565 else if (GET_CODE (target) == PARALLEL)
4566 emit_group_load (target, temp, TREE_TYPE (exp),
4567 int_size_in_bytes (TREE_TYPE (exp)));
4568 else if (GET_MODE (temp) == BLKmode)
4569 emit_block_move (target, temp, expr_size (exp),
4570 (call_param_p
4571 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4572 else
4574 temp = force_operand (temp, target);
4575 if (temp != target)
4576 emit_move_insn (target, temp);
4580 return NULL_RTX;
4583 /* Helper for categorize_ctor_elements. Identical interface. */
4585 static bool
4586 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4587 HOST_WIDE_INT *p_elt_count,
4588 bool *p_must_clear)
4590 unsigned HOST_WIDE_INT idx;
4591 HOST_WIDE_INT nz_elts, elt_count;
4592 tree value, purpose;
4594 /* Whether CTOR is a valid constant initializer, in accordance with what
4595 initializer_constant_valid_p does. If inferred from the constructor
4596 elements, true until proven otherwise. */
4597 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4598 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4600 nz_elts = 0;
4601 elt_count = 0;
4603 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4605 HOST_WIDE_INT mult;
4607 mult = 1;
4608 if (TREE_CODE (purpose) == RANGE_EXPR)
4610 tree lo_index = TREE_OPERAND (purpose, 0);
4611 tree hi_index = TREE_OPERAND (purpose, 1);
4613 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4614 mult = (tree_low_cst (hi_index, 1)
4615 - tree_low_cst (lo_index, 1) + 1);
4618 switch (TREE_CODE (value))
4620 case CONSTRUCTOR:
4622 HOST_WIDE_INT nz = 0, ic = 0;
4624 bool const_elt_p
4625 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4627 nz_elts += mult * nz;
4628 elt_count += mult * ic;
4630 if (const_from_elts_p && const_p)
4631 const_p = const_elt_p;
4633 break;
4635 case INTEGER_CST:
4636 case REAL_CST:
4637 if (!initializer_zerop (value))
4638 nz_elts += mult;
4639 elt_count += mult;
4640 break;
4642 case STRING_CST:
4643 nz_elts += mult * TREE_STRING_LENGTH (value);
4644 elt_count += mult * TREE_STRING_LENGTH (value);
4645 break;
4647 case COMPLEX_CST:
4648 if (!initializer_zerop (TREE_REALPART (value)))
4649 nz_elts += mult;
4650 if (!initializer_zerop (TREE_IMAGPART (value)))
4651 nz_elts += mult;
4652 elt_count += mult;
4653 break;
4655 case VECTOR_CST:
4657 tree v;
4658 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4660 if (!initializer_zerop (TREE_VALUE (v)))
4661 nz_elts += mult;
4662 elt_count += mult;
4665 break;
4667 default:
4668 nz_elts += mult;
4669 elt_count += mult;
4671 if (const_from_elts_p && const_p)
4672 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4673 != NULL_TREE;
4674 break;
4678 if (!*p_must_clear
4679 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4680 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4682 tree init_sub_type;
4683 bool clear_this = true;
4685 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4687 /* We don't expect more than one element of the union to be
4688 initialized. Not sure what we should do otherwise... */
4689 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4690 == 1);
4692 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4693 CONSTRUCTOR_ELTS (ctor),
4694 0)->value);
4696 /* ??? We could look at each element of the union, and find the
4697 largest element. Which would avoid comparing the size of the
4698 initialized element against any tail padding in the union.
4699 Doesn't seem worth the effort... */
4700 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4701 TYPE_SIZE (init_sub_type)) == 1)
4703 /* And now we have to find out if the element itself is fully
4704 constructed. E.g. for union { struct { int a, b; } s; } u
4705 = { .s = { .a = 1 } }. */
4706 if (elt_count == count_type_elements (init_sub_type, false))
4707 clear_this = false;
4711 *p_must_clear = clear_this;
4714 *p_nz_elts += nz_elts;
4715 *p_elt_count += elt_count;
4717 return const_p;
4720 /* Examine CTOR to discover:
4721 * how many scalar fields are set to nonzero values,
4722 and place it in *P_NZ_ELTS;
4723 * how many scalar fields in total are in CTOR,
4724 and place it in *P_ELT_COUNT.
4725 * if a type is a union, and the initializer from the constructor
4726 is not the largest element in the union, then set *p_must_clear.
4728 Return whether or not CTOR is a valid static constant initializer, the same
4729 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4731 bool
4732 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4733 HOST_WIDE_INT *p_elt_count,
4734 bool *p_must_clear)
4736 *p_nz_elts = 0;
4737 *p_elt_count = 0;
4738 *p_must_clear = false;
4740 return
4741 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4744 /* Count the number of scalars in TYPE. Return -1 on overflow or
4745 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4746 array member at the end of the structure. */
4748 HOST_WIDE_INT
4749 count_type_elements (tree type, bool allow_flexarr)
4751 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4752 switch (TREE_CODE (type))
4754 case ARRAY_TYPE:
4756 tree telts = array_type_nelts (type);
4757 if (telts && host_integerp (telts, 1))
4759 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4760 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4761 if (n == 0)
4762 return 0;
4763 else if (max / n > m)
4764 return n * m;
4766 return -1;
4769 case RECORD_TYPE:
4771 HOST_WIDE_INT n = 0, t;
4772 tree f;
4774 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4775 if (TREE_CODE (f) == FIELD_DECL)
4777 t = count_type_elements (TREE_TYPE (f), false);
4778 if (t < 0)
4780 /* Check for structures with flexible array member. */
4781 tree tf = TREE_TYPE (f);
4782 if (allow_flexarr
4783 && TREE_CHAIN (f) == NULL
4784 && TREE_CODE (tf) == ARRAY_TYPE
4785 && TYPE_DOMAIN (tf)
4786 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4787 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4788 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4789 && int_size_in_bytes (type) >= 0)
4790 break;
4792 return -1;
4794 n += t;
4797 return n;
4800 case UNION_TYPE:
4801 case QUAL_UNION_TYPE:
4803 /* Ho hum. How in the world do we guess here? Clearly it isn't
4804 right to count the fields. Guess based on the number of words. */
4805 HOST_WIDE_INT n = int_size_in_bytes (type);
4806 if (n < 0)
4807 return -1;
4808 return n / UNITS_PER_WORD;
4811 case COMPLEX_TYPE:
4812 return 2;
4814 case VECTOR_TYPE:
4815 return TYPE_VECTOR_SUBPARTS (type);
4817 case INTEGER_TYPE:
4818 case REAL_TYPE:
4819 case ENUMERAL_TYPE:
4820 case BOOLEAN_TYPE:
4821 case POINTER_TYPE:
4822 case OFFSET_TYPE:
4823 case REFERENCE_TYPE:
4824 return 1;
4826 case VOID_TYPE:
4827 case METHOD_TYPE:
4828 case FUNCTION_TYPE:
4829 case LANG_TYPE:
4830 default:
4831 gcc_unreachable ();
4835 /* Return 1 if EXP contains mostly (3/4) zeros. */
4837 static int
4838 mostly_zeros_p (tree exp)
4840 if (TREE_CODE (exp) == CONSTRUCTOR)
4843 HOST_WIDE_INT nz_elts, count, elts;
4844 bool must_clear;
4846 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4847 if (must_clear)
4848 return 1;
4850 elts = count_type_elements (TREE_TYPE (exp), false);
4852 return nz_elts < elts / 4;
4855 return initializer_zerop (exp);
4858 /* Return 1 if EXP contains all zeros. */
4860 static int
4861 all_zeros_p (tree exp)
4863 if (TREE_CODE (exp) == CONSTRUCTOR)
4866 HOST_WIDE_INT nz_elts, count;
4867 bool must_clear;
4869 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4870 return nz_elts == 0;
4873 return initializer_zerop (exp);
4876 /* Helper function for store_constructor.
4877 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4878 TYPE is the type of the CONSTRUCTOR, not the element type.
4879 CLEARED is as for store_constructor.
4880 ALIAS_SET is the alias set to use for any stores.
4882 This provides a recursive shortcut back to store_constructor when it isn't
4883 necessary to go through store_field. This is so that we can pass through
4884 the cleared field to let store_constructor know that we may not have to
4885 clear a substructure if the outer structure has already been cleared. */
4887 static void
4888 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4889 HOST_WIDE_INT bitpos, enum machine_mode mode,
4890 tree exp, tree type, int cleared, int alias_set)
4892 if (TREE_CODE (exp) == CONSTRUCTOR
4893 /* We can only call store_constructor recursively if the size and
4894 bit position are on a byte boundary. */
4895 && bitpos % BITS_PER_UNIT == 0
4896 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4897 /* If we have a nonzero bitpos for a register target, then we just
4898 let store_field do the bitfield handling. This is unlikely to
4899 generate unnecessary clear instructions anyways. */
4900 && (bitpos == 0 || MEM_P (target)))
4902 if (MEM_P (target))
4903 target
4904 = adjust_address (target,
4905 GET_MODE (target) == BLKmode
4906 || 0 != (bitpos
4907 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4908 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4911 /* Update the alias set, if required. */
4912 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4913 && MEM_ALIAS_SET (target) != 0)
4915 target = copy_rtx (target);
4916 set_mem_alias_set (target, alias_set);
4919 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4921 else
4922 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4925 /* Store the value of constructor EXP into the rtx TARGET.
4926 TARGET is either a REG or a MEM; we know it cannot conflict, since
4927 safe_from_p has been called.
4928 CLEARED is true if TARGET is known to have been zero'd.
4929 SIZE is the number of bytes of TARGET we are allowed to modify: this
4930 may not be the same as the size of EXP if we are assigning to a field
4931 which has been packed to exclude padding bits. */
4933 static void
4934 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4936 tree type = TREE_TYPE (exp);
4937 #ifdef WORD_REGISTER_OPERATIONS
4938 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4939 #endif
4941 switch (TREE_CODE (type))
4943 case RECORD_TYPE:
4944 case UNION_TYPE:
4945 case QUAL_UNION_TYPE:
4947 unsigned HOST_WIDE_INT idx;
4948 tree field, value;
4950 /* If size is zero or the target is already cleared, do nothing. */
4951 if (size == 0 || cleared)
4952 cleared = 1;
4953 /* We either clear the aggregate or indicate the value is dead. */
4954 else if ((TREE_CODE (type) == UNION_TYPE
4955 || TREE_CODE (type) == QUAL_UNION_TYPE)
4956 && ! CONSTRUCTOR_ELTS (exp))
4957 /* If the constructor is empty, clear the union. */
4959 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4960 cleared = 1;
4963 /* If we are building a static constructor into a register,
4964 set the initial value as zero so we can fold the value into
4965 a constant. But if more than one register is involved,
4966 this probably loses. */
4967 else if (REG_P (target) && TREE_STATIC (exp)
4968 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4970 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4971 cleared = 1;
4974 /* If the constructor has fewer fields than the structure or
4975 if we are initializing the structure to mostly zeros, clear
4976 the whole structure first. Don't do this if TARGET is a
4977 register whose mode size isn't equal to SIZE since
4978 clear_storage can't handle this case. */
4979 else if (size > 0
4980 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4981 != fields_length (type))
4982 || mostly_zeros_p (exp))
4983 && (!REG_P (target)
4984 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4985 == size)))
4987 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4988 cleared = 1;
4991 if (! cleared)
4992 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4994 /* Store each element of the constructor into the
4995 corresponding field of TARGET. */
4996 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4998 enum machine_mode mode;
4999 HOST_WIDE_INT bitsize;
5000 HOST_WIDE_INT bitpos = 0;
5001 tree offset;
5002 rtx to_rtx = target;
5004 /* Just ignore missing fields. We cleared the whole
5005 structure, above, if any fields are missing. */
5006 if (field == 0)
5007 continue;
5009 if (cleared && initializer_zerop (value))
5010 continue;
5012 if (host_integerp (DECL_SIZE (field), 1))
5013 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5014 else
5015 bitsize = -1;
5017 mode = DECL_MODE (field);
5018 if (DECL_BIT_FIELD (field))
5019 mode = VOIDmode;
5021 offset = DECL_FIELD_OFFSET (field);
5022 if (host_integerp (offset, 0)
5023 && host_integerp (bit_position (field), 0))
5025 bitpos = int_bit_position (field);
5026 offset = 0;
5028 else
5029 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5031 if (offset)
5033 rtx offset_rtx;
5035 offset
5036 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5037 make_tree (TREE_TYPE (exp),
5038 target));
5040 offset_rtx = expand_normal (offset);
5041 gcc_assert (MEM_P (to_rtx));
5043 #ifdef POINTERS_EXTEND_UNSIGNED
5044 if (GET_MODE (offset_rtx) != Pmode)
5045 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5046 #else
5047 if (GET_MODE (offset_rtx) != ptr_mode)
5048 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5049 #endif
5051 to_rtx = offset_address (to_rtx, offset_rtx,
5052 highest_pow2_factor (offset));
5055 #ifdef WORD_REGISTER_OPERATIONS
5056 /* If this initializes a field that is smaller than a
5057 word, at the start of a word, try to widen it to a full
5058 word. This special case allows us to output C++ member
5059 function initializations in a form that the optimizers
5060 can understand. */
5061 if (REG_P (target)
5062 && bitsize < BITS_PER_WORD
5063 && bitpos % BITS_PER_WORD == 0
5064 && GET_MODE_CLASS (mode) == MODE_INT
5065 && TREE_CODE (value) == INTEGER_CST
5066 && exp_size >= 0
5067 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5069 tree type = TREE_TYPE (value);
5071 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5073 type = lang_hooks.types.type_for_size
5074 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5075 value = fold_convert (type, value);
5078 if (BYTES_BIG_ENDIAN)
5079 value
5080 = fold_build2 (LSHIFT_EXPR, type, value,
5081 build_int_cst (type,
5082 BITS_PER_WORD - bitsize));
5083 bitsize = BITS_PER_WORD;
5084 mode = word_mode;
5086 #endif
5088 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5089 && DECL_NONADDRESSABLE_P (field))
5091 to_rtx = copy_rtx (to_rtx);
5092 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5095 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5096 value, type, cleared,
5097 get_alias_set (TREE_TYPE (field)));
5099 break;
5101 case ARRAY_TYPE:
5103 tree value, index;
5104 unsigned HOST_WIDE_INT i;
5105 int need_to_clear;
5106 tree domain;
5107 tree elttype = TREE_TYPE (type);
5108 int const_bounds_p;
5109 HOST_WIDE_INT minelt = 0;
5110 HOST_WIDE_INT maxelt = 0;
5112 domain = TYPE_DOMAIN (type);
5113 const_bounds_p = (TYPE_MIN_VALUE (domain)
5114 && TYPE_MAX_VALUE (domain)
5115 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5116 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5118 /* If we have constant bounds for the range of the type, get them. */
5119 if (const_bounds_p)
5121 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5122 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5125 /* If the constructor has fewer elements than the array, clear
5126 the whole array first. Similarly if this is static
5127 constructor of a non-BLKmode object. */
5128 if (cleared)
5129 need_to_clear = 0;
5130 else if (REG_P (target) && TREE_STATIC (exp))
5131 need_to_clear = 1;
5132 else
5134 unsigned HOST_WIDE_INT idx;
5135 tree index, value;
5136 HOST_WIDE_INT count = 0, zero_count = 0;
5137 need_to_clear = ! const_bounds_p;
5139 /* This loop is a more accurate version of the loop in
5140 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5141 is also needed to check for missing elements. */
5142 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5144 HOST_WIDE_INT this_node_count;
5146 if (need_to_clear)
5147 break;
5149 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5151 tree lo_index = TREE_OPERAND (index, 0);
5152 tree hi_index = TREE_OPERAND (index, 1);
5154 if (! host_integerp (lo_index, 1)
5155 || ! host_integerp (hi_index, 1))
5157 need_to_clear = 1;
5158 break;
5161 this_node_count = (tree_low_cst (hi_index, 1)
5162 - tree_low_cst (lo_index, 1) + 1);
5164 else
5165 this_node_count = 1;
5167 count += this_node_count;
5168 if (mostly_zeros_p (value))
5169 zero_count += this_node_count;
5172 /* Clear the entire array first if there are any missing
5173 elements, or if the incidence of zero elements is >=
5174 75%. */
5175 if (! need_to_clear
5176 && (count < maxelt - minelt + 1
5177 || 4 * zero_count >= 3 * count))
5178 need_to_clear = 1;
5181 if (need_to_clear && size > 0)
5183 if (REG_P (target))
5184 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5185 else
5186 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5187 cleared = 1;
5190 if (!cleared && REG_P (target))
5191 /* Inform later passes that the old value is dead. */
5192 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5194 /* Store each element of the constructor into the
5195 corresponding element of TARGET, determined by counting the
5196 elements. */
5197 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5199 enum machine_mode mode;
5200 HOST_WIDE_INT bitsize;
5201 HOST_WIDE_INT bitpos;
5202 int unsignedp;
5203 rtx xtarget = target;
5205 if (cleared && initializer_zerop (value))
5206 continue;
5208 unsignedp = TYPE_UNSIGNED (elttype);
5209 mode = TYPE_MODE (elttype);
5210 if (mode == BLKmode)
5211 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5212 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5213 : -1);
5214 else
5215 bitsize = GET_MODE_BITSIZE (mode);
5217 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5219 tree lo_index = TREE_OPERAND (index, 0);
5220 tree hi_index = TREE_OPERAND (index, 1);
5221 rtx index_r, pos_rtx;
5222 HOST_WIDE_INT lo, hi, count;
5223 tree position;
5225 /* If the range is constant and "small", unroll the loop. */
5226 if (const_bounds_p
5227 && host_integerp (lo_index, 0)
5228 && host_integerp (hi_index, 0)
5229 && (lo = tree_low_cst (lo_index, 0),
5230 hi = tree_low_cst (hi_index, 0),
5231 count = hi - lo + 1,
5232 (!MEM_P (target)
5233 || count <= 2
5234 || (host_integerp (TYPE_SIZE (elttype), 1)
5235 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5236 <= 40 * 8)))))
5238 lo -= minelt; hi -= minelt;
5239 for (; lo <= hi; lo++)
5241 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5243 if (MEM_P (target)
5244 && !MEM_KEEP_ALIAS_SET_P (target)
5245 && TREE_CODE (type) == ARRAY_TYPE
5246 && TYPE_NONALIASED_COMPONENT (type))
5248 target = copy_rtx (target);
5249 MEM_KEEP_ALIAS_SET_P (target) = 1;
5252 store_constructor_field
5253 (target, bitsize, bitpos, mode, value, type, cleared,
5254 get_alias_set (elttype));
5257 else
5259 rtx loop_start = gen_label_rtx ();
5260 rtx loop_end = gen_label_rtx ();
5261 tree exit_cond;
5263 expand_normal (hi_index);
5264 unsignedp = TYPE_UNSIGNED (domain);
5266 index = build_decl (VAR_DECL, NULL_TREE, domain);
5268 index_r
5269 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5270 &unsignedp, 0));
5271 SET_DECL_RTL (index, index_r);
5272 store_expr (lo_index, index_r, 0);
5274 /* Build the head of the loop. */
5275 do_pending_stack_adjust ();
5276 emit_label (loop_start);
5278 /* Assign value to element index. */
5279 position =
5280 fold_convert (ssizetype,
5281 fold_build2 (MINUS_EXPR,
5282 TREE_TYPE (index),
5283 index,
5284 TYPE_MIN_VALUE (domain)));
5286 position =
5287 size_binop (MULT_EXPR, position,
5288 fold_convert (ssizetype,
5289 TYPE_SIZE_UNIT (elttype)));
5291 pos_rtx = expand_normal (position);
5292 xtarget = offset_address (target, pos_rtx,
5293 highest_pow2_factor (position));
5294 xtarget = adjust_address (xtarget, mode, 0);
5295 if (TREE_CODE (value) == CONSTRUCTOR)
5296 store_constructor (value, xtarget, cleared,
5297 bitsize / BITS_PER_UNIT);
5298 else
5299 store_expr (value, xtarget, 0);
5301 /* Generate a conditional jump to exit the loop. */
5302 exit_cond = build2 (LT_EXPR, integer_type_node,
5303 index, hi_index);
5304 jumpif (exit_cond, loop_end);
5306 /* Update the loop counter, and jump to the head of
5307 the loop. */
5308 expand_assignment (index,
5309 build2 (PLUS_EXPR, TREE_TYPE (index),
5310 index, integer_one_node));
5312 emit_jump (loop_start);
5314 /* Build the end of the loop. */
5315 emit_label (loop_end);
5318 else if ((index != 0 && ! host_integerp (index, 0))
5319 || ! host_integerp (TYPE_SIZE (elttype), 1))
5321 tree position;
5323 if (index == 0)
5324 index = ssize_int (1);
5326 if (minelt)
5327 index = fold_convert (ssizetype,
5328 fold_build2 (MINUS_EXPR,
5329 TREE_TYPE (index),
5330 index,
5331 TYPE_MIN_VALUE (domain)));
5333 position =
5334 size_binop (MULT_EXPR, index,
5335 fold_convert (ssizetype,
5336 TYPE_SIZE_UNIT (elttype)));
5337 xtarget = offset_address (target,
5338 expand_normal (position),
5339 highest_pow2_factor (position));
5340 xtarget = adjust_address (xtarget, mode, 0);
5341 store_expr (value, xtarget, 0);
5343 else
5345 if (index != 0)
5346 bitpos = ((tree_low_cst (index, 0) - minelt)
5347 * tree_low_cst (TYPE_SIZE (elttype), 1));
5348 else
5349 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5351 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5352 && TREE_CODE (type) == ARRAY_TYPE
5353 && TYPE_NONALIASED_COMPONENT (type))
5355 target = copy_rtx (target);
5356 MEM_KEEP_ALIAS_SET_P (target) = 1;
5358 store_constructor_field (target, bitsize, bitpos, mode, value,
5359 type, cleared, get_alias_set (elttype));
5362 break;
5365 case VECTOR_TYPE:
5367 unsigned HOST_WIDE_INT idx;
5368 constructor_elt *ce;
5369 int i;
5370 int need_to_clear;
5371 int icode = 0;
5372 tree elttype = TREE_TYPE (type);
5373 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5374 enum machine_mode eltmode = TYPE_MODE (elttype);
5375 HOST_WIDE_INT bitsize;
5376 HOST_WIDE_INT bitpos;
5377 rtvec vector = NULL;
5378 unsigned n_elts;
5380 gcc_assert (eltmode != BLKmode);
5382 n_elts = TYPE_VECTOR_SUBPARTS (type);
5383 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5385 enum machine_mode mode = GET_MODE (target);
5387 icode = (int) vec_init_optab->handlers[mode].insn_code;
5388 if (icode != CODE_FOR_nothing)
5390 unsigned int i;
5392 vector = rtvec_alloc (n_elts);
5393 for (i = 0; i < n_elts; i++)
5394 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5398 /* If the constructor has fewer elements than the vector,
5399 clear the whole array first. Similarly if this is static
5400 constructor of a non-BLKmode object. */
5401 if (cleared)
5402 need_to_clear = 0;
5403 else if (REG_P (target) && TREE_STATIC (exp))
5404 need_to_clear = 1;
5405 else
5407 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5408 tree value;
5410 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5412 int n_elts_here = tree_low_cst
5413 (int_const_binop (TRUNC_DIV_EXPR,
5414 TYPE_SIZE (TREE_TYPE (value)),
5415 TYPE_SIZE (elttype), 0), 1);
5417 count += n_elts_here;
5418 if (mostly_zeros_p (value))
5419 zero_count += n_elts_here;
5422 /* Clear the entire vector first if there are any missing elements,
5423 or if the incidence of zero elements is >= 75%. */
5424 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5427 if (need_to_clear && size > 0 && !vector)
5429 if (REG_P (target))
5430 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5431 else
5432 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5433 cleared = 1;
5436 /* Inform later passes that the old value is dead. */
5437 if (!cleared && !vector && REG_P (target))
5438 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5440 /* Store each element of the constructor into the corresponding
5441 element of TARGET, determined by counting the elements. */
5442 for (idx = 0, i = 0;
5443 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5444 idx++, i += bitsize / elt_size)
5446 HOST_WIDE_INT eltpos;
5447 tree value = ce->value;
5449 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5450 if (cleared && initializer_zerop (value))
5451 continue;
5453 if (ce->index)
5454 eltpos = tree_low_cst (ce->index, 1);
5455 else
5456 eltpos = i;
5458 if (vector)
5460 /* Vector CONSTRUCTORs should only be built from smaller
5461 vectors in the case of BLKmode vectors. */
5462 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5463 RTVEC_ELT (vector, eltpos)
5464 = expand_normal (value);
5466 else
5468 enum machine_mode value_mode =
5469 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5470 ? TYPE_MODE (TREE_TYPE (value))
5471 : eltmode;
5472 bitpos = eltpos * elt_size;
5473 store_constructor_field (target, bitsize, bitpos,
5474 value_mode, value, type,
5475 cleared, get_alias_set (elttype));
5479 if (vector)
5480 emit_insn (GEN_FCN (icode)
5481 (target,
5482 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5483 break;
5486 default:
5487 gcc_unreachable ();
5491 /* Store the value of EXP (an expression tree)
5492 into a subfield of TARGET which has mode MODE and occupies
5493 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5494 If MODE is VOIDmode, it means that we are storing into a bit-field.
5496 Always return const0_rtx unless we have something particular to
5497 return.
5499 TYPE is the type of the underlying object,
5501 ALIAS_SET is the alias set for the destination. This value will
5502 (in general) be different from that for TARGET, since TARGET is a
5503 reference to the containing structure. */
5505 static rtx
5506 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5507 enum machine_mode mode, tree exp, tree type, int alias_set)
5509 HOST_WIDE_INT width_mask = 0;
5511 if (TREE_CODE (exp) == ERROR_MARK)
5512 return const0_rtx;
5514 /* If we have nothing to store, do nothing unless the expression has
5515 side-effects. */
5516 if (bitsize == 0)
5517 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5518 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5519 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5521 /* If we are storing into an unaligned field of an aligned union that is
5522 in a register, we may have the mode of TARGET being an integer mode but
5523 MODE == BLKmode. In that case, get an aligned object whose size and
5524 alignment are the same as TARGET and store TARGET into it (we can avoid
5525 the store if the field being stored is the entire width of TARGET). Then
5526 call ourselves recursively to store the field into a BLKmode version of
5527 that object. Finally, load from the object into TARGET. This is not
5528 very efficient in general, but should only be slightly more expensive
5529 than the otherwise-required unaligned accesses. Perhaps this can be
5530 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5531 twice, once with emit_move_insn and once via store_field. */
5533 if (mode == BLKmode
5534 && (REG_P (target) || GET_CODE (target) == SUBREG))
5536 rtx object = assign_temp (type, 0, 1, 1);
5537 rtx blk_object = adjust_address (object, BLKmode, 0);
5539 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5540 emit_move_insn (object, target);
5542 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5544 emit_move_insn (target, object);
5546 /* We want to return the BLKmode version of the data. */
5547 return blk_object;
5550 if (GET_CODE (target) == CONCAT)
5552 /* We're storing into a struct containing a single __complex. */
5554 gcc_assert (!bitpos);
5555 return store_expr (exp, target, 0);
5558 /* If the structure is in a register or if the component
5559 is a bit field, we cannot use addressing to access it.
5560 Use bit-field techniques or SUBREG to store in it. */
5562 if (mode == VOIDmode
5563 || (mode != BLKmode && ! direct_store[(int) mode]
5564 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5565 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5566 || REG_P (target)
5567 || GET_CODE (target) == SUBREG
5568 /* If the field isn't aligned enough to store as an ordinary memref,
5569 store it as a bit field. */
5570 || (mode != BLKmode
5571 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5572 || bitpos % GET_MODE_ALIGNMENT (mode))
5573 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5574 || (bitpos % BITS_PER_UNIT != 0)))
5575 /* If the RHS and field are a constant size and the size of the
5576 RHS isn't the same size as the bitfield, we must use bitfield
5577 operations. */
5578 || (bitsize >= 0
5579 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5580 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5582 rtx temp;
5584 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5585 implies a mask operation. If the precision is the same size as
5586 the field we're storing into, that mask is redundant. This is
5587 particularly common with bit field assignments generated by the
5588 C front end. */
5589 if (TREE_CODE (exp) == NOP_EXPR)
5591 tree type = TREE_TYPE (exp);
5592 if (INTEGRAL_TYPE_P (type)
5593 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5594 && bitsize == TYPE_PRECISION (type))
5596 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5597 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5598 exp = TREE_OPERAND (exp, 0);
5602 temp = expand_normal (exp);
5604 /* If BITSIZE is narrower than the size of the type of EXP
5605 we will be narrowing TEMP. Normally, what's wanted are the
5606 low-order bits. However, if EXP's type is a record and this is
5607 big-endian machine, we want the upper BITSIZE bits. */
5608 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5609 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5610 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5611 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5612 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5613 - bitsize),
5614 NULL_RTX, 1);
5616 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5617 MODE. */
5618 if (mode != VOIDmode && mode != BLKmode
5619 && mode != TYPE_MODE (TREE_TYPE (exp)))
5620 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5622 /* If the modes of TARGET and TEMP are both BLKmode, both
5623 must be in memory and BITPOS must be aligned on a byte
5624 boundary. If so, we simply do a block copy. */
5625 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5627 gcc_assert (MEM_P (target) && MEM_P (temp)
5628 && !(bitpos % BITS_PER_UNIT));
5630 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5631 emit_block_move (target, temp,
5632 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5633 / BITS_PER_UNIT),
5634 BLOCK_OP_NORMAL);
5636 return const0_rtx;
5639 /* Store the value in the bitfield. */
5640 store_bit_field (target, bitsize, bitpos, mode, temp);
5642 return const0_rtx;
5644 else
5646 /* Now build a reference to just the desired component. */
5647 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5649 if (to_rtx == target)
5650 to_rtx = copy_rtx (to_rtx);
5652 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5653 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5654 set_mem_alias_set (to_rtx, alias_set);
5656 return store_expr (exp, to_rtx, 0);
5660 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5661 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5662 codes and find the ultimate containing object, which we return.
5664 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5665 bit position, and *PUNSIGNEDP to the signedness of the field.
5666 If the position of the field is variable, we store a tree
5667 giving the variable offset (in units) in *POFFSET.
5668 This offset is in addition to the bit position.
5669 If the position is not variable, we store 0 in *POFFSET.
5671 If any of the extraction expressions is volatile,
5672 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5674 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5675 is a mode that can be used to access the field. In that case, *PBITSIZE
5676 is redundant.
5678 If the field describes a variable-sized object, *PMODE is set to
5679 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5680 this case, but the address of the object can be found.
5682 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5683 look through nodes that serve as markers of a greater alignment than
5684 the one that can be deduced from the expression. These nodes make it
5685 possible for front-ends to prevent temporaries from being created by
5686 the middle-end on alignment considerations. For that purpose, the
5687 normal operating mode at high-level is to always pass FALSE so that
5688 the ultimate containing object is really returned; moreover, the
5689 associated predicate handled_component_p will always return TRUE
5690 on these nodes, thus indicating that they are essentially handled
5691 by get_inner_reference. TRUE should only be passed when the caller
5692 is scanning the expression in order to build another representation
5693 and specifically knows how to handle these nodes; as such, this is
5694 the normal operating mode in the RTL expanders. */
5696 tree
5697 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5698 HOST_WIDE_INT *pbitpos, tree *poffset,
5699 enum machine_mode *pmode, int *punsignedp,
5700 int *pvolatilep, bool keep_aligning)
5702 tree size_tree = 0;
5703 enum machine_mode mode = VOIDmode;
5704 tree offset = size_zero_node;
5705 tree bit_offset = bitsize_zero_node;
5706 tree tem;
5708 /* First get the mode, signedness, and size. We do this from just the
5709 outermost expression. */
5710 if (TREE_CODE (exp) == COMPONENT_REF)
5712 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5713 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5714 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5716 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5718 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5720 size_tree = TREE_OPERAND (exp, 1);
5721 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5723 /* For vector types, with the correct size of access, use the mode of
5724 inner type. */
5725 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5726 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5727 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5728 mode = TYPE_MODE (TREE_TYPE (exp));
5730 else
5732 mode = TYPE_MODE (TREE_TYPE (exp));
5733 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5735 if (mode == BLKmode)
5736 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5737 else
5738 *pbitsize = GET_MODE_BITSIZE (mode);
5741 if (size_tree != 0)
5743 if (! host_integerp (size_tree, 1))
5744 mode = BLKmode, *pbitsize = -1;
5745 else
5746 *pbitsize = tree_low_cst (size_tree, 1);
5749 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5750 and find the ultimate containing object. */
5751 while (1)
5753 switch (TREE_CODE (exp))
5755 case BIT_FIELD_REF:
5756 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5757 TREE_OPERAND (exp, 2));
5758 break;
5760 case COMPONENT_REF:
5762 tree field = TREE_OPERAND (exp, 1);
5763 tree this_offset = component_ref_field_offset (exp);
5765 /* If this field hasn't been filled in yet, don't go past it.
5766 This should only happen when folding expressions made during
5767 type construction. */
5768 if (this_offset == 0)
5769 break;
5771 offset = size_binop (PLUS_EXPR, offset, this_offset);
5772 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5773 DECL_FIELD_BIT_OFFSET (field));
5775 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5777 break;
5779 case ARRAY_REF:
5780 case ARRAY_RANGE_REF:
5782 tree index = TREE_OPERAND (exp, 1);
5783 tree low_bound = array_ref_low_bound (exp);
5784 tree unit_size = array_ref_element_size (exp);
5786 /* We assume all arrays have sizes that are a multiple of a byte.
5787 First subtract the lower bound, if any, in the type of the
5788 index, then convert to sizetype and multiply by the size of
5789 the array element. */
5790 if (! integer_zerop (low_bound))
5791 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5792 index, low_bound);
5794 offset = size_binop (PLUS_EXPR, offset,
5795 size_binop (MULT_EXPR,
5796 fold_convert (sizetype, index),
5797 unit_size));
5799 break;
5801 case REALPART_EXPR:
5802 break;
5804 case IMAGPART_EXPR:
5805 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5806 bitsize_int (*pbitsize));
5807 break;
5809 case VIEW_CONVERT_EXPR:
5810 if (keep_aligning && STRICT_ALIGNMENT
5811 && (TYPE_ALIGN (TREE_TYPE (exp))
5812 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5813 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5814 < BIGGEST_ALIGNMENT)
5815 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5816 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5817 goto done;
5818 break;
5820 default:
5821 goto done;
5824 /* If any reference in the chain is volatile, the effect is volatile. */
5825 if (TREE_THIS_VOLATILE (exp))
5826 *pvolatilep = 1;
5828 exp = TREE_OPERAND (exp, 0);
5830 done:
5832 /* If OFFSET is constant, see if we can return the whole thing as a
5833 constant bit position. Otherwise, split it up. */
5834 if (host_integerp (offset, 0)
5835 && 0 != (tem = size_binop (MULT_EXPR,
5836 fold_convert (bitsizetype, offset),
5837 bitsize_unit_node))
5838 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5839 && host_integerp (tem, 0))
5840 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5841 else
5842 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5844 *pmode = mode;
5845 return exp;
5848 /* Return a tree of sizetype representing the size, in bytes, of the element
5849 of EXP, an ARRAY_REF. */
5851 tree
5852 array_ref_element_size (tree exp)
5854 tree aligned_size = TREE_OPERAND (exp, 3);
5855 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5857 /* If a size was specified in the ARRAY_REF, it's the size measured
5858 in alignment units of the element type. So multiply by that value. */
5859 if (aligned_size)
5861 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5862 sizetype from another type of the same width and signedness. */
5863 if (TREE_TYPE (aligned_size) != sizetype)
5864 aligned_size = fold_convert (sizetype, aligned_size);
5865 return size_binop (MULT_EXPR, aligned_size,
5866 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5869 /* Otherwise, take the size from that of the element type. Substitute
5870 any PLACEHOLDER_EXPR that we have. */
5871 else
5872 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5875 /* Return a tree representing the lower bound of the array mentioned in
5876 EXP, an ARRAY_REF. */
5878 tree
5879 array_ref_low_bound (tree exp)
5881 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5883 /* If a lower bound is specified in EXP, use it. */
5884 if (TREE_OPERAND (exp, 2))
5885 return TREE_OPERAND (exp, 2);
5887 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5888 substituting for a PLACEHOLDER_EXPR as needed. */
5889 if (domain_type && TYPE_MIN_VALUE (domain_type))
5890 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5892 /* Otherwise, return a zero of the appropriate type. */
5893 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5896 /* Return a tree representing the upper bound of the array mentioned in
5897 EXP, an ARRAY_REF. */
5899 tree
5900 array_ref_up_bound (tree exp)
5902 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5904 /* If there is a domain type and it has an upper bound, use it, substituting
5905 for a PLACEHOLDER_EXPR as needed. */
5906 if (domain_type && TYPE_MAX_VALUE (domain_type))
5907 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5909 /* Otherwise fail. */
5910 return NULL_TREE;
5913 /* Return a tree representing the offset, in bytes, of the field referenced
5914 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5916 tree
5917 component_ref_field_offset (tree exp)
5919 tree aligned_offset = TREE_OPERAND (exp, 2);
5920 tree field = TREE_OPERAND (exp, 1);
5922 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5923 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5924 value. */
5925 if (aligned_offset)
5927 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5928 sizetype from another type of the same width and signedness. */
5929 if (TREE_TYPE (aligned_offset) != sizetype)
5930 aligned_offset = fold_convert (sizetype, aligned_offset);
5931 return size_binop (MULT_EXPR, aligned_offset,
5932 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5935 /* Otherwise, take the offset from that of the field. Substitute
5936 any PLACEHOLDER_EXPR that we have. */
5937 else
5938 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5941 /* Return 1 if T is an expression that get_inner_reference handles. */
5944 handled_component_p (tree t)
5946 switch (TREE_CODE (t))
5948 case BIT_FIELD_REF:
5949 case COMPONENT_REF:
5950 case ARRAY_REF:
5951 case ARRAY_RANGE_REF:
5952 case VIEW_CONVERT_EXPR:
5953 case REALPART_EXPR:
5954 case IMAGPART_EXPR:
5955 return 1;
5957 default:
5958 return 0;
5962 /* Given an rtx VALUE that may contain additions and multiplications, return
5963 an equivalent value that just refers to a register, memory, or constant.
5964 This is done by generating instructions to perform the arithmetic and
5965 returning a pseudo-register containing the value.
5967 The returned value may be a REG, SUBREG, MEM or constant. */
5970 force_operand (rtx value, rtx target)
5972 rtx op1, op2;
5973 /* Use subtarget as the target for operand 0 of a binary operation. */
5974 rtx subtarget = get_subtarget (target);
5975 enum rtx_code code = GET_CODE (value);
5977 /* Check for subreg applied to an expression produced by loop optimizer. */
5978 if (code == SUBREG
5979 && !REG_P (SUBREG_REG (value))
5980 && !MEM_P (SUBREG_REG (value)))
5982 value = simplify_gen_subreg (GET_MODE (value),
5983 force_reg (GET_MODE (SUBREG_REG (value)),
5984 force_operand (SUBREG_REG (value),
5985 NULL_RTX)),
5986 GET_MODE (SUBREG_REG (value)),
5987 SUBREG_BYTE (value));
5988 code = GET_CODE (value);
5991 /* Check for a PIC address load. */
5992 if ((code == PLUS || code == MINUS)
5993 && XEXP (value, 0) == pic_offset_table_rtx
5994 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5995 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5996 || GET_CODE (XEXP (value, 1)) == CONST))
5998 if (!subtarget)
5999 subtarget = gen_reg_rtx (GET_MODE (value));
6000 emit_move_insn (subtarget, value);
6001 return subtarget;
6004 if (ARITHMETIC_P (value))
6006 op2 = XEXP (value, 1);
6007 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6008 subtarget = 0;
6009 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6011 code = PLUS;
6012 op2 = negate_rtx (GET_MODE (value), op2);
6015 /* Check for an addition with OP2 a constant integer and our first
6016 operand a PLUS of a virtual register and something else. In that
6017 case, we want to emit the sum of the virtual register and the
6018 constant first and then add the other value. This allows virtual
6019 register instantiation to simply modify the constant rather than
6020 creating another one around this addition. */
6021 if (code == PLUS && GET_CODE (op2) == CONST_INT
6022 && GET_CODE (XEXP (value, 0)) == PLUS
6023 && REG_P (XEXP (XEXP (value, 0), 0))
6024 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6025 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6027 rtx temp = expand_simple_binop (GET_MODE (value), code,
6028 XEXP (XEXP (value, 0), 0), op2,
6029 subtarget, 0, OPTAB_LIB_WIDEN);
6030 return expand_simple_binop (GET_MODE (value), code, temp,
6031 force_operand (XEXP (XEXP (value,
6032 0), 1), 0),
6033 target, 0, OPTAB_LIB_WIDEN);
6036 op1 = force_operand (XEXP (value, 0), subtarget);
6037 op2 = force_operand (op2, NULL_RTX);
6038 switch (code)
6040 case MULT:
6041 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6042 case DIV:
6043 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6044 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6045 target, 1, OPTAB_LIB_WIDEN);
6046 else
6047 return expand_divmod (0,
6048 FLOAT_MODE_P (GET_MODE (value))
6049 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6050 GET_MODE (value), op1, op2, target, 0);
6051 break;
6052 case MOD:
6053 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6054 target, 0);
6055 break;
6056 case UDIV:
6057 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6058 target, 1);
6059 break;
6060 case UMOD:
6061 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6062 target, 1);
6063 break;
6064 case ASHIFTRT:
6065 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6066 target, 0, OPTAB_LIB_WIDEN);
6067 break;
6068 default:
6069 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6070 target, 1, OPTAB_LIB_WIDEN);
6073 if (UNARY_P (value))
6075 if (!target)
6076 target = gen_reg_rtx (GET_MODE (value));
6077 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6078 switch (code)
6080 case ZERO_EXTEND:
6081 case SIGN_EXTEND:
6082 case TRUNCATE:
6083 case FLOAT_EXTEND:
6084 case FLOAT_TRUNCATE:
6085 convert_move (target, op1, code == ZERO_EXTEND);
6086 return target;
6088 case FIX:
6089 case UNSIGNED_FIX:
6090 expand_fix (target, op1, code == UNSIGNED_FIX);
6091 return target;
6093 case FLOAT:
6094 case UNSIGNED_FLOAT:
6095 expand_float (target, op1, code == UNSIGNED_FLOAT);
6096 return target;
6098 default:
6099 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6103 #ifdef INSN_SCHEDULING
6104 /* On machines that have insn scheduling, we want all memory reference to be
6105 explicit, so we need to deal with such paradoxical SUBREGs. */
6106 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6107 && (GET_MODE_SIZE (GET_MODE (value))
6108 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6109 value
6110 = simplify_gen_subreg (GET_MODE (value),
6111 force_reg (GET_MODE (SUBREG_REG (value)),
6112 force_operand (SUBREG_REG (value),
6113 NULL_RTX)),
6114 GET_MODE (SUBREG_REG (value)),
6115 SUBREG_BYTE (value));
6116 #endif
6118 return value;
6121 /* Subroutine of expand_expr: return nonzero iff there is no way that
6122 EXP can reference X, which is being modified. TOP_P is nonzero if this
6123 call is going to be used to determine whether we need a temporary
6124 for EXP, as opposed to a recursive call to this function.
6126 It is always safe for this routine to return zero since it merely
6127 searches for optimization opportunities. */
6130 safe_from_p (rtx x, tree exp, int top_p)
6132 rtx exp_rtl = 0;
6133 int i, nops;
6135 if (x == 0
6136 /* If EXP has varying size, we MUST use a target since we currently
6137 have no way of allocating temporaries of variable size
6138 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6139 So we assume here that something at a higher level has prevented a
6140 clash. This is somewhat bogus, but the best we can do. Only
6141 do this when X is BLKmode and when we are at the top level. */
6142 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6143 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6144 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6145 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6146 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6147 != INTEGER_CST)
6148 && GET_MODE (x) == BLKmode)
6149 /* If X is in the outgoing argument area, it is always safe. */
6150 || (MEM_P (x)
6151 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6152 || (GET_CODE (XEXP (x, 0)) == PLUS
6153 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6154 return 1;
6156 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6157 find the underlying pseudo. */
6158 if (GET_CODE (x) == SUBREG)
6160 x = SUBREG_REG (x);
6161 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6162 return 0;
6165 /* Now look at our tree code and possibly recurse. */
6166 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6168 case tcc_declaration:
6169 exp_rtl = DECL_RTL_IF_SET (exp);
6170 break;
6172 case tcc_constant:
6173 return 1;
6175 case tcc_exceptional:
6176 if (TREE_CODE (exp) == TREE_LIST)
6178 while (1)
6180 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6181 return 0;
6182 exp = TREE_CHAIN (exp);
6183 if (!exp)
6184 return 1;
6185 if (TREE_CODE (exp) != TREE_LIST)
6186 return safe_from_p (x, exp, 0);
6189 else if (TREE_CODE (exp) == CONSTRUCTOR)
6191 constructor_elt *ce;
6192 unsigned HOST_WIDE_INT idx;
6194 for (idx = 0;
6195 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6196 idx++)
6197 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6198 || !safe_from_p (x, ce->value, 0))
6199 return 0;
6200 return 1;
6202 else if (TREE_CODE (exp) == ERROR_MARK)
6203 return 1; /* An already-visited SAVE_EXPR? */
6204 else
6205 return 0;
6207 case tcc_statement:
6208 /* The only case we look at here is the DECL_INITIAL inside a
6209 DECL_EXPR. */
6210 return (TREE_CODE (exp) != DECL_EXPR
6211 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6212 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6213 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6215 case tcc_binary:
6216 case tcc_comparison:
6217 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6218 return 0;
6219 /* Fall through. */
6221 case tcc_unary:
6222 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6224 case tcc_expression:
6225 case tcc_reference:
6226 case tcc_vl_exp:
6227 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6228 the expression. If it is set, we conflict iff we are that rtx or
6229 both are in memory. Otherwise, we check all operands of the
6230 expression recursively. */
6232 switch (TREE_CODE (exp))
6234 case ADDR_EXPR:
6235 /* If the operand is static or we are static, we can't conflict.
6236 Likewise if we don't conflict with the operand at all. */
6237 if (staticp (TREE_OPERAND (exp, 0))
6238 || TREE_STATIC (exp)
6239 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6240 return 1;
6242 /* Otherwise, the only way this can conflict is if we are taking
6243 the address of a DECL a that address if part of X, which is
6244 very rare. */
6245 exp = TREE_OPERAND (exp, 0);
6246 if (DECL_P (exp))
6248 if (!DECL_RTL_SET_P (exp)
6249 || !MEM_P (DECL_RTL (exp)))
6250 return 0;
6251 else
6252 exp_rtl = XEXP (DECL_RTL (exp), 0);
6254 break;
6256 case MISALIGNED_INDIRECT_REF:
6257 case ALIGN_INDIRECT_REF:
6258 case INDIRECT_REF:
6259 if (MEM_P (x)
6260 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6261 get_alias_set (exp)))
6262 return 0;
6263 break;
6265 case CALL_EXPR:
6266 /* Assume that the call will clobber all hard registers and
6267 all of memory. */
6268 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6269 || MEM_P (x))
6270 return 0;
6271 break;
6273 case WITH_CLEANUP_EXPR:
6274 case CLEANUP_POINT_EXPR:
6275 /* Lowered by gimplify.c. */
6276 gcc_unreachable ();
6278 case SAVE_EXPR:
6279 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6281 default:
6282 break;
6285 /* If we have an rtx, we do not need to scan our operands. */
6286 if (exp_rtl)
6287 break;
6289 nops = TREE_OPERAND_LENGTH (exp);
6290 for (i = 0; i < nops; i++)
6291 if (TREE_OPERAND (exp, i) != 0
6292 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6293 return 0;
6295 /* If this is a language-specific tree code, it may require
6296 special handling. */
6297 if ((unsigned int) TREE_CODE (exp)
6298 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6299 && !lang_hooks.safe_from_p (x, exp))
6300 return 0;
6301 break;
6303 case tcc_type:
6304 /* Should never get a type here. */
6305 gcc_unreachable ();
6307 case tcc_gimple_stmt:
6308 gcc_unreachable ();
6311 /* If we have an rtl, find any enclosed object. Then see if we conflict
6312 with it. */
6313 if (exp_rtl)
6315 if (GET_CODE (exp_rtl) == SUBREG)
6317 exp_rtl = SUBREG_REG (exp_rtl);
6318 if (REG_P (exp_rtl)
6319 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6320 return 0;
6323 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6324 are memory and they conflict. */
6325 return ! (rtx_equal_p (x, exp_rtl)
6326 || (MEM_P (x) && MEM_P (exp_rtl)
6327 && true_dependence (exp_rtl, VOIDmode, x,
6328 rtx_addr_varies_p)));
6331 /* If we reach here, it is safe. */
6332 return 1;
6336 /* Return the highest power of two that EXP is known to be a multiple of.
6337 This is used in updating alignment of MEMs in array references. */
6339 unsigned HOST_WIDE_INT
6340 highest_pow2_factor (tree exp)
6342 unsigned HOST_WIDE_INT c0, c1;
6344 switch (TREE_CODE (exp))
6346 case INTEGER_CST:
6347 /* We can find the lowest bit that's a one. If the low
6348 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6349 We need to handle this case since we can find it in a COND_EXPR,
6350 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6351 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6352 later ICE. */
6353 if (TREE_OVERFLOW (exp))
6354 return BIGGEST_ALIGNMENT;
6355 else
6357 /* Note: tree_low_cst is intentionally not used here,
6358 we don't care about the upper bits. */
6359 c0 = TREE_INT_CST_LOW (exp);
6360 c0 &= -c0;
6361 return c0 ? c0 : BIGGEST_ALIGNMENT;
6363 break;
6365 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6366 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6367 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6368 return MIN (c0, c1);
6370 case MULT_EXPR:
6371 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6372 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6373 return c0 * c1;
6375 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6376 case CEIL_DIV_EXPR:
6377 if (integer_pow2p (TREE_OPERAND (exp, 1))
6378 && host_integerp (TREE_OPERAND (exp, 1), 1))
6380 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6381 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6382 return MAX (1, c0 / c1);
6384 break;
6386 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6387 case SAVE_EXPR:
6388 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6390 case COMPOUND_EXPR:
6391 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6393 case COND_EXPR:
6394 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6395 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6396 return MIN (c0, c1);
6398 default:
6399 break;
6402 return 1;
6405 /* Similar, except that the alignment requirements of TARGET are
6406 taken into account. Assume it is at least as aligned as its
6407 type, unless it is a COMPONENT_REF in which case the layout of
6408 the structure gives the alignment. */
6410 static unsigned HOST_WIDE_INT
6411 highest_pow2_factor_for_target (tree target, tree exp)
6413 unsigned HOST_WIDE_INT target_align, factor;
6415 factor = highest_pow2_factor (exp);
6416 if (TREE_CODE (target) == COMPONENT_REF)
6417 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6418 else
6419 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6420 return MAX (factor, target_align);
6423 /* Return &VAR expression for emulated thread local VAR. */
6425 static tree
6426 emutls_var_address (tree var)
6428 tree emuvar = emutls_decl (var);
6429 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6430 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6431 tree arglist = build_tree_list (NULL_TREE, arg);
6432 tree call = build_function_call_expr (fn, arglist);
6433 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6436 /* Expands variable VAR. */
6438 void
6439 expand_var (tree var)
6441 if (DECL_EXTERNAL (var))
6442 return;
6444 if (TREE_STATIC (var))
6445 /* If this is an inlined copy of a static local variable,
6446 look up the original decl. */
6447 var = DECL_ORIGIN (var);
6449 if (TREE_STATIC (var)
6450 ? !TREE_ASM_WRITTEN (var)
6451 : !DECL_RTL_SET_P (var))
6453 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6454 /* Should be ignored. */;
6455 else if (lang_hooks.expand_decl (var))
6456 /* OK. */;
6457 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6458 expand_decl (var);
6459 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6460 rest_of_decl_compilation (var, 0, 0);
6461 else
6462 /* No expansion needed. */
6463 gcc_assert (TREE_CODE (var) == TYPE_DECL
6464 || TREE_CODE (var) == CONST_DECL
6465 || TREE_CODE (var) == FUNCTION_DECL
6466 || TREE_CODE (var) == LABEL_DECL);
6470 /* Subroutine of expand_expr. Expand the two operands of a binary
6471 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6472 The value may be stored in TARGET if TARGET is nonzero. The
6473 MODIFIER argument is as documented by expand_expr. */
6475 static void
6476 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6477 enum expand_modifier modifier)
6479 if (! safe_from_p (target, exp1, 1))
6480 target = 0;
6481 if (operand_equal_p (exp0, exp1, 0))
6483 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6484 *op1 = copy_rtx (*op0);
6486 else
6488 /* If we need to preserve evaluation order, copy exp0 into its own
6489 temporary variable so that it can't be clobbered by exp1. */
6490 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6491 exp0 = save_expr (exp0);
6492 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6493 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6498 /* Return a MEM that contains constant EXP. DEFER is as for
6499 output_constant_def and MODIFIER is as for expand_expr. */
6501 static rtx
6502 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6504 rtx mem;
6506 mem = output_constant_def (exp, defer);
6507 if (modifier != EXPAND_INITIALIZER)
6508 mem = use_anchored_address (mem);
6509 return mem;
6512 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6513 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6515 static rtx
6516 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6517 enum expand_modifier modifier)
6519 rtx result, subtarget;
6520 tree inner, offset;
6521 HOST_WIDE_INT bitsize, bitpos;
6522 int volatilep, unsignedp;
6523 enum machine_mode mode1;
6525 /* If we are taking the address of a constant and are at the top level,
6526 we have to use output_constant_def since we can't call force_const_mem
6527 at top level. */
6528 /* ??? This should be considered a front-end bug. We should not be
6529 generating ADDR_EXPR of something that isn't an LVALUE. The only
6530 exception here is STRING_CST. */
6531 if (TREE_CODE (exp) == CONSTRUCTOR
6532 || CONSTANT_CLASS_P (exp))
6533 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6535 /* Everything must be something allowed by is_gimple_addressable. */
6536 switch (TREE_CODE (exp))
6538 case INDIRECT_REF:
6539 /* This case will happen via recursion for &a->b. */
6540 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6542 case CONST_DECL:
6543 /* Recurse and make the output_constant_def clause above handle this. */
6544 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6545 tmode, modifier);
6547 case REALPART_EXPR:
6548 /* The real part of the complex number is always first, therefore
6549 the address is the same as the address of the parent object. */
6550 offset = 0;
6551 bitpos = 0;
6552 inner = TREE_OPERAND (exp, 0);
6553 break;
6555 case IMAGPART_EXPR:
6556 /* The imaginary part of the complex number is always second.
6557 The expression is therefore always offset by the size of the
6558 scalar type. */
6559 offset = 0;
6560 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6561 inner = TREE_OPERAND (exp, 0);
6562 break;
6564 case VAR_DECL:
6565 /* TLS emulation hook - replace __thread VAR's &VAR with
6566 __emutls_get_address (&_emutls.VAR). */
6567 if (! targetm.have_tls
6568 && TREE_CODE (exp) == VAR_DECL
6569 && DECL_THREAD_LOCAL_P (exp))
6571 exp = emutls_var_address (exp);
6572 return expand_expr (exp, target, tmode, modifier);
6574 /* Fall through. */
6576 default:
6577 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6578 expand_expr, as that can have various side effects; LABEL_DECLs for
6579 example, may not have their DECL_RTL set yet. Assume language
6580 specific tree nodes can be expanded in some interesting way. */
6581 if (DECL_P (exp)
6582 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6584 result = expand_expr (exp, target, tmode,
6585 modifier == EXPAND_INITIALIZER
6586 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6588 /* If the DECL isn't in memory, then the DECL wasn't properly
6589 marked TREE_ADDRESSABLE, which will be either a front-end
6590 or a tree optimizer bug. */
6591 gcc_assert (MEM_P (result));
6592 result = XEXP (result, 0);
6594 /* ??? Is this needed anymore? */
6595 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6597 assemble_external (exp);
6598 TREE_USED (exp) = 1;
6601 if (modifier != EXPAND_INITIALIZER
6602 && modifier != EXPAND_CONST_ADDRESS)
6603 result = force_operand (result, target);
6604 return result;
6607 /* Pass FALSE as the last argument to get_inner_reference although
6608 we are expanding to RTL. The rationale is that we know how to
6609 handle "aligning nodes" here: we can just bypass them because
6610 they won't change the final object whose address will be returned
6611 (they actually exist only for that purpose). */
6612 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6613 &mode1, &unsignedp, &volatilep, false);
6614 break;
6617 /* We must have made progress. */
6618 gcc_assert (inner != exp);
6620 subtarget = offset || bitpos ? NULL_RTX : target;
6621 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6623 if (offset)
6625 rtx tmp;
6627 if (modifier != EXPAND_NORMAL)
6628 result = force_operand (result, NULL);
6629 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6631 result = convert_memory_address (tmode, result);
6632 tmp = convert_memory_address (tmode, tmp);
6634 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6635 result = gen_rtx_PLUS (tmode, result, tmp);
6636 else
6638 subtarget = bitpos ? NULL_RTX : target;
6639 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6640 1, OPTAB_LIB_WIDEN);
6644 if (bitpos)
6646 /* Someone beforehand should have rejected taking the address
6647 of such an object. */
6648 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6650 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6651 if (modifier < EXPAND_SUM)
6652 result = force_operand (result, target);
6655 return result;
6658 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6659 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6661 static rtx
6662 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6663 enum expand_modifier modifier)
6665 enum machine_mode rmode;
6666 rtx result;
6668 /* Target mode of VOIDmode says "whatever's natural". */
6669 if (tmode == VOIDmode)
6670 tmode = TYPE_MODE (TREE_TYPE (exp));
6672 /* We can get called with some Weird Things if the user does silliness
6673 like "(short) &a". In that case, convert_memory_address won't do
6674 the right thing, so ignore the given target mode. */
6675 if (tmode != Pmode && tmode != ptr_mode)
6676 tmode = Pmode;
6678 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6679 tmode, modifier);
6681 /* Despite expand_expr claims concerning ignoring TMODE when not
6682 strictly convenient, stuff breaks if we don't honor it. Note
6683 that combined with the above, we only do this for pointer modes. */
6684 rmode = GET_MODE (result);
6685 if (rmode == VOIDmode)
6686 rmode = tmode;
6687 if (rmode != tmode)
6688 result = convert_memory_address (tmode, result);
6690 return result;
6694 /* expand_expr: generate code for computing expression EXP.
6695 An rtx for the computed value is returned. The value is never null.
6696 In the case of a void EXP, const0_rtx is returned.
6698 The value may be stored in TARGET if TARGET is nonzero.
6699 TARGET is just a suggestion; callers must assume that
6700 the rtx returned may not be the same as TARGET.
6702 If TARGET is CONST0_RTX, it means that the value will be ignored.
6704 If TMODE is not VOIDmode, it suggests generating the
6705 result in mode TMODE. But this is done only when convenient.
6706 Otherwise, TMODE is ignored and the value generated in its natural mode.
6707 TMODE is just a suggestion; callers must assume that
6708 the rtx returned may not have mode TMODE.
6710 Note that TARGET may have neither TMODE nor MODE. In that case, it
6711 probably will not be used.
6713 If MODIFIER is EXPAND_SUM then when EXP is an addition
6714 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6715 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6716 products as above, or REG or MEM, or constant.
6717 Ordinarily in such cases we would output mul or add instructions
6718 and then return a pseudo reg containing the sum.
6720 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6721 it also marks a label as absolutely required (it can't be dead).
6722 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6723 This is used for outputting expressions used in initializers.
6725 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6726 with a constant address even if that address is not normally legitimate.
6727 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6729 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6730 a call parameter. Such targets require special care as we haven't yet
6731 marked TARGET so that it's safe from being trashed by libcalls. We
6732 don't want to use TARGET for anything but the final result;
6733 Intermediate values must go elsewhere. Additionally, calls to
6734 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6736 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6737 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6738 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6739 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6740 recursively. */
6742 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6743 enum expand_modifier, rtx *);
6746 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6747 enum expand_modifier modifier, rtx *alt_rtl)
6749 int rn = -1;
6750 rtx ret, last = NULL;
6752 /* Handle ERROR_MARK before anybody tries to access its type. */
6753 if (TREE_CODE (exp) == ERROR_MARK
6754 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6756 ret = CONST0_RTX (tmode);
6757 return ret ? ret : const0_rtx;
6760 if (flag_non_call_exceptions)
6762 rn = lookup_stmt_eh_region (exp);
6763 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6764 if (rn >= 0)
6765 last = get_last_insn ();
6768 /* If this is an expression of some kind and it has an associated line
6769 number, then emit the line number before expanding the expression.
6771 We need to save and restore the file and line information so that
6772 errors discovered during expansion are emitted with the right
6773 information. It would be better of the diagnostic routines
6774 used the file/line information embedded in the tree nodes rather
6775 than globals. */
6776 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6778 location_t saved_location = input_location;
6779 input_location = EXPR_LOCATION (exp);
6780 emit_line_note (input_location);
6782 /* Record where the insns produced belong. */
6783 record_block_change (TREE_BLOCK (exp));
6785 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6787 input_location = saved_location;
6789 else
6791 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6794 /* If using non-call exceptions, mark all insns that may trap.
6795 expand_call() will mark CALL_INSNs before we get to this code,
6796 but it doesn't handle libcalls, and these may trap. */
6797 if (rn >= 0)
6799 rtx insn;
6800 for (insn = next_real_insn (last); insn;
6801 insn = next_real_insn (insn))
6803 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6804 /* If we want exceptions for non-call insns, any
6805 may_trap_p instruction may throw. */
6806 && GET_CODE (PATTERN (insn)) != CLOBBER
6807 && GET_CODE (PATTERN (insn)) != USE
6808 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6810 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6811 REG_NOTES (insn));
6816 return ret;
6819 static rtx
6820 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6821 enum expand_modifier modifier, rtx *alt_rtl)
6823 rtx op0, op1, temp, decl_rtl;
6824 tree type;
6825 int unsignedp;
6826 enum machine_mode mode;
6827 enum tree_code code = TREE_CODE (exp);
6828 optab this_optab;
6829 rtx subtarget, original_target;
6830 int ignore;
6831 tree context, subexp0, subexp1;
6832 bool reduce_bit_field = false;
6833 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6834 ? reduce_to_bit_field_precision ((expr), \
6835 target, \
6836 type) \
6837 : (expr))
6839 if (GIMPLE_STMT_P (exp))
6841 type = void_type_node;
6842 mode = VOIDmode;
6843 unsignedp = 0;
6845 else
6847 type = TREE_TYPE (exp);
6848 mode = TYPE_MODE (type);
6849 unsignedp = TYPE_UNSIGNED (type);
6851 if (lang_hooks.reduce_bit_field_operations
6852 && TREE_CODE (type) == INTEGER_TYPE
6853 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6855 /* An operation in what may be a bit-field type needs the
6856 result to be reduced to the precision of the bit-field type,
6857 which is narrower than that of the type's mode. */
6858 reduce_bit_field = true;
6859 if (modifier == EXPAND_STACK_PARM)
6860 target = 0;
6863 /* Use subtarget as the target for operand 0 of a binary operation. */
6864 subtarget = get_subtarget (target);
6865 original_target = target;
6866 ignore = (target == const0_rtx
6867 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6868 || code == CONVERT_EXPR || code == COND_EXPR
6869 || code == VIEW_CONVERT_EXPR)
6870 && TREE_CODE (type) == VOID_TYPE));
6872 /* If we are going to ignore this result, we need only do something
6873 if there is a side-effect somewhere in the expression. If there
6874 is, short-circuit the most common cases here. Note that we must
6875 not call expand_expr with anything but const0_rtx in case this
6876 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6878 if (ignore)
6880 if (! TREE_SIDE_EFFECTS (exp))
6881 return const0_rtx;
6883 /* Ensure we reference a volatile object even if value is ignored, but
6884 don't do this if all we are doing is taking its address. */
6885 if (TREE_THIS_VOLATILE (exp)
6886 && TREE_CODE (exp) != FUNCTION_DECL
6887 && mode != VOIDmode && mode != BLKmode
6888 && modifier != EXPAND_CONST_ADDRESS)
6890 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6891 if (MEM_P (temp))
6892 temp = copy_to_reg (temp);
6893 return const0_rtx;
6896 if (TREE_CODE_CLASS (code) == tcc_unary
6897 || code == COMPONENT_REF || code == INDIRECT_REF)
6898 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6899 modifier);
6901 else if (TREE_CODE_CLASS (code) == tcc_binary
6902 || TREE_CODE_CLASS (code) == tcc_comparison
6903 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6905 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6906 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6907 return const0_rtx;
6909 else if (code == BIT_FIELD_REF)
6911 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6912 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6913 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6914 return const0_rtx;
6917 target = 0;
6921 switch (code)
6923 case LABEL_DECL:
6925 tree function = decl_function_context (exp);
6927 temp = label_rtx (exp);
6928 temp = gen_rtx_LABEL_REF (Pmode, temp);
6930 if (function != current_function_decl
6931 && function != 0)
6932 LABEL_REF_NONLOCAL_P (temp) = 1;
6934 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6935 return temp;
6938 case SSA_NAME:
6939 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6940 NULL);
6942 case PARM_DECL:
6943 case VAR_DECL:
6944 /* If a static var's type was incomplete when the decl was written,
6945 but the type is complete now, lay out the decl now. */
6946 if (DECL_SIZE (exp) == 0
6947 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6948 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6949 layout_decl (exp, 0);
6951 /* TLS emulation hook - replace __thread vars with
6952 *__emutls_get_address (&_emutls.var). */
6953 if (! targetm.have_tls
6954 && TREE_CODE (exp) == VAR_DECL
6955 && DECL_THREAD_LOCAL_P (exp))
6957 exp = build_fold_indirect_ref (emutls_var_address (exp));
6958 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
6961 /* ... fall through ... */
6963 case FUNCTION_DECL:
6964 case RESULT_DECL:
6965 decl_rtl = DECL_RTL (exp);
6966 gcc_assert (decl_rtl);
6968 /* Ensure variable marked as used even if it doesn't go through
6969 a parser. If it hasn't be used yet, write out an external
6970 definition. */
6971 if (! TREE_USED (exp))
6973 assemble_external (exp);
6974 TREE_USED (exp) = 1;
6977 /* Show we haven't gotten RTL for this yet. */
6978 temp = 0;
6980 /* Variables inherited from containing functions should have
6981 been lowered by this point. */
6982 context = decl_function_context (exp);
6983 gcc_assert (!context
6984 || context == current_function_decl
6985 || TREE_STATIC (exp)
6986 /* ??? C++ creates functions that are not TREE_STATIC. */
6987 || TREE_CODE (exp) == FUNCTION_DECL);
6989 /* This is the case of an array whose size is to be determined
6990 from its initializer, while the initializer is still being parsed.
6991 See expand_decl. */
6993 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6994 temp = validize_mem (decl_rtl);
6996 /* If DECL_RTL is memory, we are in the normal case and either
6997 the address is not valid or it is not a register and -fforce-addr
6998 is specified, get the address into a register. */
7000 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7002 if (alt_rtl)
7003 *alt_rtl = decl_rtl;
7004 decl_rtl = use_anchored_address (decl_rtl);
7005 if (modifier != EXPAND_CONST_ADDRESS
7006 && modifier != EXPAND_SUM
7007 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
7008 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
7009 temp = replace_equiv_address (decl_rtl,
7010 copy_rtx (XEXP (decl_rtl, 0)));
7013 /* If we got something, return it. But first, set the alignment
7014 if the address is a register. */
7015 if (temp != 0)
7017 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7018 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7020 return temp;
7023 /* If the mode of DECL_RTL does not match that of the decl, it
7024 must be a promoted value. We return a SUBREG of the wanted mode,
7025 but mark it so that we know that it was already extended. */
7027 if (REG_P (decl_rtl)
7028 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7030 enum machine_mode pmode;
7032 /* Get the signedness used for this variable. Ensure we get the
7033 same mode we got when the variable was declared. */
7034 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7035 (TREE_CODE (exp) == RESULT_DECL
7036 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7037 gcc_assert (GET_MODE (decl_rtl) == pmode);
7039 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7040 SUBREG_PROMOTED_VAR_P (temp) = 1;
7041 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7042 return temp;
7045 return decl_rtl;
7047 case INTEGER_CST:
7048 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7049 TREE_INT_CST_HIGH (exp), mode);
7051 /* ??? If overflow is set, fold will have done an incomplete job,
7052 which can result in (plus xx (const_int 0)), which can get
7053 simplified by validate_replace_rtx during virtual register
7054 instantiation, which can result in unrecognizable insns.
7055 Avoid this by forcing all overflows into registers. */
7056 if (TREE_OVERFLOW (exp)
7057 && modifier != EXPAND_INITIALIZER)
7058 temp = force_reg (mode, temp);
7060 return temp;
7062 case VECTOR_CST:
7064 tree tmp = NULL_TREE;
7065 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7066 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7067 return const_vector_from_tree (exp);
7068 if (GET_MODE_CLASS (mode) == MODE_INT)
7070 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7071 if (type_for_mode)
7072 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7074 if (!tmp)
7075 tmp = build_constructor_from_list (type,
7076 TREE_VECTOR_CST_ELTS (exp));
7077 return expand_expr (tmp, ignore ? const0_rtx : target,
7078 tmode, modifier);
7081 case CONST_DECL:
7082 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7084 case REAL_CST:
7085 /* If optimized, generate immediate CONST_DOUBLE
7086 which will be turned into memory by reload if necessary.
7088 We used to force a register so that loop.c could see it. But
7089 this does not allow gen_* patterns to perform optimizations with
7090 the constants. It also produces two insns in cases like "x = 1.0;".
7091 On most machines, floating-point constants are not permitted in
7092 many insns, so we'd end up copying it to a register in any case.
7094 Now, we do the copying in expand_binop, if appropriate. */
7095 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7096 TYPE_MODE (TREE_TYPE (exp)));
7098 case COMPLEX_CST:
7099 /* Handle evaluating a complex constant in a CONCAT target. */
7100 if (original_target && GET_CODE (original_target) == CONCAT)
7102 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7103 rtx rtarg, itarg;
7105 rtarg = XEXP (original_target, 0);
7106 itarg = XEXP (original_target, 1);
7108 /* Move the real and imaginary parts separately. */
7109 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7110 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7112 if (op0 != rtarg)
7113 emit_move_insn (rtarg, op0);
7114 if (op1 != itarg)
7115 emit_move_insn (itarg, op1);
7117 return original_target;
7120 /* ... fall through ... */
7122 case STRING_CST:
7123 temp = expand_expr_constant (exp, 1, modifier);
7125 /* temp contains a constant address.
7126 On RISC machines where a constant address isn't valid,
7127 make some insns to get that address into a register. */
7128 if (modifier != EXPAND_CONST_ADDRESS
7129 && modifier != EXPAND_INITIALIZER
7130 && modifier != EXPAND_SUM
7131 && (! memory_address_p (mode, XEXP (temp, 0))
7132 || flag_force_addr))
7133 return replace_equiv_address (temp,
7134 copy_rtx (XEXP (temp, 0)));
7135 return temp;
7137 case SAVE_EXPR:
7139 tree val = TREE_OPERAND (exp, 0);
7140 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7142 if (!SAVE_EXPR_RESOLVED_P (exp))
7144 /* We can indeed still hit this case, typically via builtin
7145 expanders calling save_expr immediately before expanding
7146 something. Assume this means that we only have to deal
7147 with non-BLKmode values. */
7148 gcc_assert (GET_MODE (ret) != BLKmode);
7150 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7151 DECL_ARTIFICIAL (val) = 1;
7152 DECL_IGNORED_P (val) = 1;
7153 TREE_OPERAND (exp, 0) = val;
7154 SAVE_EXPR_RESOLVED_P (exp) = 1;
7156 if (!CONSTANT_P (ret))
7157 ret = copy_to_reg (ret);
7158 SET_DECL_RTL (val, ret);
7161 return ret;
7164 case GOTO_EXPR:
7165 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7166 expand_goto (TREE_OPERAND (exp, 0));
7167 else
7168 expand_computed_goto (TREE_OPERAND (exp, 0));
7169 return const0_rtx;
7171 case CONSTRUCTOR:
7172 /* If we don't need the result, just ensure we evaluate any
7173 subexpressions. */
7174 if (ignore)
7176 unsigned HOST_WIDE_INT idx;
7177 tree value;
7179 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7180 expand_expr (value, const0_rtx, VOIDmode, 0);
7182 return const0_rtx;
7185 /* Try to avoid creating a temporary at all. This is possible
7186 if all of the initializer is zero.
7187 FIXME: try to handle all [0..255] initializers we can handle
7188 with memset. */
7189 else if (TREE_STATIC (exp)
7190 && !TREE_ADDRESSABLE (exp)
7191 && target != 0 && mode == BLKmode
7192 && all_zeros_p (exp))
7194 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7195 return target;
7198 /* All elts simple constants => refer to a constant in memory. But
7199 if this is a non-BLKmode mode, let it store a field at a time
7200 since that should make a CONST_INT or CONST_DOUBLE when we
7201 fold. Likewise, if we have a target we can use, it is best to
7202 store directly into the target unless the type is large enough
7203 that memcpy will be used. If we are making an initializer and
7204 all operands are constant, put it in memory as well.
7206 FIXME: Avoid trying to fill vector constructors piece-meal.
7207 Output them with output_constant_def below unless we're sure
7208 they're zeros. This should go away when vector initializers
7209 are treated like VECTOR_CST instead of arrays.
7211 else if ((TREE_STATIC (exp)
7212 && ((mode == BLKmode
7213 && ! (target != 0 && safe_from_p (target, exp, 1)))
7214 || TREE_ADDRESSABLE (exp)
7215 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7216 && (! MOVE_BY_PIECES_P
7217 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7218 TYPE_ALIGN (type)))
7219 && ! mostly_zeros_p (exp))))
7220 || ((modifier == EXPAND_INITIALIZER
7221 || modifier == EXPAND_CONST_ADDRESS)
7222 && TREE_CONSTANT (exp)))
7224 rtx constructor = expand_expr_constant (exp, 1, modifier);
7226 if (modifier != EXPAND_CONST_ADDRESS
7227 && modifier != EXPAND_INITIALIZER
7228 && modifier != EXPAND_SUM)
7229 constructor = validize_mem (constructor);
7231 return constructor;
7233 else
7235 /* Handle calls that pass values in multiple non-contiguous
7236 locations. The Irix 6 ABI has examples of this. */
7237 if (target == 0 || ! safe_from_p (target, exp, 1)
7238 || GET_CODE (target) == PARALLEL
7239 || modifier == EXPAND_STACK_PARM)
7240 target
7241 = assign_temp (build_qualified_type (type,
7242 (TYPE_QUALS (type)
7243 | (TREE_READONLY (exp)
7244 * TYPE_QUAL_CONST))),
7245 0, TREE_ADDRESSABLE (exp), 1);
7247 store_constructor (exp, target, 0, int_expr_size (exp));
7248 return target;
7251 case MISALIGNED_INDIRECT_REF:
7252 case ALIGN_INDIRECT_REF:
7253 case INDIRECT_REF:
7255 tree exp1 = TREE_OPERAND (exp, 0);
7257 if (modifier != EXPAND_WRITE)
7259 tree t;
7261 t = fold_read_from_constant_string (exp);
7262 if (t)
7263 return expand_expr (t, target, tmode, modifier);
7266 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7267 op0 = memory_address (mode, op0);
7269 if (code == ALIGN_INDIRECT_REF)
7271 int align = TYPE_ALIGN_UNIT (type);
7272 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7273 op0 = memory_address (mode, op0);
7276 temp = gen_rtx_MEM (mode, op0);
7278 set_mem_attributes (temp, exp, 0);
7280 /* Resolve the misalignment now, so that we don't have to remember
7281 to resolve it later. Of course, this only works for reads. */
7282 /* ??? When we get around to supporting writes, we'll have to handle
7283 this in store_expr directly. The vectorizer isn't generating
7284 those yet, however. */
7285 if (code == MISALIGNED_INDIRECT_REF)
7287 int icode;
7288 rtx reg, insn;
7290 gcc_assert (modifier == EXPAND_NORMAL
7291 || modifier == EXPAND_STACK_PARM);
7293 /* The vectorizer should have already checked the mode. */
7294 icode = movmisalign_optab->handlers[mode].insn_code;
7295 gcc_assert (icode != CODE_FOR_nothing);
7297 /* We've already validated the memory, and we're creating a
7298 new pseudo destination. The predicates really can't fail. */
7299 reg = gen_reg_rtx (mode);
7301 /* Nor can the insn generator. */
7302 insn = GEN_FCN (icode) (reg, temp);
7303 emit_insn (insn);
7305 return reg;
7308 return temp;
7311 case TARGET_MEM_REF:
7313 struct mem_address addr;
7315 get_address_description (exp, &addr);
7316 op0 = addr_for_mem_ref (&addr, true);
7317 op0 = memory_address (mode, op0);
7318 temp = gen_rtx_MEM (mode, op0);
7319 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7321 return temp;
7323 case ARRAY_REF:
7326 tree array = TREE_OPERAND (exp, 0);
7327 tree index = TREE_OPERAND (exp, 1);
7329 /* Fold an expression like: "foo"[2].
7330 This is not done in fold so it won't happen inside &.
7331 Don't fold if this is for wide characters since it's too
7332 difficult to do correctly and this is a very rare case. */
7334 if (modifier != EXPAND_CONST_ADDRESS
7335 && modifier != EXPAND_INITIALIZER
7336 && modifier != EXPAND_MEMORY)
7338 tree t = fold_read_from_constant_string (exp);
7340 if (t)
7341 return expand_expr (t, target, tmode, modifier);
7344 /* If this is a constant index into a constant array,
7345 just get the value from the array. Handle both the cases when
7346 we have an explicit constructor and when our operand is a variable
7347 that was declared const. */
7349 if (modifier != EXPAND_CONST_ADDRESS
7350 && modifier != EXPAND_INITIALIZER
7351 && modifier != EXPAND_MEMORY
7352 && TREE_CODE (array) == CONSTRUCTOR
7353 && ! TREE_SIDE_EFFECTS (array)
7354 && TREE_CODE (index) == INTEGER_CST)
7356 unsigned HOST_WIDE_INT ix;
7357 tree field, value;
7359 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7360 field, value)
7361 if (tree_int_cst_equal (field, index))
7363 if (!TREE_SIDE_EFFECTS (value))
7364 return expand_expr (fold (value), target, tmode, modifier);
7365 break;
7369 else if (optimize >= 1
7370 && modifier != EXPAND_CONST_ADDRESS
7371 && modifier != EXPAND_INITIALIZER
7372 && modifier != EXPAND_MEMORY
7373 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7374 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7375 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7376 && targetm.binds_local_p (array))
7378 if (TREE_CODE (index) == INTEGER_CST)
7380 tree init = DECL_INITIAL (array);
7382 if (TREE_CODE (init) == CONSTRUCTOR)
7384 unsigned HOST_WIDE_INT ix;
7385 tree field, value;
7387 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7388 field, value)
7389 if (tree_int_cst_equal (field, index))
7391 if (!TREE_SIDE_EFFECTS (value))
7392 return expand_expr (fold (value), target, tmode,
7393 modifier);
7394 break;
7397 else if(TREE_CODE (init) == STRING_CST)
7399 tree index1 = index;
7400 tree low_bound = array_ref_low_bound (exp);
7401 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7403 /* Optimize the special-case of a zero lower bound.
7405 We convert the low_bound to sizetype to avoid some problems
7406 with constant folding. (E.g. suppose the lower bound is 1,
7407 and its mode is QI. Without the conversion,l (ARRAY
7408 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7409 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7411 if (! integer_zerop (low_bound))
7412 index1 = size_diffop (index1, fold_convert (sizetype,
7413 low_bound));
7415 if (0 > compare_tree_int (index1,
7416 TREE_STRING_LENGTH (init)))
7418 tree type = TREE_TYPE (TREE_TYPE (init));
7419 enum machine_mode mode = TYPE_MODE (type);
7421 if (GET_MODE_CLASS (mode) == MODE_INT
7422 && GET_MODE_SIZE (mode) == 1)
7423 return gen_int_mode (TREE_STRING_POINTER (init)
7424 [TREE_INT_CST_LOW (index1)],
7425 mode);
7431 goto normal_inner_ref;
7433 case COMPONENT_REF:
7434 /* If the operand is a CONSTRUCTOR, we can just extract the
7435 appropriate field if it is present. */
7436 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7438 unsigned HOST_WIDE_INT idx;
7439 tree field, value;
7441 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7442 idx, field, value)
7443 if (field == TREE_OPERAND (exp, 1)
7444 /* We can normally use the value of the field in the
7445 CONSTRUCTOR. However, if this is a bitfield in
7446 an integral mode that we can fit in a HOST_WIDE_INT,
7447 we must mask only the number of bits in the bitfield,
7448 since this is done implicitly by the constructor. If
7449 the bitfield does not meet either of those conditions,
7450 we can't do this optimization. */
7451 && (! DECL_BIT_FIELD (field)
7452 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7453 && (GET_MODE_BITSIZE (DECL_MODE (field))
7454 <= HOST_BITS_PER_WIDE_INT))))
7456 if (DECL_BIT_FIELD (field)
7457 && modifier == EXPAND_STACK_PARM)
7458 target = 0;
7459 op0 = expand_expr (value, target, tmode, modifier);
7460 if (DECL_BIT_FIELD (field))
7462 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7463 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7465 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7467 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7468 op0 = expand_and (imode, op0, op1, target);
7470 else
7472 tree count
7473 = build_int_cst (NULL_TREE,
7474 GET_MODE_BITSIZE (imode) - bitsize);
7476 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7477 target, 0);
7478 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7479 target, 0);
7483 return op0;
7486 goto normal_inner_ref;
7488 case BIT_FIELD_REF:
7489 case ARRAY_RANGE_REF:
7490 normal_inner_ref:
7492 enum machine_mode mode1;
7493 HOST_WIDE_INT bitsize, bitpos;
7494 tree offset;
7495 int volatilep = 0;
7496 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7497 &mode1, &unsignedp, &volatilep, true);
7498 rtx orig_op0;
7500 /* If we got back the original object, something is wrong. Perhaps
7501 we are evaluating an expression too early. In any event, don't
7502 infinitely recurse. */
7503 gcc_assert (tem != exp);
7505 /* If TEM's type is a union of variable size, pass TARGET to the inner
7506 computation, since it will need a temporary and TARGET is known
7507 to have to do. This occurs in unchecked conversion in Ada. */
7509 orig_op0 = op0
7510 = expand_expr (tem,
7511 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7512 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7513 != INTEGER_CST)
7514 && modifier != EXPAND_STACK_PARM
7515 ? target : NULL_RTX),
7516 VOIDmode,
7517 (modifier == EXPAND_INITIALIZER
7518 || modifier == EXPAND_CONST_ADDRESS
7519 || modifier == EXPAND_STACK_PARM)
7520 ? modifier : EXPAND_NORMAL);
7522 /* If this is a constant, put it into a register if it is a legitimate
7523 constant, OFFSET is 0, and we won't try to extract outside the
7524 register (in case we were passed a partially uninitialized object
7525 or a view_conversion to a larger size). Force the constant to
7526 memory otherwise. */
7527 if (CONSTANT_P (op0))
7529 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7530 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7531 && offset == 0
7532 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7533 op0 = force_reg (mode, op0);
7534 else
7535 op0 = validize_mem (force_const_mem (mode, op0));
7538 /* Otherwise, if this object not in memory and we either have an
7539 offset, a BLKmode result, or a reference outside the object, put it
7540 there. Such cases can occur in Ada if we have unchecked conversion
7541 of an expression from a scalar type to an array or record type or
7542 for an ARRAY_RANGE_REF whose type is BLKmode. */
7543 else if (!MEM_P (op0)
7544 && (offset != 0
7545 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7546 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7548 tree nt = build_qualified_type (TREE_TYPE (tem),
7549 (TYPE_QUALS (TREE_TYPE (tem))
7550 | TYPE_QUAL_CONST));
7551 rtx memloc = assign_temp (nt, 1, 1, 1);
7553 emit_move_insn (memloc, op0);
7554 op0 = memloc;
7557 if (offset != 0)
7559 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7560 EXPAND_SUM);
7562 gcc_assert (MEM_P (op0));
7564 #ifdef POINTERS_EXTEND_UNSIGNED
7565 if (GET_MODE (offset_rtx) != Pmode)
7566 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7567 #else
7568 if (GET_MODE (offset_rtx) != ptr_mode)
7569 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7570 #endif
7572 if (GET_MODE (op0) == BLKmode
7573 /* A constant address in OP0 can have VOIDmode, we must
7574 not try to call force_reg in that case. */
7575 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7576 && bitsize != 0
7577 && (bitpos % bitsize) == 0
7578 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7579 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7581 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7582 bitpos = 0;
7585 op0 = offset_address (op0, offset_rtx,
7586 highest_pow2_factor (offset));
7589 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7590 record its alignment as BIGGEST_ALIGNMENT. */
7591 if (MEM_P (op0) && bitpos == 0 && offset != 0
7592 && is_aligning_offset (offset, tem))
7593 set_mem_align (op0, BIGGEST_ALIGNMENT);
7595 /* Don't forget about volatility even if this is a bitfield. */
7596 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7598 if (op0 == orig_op0)
7599 op0 = copy_rtx (op0);
7601 MEM_VOLATILE_P (op0) = 1;
7604 /* The following code doesn't handle CONCAT.
7605 Assume only bitpos == 0 can be used for CONCAT, due to
7606 one element arrays having the same mode as its element. */
7607 if (GET_CODE (op0) == CONCAT)
7609 gcc_assert (bitpos == 0
7610 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7611 return op0;
7614 /* In cases where an aligned union has an unaligned object
7615 as a field, we might be extracting a BLKmode value from
7616 an integer-mode (e.g., SImode) object. Handle this case
7617 by doing the extract into an object as wide as the field
7618 (which we know to be the width of a basic mode), then
7619 storing into memory, and changing the mode to BLKmode. */
7620 if (mode1 == VOIDmode
7621 || REG_P (op0) || GET_CODE (op0) == SUBREG
7622 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7623 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7624 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7625 && modifier != EXPAND_CONST_ADDRESS
7626 && modifier != EXPAND_INITIALIZER)
7627 /* If the field isn't aligned enough to fetch as a memref,
7628 fetch it as a bit field. */
7629 || (mode1 != BLKmode
7630 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7631 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7632 || (MEM_P (op0)
7633 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7634 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7635 && ((modifier == EXPAND_CONST_ADDRESS
7636 || modifier == EXPAND_INITIALIZER)
7637 ? STRICT_ALIGNMENT
7638 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7639 || (bitpos % BITS_PER_UNIT != 0)))
7640 /* If the type and the field are a constant size and the
7641 size of the type isn't the same size as the bitfield,
7642 we must use bitfield operations. */
7643 || (bitsize >= 0
7644 && TYPE_SIZE (TREE_TYPE (exp))
7645 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7646 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7647 bitsize)))
7649 enum machine_mode ext_mode = mode;
7651 if (ext_mode == BLKmode
7652 && ! (target != 0 && MEM_P (op0)
7653 && MEM_P (target)
7654 && bitpos % BITS_PER_UNIT == 0))
7655 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7657 if (ext_mode == BLKmode)
7659 if (target == 0)
7660 target = assign_temp (type, 0, 1, 1);
7662 if (bitsize == 0)
7663 return target;
7665 /* In this case, BITPOS must start at a byte boundary and
7666 TARGET, if specified, must be a MEM. */
7667 gcc_assert (MEM_P (op0)
7668 && (!target || MEM_P (target))
7669 && !(bitpos % BITS_PER_UNIT));
7671 emit_block_move (target,
7672 adjust_address (op0, VOIDmode,
7673 bitpos / BITS_PER_UNIT),
7674 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7675 / BITS_PER_UNIT),
7676 (modifier == EXPAND_STACK_PARM
7677 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7679 return target;
7682 op0 = validize_mem (op0);
7684 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7685 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7687 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7688 (modifier == EXPAND_STACK_PARM
7689 ? NULL_RTX : target),
7690 ext_mode, ext_mode);
7692 /* If the result is a record type and BITSIZE is narrower than
7693 the mode of OP0, an integral mode, and this is a big endian
7694 machine, we must put the field into the high-order bits. */
7695 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7696 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7697 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7698 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7699 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7700 - bitsize),
7701 op0, 1);
7703 /* If the result type is BLKmode, store the data into a temporary
7704 of the appropriate type, but with the mode corresponding to the
7705 mode for the data we have (op0's mode). It's tempting to make
7706 this a constant type, since we know it's only being stored once,
7707 but that can cause problems if we are taking the address of this
7708 COMPONENT_REF because the MEM of any reference via that address
7709 will have flags corresponding to the type, which will not
7710 necessarily be constant. */
7711 if (mode == BLKmode)
7713 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7714 rtx new;
7716 /* If the reference doesn't use the alias set of its type,
7717 we cannot create the temporary using that type. */
7718 if (component_uses_parent_alias_set (exp))
7720 new = assign_stack_local (ext_mode, size, 0);
7721 set_mem_alias_set (new, get_alias_set (exp));
7723 else
7724 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7726 emit_move_insn (new, op0);
7727 op0 = copy_rtx (new);
7728 PUT_MODE (op0, BLKmode);
7729 set_mem_attributes (op0, exp, 1);
7732 return op0;
7735 /* If the result is BLKmode, use that to access the object
7736 now as well. */
7737 if (mode == BLKmode)
7738 mode1 = BLKmode;
7740 /* Get a reference to just this component. */
7741 if (modifier == EXPAND_CONST_ADDRESS
7742 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7743 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7744 else
7745 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7747 if (op0 == orig_op0)
7748 op0 = copy_rtx (op0);
7750 set_mem_attributes (op0, exp, 0);
7751 if (REG_P (XEXP (op0, 0)))
7752 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7754 MEM_VOLATILE_P (op0) |= volatilep;
7755 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7756 || modifier == EXPAND_CONST_ADDRESS
7757 || modifier == EXPAND_INITIALIZER)
7758 return op0;
7759 else if (target == 0)
7760 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7762 convert_move (target, op0, unsignedp);
7763 return target;
7766 case OBJ_TYPE_REF:
7767 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7769 case CALL_EXPR:
7770 /* Check for a built-in function. */
7771 if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
7772 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7773 == FUNCTION_DECL)
7774 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7776 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7777 == BUILT_IN_FRONTEND)
7778 return lang_hooks.expand_expr (exp, original_target,
7779 tmode, modifier,
7780 alt_rtl);
7781 else
7782 return expand_builtin (exp, target, subtarget, tmode, ignore);
7785 return expand_call (exp, target, ignore);
7787 case NON_LVALUE_EXPR:
7788 case NOP_EXPR:
7789 case CONVERT_EXPR:
7790 if (TREE_OPERAND (exp, 0) == error_mark_node)
7791 return const0_rtx;
7793 if (TREE_CODE (type) == UNION_TYPE)
7795 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7797 /* If both input and output are BLKmode, this conversion isn't doing
7798 anything except possibly changing memory attribute. */
7799 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7801 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7802 modifier);
7804 result = copy_rtx (result);
7805 set_mem_attributes (result, exp, 0);
7806 return result;
7809 if (target == 0)
7811 if (TYPE_MODE (type) != BLKmode)
7812 target = gen_reg_rtx (TYPE_MODE (type));
7813 else
7814 target = assign_temp (type, 0, 1, 1);
7817 if (MEM_P (target))
7818 /* Store data into beginning of memory target. */
7819 store_expr (TREE_OPERAND (exp, 0),
7820 adjust_address (target, TYPE_MODE (valtype), 0),
7821 modifier == EXPAND_STACK_PARM);
7823 else
7825 gcc_assert (REG_P (target));
7827 /* Store this field into a union of the proper type. */
7828 store_field (target,
7829 MIN ((int_size_in_bytes (TREE_TYPE
7830 (TREE_OPERAND (exp, 0)))
7831 * BITS_PER_UNIT),
7832 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7833 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7834 type, 0);
7837 /* Return the entire union. */
7838 return target;
7841 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7843 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7844 modifier);
7846 /* If the signedness of the conversion differs and OP0 is
7847 a promoted SUBREG, clear that indication since we now
7848 have to do the proper extension. */
7849 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7850 && GET_CODE (op0) == SUBREG)
7851 SUBREG_PROMOTED_VAR_P (op0) = 0;
7853 return REDUCE_BIT_FIELD (op0);
7856 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7857 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7858 if (GET_MODE (op0) == mode)
7861 /* If OP0 is a constant, just convert it into the proper mode. */
7862 else if (CONSTANT_P (op0))
7864 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7865 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7867 if (modifier == EXPAND_INITIALIZER)
7868 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7869 subreg_lowpart_offset (mode,
7870 inner_mode));
7871 else
7872 op0= convert_modes (mode, inner_mode, op0,
7873 TYPE_UNSIGNED (inner_type));
7876 else if (modifier == EXPAND_INITIALIZER)
7877 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7879 else if (target == 0)
7880 op0 = convert_to_mode (mode, op0,
7881 TYPE_UNSIGNED (TREE_TYPE
7882 (TREE_OPERAND (exp, 0))));
7883 else
7885 convert_move (target, op0,
7886 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7887 op0 = target;
7890 return REDUCE_BIT_FIELD (op0);
7892 case VIEW_CONVERT_EXPR:
7893 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7895 /* If the input and output modes are both the same, we are done. */
7896 if (TYPE_MODE (type) == GET_MODE (op0))
7898 /* If neither mode is BLKmode, and both modes are the same size
7899 then we can use gen_lowpart. */
7900 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7901 && GET_MODE_SIZE (TYPE_MODE (type))
7902 == GET_MODE_SIZE (GET_MODE (op0)))
7904 if (GET_CODE (op0) == SUBREG)
7905 op0 = force_reg (GET_MODE (op0), op0);
7906 op0 = gen_lowpart (TYPE_MODE (type), op0);
7908 /* If both modes are integral, then we can convert from one to the
7909 other. */
7910 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7911 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7912 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7913 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7914 /* As a last resort, spill op0 to memory, and reload it in a
7915 different mode. */
7916 else if (!MEM_P (op0))
7918 /* If the operand is not a MEM, force it into memory. Since we
7919 are going to be changing the mode of the MEM, don't call
7920 force_const_mem for constants because we don't allow pool
7921 constants to change mode. */
7922 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7924 gcc_assert (!TREE_ADDRESSABLE (exp));
7926 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7927 target
7928 = assign_stack_temp_for_type
7929 (TYPE_MODE (inner_type),
7930 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7932 emit_move_insn (target, op0);
7933 op0 = target;
7936 /* At this point, OP0 is in the correct mode. If the output type is such
7937 that the operand is known to be aligned, indicate that it is.
7938 Otherwise, we need only be concerned about alignment for non-BLKmode
7939 results. */
7940 if (MEM_P (op0))
7942 op0 = copy_rtx (op0);
7944 if (TYPE_ALIGN_OK (type))
7945 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7946 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7947 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7949 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7950 HOST_WIDE_INT temp_size
7951 = MAX (int_size_in_bytes (inner_type),
7952 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7953 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7954 temp_size, 0, type);
7955 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7957 gcc_assert (!TREE_ADDRESSABLE (exp));
7959 if (GET_MODE (op0) == BLKmode)
7960 emit_block_move (new_with_op0_mode, op0,
7961 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7962 (modifier == EXPAND_STACK_PARM
7963 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7964 else
7965 emit_move_insn (new_with_op0_mode, op0);
7967 op0 = new;
7970 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7973 return op0;
7975 case PLUS_EXPR:
7976 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7977 something else, make sure we add the register to the constant and
7978 then to the other thing. This case can occur during strength
7979 reduction and doing it this way will produce better code if the
7980 frame pointer or argument pointer is eliminated.
7982 fold-const.c will ensure that the constant is always in the inner
7983 PLUS_EXPR, so the only case we need to do anything about is if
7984 sp, ap, or fp is our second argument, in which case we must swap
7985 the innermost first argument and our second argument. */
7987 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7988 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7989 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7990 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7991 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7992 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7994 tree t = TREE_OPERAND (exp, 1);
7996 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7997 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8000 /* If the result is to be ptr_mode and we are adding an integer to
8001 something, we might be forming a constant. So try to use
8002 plus_constant. If it produces a sum and we can't accept it,
8003 use force_operand. This allows P = &ARR[const] to generate
8004 efficient code on machines where a SYMBOL_REF is not a valid
8005 address.
8007 If this is an EXPAND_SUM call, always return the sum. */
8008 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8009 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8011 if (modifier == EXPAND_STACK_PARM)
8012 target = 0;
8013 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8014 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8015 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8017 rtx constant_part;
8019 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8020 EXPAND_SUM);
8021 /* Use immed_double_const to ensure that the constant is
8022 truncated according to the mode of OP1, then sign extended
8023 to a HOST_WIDE_INT. Using the constant directly can result
8024 in non-canonical RTL in a 64x32 cross compile. */
8025 constant_part
8026 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8027 (HOST_WIDE_INT) 0,
8028 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8029 op1 = plus_constant (op1, INTVAL (constant_part));
8030 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8031 op1 = force_operand (op1, target);
8032 return REDUCE_BIT_FIELD (op1);
8035 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8036 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8037 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8039 rtx constant_part;
8041 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8042 (modifier == EXPAND_INITIALIZER
8043 ? EXPAND_INITIALIZER : EXPAND_SUM));
8044 if (! CONSTANT_P (op0))
8046 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8047 VOIDmode, modifier);
8048 /* Return a PLUS if modifier says it's OK. */
8049 if (modifier == EXPAND_SUM
8050 || modifier == EXPAND_INITIALIZER)
8051 return simplify_gen_binary (PLUS, mode, op0, op1);
8052 goto binop2;
8054 /* Use immed_double_const to ensure that the constant is
8055 truncated according to the mode of OP1, then sign extended
8056 to a HOST_WIDE_INT. Using the constant directly can result
8057 in non-canonical RTL in a 64x32 cross compile. */
8058 constant_part
8059 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8060 (HOST_WIDE_INT) 0,
8061 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8062 op0 = plus_constant (op0, INTVAL (constant_part));
8063 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8064 op0 = force_operand (op0, target);
8065 return REDUCE_BIT_FIELD (op0);
8069 /* No sense saving up arithmetic to be done
8070 if it's all in the wrong mode to form part of an address.
8071 And force_operand won't know whether to sign-extend or
8072 zero-extend. */
8073 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8074 || mode != ptr_mode)
8076 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8077 subtarget, &op0, &op1, 0);
8078 if (op0 == const0_rtx)
8079 return op1;
8080 if (op1 == const0_rtx)
8081 return op0;
8082 goto binop2;
8085 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8086 subtarget, &op0, &op1, modifier);
8087 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8089 case MINUS_EXPR:
8090 /* For initializers, we are allowed to return a MINUS of two
8091 symbolic constants. Here we handle all cases when both operands
8092 are constant. */
8093 /* Handle difference of two symbolic constants,
8094 for the sake of an initializer. */
8095 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8096 && really_constant_p (TREE_OPERAND (exp, 0))
8097 && really_constant_p (TREE_OPERAND (exp, 1)))
8099 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8100 NULL_RTX, &op0, &op1, modifier);
8102 /* If the last operand is a CONST_INT, use plus_constant of
8103 the negated constant. Else make the MINUS. */
8104 if (GET_CODE (op1) == CONST_INT)
8105 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8106 else
8107 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8110 /* No sense saving up arithmetic to be done
8111 if it's all in the wrong mode to form part of an address.
8112 And force_operand won't know whether to sign-extend or
8113 zero-extend. */
8114 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8115 || mode != ptr_mode)
8116 goto binop;
8118 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8119 subtarget, &op0, &op1, modifier);
8121 /* Convert A - const to A + (-const). */
8122 if (GET_CODE (op1) == CONST_INT)
8124 op1 = negate_rtx (mode, op1);
8125 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8128 goto binop2;
8130 case MULT_EXPR:
8131 /* If first operand is constant, swap them.
8132 Thus the following special case checks need only
8133 check the second operand. */
8134 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8136 tree t1 = TREE_OPERAND (exp, 0);
8137 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8138 TREE_OPERAND (exp, 1) = t1;
8141 /* Attempt to return something suitable for generating an
8142 indexed address, for machines that support that. */
8144 if (modifier == EXPAND_SUM && mode == ptr_mode
8145 && host_integerp (TREE_OPERAND (exp, 1), 0))
8147 tree exp1 = TREE_OPERAND (exp, 1);
8149 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8150 EXPAND_SUM);
8152 if (!REG_P (op0))
8153 op0 = force_operand (op0, NULL_RTX);
8154 if (!REG_P (op0))
8155 op0 = copy_to_mode_reg (mode, op0);
8157 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8158 gen_int_mode (tree_low_cst (exp1, 0),
8159 TYPE_MODE (TREE_TYPE (exp1)))));
8162 if (modifier == EXPAND_STACK_PARM)
8163 target = 0;
8165 /* Check for multiplying things that have been extended
8166 from a narrower type. If this machine supports multiplying
8167 in that narrower type with a result in the desired type,
8168 do it that way, and avoid the explicit type-conversion. */
8170 subexp0 = TREE_OPERAND (exp, 0);
8171 subexp1 = TREE_OPERAND (exp, 1);
8172 /* First, check if we have a multiplication of one signed and one
8173 unsigned operand. */
8174 if (TREE_CODE (subexp0) == NOP_EXPR
8175 && TREE_CODE (subexp1) == NOP_EXPR
8176 && TREE_CODE (type) == INTEGER_TYPE
8177 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8178 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8179 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8180 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8181 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8182 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8184 enum machine_mode innermode
8185 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8186 this_optab = usmul_widen_optab;
8187 if (mode == GET_MODE_WIDER_MODE (innermode))
8189 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8191 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8192 expand_operands (TREE_OPERAND (subexp0, 0),
8193 TREE_OPERAND (subexp1, 0),
8194 NULL_RTX, &op0, &op1, 0);
8195 else
8196 expand_operands (TREE_OPERAND (subexp0, 0),
8197 TREE_OPERAND (subexp1, 0),
8198 NULL_RTX, &op1, &op0, 0);
8200 goto binop3;
8204 /* Check for a multiplication with matching signedness. */
8205 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8206 && TREE_CODE (type) == INTEGER_TYPE
8207 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8208 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8209 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8210 && int_fits_type_p (TREE_OPERAND (exp, 1),
8211 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8212 /* Don't use a widening multiply if a shift will do. */
8213 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8214 > HOST_BITS_PER_WIDE_INT)
8215 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8217 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8218 && (TYPE_PRECISION (TREE_TYPE
8219 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8220 == TYPE_PRECISION (TREE_TYPE
8221 (TREE_OPERAND
8222 (TREE_OPERAND (exp, 0), 0))))
8223 /* If both operands are extended, they must either both
8224 be zero-extended or both be sign-extended. */
8225 && (TYPE_UNSIGNED (TREE_TYPE
8226 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8227 == TYPE_UNSIGNED (TREE_TYPE
8228 (TREE_OPERAND
8229 (TREE_OPERAND (exp, 0), 0)))))))
8231 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8232 enum machine_mode innermode = TYPE_MODE (op0type);
8233 bool zextend_p = TYPE_UNSIGNED (op0type);
8234 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8235 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8237 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8239 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8241 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8242 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8243 TREE_OPERAND (exp, 1),
8244 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8245 else
8246 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8247 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8248 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8249 goto binop3;
8251 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8252 && innermode == word_mode)
8254 rtx htem, hipart;
8255 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8256 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8257 op1 = convert_modes (innermode, mode,
8258 expand_normal (TREE_OPERAND (exp, 1)),
8259 unsignedp);
8260 else
8261 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8262 temp = expand_binop (mode, other_optab, op0, op1, target,
8263 unsignedp, OPTAB_LIB_WIDEN);
8264 hipart = gen_highpart (innermode, temp);
8265 htem = expand_mult_highpart_adjust (innermode, hipart,
8266 op0, op1, hipart,
8267 zextend_p);
8268 if (htem != hipart)
8269 emit_move_insn (hipart, htem);
8270 return REDUCE_BIT_FIELD (temp);
8274 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8275 subtarget, &op0, &op1, 0);
8276 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8278 case TRUNC_DIV_EXPR:
8279 case FLOOR_DIV_EXPR:
8280 case CEIL_DIV_EXPR:
8281 case ROUND_DIV_EXPR:
8282 case EXACT_DIV_EXPR:
8283 if (modifier == EXPAND_STACK_PARM)
8284 target = 0;
8285 /* Possible optimization: compute the dividend with EXPAND_SUM
8286 then if the divisor is constant can optimize the case
8287 where some terms of the dividend have coeffs divisible by it. */
8288 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8289 subtarget, &op0, &op1, 0);
8290 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8292 case RDIV_EXPR:
8293 goto binop;
8295 case TRUNC_MOD_EXPR:
8296 case FLOOR_MOD_EXPR:
8297 case CEIL_MOD_EXPR:
8298 case ROUND_MOD_EXPR:
8299 if (modifier == EXPAND_STACK_PARM)
8300 target = 0;
8301 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8302 subtarget, &op0, &op1, 0);
8303 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8305 case FIX_TRUNC_EXPR:
8306 op0 = expand_normal (TREE_OPERAND (exp, 0));
8307 if (target == 0 || modifier == EXPAND_STACK_PARM)
8308 target = gen_reg_rtx (mode);
8309 expand_fix (target, op0, unsignedp);
8310 return target;
8312 case FLOAT_EXPR:
8313 op0 = expand_normal (TREE_OPERAND (exp, 0));
8314 if (target == 0 || modifier == EXPAND_STACK_PARM)
8315 target = gen_reg_rtx (mode);
8316 /* expand_float can't figure out what to do if FROM has VOIDmode.
8317 So give it the correct mode. With -O, cse will optimize this. */
8318 if (GET_MODE (op0) == VOIDmode)
8319 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8320 op0);
8321 expand_float (target, op0,
8322 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8323 return target;
8325 case NEGATE_EXPR:
8326 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8327 if (modifier == EXPAND_STACK_PARM)
8328 target = 0;
8329 temp = expand_unop (mode,
8330 optab_for_tree_code (NEGATE_EXPR, type),
8331 op0, target, 0);
8332 gcc_assert (temp);
8333 return REDUCE_BIT_FIELD (temp);
8335 case ABS_EXPR:
8336 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8337 if (modifier == EXPAND_STACK_PARM)
8338 target = 0;
8340 /* ABS_EXPR is not valid for complex arguments. */
8341 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8342 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8344 /* Unsigned abs is simply the operand. Testing here means we don't
8345 risk generating incorrect code below. */
8346 if (TYPE_UNSIGNED (type))
8347 return op0;
8349 return expand_abs (mode, op0, target, unsignedp,
8350 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8352 case MAX_EXPR:
8353 case MIN_EXPR:
8354 target = original_target;
8355 if (target == 0
8356 || modifier == EXPAND_STACK_PARM
8357 || (MEM_P (target) && MEM_VOLATILE_P (target))
8358 || GET_MODE (target) != mode
8359 || (REG_P (target)
8360 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8361 target = gen_reg_rtx (mode);
8362 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8363 target, &op0, &op1, 0);
8365 /* First try to do it with a special MIN or MAX instruction.
8366 If that does not win, use a conditional jump to select the proper
8367 value. */
8368 this_optab = optab_for_tree_code (code, type);
8369 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8370 OPTAB_WIDEN);
8371 if (temp != 0)
8372 return temp;
8374 /* At this point, a MEM target is no longer useful; we will get better
8375 code without it. */
8377 if (! REG_P (target))
8378 target = gen_reg_rtx (mode);
8380 /* If op1 was placed in target, swap op0 and op1. */
8381 if (target != op0 && target == op1)
8383 temp = op0;
8384 op0 = op1;
8385 op1 = temp;
8388 /* We generate better code and avoid problems with op1 mentioning
8389 target by forcing op1 into a pseudo if it isn't a constant. */
8390 if (! CONSTANT_P (op1))
8391 op1 = force_reg (mode, op1);
8394 enum rtx_code comparison_code;
8395 rtx cmpop1 = op1;
8397 if (code == MAX_EXPR)
8398 comparison_code = unsignedp ? GEU : GE;
8399 else
8400 comparison_code = unsignedp ? LEU : LE;
8402 /* Canonicalize to comparisons against 0. */
8403 if (op1 == const1_rtx)
8405 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8406 or (a != 0 ? a : 1) for unsigned.
8407 For MIN we are safe converting (a <= 1 ? a : 1)
8408 into (a <= 0 ? a : 1) */
8409 cmpop1 = const0_rtx;
8410 if (code == MAX_EXPR)
8411 comparison_code = unsignedp ? NE : GT;
8413 if (op1 == constm1_rtx && !unsignedp)
8415 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8416 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8417 cmpop1 = const0_rtx;
8418 if (code == MIN_EXPR)
8419 comparison_code = LT;
8421 #ifdef HAVE_conditional_move
8422 /* Use a conditional move if possible. */
8423 if (can_conditionally_move_p (mode))
8425 rtx insn;
8427 /* ??? Same problem as in expmed.c: emit_conditional_move
8428 forces a stack adjustment via compare_from_rtx, and we
8429 lose the stack adjustment if the sequence we are about
8430 to create is discarded. */
8431 do_pending_stack_adjust ();
8433 start_sequence ();
8435 /* Try to emit the conditional move. */
8436 insn = emit_conditional_move (target, comparison_code,
8437 op0, cmpop1, mode,
8438 op0, op1, mode,
8439 unsignedp);
8441 /* If we could do the conditional move, emit the sequence,
8442 and return. */
8443 if (insn)
8445 rtx seq = get_insns ();
8446 end_sequence ();
8447 emit_insn (seq);
8448 return target;
8451 /* Otherwise discard the sequence and fall back to code with
8452 branches. */
8453 end_sequence ();
8455 #endif
8456 if (target != op0)
8457 emit_move_insn (target, op0);
8459 temp = gen_label_rtx ();
8460 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8461 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8463 emit_move_insn (target, op1);
8464 emit_label (temp);
8465 return target;
8467 case BIT_NOT_EXPR:
8468 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8469 if (modifier == EXPAND_STACK_PARM)
8470 target = 0;
8471 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8472 gcc_assert (temp);
8473 return temp;
8475 /* ??? Can optimize bitwise operations with one arg constant.
8476 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8477 and (a bitwise1 b) bitwise2 b (etc)
8478 but that is probably not worth while. */
8480 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8481 boolean values when we want in all cases to compute both of them. In
8482 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8483 as actual zero-or-1 values and then bitwise anding. In cases where
8484 there cannot be any side effects, better code would be made by
8485 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8486 how to recognize those cases. */
8488 case TRUTH_AND_EXPR:
8489 code = BIT_AND_EXPR;
8490 case BIT_AND_EXPR:
8491 goto binop;
8493 case TRUTH_OR_EXPR:
8494 code = BIT_IOR_EXPR;
8495 case BIT_IOR_EXPR:
8496 goto binop;
8498 case TRUTH_XOR_EXPR:
8499 code = BIT_XOR_EXPR;
8500 case BIT_XOR_EXPR:
8501 goto binop;
8503 case LSHIFT_EXPR:
8504 case RSHIFT_EXPR:
8505 case LROTATE_EXPR:
8506 case RROTATE_EXPR:
8507 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8508 subtarget = 0;
8509 if (modifier == EXPAND_STACK_PARM)
8510 target = 0;
8511 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8512 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8513 unsignedp);
8515 /* Could determine the answer when only additive constants differ. Also,
8516 the addition of one can be handled by changing the condition. */
8517 case LT_EXPR:
8518 case LE_EXPR:
8519 case GT_EXPR:
8520 case GE_EXPR:
8521 case EQ_EXPR:
8522 case NE_EXPR:
8523 case UNORDERED_EXPR:
8524 case ORDERED_EXPR:
8525 case UNLT_EXPR:
8526 case UNLE_EXPR:
8527 case UNGT_EXPR:
8528 case UNGE_EXPR:
8529 case UNEQ_EXPR:
8530 case LTGT_EXPR:
8531 temp = do_store_flag (exp,
8532 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8533 tmode != VOIDmode ? tmode : mode, 0);
8534 if (temp != 0)
8535 return temp;
8537 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8538 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8539 && original_target
8540 && REG_P (original_target)
8541 && (GET_MODE (original_target)
8542 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8544 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8545 VOIDmode, 0);
8547 /* If temp is constant, we can just compute the result. */
8548 if (GET_CODE (temp) == CONST_INT)
8550 if (INTVAL (temp) != 0)
8551 emit_move_insn (target, const1_rtx);
8552 else
8553 emit_move_insn (target, const0_rtx);
8555 return target;
8558 if (temp != original_target)
8560 enum machine_mode mode1 = GET_MODE (temp);
8561 if (mode1 == VOIDmode)
8562 mode1 = tmode != VOIDmode ? tmode : mode;
8564 temp = copy_to_mode_reg (mode1, temp);
8567 op1 = gen_label_rtx ();
8568 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8569 GET_MODE (temp), unsignedp, op1);
8570 emit_move_insn (temp, const1_rtx);
8571 emit_label (op1);
8572 return temp;
8575 /* If no set-flag instruction, must generate a conditional store
8576 into a temporary variable. Drop through and handle this
8577 like && and ||. */
8579 if (! ignore
8580 && (target == 0
8581 || modifier == EXPAND_STACK_PARM
8582 || ! safe_from_p (target, exp, 1)
8583 /* Make sure we don't have a hard reg (such as function's return
8584 value) live across basic blocks, if not optimizing. */
8585 || (!optimize && REG_P (target)
8586 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8587 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8589 if (target)
8590 emit_move_insn (target, const0_rtx);
8592 op1 = gen_label_rtx ();
8593 jumpifnot (exp, op1);
8595 if (target)
8596 emit_move_insn (target, const1_rtx);
8598 emit_label (op1);
8599 return ignore ? const0_rtx : target;
8601 case TRUTH_NOT_EXPR:
8602 if (modifier == EXPAND_STACK_PARM)
8603 target = 0;
8604 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8605 /* The parser is careful to generate TRUTH_NOT_EXPR
8606 only with operands that are always zero or one. */
8607 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8608 target, 1, OPTAB_LIB_WIDEN);
8609 gcc_assert (temp);
8610 return temp;
8612 case STATEMENT_LIST:
8614 tree_stmt_iterator iter;
8616 gcc_assert (ignore);
8618 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8619 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8621 return const0_rtx;
8623 case COND_EXPR:
8624 /* A COND_EXPR with its type being VOID_TYPE represents a
8625 conditional jump and is handled in
8626 expand_gimple_cond_expr. */
8627 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8629 /* Note that COND_EXPRs whose type is a structure or union
8630 are required to be constructed to contain assignments of
8631 a temporary variable, so that we can evaluate them here
8632 for side effect only. If type is void, we must do likewise. */
8634 gcc_assert (!TREE_ADDRESSABLE (type)
8635 && !ignore
8636 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8637 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8639 /* If we are not to produce a result, we have no target. Otherwise,
8640 if a target was specified use it; it will not be used as an
8641 intermediate target unless it is safe. If no target, use a
8642 temporary. */
8644 if (modifier != EXPAND_STACK_PARM
8645 && original_target
8646 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8647 && GET_MODE (original_target) == mode
8648 #ifdef HAVE_conditional_move
8649 && (! can_conditionally_move_p (mode)
8650 || REG_P (original_target))
8651 #endif
8652 && !MEM_P (original_target))
8653 temp = original_target;
8654 else
8655 temp = assign_temp (type, 0, 0, 1);
8657 do_pending_stack_adjust ();
8658 NO_DEFER_POP;
8659 op0 = gen_label_rtx ();
8660 op1 = gen_label_rtx ();
8661 jumpifnot (TREE_OPERAND (exp, 0), op0);
8662 store_expr (TREE_OPERAND (exp, 1), temp,
8663 modifier == EXPAND_STACK_PARM);
8665 emit_jump_insn (gen_jump (op1));
8666 emit_barrier ();
8667 emit_label (op0);
8668 store_expr (TREE_OPERAND (exp, 2), temp,
8669 modifier == EXPAND_STACK_PARM);
8671 emit_label (op1);
8672 OK_DEFER_POP;
8673 return temp;
8675 case VEC_COND_EXPR:
8676 target = expand_vec_cond_expr (exp, target);
8677 return target;
8679 case MODIFY_EXPR:
8681 tree lhs = TREE_OPERAND (exp, 0);
8682 tree rhs = TREE_OPERAND (exp, 1);
8683 gcc_assert (ignore);
8684 expand_assignment (lhs, rhs);
8685 return const0_rtx;
8688 case GIMPLE_MODIFY_STMT:
8690 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
8691 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
8693 gcc_assert (ignore);
8695 /* Check for |= or &= of a bitfield of size one into another bitfield
8696 of size 1. In this case, (unless we need the result of the
8697 assignment) we can do this more efficiently with a
8698 test followed by an assignment, if necessary.
8700 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8701 things change so we do, this code should be enhanced to
8702 support it. */
8703 if (TREE_CODE (lhs) == COMPONENT_REF
8704 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8705 || TREE_CODE (rhs) == BIT_AND_EXPR)
8706 && TREE_OPERAND (rhs, 0) == lhs
8707 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8708 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8709 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8711 rtx label = gen_label_rtx ();
8712 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8713 do_jump (TREE_OPERAND (rhs, 1),
8714 value ? label : 0,
8715 value ? 0 : label);
8716 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8717 do_pending_stack_adjust ();
8718 emit_label (label);
8719 return const0_rtx;
8722 expand_assignment (lhs, rhs);
8723 return const0_rtx;
8726 case RETURN_EXPR:
8727 if (!TREE_OPERAND (exp, 0))
8728 expand_null_return ();
8729 else
8730 expand_return (TREE_OPERAND (exp, 0));
8731 return const0_rtx;
8733 case ADDR_EXPR:
8734 return expand_expr_addr_expr (exp, target, tmode, modifier);
8736 case COMPLEX_EXPR:
8737 /* Get the rtx code of the operands. */
8738 op0 = expand_normal (TREE_OPERAND (exp, 0));
8739 op1 = expand_normal (TREE_OPERAND (exp, 1));
8741 if (!target)
8742 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8744 /* Move the real (op0) and imaginary (op1) parts to their location. */
8745 write_complex_part (target, op0, false);
8746 write_complex_part (target, op1, true);
8748 return target;
8750 case REALPART_EXPR:
8751 op0 = expand_normal (TREE_OPERAND (exp, 0));
8752 return read_complex_part (op0, false);
8754 case IMAGPART_EXPR:
8755 op0 = expand_normal (TREE_OPERAND (exp, 0));
8756 return read_complex_part (op0, true);
8758 case RESX_EXPR:
8759 expand_resx_expr (exp);
8760 return const0_rtx;
8762 case TRY_CATCH_EXPR:
8763 case CATCH_EXPR:
8764 case EH_FILTER_EXPR:
8765 case TRY_FINALLY_EXPR:
8766 /* Lowered by tree-eh.c. */
8767 gcc_unreachable ();
8769 case WITH_CLEANUP_EXPR:
8770 case CLEANUP_POINT_EXPR:
8771 case TARGET_EXPR:
8772 case CASE_LABEL_EXPR:
8773 case VA_ARG_EXPR:
8774 case BIND_EXPR:
8775 case INIT_EXPR:
8776 case CONJ_EXPR:
8777 case COMPOUND_EXPR:
8778 case PREINCREMENT_EXPR:
8779 case PREDECREMENT_EXPR:
8780 case POSTINCREMENT_EXPR:
8781 case POSTDECREMENT_EXPR:
8782 case LOOP_EXPR:
8783 case EXIT_EXPR:
8784 case TRUTH_ANDIF_EXPR:
8785 case TRUTH_ORIF_EXPR:
8786 /* Lowered by gimplify.c. */
8787 gcc_unreachable ();
8789 case EXC_PTR_EXPR:
8790 return get_exception_pointer (cfun);
8792 case FILTER_EXPR:
8793 return get_exception_filter (cfun);
8795 case FDESC_EXPR:
8796 /* Function descriptors are not valid except for as
8797 initialization constants, and should not be expanded. */
8798 gcc_unreachable ();
8800 case SWITCH_EXPR:
8801 expand_case (exp);
8802 return const0_rtx;
8804 case LABEL_EXPR:
8805 expand_label (TREE_OPERAND (exp, 0));
8806 return const0_rtx;
8808 case ASM_EXPR:
8809 expand_asm_expr (exp);
8810 return const0_rtx;
8812 case WITH_SIZE_EXPR:
8813 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8814 have pulled out the size to use in whatever context it needed. */
8815 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8816 modifier, alt_rtl);
8818 case REALIGN_LOAD_EXPR:
8820 tree oprnd0 = TREE_OPERAND (exp, 0);
8821 tree oprnd1 = TREE_OPERAND (exp, 1);
8822 tree oprnd2 = TREE_OPERAND (exp, 2);
8823 rtx op2;
8825 this_optab = optab_for_tree_code (code, type);
8826 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8827 op2 = expand_normal (oprnd2);
8828 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8829 target, unsignedp);
8830 gcc_assert (temp);
8831 return temp;
8834 case DOT_PROD_EXPR:
8836 tree oprnd0 = TREE_OPERAND (exp, 0);
8837 tree oprnd1 = TREE_OPERAND (exp, 1);
8838 tree oprnd2 = TREE_OPERAND (exp, 2);
8839 rtx op2;
8841 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8842 op2 = expand_normal (oprnd2);
8843 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8844 target, unsignedp);
8845 return target;
8848 case WIDEN_SUM_EXPR:
8850 tree oprnd0 = TREE_OPERAND (exp, 0);
8851 tree oprnd1 = TREE_OPERAND (exp, 1);
8853 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8854 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8855 target, unsignedp);
8856 return target;
8859 case REDUC_MAX_EXPR:
8860 case REDUC_MIN_EXPR:
8861 case REDUC_PLUS_EXPR:
8863 op0 = expand_normal (TREE_OPERAND (exp, 0));
8864 this_optab = optab_for_tree_code (code, type);
8865 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8866 gcc_assert (temp);
8867 return temp;
8870 case VEC_EXTRACT_EVEN_EXPR:
8871 case VEC_EXTRACT_ODD_EXPR:
8873 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8874 NULL_RTX, &op0, &op1, 0);
8875 this_optab = optab_for_tree_code (code, type);
8876 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8877 OPTAB_WIDEN);
8878 gcc_assert (temp);
8879 return temp;
8882 case VEC_INTERLEAVE_HIGH_EXPR:
8883 case VEC_INTERLEAVE_LOW_EXPR:
8885 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8886 NULL_RTX, &op0, &op1, 0);
8887 this_optab = optab_for_tree_code (code, type);
8888 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8889 OPTAB_WIDEN);
8890 gcc_assert (temp);
8891 return temp;
8894 case VEC_LSHIFT_EXPR:
8895 case VEC_RSHIFT_EXPR:
8897 target = expand_vec_shift_expr (exp, target);
8898 return target;
8901 case VEC_UNPACK_HI_EXPR:
8902 case VEC_UNPACK_LO_EXPR:
8904 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8905 this_optab = optab_for_tree_code (code, type);
8906 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
8907 target, unsignedp);
8908 gcc_assert (temp);
8909 return temp;
8912 case VEC_WIDEN_MULT_HI_EXPR:
8913 case VEC_WIDEN_MULT_LO_EXPR:
8915 tree oprnd0 = TREE_OPERAND (exp, 0);
8916 tree oprnd1 = TREE_OPERAND (exp, 1);
8918 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8919 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
8920 target, unsignedp);
8921 gcc_assert (target);
8922 return target;
8925 case VEC_PACK_MOD_EXPR:
8926 case VEC_PACK_SAT_EXPR:
8928 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8929 goto binop;
8932 default:
8933 return lang_hooks.expand_expr (exp, original_target, tmode,
8934 modifier, alt_rtl);
8937 /* Here to do an ordinary binary operator. */
8938 binop:
8939 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8940 subtarget, &op0, &op1, 0);
8941 binop2:
8942 this_optab = optab_for_tree_code (code, type);
8943 binop3:
8944 if (modifier == EXPAND_STACK_PARM)
8945 target = 0;
8946 temp = expand_binop (mode, this_optab, op0, op1, target,
8947 unsignedp, OPTAB_LIB_WIDEN);
8948 gcc_assert (temp);
8949 return REDUCE_BIT_FIELD (temp);
8951 #undef REDUCE_BIT_FIELD
8953 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8954 signedness of TYPE), possibly returning the result in TARGET. */
8955 static rtx
8956 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8958 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8959 if (target && GET_MODE (target) != GET_MODE (exp))
8960 target = 0;
8961 if (TYPE_UNSIGNED (type))
8963 rtx mask;
8964 if (prec < HOST_BITS_PER_WIDE_INT)
8965 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8966 GET_MODE (exp));
8967 else
8968 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8969 ((unsigned HOST_WIDE_INT) 1
8970 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8971 GET_MODE (exp));
8972 return expand_and (GET_MODE (exp), exp, mask, target);
8974 else
8976 tree count = build_int_cst (NULL_TREE,
8977 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8978 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8979 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8983 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8984 when applied to the address of EXP produces an address known to be
8985 aligned more than BIGGEST_ALIGNMENT. */
8987 static int
8988 is_aligning_offset (tree offset, tree exp)
8990 /* Strip off any conversions. */
8991 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8992 || TREE_CODE (offset) == NOP_EXPR
8993 || TREE_CODE (offset) == CONVERT_EXPR)
8994 offset = TREE_OPERAND (offset, 0);
8996 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8997 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8998 if (TREE_CODE (offset) != BIT_AND_EXPR
8999 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9000 || compare_tree_int (TREE_OPERAND (offset, 1),
9001 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9002 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9003 return 0;
9005 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9006 It must be NEGATE_EXPR. Then strip any more conversions. */
9007 offset = TREE_OPERAND (offset, 0);
9008 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9009 || TREE_CODE (offset) == NOP_EXPR
9010 || TREE_CODE (offset) == CONVERT_EXPR)
9011 offset = TREE_OPERAND (offset, 0);
9013 if (TREE_CODE (offset) != NEGATE_EXPR)
9014 return 0;
9016 offset = TREE_OPERAND (offset, 0);
9017 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9018 || TREE_CODE (offset) == NOP_EXPR
9019 || TREE_CODE (offset) == CONVERT_EXPR)
9020 offset = TREE_OPERAND (offset, 0);
9022 /* This must now be the address of EXP. */
9023 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9026 /* Return the tree node if an ARG corresponds to a string constant or zero
9027 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9028 in bytes within the string that ARG is accessing. The type of the
9029 offset will be `sizetype'. */
9031 tree
9032 string_constant (tree arg, tree *ptr_offset)
9034 tree array, offset, lower_bound;
9035 STRIP_NOPS (arg);
9037 if (TREE_CODE (arg) == ADDR_EXPR)
9039 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9041 *ptr_offset = size_zero_node;
9042 return TREE_OPERAND (arg, 0);
9044 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9046 array = TREE_OPERAND (arg, 0);
9047 offset = size_zero_node;
9049 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9051 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9052 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9053 if (TREE_CODE (array) != STRING_CST
9054 && TREE_CODE (array) != VAR_DECL)
9055 return 0;
9057 /* Check if the array has a nonzero lower bound. */
9058 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9059 if (!integer_zerop (lower_bound))
9061 /* If the offset and base aren't both constants, return 0. */
9062 if (TREE_CODE (lower_bound) != INTEGER_CST)
9063 return 0;
9064 if (TREE_CODE (offset) != INTEGER_CST)
9065 return 0;
9066 /* Adjust offset by the lower bound. */
9067 offset = size_diffop (fold_convert (sizetype, offset),
9068 fold_convert (sizetype, lower_bound));
9071 else
9072 return 0;
9074 else if (TREE_CODE (arg) == PLUS_EXPR)
9076 tree arg0 = TREE_OPERAND (arg, 0);
9077 tree arg1 = TREE_OPERAND (arg, 1);
9079 STRIP_NOPS (arg0);
9080 STRIP_NOPS (arg1);
9082 if (TREE_CODE (arg0) == ADDR_EXPR
9083 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9084 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9086 array = TREE_OPERAND (arg0, 0);
9087 offset = arg1;
9089 else if (TREE_CODE (arg1) == ADDR_EXPR
9090 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9091 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9093 array = TREE_OPERAND (arg1, 0);
9094 offset = arg0;
9096 else
9097 return 0;
9099 else
9100 return 0;
9102 if (TREE_CODE (array) == STRING_CST)
9104 *ptr_offset = fold_convert (sizetype, offset);
9105 return array;
9107 else if (TREE_CODE (array) == VAR_DECL)
9109 int length;
9111 /* Variables initialized to string literals can be handled too. */
9112 if (DECL_INITIAL (array) == NULL_TREE
9113 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9114 return 0;
9116 /* If they are read-only, non-volatile and bind locally. */
9117 if (! TREE_READONLY (array)
9118 || TREE_SIDE_EFFECTS (array)
9119 || ! targetm.binds_local_p (array))
9120 return 0;
9122 /* Avoid const char foo[4] = "abcde"; */
9123 if (DECL_SIZE_UNIT (array) == NULL_TREE
9124 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9125 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9126 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9127 return 0;
9129 /* If variable is bigger than the string literal, OFFSET must be constant
9130 and inside of the bounds of the string literal. */
9131 offset = fold_convert (sizetype, offset);
9132 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9133 && (! host_integerp (offset, 1)
9134 || compare_tree_int (offset, length) >= 0))
9135 return 0;
9137 *ptr_offset = offset;
9138 return DECL_INITIAL (array);
9141 return 0;
9144 /* Generate code to calculate EXP using a store-flag instruction
9145 and return an rtx for the result. EXP is either a comparison
9146 or a TRUTH_NOT_EXPR whose operand is a comparison.
9148 If TARGET is nonzero, store the result there if convenient.
9150 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9151 cheap.
9153 Return zero if there is no suitable set-flag instruction
9154 available on this machine.
9156 Once expand_expr has been called on the arguments of the comparison,
9157 we are committed to doing the store flag, since it is not safe to
9158 re-evaluate the expression. We emit the store-flag insn by calling
9159 emit_store_flag, but only expand the arguments if we have a reason
9160 to believe that emit_store_flag will be successful. If we think that
9161 it will, but it isn't, we have to simulate the store-flag with a
9162 set/jump/set sequence. */
9164 static rtx
9165 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9167 enum rtx_code code;
9168 tree arg0, arg1, type;
9169 tree tem;
9170 enum machine_mode operand_mode;
9171 int invert = 0;
9172 int unsignedp;
9173 rtx op0, op1;
9174 enum insn_code icode;
9175 rtx subtarget = target;
9176 rtx result, label;
9178 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9179 result at the end. We can't simply invert the test since it would
9180 have already been inverted if it were valid. This case occurs for
9181 some floating-point comparisons. */
9183 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9184 invert = 1, exp = TREE_OPERAND (exp, 0);
9186 arg0 = TREE_OPERAND (exp, 0);
9187 arg1 = TREE_OPERAND (exp, 1);
9189 /* Don't crash if the comparison was erroneous. */
9190 if (arg0 == error_mark_node || arg1 == error_mark_node)
9191 return const0_rtx;
9193 type = TREE_TYPE (arg0);
9194 operand_mode = TYPE_MODE (type);
9195 unsignedp = TYPE_UNSIGNED (type);
9197 /* We won't bother with BLKmode store-flag operations because it would mean
9198 passing a lot of information to emit_store_flag. */
9199 if (operand_mode == BLKmode)
9200 return 0;
9202 /* We won't bother with store-flag operations involving function pointers
9203 when function pointers must be canonicalized before comparisons. */
9204 #ifdef HAVE_canonicalize_funcptr_for_compare
9205 if (HAVE_canonicalize_funcptr_for_compare
9206 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9207 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9208 == FUNCTION_TYPE))
9209 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9210 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9211 == FUNCTION_TYPE))))
9212 return 0;
9213 #endif
9215 STRIP_NOPS (arg0);
9216 STRIP_NOPS (arg1);
9218 /* Get the rtx comparison code to use. We know that EXP is a comparison
9219 operation of some type. Some comparisons against 1 and -1 can be
9220 converted to comparisons with zero. Do so here so that the tests
9221 below will be aware that we have a comparison with zero. These
9222 tests will not catch constants in the first operand, but constants
9223 are rarely passed as the first operand. */
9225 switch (TREE_CODE (exp))
9227 case EQ_EXPR:
9228 code = EQ;
9229 break;
9230 case NE_EXPR:
9231 code = NE;
9232 break;
9233 case LT_EXPR:
9234 if (integer_onep (arg1))
9235 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9236 else
9237 code = unsignedp ? LTU : LT;
9238 break;
9239 case LE_EXPR:
9240 if (! unsignedp && integer_all_onesp (arg1))
9241 arg1 = integer_zero_node, code = LT;
9242 else
9243 code = unsignedp ? LEU : LE;
9244 break;
9245 case GT_EXPR:
9246 if (! unsignedp && integer_all_onesp (arg1))
9247 arg1 = integer_zero_node, code = GE;
9248 else
9249 code = unsignedp ? GTU : GT;
9250 break;
9251 case GE_EXPR:
9252 if (integer_onep (arg1))
9253 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9254 else
9255 code = unsignedp ? GEU : GE;
9256 break;
9258 case UNORDERED_EXPR:
9259 code = UNORDERED;
9260 break;
9261 case ORDERED_EXPR:
9262 code = ORDERED;
9263 break;
9264 case UNLT_EXPR:
9265 code = UNLT;
9266 break;
9267 case UNLE_EXPR:
9268 code = UNLE;
9269 break;
9270 case UNGT_EXPR:
9271 code = UNGT;
9272 break;
9273 case UNGE_EXPR:
9274 code = UNGE;
9275 break;
9276 case UNEQ_EXPR:
9277 code = UNEQ;
9278 break;
9279 case LTGT_EXPR:
9280 code = LTGT;
9281 break;
9283 default:
9284 gcc_unreachable ();
9287 /* Put a constant second. */
9288 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9290 tem = arg0; arg0 = arg1; arg1 = tem;
9291 code = swap_condition (code);
9294 /* If this is an equality or inequality test of a single bit, we can
9295 do this by shifting the bit being tested to the low-order bit and
9296 masking the result with the constant 1. If the condition was EQ,
9297 we xor it with 1. This does not require an scc insn and is faster
9298 than an scc insn even if we have it.
9300 The code to make this transformation was moved into fold_single_bit_test,
9301 so we just call into the folder and expand its result. */
9303 if ((code == NE || code == EQ)
9304 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9305 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9307 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9308 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9309 arg0, arg1, type),
9310 target, VOIDmode, EXPAND_NORMAL);
9313 /* Now see if we are likely to be able to do this. Return if not. */
9314 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9315 return 0;
9317 icode = setcc_gen_code[(int) code];
9319 if (icode == CODE_FOR_nothing)
9321 enum machine_mode wmode;
9323 for (wmode = operand_mode;
9324 icode == CODE_FOR_nothing && wmode != VOIDmode;
9325 wmode = GET_MODE_WIDER_MODE (wmode))
9326 icode = cstore_optab->handlers[(int) wmode].insn_code;
9329 if (icode == CODE_FOR_nothing
9330 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9332 /* We can only do this if it is one of the special cases that
9333 can be handled without an scc insn. */
9334 if ((code == LT && integer_zerop (arg1))
9335 || (! only_cheap && code == GE && integer_zerop (arg1)))
9337 else if (! only_cheap && (code == NE || code == EQ)
9338 && TREE_CODE (type) != REAL_TYPE
9339 && ((abs_optab->handlers[(int) operand_mode].insn_code
9340 != CODE_FOR_nothing)
9341 || (ffs_optab->handlers[(int) operand_mode].insn_code
9342 != CODE_FOR_nothing)))
9344 else
9345 return 0;
9348 if (! get_subtarget (target)
9349 || GET_MODE (subtarget) != operand_mode)
9350 subtarget = 0;
9352 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9354 if (target == 0)
9355 target = gen_reg_rtx (mode);
9357 result = emit_store_flag (target, code, op0, op1,
9358 operand_mode, unsignedp, 1);
9360 if (result)
9362 if (invert)
9363 result = expand_binop (mode, xor_optab, result, const1_rtx,
9364 result, 0, OPTAB_LIB_WIDEN);
9365 return result;
9368 /* If this failed, we have to do this with set/compare/jump/set code. */
9369 if (!REG_P (target)
9370 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9371 target = gen_reg_rtx (GET_MODE (target));
9373 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9374 label = gen_label_rtx ();
9375 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9376 NULL_RTX, label);
9378 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9379 emit_label (label);
9381 return target;
9385 /* Stubs in case we haven't got a casesi insn. */
9386 #ifndef HAVE_casesi
9387 # define HAVE_casesi 0
9388 # define gen_casesi(a, b, c, d, e) (0)
9389 # define CODE_FOR_casesi CODE_FOR_nothing
9390 #endif
9392 /* If the machine does not have a case insn that compares the bounds,
9393 this means extra overhead for dispatch tables, which raises the
9394 threshold for using them. */
9395 #ifndef CASE_VALUES_THRESHOLD
9396 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9397 #endif /* CASE_VALUES_THRESHOLD */
9399 unsigned int
9400 case_values_threshold (void)
9402 return CASE_VALUES_THRESHOLD;
9405 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9406 0 otherwise (i.e. if there is no casesi instruction). */
9408 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9409 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9411 enum machine_mode index_mode = SImode;
9412 int index_bits = GET_MODE_BITSIZE (index_mode);
9413 rtx op1, op2, index;
9414 enum machine_mode op_mode;
9416 if (! HAVE_casesi)
9417 return 0;
9419 /* Convert the index to SImode. */
9420 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9422 enum machine_mode omode = TYPE_MODE (index_type);
9423 rtx rangertx = expand_normal (range);
9425 /* We must handle the endpoints in the original mode. */
9426 index_expr = build2 (MINUS_EXPR, index_type,
9427 index_expr, minval);
9428 minval = integer_zero_node;
9429 index = expand_normal (index_expr);
9430 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9431 omode, 1, default_label);
9432 /* Now we can safely truncate. */
9433 index = convert_to_mode (index_mode, index, 0);
9435 else
9437 if (TYPE_MODE (index_type) != index_mode)
9439 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9440 index_expr = fold_convert (index_type, index_expr);
9443 index = expand_normal (index_expr);
9446 do_pending_stack_adjust ();
9448 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9449 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9450 (index, op_mode))
9451 index = copy_to_mode_reg (op_mode, index);
9453 op1 = expand_normal (minval);
9455 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9456 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9457 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9458 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9459 (op1, op_mode))
9460 op1 = copy_to_mode_reg (op_mode, op1);
9462 op2 = expand_normal (range);
9464 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9465 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9466 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9467 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9468 (op2, op_mode))
9469 op2 = copy_to_mode_reg (op_mode, op2);
9471 emit_jump_insn (gen_casesi (index, op1, op2,
9472 table_label, default_label));
9473 return 1;
9476 /* Attempt to generate a tablejump instruction; same concept. */
9477 #ifndef HAVE_tablejump
9478 #define HAVE_tablejump 0
9479 #define gen_tablejump(x, y) (0)
9480 #endif
9482 /* Subroutine of the next function.
9484 INDEX is the value being switched on, with the lowest value
9485 in the table already subtracted.
9486 MODE is its expected mode (needed if INDEX is constant).
9487 RANGE is the length of the jump table.
9488 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9490 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9491 index value is out of range. */
9493 static void
9494 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9495 rtx default_label)
9497 rtx temp, vector;
9499 if (INTVAL (range) > cfun->max_jumptable_ents)
9500 cfun->max_jumptable_ents = INTVAL (range);
9502 /* Do an unsigned comparison (in the proper mode) between the index
9503 expression and the value which represents the length of the range.
9504 Since we just finished subtracting the lower bound of the range
9505 from the index expression, this comparison allows us to simultaneously
9506 check that the original index expression value is both greater than
9507 or equal to the minimum value of the range and less than or equal to
9508 the maximum value of the range. */
9510 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9511 default_label);
9513 /* If index is in range, it must fit in Pmode.
9514 Convert to Pmode so we can index with it. */
9515 if (mode != Pmode)
9516 index = convert_to_mode (Pmode, index, 1);
9518 /* Don't let a MEM slip through, because then INDEX that comes
9519 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9520 and break_out_memory_refs will go to work on it and mess it up. */
9521 #ifdef PIC_CASE_VECTOR_ADDRESS
9522 if (flag_pic && !REG_P (index))
9523 index = copy_to_mode_reg (Pmode, index);
9524 #endif
9526 /* If flag_force_addr were to affect this address
9527 it could interfere with the tricky assumptions made
9528 about addresses that contain label-refs,
9529 which may be valid only very near the tablejump itself. */
9530 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9531 GET_MODE_SIZE, because this indicates how large insns are. The other
9532 uses should all be Pmode, because they are addresses. This code
9533 could fail if addresses and insns are not the same size. */
9534 index = gen_rtx_PLUS (Pmode,
9535 gen_rtx_MULT (Pmode, index,
9536 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9537 gen_rtx_LABEL_REF (Pmode, table_label));
9538 #ifdef PIC_CASE_VECTOR_ADDRESS
9539 if (flag_pic)
9540 index = PIC_CASE_VECTOR_ADDRESS (index);
9541 else
9542 #endif
9543 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9544 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9545 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9546 convert_move (temp, vector, 0);
9548 emit_jump_insn (gen_tablejump (temp, table_label));
9550 /* If we are generating PIC code or if the table is PC-relative, the
9551 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9552 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9553 emit_barrier ();
9557 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9558 rtx table_label, rtx default_label)
9560 rtx index;
9562 if (! HAVE_tablejump)
9563 return 0;
9565 index_expr = fold_build2 (MINUS_EXPR, index_type,
9566 fold_convert (index_type, index_expr),
9567 fold_convert (index_type, minval));
9568 index = expand_normal (index_expr);
9569 do_pending_stack_adjust ();
9571 do_tablejump (index, TYPE_MODE (index_type),
9572 convert_modes (TYPE_MODE (index_type),
9573 TYPE_MODE (TREE_TYPE (range)),
9574 expand_normal (range),
9575 TYPE_UNSIGNED (TREE_TYPE (range))),
9576 table_label, default_label);
9577 return 1;
9580 /* Nonzero if the mode is a valid vector mode for this architecture.
9581 This returns nonzero even if there is no hardware support for the
9582 vector mode, but we can emulate with narrower modes. */
9585 vector_mode_valid_p (enum machine_mode mode)
9587 enum mode_class class = GET_MODE_CLASS (mode);
9588 enum machine_mode innermode;
9590 /* Doh! What's going on? */
9591 if (class != MODE_VECTOR_INT
9592 && class != MODE_VECTOR_FLOAT)
9593 return 0;
9595 /* Hardware support. Woo hoo! */
9596 if (targetm.vector_mode_supported_p (mode))
9597 return 1;
9599 innermode = GET_MODE_INNER (mode);
9601 /* We should probably return 1 if requesting V4DI and we have no DI,
9602 but we have V2DI, but this is probably very unlikely. */
9604 /* If we have support for the inner mode, we can safely emulate it.
9605 We may not have V2DI, but me can emulate with a pair of DIs. */
9606 return targetm.scalar_mode_supported_p (innermode);
9609 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9610 static rtx
9611 const_vector_from_tree (tree exp)
9613 rtvec v;
9614 int units, i;
9615 tree link, elt;
9616 enum machine_mode inner, mode;
9618 mode = TYPE_MODE (TREE_TYPE (exp));
9620 if (initializer_zerop (exp))
9621 return CONST0_RTX (mode);
9623 units = GET_MODE_NUNITS (mode);
9624 inner = GET_MODE_INNER (mode);
9626 v = rtvec_alloc (units);
9628 link = TREE_VECTOR_CST_ELTS (exp);
9629 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9631 elt = TREE_VALUE (link);
9633 if (TREE_CODE (elt) == REAL_CST)
9634 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9635 inner);
9636 else
9637 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9638 TREE_INT_CST_HIGH (elt),
9639 inner);
9642 /* Initialize remaining elements to 0. */
9643 for (; i < units; ++i)
9644 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9646 return gen_rtx_CONST_VECTOR (mode, v);
9648 #include "gt-expr.h"