ChangeLog/libgcc
[official-gcc.git] / gcc / expr.c
blob9c91c4e0f2466c3b30729fe8b91f25462ef10d09
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "real.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "typeclass.h"
46 #include "toplev.h"
47 #include "ggc.h"
48 #include "langhooks.h"
49 #include "intl.h"
50 #include "tm_p.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
54 #include "target.h"
55 #include "timevar.h"
56 #include "df.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
64 #ifdef PUSH_ROUNDING
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
69 #endif
70 #endif
72 #endif
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
77 #else
78 #define STACK_PUSH_CODE PRE_INC
79 #endif
80 #endif
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
91 /* This structure is used by move_by_pieces to describe the move to
92 be performed. */
93 struct move_by_pieces
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 rtx from;
100 rtx from_addr;
101 int autinc_from;
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
105 int reverse;
108 /* This structure is used by store_by_pieces to describe the clear to
109 be performed. */
111 struct store_by_pieces
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
120 void *constfundata;
121 int reverse;
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 unsigned int,
126 unsigned int);
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, int, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150 static int is_aligning_offset (tree, tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
179 #endif
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
187 #endif
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) MOVE_RATIO)
196 #endif
198 /* This array records the insn_code of insns to perform block moves. */
199 enum insn_code movmem_optab[NUM_MACHINE_MODES];
201 /* This array records the insn_code of insns to perform block sets. */
202 enum insn_code setmem_optab[NUM_MACHINE_MODES];
204 /* These arrays record the insn_code of three different kinds of insns
205 to perform block compares. */
206 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* Synchronization primitives. */
211 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
238 #endif
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
243 void
244 init_expr_once (void)
246 rtx insn, pat;
247 enum machine_mode mode;
248 int num_clobbers;
249 rtx mem, mem1;
250 rtx reg;
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
269 int regno;
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
287 SET_REGNO (reg, regno);
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
320 enum insn_code ic;
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
324 continue;
326 PUT_MODE (mem, srcmode);
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
334 /* This is run at the start of compiling a function. */
336 void
337 init_expr (void)
339 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
342 /* Copy data from FROM to TO, where the machine modes are not the same.
343 Both modes may be integer, or both may be floating.
344 UNSIGNEDP should be nonzero if FROM is an unsigned type.
345 This causes zero-extension instead of sign-extension. */
347 void
348 convert_move (rtx to, rtx from, int unsignedp)
350 enum machine_mode to_mode = GET_MODE (to);
351 enum machine_mode from_mode = GET_MODE (from);
352 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
353 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
354 enum insn_code code;
355 rtx libcall;
357 /* rtx code for making an equivalent value. */
358 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
359 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
362 gcc_assert (to_real == from_real);
363 gcc_assert (to_mode != BLKmode);
364 gcc_assert (from_mode != BLKmode);
366 /* If the source and destination are already the same, then there's
367 nothing to do. */
368 if (to == from)
369 return;
371 /* If FROM is a SUBREG that indicates that we have already done at least
372 the required extension, strip it. We don't handle such SUBREGs as
373 TO here. */
375 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
376 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
377 >= GET_MODE_SIZE (to_mode))
378 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
379 from = gen_lowpart (to_mode, from), from_mode = to_mode;
381 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
383 if (to_mode == from_mode
384 || (from_mode == VOIDmode && CONSTANT_P (from)))
386 emit_move_insn (to, from);
387 return;
390 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
392 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
394 if (VECTOR_MODE_P (to_mode))
395 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
396 else
397 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
399 emit_move_insn (to, from);
400 return;
403 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
405 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
406 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
407 return;
410 if (to_real)
412 rtx value, insns;
413 convert_optab tab;
415 gcc_assert ((GET_MODE_PRECISION (from_mode)
416 != GET_MODE_PRECISION (to_mode))
417 || (DECIMAL_FLOAT_MODE_P (from_mode)
418 != DECIMAL_FLOAT_MODE_P (to_mode)));
420 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
421 /* Conversion between decimal float and binary float, same size. */
422 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
423 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
424 tab = sext_optab;
425 else
426 tab = trunc_optab;
428 /* Try converting directly if the insn is supported. */
430 code = tab->handlers[to_mode][from_mode].insn_code;
431 if (code != CODE_FOR_nothing)
433 emit_unop_insn (code, to, from,
434 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
435 return;
438 /* Otherwise use a libcall. */
439 libcall = tab->handlers[to_mode][from_mode].libfunc;
441 /* Is this conversion implemented yet? */
442 gcc_assert (libcall);
444 start_sequence ();
445 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
446 1, from, from_mode);
447 insns = get_insns ();
448 end_sequence ();
449 emit_libcall_block (insns, to, value,
450 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
451 from)
452 : gen_rtx_FLOAT_EXTEND (to_mode, from));
453 return;
456 /* Handle pointer conversion. */ /* SPEE 900220. */
457 /* Targets are expected to provide conversion insns between PxImode and
458 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
459 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
461 enum machine_mode full_mode
462 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
464 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
465 != CODE_FOR_nothing);
467 if (full_mode != from_mode)
468 from = convert_to_mode (full_mode, from, unsignedp);
469 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
470 to, from, UNKNOWN);
471 return;
473 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
475 rtx new_from;
476 enum machine_mode full_mode
477 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
479 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
480 != CODE_FOR_nothing);
482 if (to_mode == full_mode)
484 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
485 to, from, UNKNOWN);
486 return;
489 new_from = gen_reg_rtx (full_mode);
490 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
491 new_from, from, UNKNOWN);
493 /* else proceed to integer conversions below. */
494 from_mode = full_mode;
495 from = new_from;
498 /* Now both modes are integers. */
500 /* Handle expanding beyond a word. */
501 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
502 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
504 rtx insns;
505 rtx lowpart;
506 rtx fill_value;
507 rtx lowfrom;
508 int i;
509 enum machine_mode lowpart_mode;
510 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
512 /* Try converting directly if the insn is supported. */
513 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
514 != CODE_FOR_nothing)
516 /* If FROM is a SUBREG, put it into a register. Do this
517 so that we always generate the same set of insns for
518 better cse'ing; if an intermediate assignment occurred,
519 we won't be doing the operation directly on the SUBREG. */
520 if (optimize > 0 && GET_CODE (from) == SUBREG)
521 from = force_reg (from_mode, from);
522 emit_unop_insn (code, to, from, equiv_code);
523 return;
525 /* Next, try converting via full word. */
526 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
527 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
528 != CODE_FOR_nothing))
530 if (REG_P (to))
532 if (reg_overlap_mentioned_p (to, from))
533 from = force_reg (from_mode, from);
534 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
536 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
537 emit_unop_insn (code, to,
538 gen_lowpart (word_mode, to), equiv_code);
539 return;
542 /* No special multiword conversion insn; do it by hand. */
543 start_sequence ();
545 /* Since we will turn this into a no conflict block, we must ensure
546 that the source does not overlap the target. */
548 if (reg_overlap_mentioned_p (to, from))
549 from = force_reg (from_mode, from);
551 /* Get a copy of FROM widened to a word, if necessary. */
552 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
553 lowpart_mode = word_mode;
554 else
555 lowpart_mode = from_mode;
557 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
559 lowpart = gen_lowpart (lowpart_mode, to);
560 emit_move_insn (lowpart, lowfrom);
562 /* Compute the value to put in each remaining word. */
563 if (unsignedp)
564 fill_value = const0_rtx;
565 else
567 #ifdef HAVE_slt
568 if (HAVE_slt
569 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
570 && STORE_FLAG_VALUE == -1)
572 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
573 lowpart_mode, 0);
574 fill_value = gen_reg_rtx (word_mode);
575 emit_insn (gen_slt (fill_value));
577 else
578 #endif
580 fill_value
581 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
582 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
583 NULL_RTX, 0);
584 fill_value = convert_to_mode (word_mode, fill_value, 1);
588 /* Fill the remaining words. */
589 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
591 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
592 rtx subword = operand_subword (to, index, 1, to_mode);
594 gcc_assert (subword);
596 if (fill_value != subword)
597 emit_move_insn (subword, fill_value);
600 insns = get_insns ();
601 end_sequence ();
603 emit_no_conflict_block (insns, to, from, NULL_RTX,
604 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
605 return;
608 /* Truncating multi-word to a word or less. */
609 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
610 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
612 if (!((MEM_P (from)
613 && ! MEM_VOLATILE_P (from)
614 && direct_load[(int) to_mode]
615 && ! mode_dependent_address_p (XEXP (from, 0)))
616 || REG_P (from)
617 || GET_CODE (from) == SUBREG))
618 from = force_reg (from_mode, from);
619 convert_move (to, gen_lowpart (word_mode, from), 0);
620 return;
623 /* Now follow all the conversions between integers
624 no more than a word long. */
626 /* For truncation, usually we can just refer to FROM in a narrower mode. */
627 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
628 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
629 GET_MODE_BITSIZE (from_mode)))
631 if (!((MEM_P (from)
632 && ! MEM_VOLATILE_P (from)
633 && direct_load[(int) to_mode]
634 && ! mode_dependent_address_p (XEXP (from, 0)))
635 || REG_P (from)
636 || GET_CODE (from) == SUBREG))
637 from = force_reg (from_mode, from);
638 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
639 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
640 from = copy_to_reg (from);
641 emit_move_insn (to, gen_lowpart (to_mode, from));
642 return;
645 /* Handle extension. */
646 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
648 /* Convert directly if that works. */
649 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
650 != CODE_FOR_nothing)
652 emit_unop_insn (code, to, from, equiv_code);
653 return;
655 else
657 enum machine_mode intermediate;
658 rtx tmp;
659 tree shift_amount;
661 /* Search for a mode to convert via. */
662 for (intermediate = from_mode; intermediate != VOIDmode;
663 intermediate = GET_MODE_WIDER_MODE (intermediate))
664 if (((can_extend_p (to_mode, intermediate, unsignedp)
665 != CODE_FOR_nothing)
666 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
667 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
668 GET_MODE_BITSIZE (intermediate))))
669 && (can_extend_p (intermediate, from_mode, unsignedp)
670 != CODE_FOR_nothing))
672 convert_move (to, convert_to_mode (intermediate, from,
673 unsignedp), unsignedp);
674 return;
677 /* No suitable intermediate mode.
678 Generate what we need with shifts. */
679 shift_amount = build_int_cst (NULL_TREE,
680 GET_MODE_BITSIZE (to_mode)
681 - GET_MODE_BITSIZE (from_mode));
682 from = gen_lowpart (to_mode, force_reg (from_mode, from));
683 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
684 to, unsignedp);
685 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
686 to, unsignedp);
687 if (tmp != to)
688 emit_move_insn (to, tmp);
689 return;
693 /* Support special truncate insns for certain modes. */
694 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
696 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
697 to, from, UNKNOWN);
698 return;
701 /* Handle truncation of volatile memrefs, and so on;
702 the things that couldn't be truncated directly,
703 and for which there was no special instruction.
705 ??? Code above formerly short-circuited this, for most integer
706 mode pairs, with a force_reg in from_mode followed by a recursive
707 call to this routine. Appears always to have been wrong. */
708 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
710 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
711 emit_move_insn (to, temp);
712 return;
715 /* Mode combination is not recognized. */
716 gcc_unreachable ();
719 /* Return an rtx for a value that would result
720 from converting X to mode MODE.
721 Both X and MODE may be floating, or both integer.
722 UNSIGNEDP is nonzero if X is an unsigned value.
723 This can be done by referring to a part of X in place
724 or by copying to a new temporary with conversion. */
727 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
729 return convert_modes (mode, VOIDmode, x, unsignedp);
732 /* Return an rtx for a value that would result
733 from converting X from mode OLDMODE to mode MODE.
734 Both modes may be floating, or both integer.
735 UNSIGNEDP is nonzero if X is an unsigned value.
737 This can be done by referring to a part of X in place
738 or by copying to a new temporary with conversion.
740 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
743 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
745 rtx temp;
747 /* If FROM is a SUBREG that indicates that we have already done at least
748 the required extension, strip it. */
750 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
751 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
752 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
753 x = gen_lowpart (mode, x);
755 if (GET_MODE (x) != VOIDmode)
756 oldmode = GET_MODE (x);
758 if (mode == oldmode)
759 return x;
761 /* There is one case that we must handle specially: If we are converting
762 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
763 we are to interpret the constant as unsigned, gen_lowpart will do
764 the wrong if the constant appears negative. What we want to do is
765 make the high-order word of the constant zero, not all ones. */
767 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
768 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
769 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
771 HOST_WIDE_INT val = INTVAL (x);
773 if (oldmode != VOIDmode
774 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
776 int width = GET_MODE_BITSIZE (oldmode);
778 /* We need to zero extend VAL. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
782 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
785 /* We can do this with a gen_lowpart if both desired and current modes
786 are integer, and this is either a constant integer, a register, or a
787 non-volatile MEM. Except for the constant case where MODE is no
788 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
790 if ((GET_CODE (x) == CONST_INT
791 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
792 || (GET_MODE_CLASS (mode) == MODE_INT
793 && GET_MODE_CLASS (oldmode) == MODE_INT
794 && (GET_CODE (x) == CONST_DOUBLE
795 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
796 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
797 && direct_load[(int) mode])
798 || (REG_P (x)
799 && (! HARD_REGISTER_P (x)
800 || HARD_REGNO_MODE_OK (REGNO (x), mode))
801 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
802 GET_MODE_BITSIZE (GET_MODE (x)))))))))
804 /* ?? If we don't know OLDMODE, we have to assume here that
805 X does not need sign- or zero-extension. This may not be
806 the case, but it's the best we can do. */
807 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
808 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
810 HOST_WIDE_INT val = INTVAL (x);
811 int width = GET_MODE_BITSIZE (oldmode);
813 /* We must sign or zero-extend in this case. Start by
814 zero-extending, then sign extend if we need to. */
815 val &= ((HOST_WIDE_INT) 1 << width) - 1;
816 if (! unsignedp
817 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
818 val |= (HOST_WIDE_INT) (-1) << width;
820 return gen_int_mode (val, mode);
823 return gen_lowpart (mode, x);
826 /* Converting from integer constant into mode is always equivalent to an
827 subreg operation. */
828 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
830 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
831 return simplify_gen_subreg (mode, x, oldmode, 0);
834 temp = gen_reg_rtx (mode);
835 convert_move (temp, x, unsignedp);
836 return temp;
839 /* STORE_MAX_PIECES is the number of bytes at a time that we can
840 store efficiently. Due to internal GCC limitations, this is
841 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
842 for an immediate constant. */
844 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
846 /* Determine whether the LEN bytes can be moved by using several move
847 instructions. Return nonzero if a call to move_by_pieces should
848 succeed. */
851 can_move_by_pieces (unsigned HOST_WIDE_INT len,
852 unsigned int align ATTRIBUTE_UNUSED)
854 return MOVE_BY_PIECES_P (len, align);
857 /* Generate several move instructions to copy LEN bytes from block FROM to
858 block TO. (These are MEM rtx's with BLKmode).
860 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
861 used to push FROM to the stack.
863 ALIGN is maximum stack alignment we can assume.
865 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
866 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
867 stpcpy. */
870 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
871 unsigned int align, int endp)
873 struct move_by_pieces data;
874 rtx to_addr, from_addr = XEXP (from, 0);
875 unsigned int max_size = MOVE_MAX_PIECES + 1;
876 enum machine_mode mode = VOIDmode, tmode;
877 enum insn_code icode;
879 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
881 data.offset = 0;
882 data.from_addr = from_addr;
883 if (to)
885 to_addr = XEXP (to, 0);
886 data.to = to;
887 data.autinc_to
888 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
889 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
890 data.reverse
891 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
893 else
895 to_addr = NULL_RTX;
896 data.to = NULL_RTX;
897 data.autinc_to = 1;
898 #ifdef STACK_GROWS_DOWNWARD
899 data.reverse = 1;
900 #else
901 data.reverse = 0;
902 #endif
904 data.to_addr = to_addr;
905 data.from = from;
906 data.autinc_from
907 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
908 || GET_CODE (from_addr) == POST_INC
909 || GET_CODE (from_addr) == POST_DEC);
911 data.explicit_inc_from = 0;
912 data.explicit_inc_to = 0;
913 if (data.reverse) data.offset = len;
914 data.len = len;
916 /* If copying requires more than two move insns,
917 copy addresses to registers (to make displacements shorter)
918 and use post-increment if available. */
919 if (!(data.autinc_from && data.autinc_to)
920 && move_by_pieces_ninsns (len, align, max_size) > 2)
922 /* Find the mode of the largest move... */
923 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
924 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
925 if (GET_MODE_SIZE (tmode) < max_size)
926 mode = tmode;
928 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
930 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
931 data.autinc_from = 1;
932 data.explicit_inc_from = -1;
934 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
936 data.from_addr = copy_addr_to_reg (from_addr);
937 data.autinc_from = 1;
938 data.explicit_inc_from = 1;
940 if (!data.autinc_from && CONSTANT_P (from_addr))
941 data.from_addr = copy_addr_to_reg (from_addr);
942 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
944 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
945 data.autinc_to = 1;
946 data.explicit_inc_to = -1;
948 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
950 data.to_addr = copy_addr_to_reg (to_addr);
951 data.autinc_to = 1;
952 data.explicit_inc_to = 1;
954 if (!data.autinc_to && CONSTANT_P (to_addr))
955 data.to_addr = copy_addr_to_reg (to_addr);
958 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
959 if (align >= GET_MODE_ALIGNMENT (tmode))
960 align = GET_MODE_ALIGNMENT (tmode);
961 else
963 enum machine_mode xmode;
965 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
966 tmode != VOIDmode;
967 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
968 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
969 || SLOW_UNALIGNED_ACCESS (tmode, align))
970 break;
972 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
975 /* First move what we can in the largest integer mode, then go to
976 successively smaller modes. */
978 while (max_size > 1)
980 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
981 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
982 if (GET_MODE_SIZE (tmode) < max_size)
983 mode = tmode;
985 if (mode == VOIDmode)
986 break;
988 icode = mov_optab->handlers[(int) mode].insn_code;
989 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
990 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
992 max_size = GET_MODE_SIZE (mode);
995 /* The code above should have handled everything. */
996 gcc_assert (!data.len);
998 if (endp)
1000 rtx to1;
1002 gcc_assert (!data.reverse);
1003 if (data.autinc_to)
1005 if (endp == 2)
1007 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1008 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1009 else
1010 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1011 -1));
1013 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1014 data.offset);
1016 else
1018 if (endp == 2)
1019 --data.offset;
1020 to1 = adjust_address (data.to, QImode, data.offset);
1022 return to1;
1024 else
1025 return data.to;
1028 /* Return number of insns required to move L bytes by pieces.
1029 ALIGN (in bits) is maximum alignment we can assume. */
1031 static unsigned HOST_WIDE_INT
1032 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1033 unsigned int max_size)
1035 unsigned HOST_WIDE_INT n_insns = 0;
1036 enum machine_mode tmode;
1038 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1039 if (align >= GET_MODE_ALIGNMENT (tmode))
1040 align = GET_MODE_ALIGNMENT (tmode);
1041 else
1043 enum machine_mode tmode, xmode;
1045 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1046 tmode != VOIDmode;
1047 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1048 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1049 || SLOW_UNALIGNED_ACCESS (tmode, align))
1050 break;
1052 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1055 while (max_size > 1)
1057 enum machine_mode mode = VOIDmode;
1058 enum insn_code icode;
1060 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1061 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1062 if (GET_MODE_SIZE (tmode) < max_size)
1063 mode = tmode;
1065 if (mode == VOIDmode)
1066 break;
1068 icode = mov_optab->handlers[(int) mode].insn_code;
1069 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1070 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1072 max_size = GET_MODE_SIZE (mode);
1075 gcc_assert (!l);
1076 return n_insns;
1079 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1080 with move instructions for mode MODE. GENFUN is the gen_... function
1081 to make a move insn for that mode. DATA has all the other info. */
1083 static void
1084 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1085 struct move_by_pieces *data)
1087 unsigned int size = GET_MODE_SIZE (mode);
1088 rtx to1 = NULL_RTX, from1;
1090 while (data->len >= size)
1092 if (data->reverse)
1093 data->offset -= size;
1095 if (data->to)
1097 if (data->autinc_to)
1098 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1099 data->offset);
1100 else
1101 to1 = adjust_address (data->to, mode, data->offset);
1104 if (data->autinc_from)
1105 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1106 data->offset);
1107 else
1108 from1 = adjust_address (data->from, mode, data->offset);
1110 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1111 emit_insn (gen_add2_insn (data->to_addr,
1112 GEN_INT (-(HOST_WIDE_INT)size)));
1113 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1114 emit_insn (gen_add2_insn (data->from_addr,
1115 GEN_INT (-(HOST_WIDE_INT)size)));
1117 if (data->to)
1118 emit_insn ((*genfun) (to1, from1));
1119 else
1121 #ifdef PUSH_ROUNDING
1122 emit_single_push_insn (mode, from1, NULL);
1123 #else
1124 gcc_unreachable ();
1125 #endif
1128 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1129 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1130 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1131 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1133 if (! data->reverse)
1134 data->offset += size;
1136 data->len -= size;
1140 /* Emit code to move a block Y to a block X. This may be done with
1141 string-move instructions, with multiple scalar move instructions,
1142 or with a library call.
1144 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1145 SIZE is an rtx that says how long they are.
1146 ALIGN is the maximum alignment we can assume they have.
1147 METHOD describes what kind of copy this is, and what mechanisms may be used.
1149 Return the address of the new block, if memcpy is called and returns it,
1150 0 otherwise. */
1153 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1154 unsigned int expected_align, HOST_WIDE_INT expected_size)
1156 bool may_use_call;
1157 rtx retval = 0;
1158 unsigned int align;
1160 switch (method)
1162 case BLOCK_OP_NORMAL:
1163 case BLOCK_OP_TAILCALL:
1164 may_use_call = true;
1165 break;
1167 case BLOCK_OP_CALL_PARM:
1168 may_use_call = block_move_libcall_safe_for_call_parm ();
1170 /* Make inhibit_defer_pop nonzero around the library call
1171 to force it to pop the arguments right away. */
1172 NO_DEFER_POP;
1173 break;
1175 case BLOCK_OP_NO_LIBCALL:
1176 may_use_call = false;
1177 break;
1179 default:
1180 gcc_unreachable ();
1183 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1185 gcc_assert (MEM_P (x));
1186 gcc_assert (MEM_P (y));
1187 gcc_assert (size);
1189 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1190 block copy is more efficient for other large modes, e.g. DCmode. */
1191 x = adjust_address (x, BLKmode, 0);
1192 y = adjust_address (y, BLKmode, 0);
1194 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1195 can be incorrect is coming from __builtin_memcpy. */
1196 if (GET_CODE (size) == CONST_INT)
1198 if (INTVAL (size) == 0)
1199 return 0;
1201 x = shallow_copy_rtx (x);
1202 y = shallow_copy_rtx (y);
1203 set_mem_size (x, size);
1204 set_mem_size (y, size);
1207 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1208 move_by_pieces (x, y, INTVAL (size), align, 0);
1209 else if (emit_block_move_via_movmem (x, y, size, align,
1210 expected_align, expected_size))
1212 else if (may_use_call)
1213 retval = emit_block_move_via_libcall (x, y, size,
1214 method == BLOCK_OP_TAILCALL);
1215 else
1216 emit_block_move_via_loop (x, y, size, align);
1218 if (method == BLOCK_OP_CALL_PARM)
1219 OK_DEFER_POP;
1221 return retval;
1225 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1227 return emit_block_move_hints (x, y, size, method, 0, -1);
1230 /* A subroutine of emit_block_move. Returns true if calling the
1231 block move libcall will not clobber any parameters which may have
1232 already been placed on the stack. */
1234 static bool
1235 block_move_libcall_safe_for_call_parm (void)
1237 /* If arguments are pushed on the stack, then they're safe. */
1238 if (PUSH_ARGS)
1239 return true;
1241 /* If registers go on the stack anyway, any argument is sure to clobber
1242 an outgoing argument. */
1243 #if defined (REG_PARM_STACK_SPACE)
1244 if (OUTGOING_REG_PARM_STACK_SPACE)
1246 tree fn;
1247 fn = emit_block_move_libcall_fn (false);
1248 if (REG_PARM_STACK_SPACE (fn) != 0)
1249 return false;
1251 #endif
1253 /* If any argument goes in memory, then it might clobber an outgoing
1254 argument. */
1256 CUMULATIVE_ARGS args_so_far;
1257 tree fn, arg;
1259 fn = emit_block_move_libcall_fn (false);
1260 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1262 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1263 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1265 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1266 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1267 if (!tmp || !REG_P (tmp))
1268 return false;
1269 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1270 return false;
1271 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1274 return true;
1277 /* A subroutine of emit_block_move. Expand a movmem pattern;
1278 return true if successful. */
1280 static bool
1281 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1282 unsigned int expected_align, HOST_WIDE_INT expected_size)
1284 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1285 int save_volatile_ok = volatile_ok;
1286 enum machine_mode mode;
1288 if (expected_align < align)
1289 expected_align = align;
1291 /* Since this is a move insn, we don't care about volatility. */
1292 volatile_ok = 1;
1294 /* Try the most limited insn first, because there's no point
1295 including more than one in the machine description unless
1296 the more limited one has some advantage. */
1298 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1299 mode = GET_MODE_WIDER_MODE (mode))
1301 enum insn_code code = movmem_optab[(int) mode];
1302 insn_operand_predicate_fn pred;
1304 if (code != CODE_FOR_nothing
1305 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1306 here because if SIZE is less than the mode mask, as it is
1307 returned by the macro, it will definitely be less than the
1308 actual mode mask. */
1309 && ((GET_CODE (size) == CONST_INT
1310 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1311 <= (GET_MODE_MASK (mode) >> 1)))
1312 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1313 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1314 || (*pred) (x, BLKmode))
1315 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1316 || (*pred) (y, BLKmode))
1317 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1318 || (*pred) (opalign, VOIDmode)))
1320 rtx op2;
1321 rtx last = get_last_insn ();
1322 rtx pat;
1324 op2 = convert_to_mode (mode, size, 1);
1325 pred = insn_data[(int) code].operand[2].predicate;
1326 if (pred != 0 && ! (*pred) (op2, mode))
1327 op2 = copy_to_mode_reg (mode, op2);
1329 /* ??? When called via emit_block_move_for_call, it'd be
1330 nice if there were some way to inform the backend, so
1331 that it doesn't fail the expansion because it thinks
1332 emitting the libcall would be more efficient. */
1334 if (insn_data[(int) code].n_operands == 4)
1335 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1336 else
1337 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1338 GEN_INT (expected_align),
1339 GEN_INT (expected_size));
1340 if (pat)
1342 emit_insn (pat);
1343 volatile_ok = save_volatile_ok;
1344 return true;
1346 else
1347 delete_insns_since (last);
1351 volatile_ok = save_volatile_ok;
1352 return false;
1355 /* A subroutine of emit_block_move. Expand a call to memcpy.
1356 Return the return value from memcpy, 0 otherwise. */
1359 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1361 rtx dst_addr, src_addr;
1362 tree call_expr, fn, src_tree, dst_tree, size_tree;
1363 enum machine_mode size_mode;
1364 rtx retval;
1366 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1367 pseudos. We can then place those new pseudos into a VAR_DECL and
1368 use them later. */
1370 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1371 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1373 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1374 src_addr = convert_memory_address (ptr_mode, src_addr);
1376 dst_tree = make_tree (ptr_type_node, dst_addr);
1377 src_tree = make_tree (ptr_type_node, src_addr);
1379 size_mode = TYPE_MODE (sizetype);
1381 size = convert_to_mode (size_mode, size, 1);
1382 size = copy_to_mode_reg (size_mode, size);
1384 /* It is incorrect to use the libcall calling conventions to call
1385 memcpy in this context. This could be a user call to memcpy and
1386 the user may wish to examine the return value from memcpy. For
1387 targets where libcalls and normal calls have different conventions
1388 for returning pointers, we could end up generating incorrect code. */
1390 size_tree = make_tree (sizetype, size);
1392 fn = emit_block_move_libcall_fn (true);
1393 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1394 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1396 retval = expand_normal (call_expr);
1398 return retval;
1401 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1402 for the function we use for block copies. The first time FOR_CALL
1403 is true, we call assemble_external. */
1405 static GTY(()) tree block_move_fn;
1407 void
1408 init_block_move_fn (const char *asmspec)
1410 if (!block_move_fn)
1412 tree args, fn;
1414 fn = get_identifier ("memcpy");
1415 args = build_function_type_list (ptr_type_node, ptr_type_node,
1416 const_ptr_type_node, sizetype,
1417 NULL_TREE);
1419 fn = build_decl (FUNCTION_DECL, fn, args);
1420 DECL_EXTERNAL (fn) = 1;
1421 TREE_PUBLIC (fn) = 1;
1422 DECL_ARTIFICIAL (fn) = 1;
1423 TREE_NOTHROW (fn) = 1;
1424 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1425 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1427 block_move_fn = fn;
1430 if (asmspec)
1431 set_user_assembler_name (block_move_fn, asmspec);
1434 static tree
1435 emit_block_move_libcall_fn (int for_call)
1437 static bool emitted_extern;
1439 if (!block_move_fn)
1440 init_block_move_fn (NULL);
1442 if (for_call && !emitted_extern)
1444 emitted_extern = true;
1445 make_decl_rtl (block_move_fn);
1446 assemble_external (block_move_fn);
1449 return block_move_fn;
1452 /* A subroutine of emit_block_move. Copy the data via an explicit
1453 loop. This is used only when libcalls are forbidden. */
1454 /* ??? It'd be nice to copy in hunks larger than QImode. */
1456 static void
1457 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1458 unsigned int align ATTRIBUTE_UNUSED)
1460 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1461 enum machine_mode iter_mode;
1463 iter_mode = GET_MODE (size);
1464 if (iter_mode == VOIDmode)
1465 iter_mode = word_mode;
1467 top_label = gen_label_rtx ();
1468 cmp_label = gen_label_rtx ();
1469 iter = gen_reg_rtx (iter_mode);
1471 emit_move_insn (iter, const0_rtx);
1473 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1474 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1475 do_pending_stack_adjust ();
1477 emit_jump (cmp_label);
1478 emit_label (top_label);
1480 tmp = convert_modes (Pmode, iter_mode, iter, true);
1481 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1482 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1483 x = change_address (x, QImode, x_addr);
1484 y = change_address (y, QImode, y_addr);
1486 emit_move_insn (x, y);
1488 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1489 true, OPTAB_LIB_WIDEN);
1490 if (tmp != iter)
1491 emit_move_insn (iter, tmp);
1493 emit_label (cmp_label);
1495 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1496 true, top_label);
1499 /* Copy all or part of a value X into registers starting at REGNO.
1500 The number of registers to be filled is NREGS. */
1502 void
1503 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1505 int i;
1506 #ifdef HAVE_load_multiple
1507 rtx pat;
1508 rtx last;
1509 #endif
1511 if (nregs == 0)
1512 return;
1514 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1515 x = validize_mem (force_const_mem (mode, x));
1517 /* See if the machine can do this with a load multiple insn. */
1518 #ifdef HAVE_load_multiple
1519 if (HAVE_load_multiple)
1521 last = get_last_insn ();
1522 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1523 GEN_INT (nregs));
1524 if (pat)
1526 emit_insn (pat);
1527 return;
1529 else
1530 delete_insns_since (last);
1532 #endif
1534 for (i = 0; i < nregs; i++)
1535 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1536 operand_subword_force (x, i, mode));
1539 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1540 The number of registers to be filled is NREGS. */
1542 void
1543 move_block_from_reg (int regno, rtx x, int nregs)
1545 int i;
1547 if (nregs == 0)
1548 return;
1550 /* See if the machine can do this with a store multiple insn. */
1551 #ifdef HAVE_store_multiple
1552 if (HAVE_store_multiple)
1554 rtx last = get_last_insn ();
1555 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1556 GEN_INT (nregs));
1557 if (pat)
1559 emit_insn (pat);
1560 return;
1562 else
1563 delete_insns_since (last);
1565 #endif
1567 for (i = 0; i < nregs; i++)
1569 rtx tem = operand_subword (x, i, 1, BLKmode);
1571 gcc_assert (tem);
1573 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1577 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1578 ORIG, where ORIG is a non-consecutive group of registers represented by
1579 a PARALLEL. The clone is identical to the original except in that the
1580 original set of registers is replaced by a new set of pseudo registers.
1581 The new set has the same modes as the original set. */
1584 gen_group_rtx (rtx orig)
1586 int i, length;
1587 rtx *tmps;
1589 gcc_assert (GET_CODE (orig) == PARALLEL);
1591 length = XVECLEN (orig, 0);
1592 tmps = alloca (sizeof (rtx) * length);
1594 /* Skip a NULL entry in first slot. */
1595 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1597 if (i)
1598 tmps[0] = 0;
1600 for (; i < length; i++)
1602 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1603 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1605 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1608 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1611 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1612 except that values are placed in TMPS[i], and must later be moved
1613 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1615 static void
1616 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1618 rtx src;
1619 int start, i;
1620 enum machine_mode m = GET_MODE (orig_src);
1622 gcc_assert (GET_CODE (dst) == PARALLEL);
1624 if (m != VOIDmode
1625 && !SCALAR_INT_MODE_P (m)
1626 && !MEM_P (orig_src)
1627 && GET_CODE (orig_src) != CONCAT)
1629 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1630 if (imode == BLKmode)
1631 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1632 else
1633 src = gen_reg_rtx (imode);
1634 if (imode != BLKmode)
1635 src = gen_lowpart (GET_MODE (orig_src), src);
1636 emit_move_insn (src, orig_src);
1637 /* ...and back again. */
1638 if (imode != BLKmode)
1639 src = gen_lowpart (imode, src);
1640 emit_group_load_1 (tmps, dst, src, type, ssize);
1641 return;
1644 /* Check for a NULL entry, used to indicate that the parameter goes
1645 both on the stack and in registers. */
1646 if (XEXP (XVECEXP (dst, 0, 0), 0))
1647 start = 0;
1648 else
1649 start = 1;
1651 /* Process the pieces. */
1652 for (i = start; i < XVECLEN (dst, 0); i++)
1654 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1655 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1656 unsigned int bytelen = GET_MODE_SIZE (mode);
1657 int shift = 0;
1659 /* Handle trailing fragments that run over the size of the struct. */
1660 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1662 /* Arrange to shift the fragment to where it belongs.
1663 extract_bit_field loads to the lsb of the reg. */
1664 if (
1665 #ifdef BLOCK_REG_PADDING
1666 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1667 == (BYTES_BIG_ENDIAN ? upward : downward)
1668 #else
1669 BYTES_BIG_ENDIAN
1670 #endif
1672 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1673 bytelen = ssize - bytepos;
1674 gcc_assert (bytelen > 0);
1677 /* If we won't be loading directly from memory, protect the real source
1678 from strange tricks we might play; but make sure that the source can
1679 be loaded directly into the destination. */
1680 src = orig_src;
1681 if (!MEM_P (orig_src)
1682 && (!CONSTANT_P (orig_src)
1683 || (GET_MODE (orig_src) != mode
1684 && GET_MODE (orig_src) != VOIDmode)))
1686 if (GET_MODE (orig_src) == VOIDmode)
1687 src = gen_reg_rtx (mode);
1688 else
1689 src = gen_reg_rtx (GET_MODE (orig_src));
1691 emit_move_insn (src, orig_src);
1694 /* Optimize the access just a bit. */
1695 if (MEM_P (src)
1696 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1697 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1698 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1699 && bytelen == GET_MODE_SIZE (mode))
1701 tmps[i] = gen_reg_rtx (mode);
1702 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1704 else if (COMPLEX_MODE_P (mode)
1705 && GET_MODE (src) == mode
1706 && bytelen == GET_MODE_SIZE (mode))
1707 /* Let emit_move_complex do the bulk of the work. */
1708 tmps[i] = src;
1709 else if (GET_CODE (src) == CONCAT)
1711 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1712 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1714 if ((bytepos == 0 && bytelen == slen0)
1715 || (bytepos != 0 && bytepos + bytelen <= slen))
1717 /* The following assumes that the concatenated objects all
1718 have the same size. In this case, a simple calculation
1719 can be used to determine the object and the bit field
1720 to be extracted. */
1721 tmps[i] = XEXP (src, bytepos / slen0);
1722 if (! CONSTANT_P (tmps[i])
1723 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1724 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1725 (bytepos % slen0) * BITS_PER_UNIT,
1726 1, NULL_RTX, mode, mode);
1728 else
1730 rtx mem;
1732 gcc_assert (!bytepos);
1733 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1734 emit_move_insn (mem, src);
1735 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1736 0, 1, NULL_RTX, mode, mode);
1739 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1740 SIMD register, which is currently broken. While we get GCC
1741 to emit proper RTL for these cases, let's dump to memory. */
1742 else if (VECTOR_MODE_P (GET_MODE (dst))
1743 && REG_P (src))
1745 int slen = GET_MODE_SIZE (GET_MODE (src));
1746 rtx mem;
1748 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1749 emit_move_insn (mem, src);
1750 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1752 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1753 && XVECLEN (dst, 0) > 1)
1754 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1755 else if (CONSTANT_P (src)
1756 || (REG_P (src) && GET_MODE (src) == mode))
1757 tmps[i] = src;
1758 else
1759 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1760 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1761 mode, mode);
1763 if (shift)
1764 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1765 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1769 /* Emit code to move a block SRC of type TYPE to a block DST,
1770 where DST is non-consecutive registers represented by a PARALLEL.
1771 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1772 if not known. */
1774 void
1775 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1777 rtx *tmps;
1778 int i;
1780 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1781 emit_group_load_1 (tmps, dst, src, type, ssize);
1783 /* Copy the extracted pieces into the proper (probable) hard regs. */
1784 for (i = 0; i < XVECLEN (dst, 0); i++)
1786 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1787 if (d == NULL)
1788 continue;
1789 emit_move_insn (d, tmps[i]);
1793 /* Similar, but load SRC into new pseudos in a format that looks like
1794 PARALLEL. This can later be fed to emit_group_move to get things
1795 in the right place. */
1798 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1800 rtvec vec;
1801 int i;
1803 vec = rtvec_alloc (XVECLEN (parallel, 0));
1804 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1806 /* Convert the vector to look just like the original PARALLEL, except
1807 with the computed values. */
1808 for (i = 0; i < XVECLEN (parallel, 0); i++)
1810 rtx e = XVECEXP (parallel, 0, i);
1811 rtx d = XEXP (e, 0);
1813 if (d)
1815 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1816 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1818 RTVEC_ELT (vec, i) = e;
1821 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1824 /* Emit code to move a block SRC to block DST, where SRC and DST are
1825 non-consecutive groups of registers, each represented by a PARALLEL. */
1827 void
1828 emit_group_move (rtx dst, rtx src)
1830 int i;
1832 gcc_assert (GET_CODE (src) == PARALLEL
1833 && GET_CODE (dst) == PARALLEL
1834 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1836 /* Skip first entry if NULL. */
1837 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1838 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1839 XEXP (XVECEXP (src, 0, i), 0));
1842 /* Move a group of registers represented by a PARALLEL into pseudos. */
1845 emit_group_move_into_temps (rtx src)
1847 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1848 int i;
1850 for (i = 0; i < XVECLEN (src, 0); i++)
1852 rtx e = XVECEXP (src, 0, i);
1853 rtx d = XEXP (e, 0);
1855 if (d)
1856 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1857 RTVEC_ELT (vec, i) = e;
1860 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1863 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1864 where SRC is non-consecutive registers represented by a PARALLEL.
1865 SSIZE represents the total size of block ORIG_DST, or -1 if not
1866 known. */
1868 void
1869 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1871 rtx *tmps, dst;
1872 int start, finish, i;
1873 enum machine_mode m = GET_MODE (orig_dst);
1875 gcc_assert (GET_CODE (src) == PARALLEL);
1877 if (!SCALAR_INT_MODE_P (m)
1878 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1880 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1881 if (imode == BLKmode)
1882 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1883 else
1884 dst = gen_reg_rtx (imode);
1885 emit_group_store (dst, src, type, ssize);
1886 if (imode != BLKmode)
1887 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1888 emit_move_insn (orig_dst, dst);
1889 return;
1892 /* Check for a NULL entry, used to indicate that the parameter goes
1893 both on the stack and in registers. */
1894 if (XEXP (XVECEXP (src, 0, 0), 0))
1895 start = 0;
1896 else
1897 start = 1;
1898 finish = XVECLEN (src, 0);
1900 tmps = alloca (sizeof (rtx) * finish);
1902 /* Copy the (probable) hard regs into pseudos. */
1903 for (i = start; i < finish; i++)
1905 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1906 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1908 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1909 emit_move_insn (tmps[i], reg);
1911 else
1912 tmps[i] = reg;
1915 /* If we won't be storing directly into memory, protect the real destination
1916 from strange tricks we might play. */
1917 dst = orig_dst;
1918 if (GET_CODE (dst) == PARALLEL)
1920 rtx temp;
1922 /* We can get a PARALLEL dst if there is a conditional expression in
1923 a return statement. In that case, the dst and src are the same,
1924 so no action is necessary. */
1925 if (rtx_equal_p (dst, src))
1926 return;
1928 /* It is unclear if we can ever reach here, but we may as well handle
1929 it. Allocate a temporary, and split this into a store/load to/from
1930 the temporary. */
1932 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1933 emit_group_store (temp, src, type, ssize);
1934 emit_group_load (dst, temp, type, ssize);
1935 return;
1937 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1939 enum machine_mode outer = GET_MODE (dst);
1940 enum machine_mode inner;
1941 HOST_WIDE_INT bytepos;
1942 bool done = false;
1943 rtx temp;
1945 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1946 dst = gen_reg_rtx (outer);
1948 /* Make life a bit easier for combine. */
1949 /* If the first element of the vector is the low part
1950 of the destination mode, use a paradoxical subreg to
1951 initialize the destination. */
1952 if (start < finish)
1954 inner = GET_MODE (tmps[start]);
1955 bytepos = subreg_lowpart_offset (inner, outer);
1956 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1958 temp = simplify_gen_subreg (outer, tmps[start],
1959 inner, 0);
1960 if (temp)
1962 emit_move_insn (dst, temp);
1963 done = true;
1964 start++;
1969 /* If the first element wasn't the low part, try the last. */
1970 if (!done
1971 && start < finish - 1)
1973 inner = GET_MODE (tmps[finish - 1]);
1974 bytepos = subreg_lowpart_offset (inner, outer);
1975 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1977 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1978 inner, 0);
1979 if (temp)
1981 emit_move_insn (dst, temp);
1982 done = true;
1983 finish--;
1988 /* Otherwise, simply initialize the result to zero. */
1989 if (!done)
1990 emit_move_insn (dst, CONST0_RTX (outer));
1993 /* Process the pieces. */
1994 for (i = start; i < finish; i++)
1996 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1997 enum machine_mode mode = GET_MODE (tmps[i]);
1998 unsigned int bytelen = GET_MODE_SIZE (mode);
1999 rtx dest = dst;
2001 /* Handle trailing fragments that run over the size of the struct. */
2002 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2004 /* store_bit_field always takes its value from the lsb.
2005 Move the fragment to the lsb if it's not already there. */
2006 if (
2007 #ifdef BLOCK_REG_PADDING
2008 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2009 == (BYTES_BIG_ENDIAN ? upward : downward)
2010 #else
2011 BYTES_BIG_ENDIAN
2012 #endif
2015 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2016 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2017 build_int_cst (NULL_TREE, shift),
2018 tmps[i], 0);
2020 bytelen = ssize - bytepos;
2023 if (GET_CODE (dst) == CONCAT)
2025 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2026 dest = XEXP (dst, 0);
2027 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2029 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2030 dest = XEXP (dst, 1);
2032 else
2034 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2035 dest = assign_stack_temp (GET_MODE (dest),
2036 GET_MODE_SIZE (GET_MODE (dest)), 0);
2037 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2038 tmps[i]);
2039 dst = dest;
2040 break;
2044 /* Optimize the access just a bit. */
2045 if (MEM_P (dest)
2046 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2047 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2048 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2049 && bytelen == GET_MODE_SIZE (mode))
2050 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2051 else
2052 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2053 mode, tmps[i]);
2056 /* Copy from the pseudo into the (probable) hard reg. */
2057 if (orig_dst != dst)
2058 emit_move_insn (orig_dst, dst);
2061 /* Generate code to copy a BLKmode object of TYPE out of a
2062 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2063 is null, a stack temporary is created. TGTBLK is returned.
2065 The purpose of this routine is to handle functions that return
2066 BLKmode structures in registers. Some machines (the PA for example)
2067 want to return all small structures in registers regardless of the
2068 structure's alignment. */
2071 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2073 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2074 rtx src = NULL, dst = NULL;
2075 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2076 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2078 if (tgtblk == 0)
2080 tgtblk = assign_temp (build_qualified_type (type,
2081 (TYPE_QUALS (type)
2082 | TYPE_QUAL_CONST)),
2083 0, 1, 1);
2084 preserve_temp_slots (tgtblk);
2087 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2088 into a new pseudo which is a full word. */
2090 if (GET_MODE (srcreg) != BLKmode
2091 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2092 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2094 /* If the structure doesn't take up a whole number of words, see whether
2095 SRCREG is padded on the left or on the right. If it's on the left,
2096 set PADDING_CORRECTION to the number of bits to skip.
2098 In most ABIs, the structure will be returned at the least end of
2099 the register, which translates to right padding on little-endian
2100 targets and left padding on big-endian targets. The opposite
2101 holds if the structure is returned at the most significant
2102 end of the register. */
2103 if (bytes % UNITS_PER_WORD != 0
2104 && (targetm.calls.return_in_msb (type)
2105 ? !BYTES_BIG_ENDIAN
2106 : BYTES_BIG_ENDIAN))
2107 padding_correction
2108 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2110 /* Copy the structure BITSIZE bites at a time.
2112 We could probably emit more efficient code for machines which do not use
2113 strict alignment, but it doesn't seem worth the effort at the current
2114 time. */
2115 for (bitpos = 0, xbitpos = padding_correction;
2116 bitpos < bytes * BITS_PER_UNIT;
2117 bitpos += bitsize, xbitpos += bitsize)
2119 /* We need a new source operand each time xbitpos is on a
2120 word boundary and when xbitpos == padding_correction
2121 (the first time through). */
2122 if (xbitpos % BITS_PER_WORD == 0
2123 || xbitpos == padding_correction)
2124 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2125 GET_MODE (srcreg));
2127 /* We need a new destination operand each time bitpos is on
2128 a word boundary. */
2129 if (bitpos % BITS_PER_WORD == 0)
2130 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2132 /* Use xbitpos for the source extraction (right justified) and
2133 xbitpos for the destination store (left justified). */
2134 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2135 extract_bit_field (src, bitsize,
2136 xbitpos % BITS_PER_WORD, 1,
2137 NULL_RTX, word_mode, word_mode));
2140 return tgtblk;
2143 /* Add a USE expression for REG to the (possibly empty) list pointed
2144 to by CALL_FUSAGE. REG must denote a hard register. */
2146 void
2147 use_reg (rtx *call_fusage, rtx reg)
2149 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2151 *call_fusage
2152 = gen_rtx_EXPR_LIST (VOIDmode,
2153 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2156 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2157 starting at REGNO. All of these registers must be hard registers. */
2159 void
2160 use_regs (rtx *call_fusage, int regno, int nregs)
2162 int i;
2164 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2166 for (i = 0; i < nregs; i++)
2167 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2170 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2171 PARALLEL REGS. This is for calls that pass values in multiple
2172 non-contiguous locations. The Irix 6 ABI has examples of this. */
2174 void
2175 use_group_regs (rtx *call_fusage, rtx regs)
2177 int i;
2179 for (i = 0; i < XVECLEN (regs, 0); i++)
2181 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2183 /* A NULL entry means the parameter goes both on the stack and in
2184 registers. This can also be a MEM for targets that pass values
2185 partially on the stack and partially in registers. */
2186 if (reg != 0 && REG_P (reg))
2187 use_reg (call_fusage, reg);
2192 /* Determine whether the LEN bytes generated by CONSTFUN can be
2193 stored to memory using several move instructions. CONSTFUNDATA is
2194 a pointer which will be passed as argument in every CONSTFUN call.
2195 ALIGN is maximum alignment we can assume. Return nonzero if a
2196 call to store_by_pieces should succeed. */
2199 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2200 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2201 void *constfundata, unsigned int align)
2203 unsigned HOST_WIDE_INT l;
2204 unsigned int max_size;
2205 HOST_WIDE_INT offset = 0;
2206 enum machine_mode mode, tmode;
2207 enum insn_code icode;
2208 int reverse;
2209 rtx cst;
2211 if (len == 0)
2212 return 1;
2214 if (! STORE_BY_PIECES_P (len, align))
2215 return 0;
2217 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2218 if (align >= GET_MODE_ALIGNMENT (tmode))
2219 align = GET_MODE_ALIGNMENT (tmode);
2220 else
2222 enum machine_mode xmode;
2224 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2225 tmode != VOIDmode;
2226 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2227 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2228 || SLOW_UNALIGNED_ACCESS (tmode, align))
2229 break;
2231 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2234 /* We would first store what we can in the largest integer mode, then go to
2235 successively smaller modes. */
2237 for (reverse = 0;
2238 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2239 reverse++)
2241 l = len;
2242 mode = VOIDmode;
2243 max_size = STORE_MAX_PIECES + 1;
2244 while (max_size > 1)
2246 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2247 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2248 if (GET_MODE_SIZE (tmode) < max_size)
2249 mode = tmode;
2251 if (mode == VOIDmode)
2252 break;
2254 icode = mov_optab->handlers[(int) mode].insn_code;
2255 if (icode != CODE_FOR_nothing
2256 && align >= GET_MODE_ALIGNMENT (mode))
2258 unsigned int size = GET_MODE_SIZE (mode);
2260 while (l >= size)
2262 if (reverse)
2263 offset -= size;
2265 cst = (*constfun) (constfundata, offset, mode);
2266 if (!LEGITIMATE_CONSTANT_P (cst))
2267 return 0;
2269 if (!reverse)
2270 offset += size;
2272 l -= size;
2276 max_size = GET_MODE_SIZE (mode);
2279 /* The code above should have handled everything. */
2280 gcc_assert (!l);
2283 return 1;
2286 /* Generate several move instructions to store LEN bytes generated by
2287 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2288 pointer which will be passed as argument in every CONSTFUN call.
2289 ALIGN is maximum alignment we can assume.
2290 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2291 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2292 stpcpy. */
2295 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2296 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2297 void *constfundata, unsigned int align, int endp)
2299 struct store_by_pieces data;
2301 if (len == 0)
2303 gcc_assert (endp != 2);
2304 return to;
2307 gcc_assert (STORE_BY_PIECES_P (len, align));
2308 data.constfun = constfun;
2309 data.constfundata = constfundata;
2310 data.len = len;
2311 data.to = to;
2312 store_by_pieces_1 (&data, align);
2313 if (endp)
2315 rtx to1;
2317 gcc_assert (!data.reverse);
2318 if (data.autinc_to)
2320 if (endp == 2)
2322 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2323 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2324 else
2325 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2326 -1));
2328 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2329 data.offset);
2331 else
2333 if (endp == 2)
2334 --data.offset;
2335 to1 = adjust_address (data.to, QImode, data.offset);
2337 return to1;
2339 else
2340 return data.to;
2343 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2344 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2346 static void
2347 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2349 struct store_by_pieces data;
2351 if (len == 0)
2352 return;
2354 data.constfun = clear_by_pieces_1;
2355 data.constfundata = NULL;
2356 data.len = len;
2357 data.to = to;
2358 store_by_pieces_1 (&data, align);
2361 /* Callback routine for clear_by_pieces.
2362 Return const0_rtx unconditionally. */
2364 static rtx
2365 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2366 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2367 enum machine_mode mode ATTRIBUTE_UNUSED)
2369 return const0_rtx;
2372 /* Subroutine of clear_by_pieces and store_by_pieces.
2373 Generate several move instructions to store LEN bytes of block TO. (A MEM
2374 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2376 static void
2377 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2378 unsigned int align ATTRIBUTE_UNUSED)
2380 rtx to_addr = XEXP (data->to, 0);
2381 unsigned int max_size = STORE_MAX_PIECES + 1;
2382 enum machine_mode mode = VOIDmode, tmode;
2383 enum insn_code icode;
2385 data->offset = 0;
2386 data->to_addr = to_addr;
2387 data->autinc_to
2388 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2389 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2391 data->explicit_inc_to = 0;
2392 data->reverse
2393 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2394 if (data->reverse)
2395 data->offset = data->len;
2397 /* If storing requires more than two move insns,
2398 copy addresses to registers (to make displacements shorter)
2399 and use post-increment if available. */
2400 if (!data->autinc_to
2401 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2403 /* Determine the main mode we'll be using. */
2404 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2405 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2406 if (GET_MODE_SIZE (tmode) < max_size)
2407 mode = tmode;
2409 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2411 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2412 data->autinc_to = 1;
2413 data->explicit_inc_to = -1;
2416 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2417 && ! data->autinc_to)
2419 data->to_addr = copy_addr_to_reg (to_addr);
2420 data->autinc_to = 1;
2421 data->explicit_inc_to = 1;
2424 if ( !data->autinc_to && CONSTANT_P (to_addr))
2425 data->to_addr = copy_addr_to_reg (to_addr);
2428 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2429 if (align >= GET_MODE_ALIGNMENT (tmode))
2430 align = GET_MODE_ALIGNMENT (tmode);
2431 else
2433 enum machine_mode xmode;
2435 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2436 tmode != VOIDmode;
2437 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2438 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2439 || SLOW_UNALIGNED_ACCESS (tmode, align))
2440 break;
2442 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2445 /* First store what we can in the largest integer mode, then go to
2446 successively smaller modes. */
2448 while (max_size > 1)
2450 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2451 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2452 if (GET_MODE_SIZE (tmode) < max_size)
2453 mode = tmode;
2455 if (mode == VOIDmode)
2456 break;
2458 icode = mov_optab->handlers[(int) mode].insn_code;
2459 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2460 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2462 max_size = GET_MODE_SIZE (mode);
2465 /* The code above should have handled everything. */
2466 gcc_assert (!data->len);
2469 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2470 with move instructions for mode MODE. GENFUN is the gen_... function
2471 to make a move insn for that mode. DATA has all the other info. */
2473 static void
2474 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2475 struct store_by_pieces *data)
2477 unsigned int size = GET_MODE_SIZE (mode);
2478 rtx to1, cst;
2480 while (data->len >= size)
2482 if (data->reverse)
2483 data->offset -= size;
2485 if (data->autinc_to)
2486 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2487 data->offset);
2488 else
2489 to1 = adjust_address (data->to, mode, data->offset);
2491 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2492 emit_insn (gen_add2_insn (data->to_addr,
2493 GEN_INT (-(HOST_WIDE_INT) size)));
2495 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2496 emit_insn ((*genfun) (to1, cst));
2498 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2499 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2501 if (! data->reverse)
2502 data->offset += size;
2504 data->len -= size;
2508 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2509 its length in bytes. */
2512 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2513 unsigned int expected_align, HOST_WIDE_INT expected_size)
2515 enum machine_mode mode = GET_MODE (object);
2516 unsigned int align;
2518 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2520 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2521 just move a zero. Otherwise, do this a piece at a time. */
2522 if (mode != BLKmode
2523 && GET_CODE (size) == CONST_INT
2524 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2526 rtx zero = CONST0_RTX (mode);
2527 if (zero != NULL)
2529 emit_move_insn (object, zero);
2530 return NULL;
2533 if (COMPLEX_MODE_P (mode))
2535 zero = CONST0_RTX (GET_MODE_INNER (mode));
2536 if (zero != NULL)
2538 write_complex_part (object, zero, 0);
2539 write_complex_part (object, zero, 1);
2540 return NULL;
2545 if (size == const0_rtx)
2546 return NULL;
2548 align = MEM_ALIGN (object);
2550 if (GET_CODE (size) == CONST_INT
2551 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2552 clear_by_pieces (object, INTVAL (size), align);
2553 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2554 expected_align, expected_size))
2556 else
2557 return set_storage_via_libcall (object, size, const0_rtx,
2558 method == BLOCK_OP_TAILCALL);
2560 return NULL;
2564 clear_storage (rtx object, rtx size, enum block_op_methods method)
2566 return clear_storage_hints (object, size, method, 0, -1);
2570 /* A subroutine of clear_storage. Expand a call to memset.
2571 Return the return value of memset, 0 otherwise. */
2574 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2576 tree call_expr, fn, object_tree, size_tree, val_tree;
2577 enum machine_mode size_mode;
2578 rtx retval;
2580 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2581 place those into new pseudos into a VAR_DECL and use them later. */
2583 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2585 size_mode = TYPE_MODE (sizetype);
2586 size = convert_to_mode (size_mode, size, 1);
2587 size = copy_to_mode_reg (size_mode, size);
2589 /* It is incorrect to use the libcall calling conventions to call
2590 memset in this context. This could be a user call to memset and
2591 the user may wish to examine the return value from memset. For
2592 targets where libcalls and normal calls have different conventions
2593 for returning pointers, we could end up generating incorrect code. */
2595 object_tree = make_tree (ptr_type_node, object);
2596 if (GET_CODE (val) != CONST_INT)
2597 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2598 size_tree = make_tree (sizetype, size);
2599 val_tree = make_tree (integer_type_node, val);
2601 fn = clear_storage_libcall_fn (true);
2602 call_expr = build_call_expr (fn, 3,
2603 object_tree, integer_zero_node, size_tree);
2604 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2606 retval = expand_normal (call_expr);
2608 return retval;
2611 /* A subroutine of set_storage_via_libcall. Create the tree node
2612 for the function we use for block clears. The first time FOR_CALL
2613 is true, we call assemble_external. */
2615 static GTY(()) tree block_clear_fn;
2617 void
2618 init_block_clear_fn (const char *asmspec)
2620 if (!block_clear_fn)
2622 tree fn, args;
2624 fn = get_identifier ("memset");
2625 args = build_function_type_list (ptr_type_node, ptr_type_node,
2626 integer_type_node, sizetype,
2627 NULL_TREE);
2629 fn = build_decl (FUNCTION_DECL, fn, args);
2630 DECL_EXTERNAL (fn) = 1;
2631 TREE_PUBLIC (fn) = 1;
2632 DECL_ARTIFICIAL (fn) = 1;
2633 TREE_NOTHROW (fn) = 1;
2634 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2635 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2637 block_clear_fn = fn;
2640 if (asmspec)
2641 set_user_assembler_name (block_clear_fn, asmspec);
2644 static tree
2645 clear_storage_libcall_fn (int for_call)
2647 static bool emitted_extern;
2649 if (!block_clear_fn)
2650 init_block_clear_fn (NULL);
2652 if (for_call && !emitted_extern)
2654 emitted_extern = true;
2655 make_decl_rtl (block_clear_fn);
2656 assemble_external (block_clear_fn);
2659 return block_clear_fn;
2662 /* Expand a setmem pattern; return true if successful. */
2664 bool
2665 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2666 unsigned int expected_align, HOST_WIDE_INT expected_size)
2668 /* Try the most limited insn first, because there's no point
2669 including more than one in the machine description unless
2670 the more limited one has some advantage. */
2672 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2673 enum machine_mode mode;
2675 if (expected_align < align)
2676 expected_align = align;
2678 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2679 mode = GET_MODE_WIDER_MODE (mode))
2681 enum insn_code code = setmem_optab[(int) mode];
2682 insn_operand_predicate_fn pred;
2684 if (code != CODE_FOR_nothing
2685 /* We don't need MODE to be narrower than
2686 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2687 the mode mask, as it is returned by the macro, it will
2688 definitely be less than the actual mode mask. */
2689 && ((GET_CODE (size) == CONST_INT
2690 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2691 <= (GET_MODE_MASK (mode) >> 1)))
2692 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2693 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2694 || (*pred) (object, BLKmode))
2695 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2696 || (*pred) (opalign, VOIDmode)))
2698 rtx opsize, opchar;
2699 enum machine_mode char_mode;
2700 rtx last = get_last_insn ();
2701 rtx pat;
2703 opsize = convert_to_mode (mode, size, 1);
2704 pred = insn_data[(int) code].operand[1].predicate;
2705 if (pred != 0 && ! (*pred) (opsize, mode))
2706 opsize = copy_to_mode_reg (mode, opsize);
2708 opchar = val;
2709 char_mode = insn_data[(int) code].operand[2].mode;
2710 if (char_mode != VOIDmode)
2712 opchar = convert_to_mode (char_mode, opchar, 1);
2713 pred = insn_data[(int) code].operand[2].predicate;
2714 if (pred != 0 && ! (*pred) (opchar, char_mode))
2715 opchar = copy_to_mode_reg (char_mode, opchar);
2718 if (insn_data[(int) code].n_operands == 4)
2719 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2720 else
2721 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2722 GEN_INT (expected_align),
2723 GEN_INT (expected_size));
2724 if (pat)
2726 emit_insn (pat);
2727 return true;
2729 else
2730 delete_insns_since (last);
2734 return false;
2738 /* Write to one of the components of the complex value CPLX. Write VAL to
2739 the real part if IMAG_P is false, and the imaginary part if its true. */
2741 static void
2742 write_complex_part (rtx cplx, rtx val, bool imag_p)
2744 enum machine_mode cmode;
2745 enum machine_mode imode;
2746 unsigned ibitsize;
2748 if (GET_CODE (cplx) == CONCAT)
2750 emit_move_insn (XEXP (cplx, imag_p), val);
2751 return;
2754 cmode = GET_MODE (cplx);
2755 imode = GET_MODE_INNER (cmode);
2756 ibitsize = GET_MODE_BITSIZE (imode);
2758 /* For MEMs simplify_gen_subreg may generate an invalid new address
2759 because, e.g., the original address is considered mode-dependent
2760 by the target, which restricts simplify_subreg from invoking
2761 adjust_address_nv. Instead of preparing fallback support for an
2762 invalid address, we call adjust_address_nv directly. */
2763 if (MEM_P (cplx))
2765 emit_move_insn (adjust_address_nv (cplx, imode,
2766 imag_p ? GET_MODE_SIZE (imode) : 0),
2767 val);
2768 return;
2771 /* If the sub-object is at least word sized, then we know that subregging
2772 will work. This special case is important, since store_bit_field
2773 wants to operate on integer modes, and there's rarely an OImode to
2774 correspond to TCmode. */
2775 if (ibitsize >= BITS_PER_WORD
2776 /* For hard regs we have exact predicates. Assume we can split
2777 the original object if it spans an even number of hard regs.
2778 This special case is important for SCmode on 64-bit platforms
2779 where the natural size of floating-point regs is 32-bit. */
2780 || (REG_P (cplx)
2781 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2782 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2784 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2785 imag_p ? GET_MODE_SIZE (imode) : 0);
2786 if (part)
2788 emit_move_insn (part, val);
2789 return;
2791 else
2792 /* simplify_gen_subreg may fail for sub-word MEMs. */
2793 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2796 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2799 /* Extract one of the components of the complex value CPLX. Extract the
2800 real part if IMAG_P is false, and the imaginary part if it's true. */
2802 static rtx
2803 read_complex_part (rtx cplx, bool imag_p)
2805 enum machine_mode cmode, imode;
2806 unsigned ibitsize;
2808 if (GET_CODE (cplx) == CONCAT)
2809 return XEXP (cplx, imag_p);
2811 cmode = GET_MODE (cplx);
2812 imode = GET_MODE_INNER (cmode);
2813 ibitsize = GET_MODE_BITSIZE (imode);
2815 /* Special case reads from complex constants that got spilled to memory. */
2816 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2818 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2819 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2821 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2822 if (CONSTANT_CLASS_P (part))
2823 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2827 /* For MEMs simplify_gen_subreg may generate an invalid new address
2828 because, e.g., the original address is considered mode-dependent
2829 by the target, which restricts simplify_subreg from invoking
2830 adjust_address_nv. Instead of preparing fallback support for an
2831 invalid address, we call adjust_address_nv directly. */
2832 if (MEM_P (cplx))
2833 return adjust_address_nv (cplx, imode,
2834 imag_p ? GET_MODE_SIZE (imode) : 0);
2836 /* If the sub-object is at least word sized, then we know that subregging
2837 will work. This special case is important, since extract_bit_field
2838 wants to operate on integer modes, and there's rarely an OImode to
2839 correspond to TCmode. */
2840 if (ibitsize >= BITS_PER_WORD
2841 /* For hard regs we have exact predicates. Assume we can split
2842 the original object if it spans an even number of hard regs.
2843 This special case is important for SCmode on 64-bit platforms
2844 where the natural size of floating-point regs is 32-bit. */
2845 || (REG_P (cplx)
2846 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2847 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2849 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2850 imag_p ? GET_MODE_SIZE (imode) : 0);
2851 if (ret)
2852 return ret;
2853 else
2854 /* simplify_gen_subreg may fail for sub-word MEMs. */
2855 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2858 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2859 true, NULL_RTX, imode, imode);
2862 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2863 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2864 represented in NEW_MODE. If FORCE is true, this will never happen, as
2865 we'll force-create a SUBREG if needed. */
2867 static rtx
2868 emit_move_change_mode (enum machine_mode new_mode,
2869 enum machine_mode old_mode, rtx x, bool force)
2871 rtx ret;
2873 if (push_operand (x, GET_MODE (x)))
2875 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2876 MEM_COPY_ATTRIBUTES (ret, x);
2878 else if (MEM_P (x))
2880 /* We don't have to worry about changing the address since the
2881 size in bytes is supposed to be the same. */
2882 if (reload_in_progress)
2884 /* Copy the MEM to change the mode and move any
2885 substitutions from the old MEM to the new one. */
2886 ret = adjust_address_nv (x, new_mode, 0);
2887 copy_replacements (x, ret);
2889 else
2890 ret = adjust_address (x, new_mode, 0);
2892 else
2894 /* Note that we do want simplify_subreg's behavior of validating
2895 that the new mode is ok for a hard register. If we were to use
2896 simplify_gen_subreg, we would create the subreg, but would
2897 probably run into the target not being able to implement it. */
2898 /* Except, of course, when FORCE is true, when this is exactly what
2899 we want. Which is needed for CCmodes on some targets. */
2900 if (force)
2901 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2902 else
2903 ret = simplify_subreg (new_mode, x, old_mode, 0);
2906 return ret;
2909 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2910 an integer mode of the same size as MODE. Returns the instruction
2911 emitted, or NULL if such a move could not be generated. */
2913 static rtx
2914 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2916 enum machine_mode imode;
2917 enum insn_code code;
2919 /* There must exist a mode of the exact size we require. */
2920 imode = int_mode_for_mode (mode);
2921 if (imode == BLKmode)
2922 return NULL_RTX;
2924 /* The target must support moves in this mode. */
2925 code = mov_optab->handlers[imode].insn_code;
2926 if (code == CODE_FOR_nothing)
2927 return NULL_RTX;
2929 x = emit_move_change_mode (imode, mode, x, force);
2930 if (x == NULL_RTX)
2931 return NULL_RTX;
2932 y = emit_move_change_mode (imode, mode, y, force);
2933 if (y == NULL_RTX)
2934 return NULL_RTX;
2935 return emit_insn (GEN_FCN (code) (x, y));
2938 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2939 Return an equivalent MEM that does not use an auto-increment. */
2941 static rtx
2942 emit_move_resolve_push (enum machine_mode mode, rtx x)
2944 enum rtx_code code = GET_CODE (XEXP (x, 0));
2945 HOST_WIDE_INT adjust;
2946 rtx temp;
2948 adjust = GET_MODE_SIZE (mode);
2949 #ifdef PUSH_ROUNDING
2950 adjust = PUSH_ROUNDING (adjust);
2951 #endif
2952 if (code == PRE_DEC || code == POST_DEC)
2953 adjust = -adjust;
2954 else if (code == PRE_MODIFY || code == POST_MODIFY)
2956 rtx expr = XEXP (XEXP (x, 0), 1);
2957 HOST_WIDE_INT val;
2959 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2960 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2961 val = INTVAL (XEXP (expr, 1));
2962 if (GET_CODE (expr) == MINUS)
2963 val = -val;
2964 gcc_assert (adjust == val || adjust == -val);
2965 adjust = val;
2968 /* Do not use anti_adjust_stack, since we don't want to update
2969 stack_pointer_delta. */
2970 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2971 GEN_INT (adjust), stack_pointer_rtx,
2972 0, OPTAB_LIB_WIDEN);
2973 if (temp != stack_pointer_rtx)
2974 emit_move_insn (stack_pointer_rtx, temp);
2976 switch (code)
2978 case PRE_INC:
2979 case PRE_DEC:
2980 case PRE_MODIFY:
2981 temp = stack_pointer_rtx;
2982 break;
2983 case POST_INC:
2984 case POST_DEC:
2985 case POST_MODIFY:
2986 temp = plus_constant (stack_pointer_rtx, -adjust);
2987 break;
2988 default:
2989 gcc_unreachable ();
2992 return replace_equiv_address (x, temp);
2995 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2996 X is known to satisfy push_operand, and MODE is known to be complex.
2997 Returns the last instruction emitted. */
3000 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3002 enum machine_mode submode = GET_MODE_INNER (mode);
3003 bool imag_first;
3005 #ifdef PUSH_ROUNDING
3006 unsigned int submodesize = GET_MODE_SIZE (submode);
3008 /* In case we output to the stack, but the size is smaller than the
3009 machine can push exactly, we need to use move instructions. */
3010 if (PUSH_ROUNDING (submodesize) != submodesize)
3012 x = emit_move_resolve_push (mode, x);
3013 return emit_move_insn (x, y);
3015 #endif
3017 /* Note that the real part always precedes the imag part in memory
3018 regardless of machine's endianness. */
3019 switch (GET_CODE (XEXP (x, 0)))
3021 case PRE_DEC:
3022 case POST_DEC:
3023 imag_first = true;
3024 break;
3025 case PRE_INC:
3026 case POST_INC:
3027 imag_first = false;
3028 break;
3029 default:
3030 gcc_unreachable ();
3033 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3034 read_complex_part (y, imag_first));
3035 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3036 read_complex_part (y, !imag_first));
3039 /* A subroutine of emit_move_complex. Perform the move from Y to X
3040 via two moves of the parts. Returns the last instruction emitted. */
3043 emit_move_complex_parts (rtx x, rtx y)
3045 /* Show the output dies here. This is necessary for SUBREGs
3046 of pseudos since we cannot track their lifetimes correctly;
3047 hard regs shouldn't appear here except as return values. */
3048 if (!reload_completed && !reload_in_progress
3049 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3050 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3052 write_complex_part (x, read_complex_part (y, false), false);
3053 write_complex_part (x, read_complex_part (y, true), true);
3055 return get_last_insn ();
3058 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3059 MODE is known to be complex. Returns the last instruction emitted. */
3061 static rtx
3062 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3064 bool try_int;
3066 /* Need to take special care for pushes, to maintain proper ordering
3067 of the data, and possibly extra padding. */
3068 if (push_operand (x, mode))
3069 return emit_move_complex_push (mode, x, y);
3071 /* See if we can coerce the target into moving both values at once. */
3073 /* Move floating point as parts. */
3074 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3075 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3076 try_int = false;
3077 /* Not possible if the values are inherently not adjacent. */
3078 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3079 try_int = false;
3080 /* Is possible if both are registers (or subregs of registers). */
3081 else if (register_operand (x, mode) && register_operand (y, mode))
3082 try_int = true;
3083 /* If one of the operands is a memory, and alignment constraints
3084 are friendly enough, we may be able to do combined memory operations.
3085 We do not attempt this if Y is a constant because that combination is
3086 usually better with the by-parts thing below. */
3087 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3088 && (!STRICT_ALIGNMENT
3089 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3090 try_int = true;
3091 else
3092 try_int = false;
3094 if (try_int)
3096 rtx ret;
3098 /* For memory to memory moves, optimal behavior can be had with the
3099 existing block move logic. */
3100 if (MEM_P (x) && MEM_P (y))
3102 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3103 BLOCK_OP_NO_LIBCALL);
3104 return get_last_insn ();
3107 ret = emit_move_via_integer (mode, x, y, true);
3108 if (ret)
3109 return ret;
3112 return emit_move_complex_parts (x, y);
3115 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3116 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3118 static rtx
3119 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3121 rtx ret;
3123 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3124 if (mode != CCmode)
3126 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3127 if (code != CODE_FOR_nothing)
3129 x = emit_move_change_mode (CCmode, mode, x, true);
3130 y = emit_move_change_mode (CCmode, mode, y, true);
3131 return emit_insn (GEN_FCN (code) (x, y));
3135 /* Otherwise, find the MODE_INT mode of the same width. */
3136 ret = emit_move_via_integer (mode, x, y, false);
3137 gcc_assert (ret != NULL);
3138 return ret;
3141 /* Return true if word I of OP lies entirely in the
3142 undefined bits of a paradoxical subreg. */
3144 static bool
3145 undefined_operand_subword_p (rtx op, int i)
3147 enum machine_mode innermode, innermostmode;
3148 int offset;
3149 if (GET_CODE (op) != SUBREG)
3150 return false;
3151 innermode = GET_MODE (op);
3152 innermostmode = GET_MODE (SUBREG_REG (op));
3153 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3154 /* The SUBREG_BYTE represents offset, as if the value were stored in
3155 memory, except for a paradoxical subreg where we define
3156 SUBREG_BYTE to be 0; undo this exception as in
3157 simplify_subreg. */
3158 if (SUBREG_BYTE (op) == 0
3159 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3161 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3162 if (WORDS_BIG_ENDIAN)
3163 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3164 if (BYTES_BIG_ENDIAN)
3165 offset += difference % UNITS_PER_WORD;
3167 if (offset >= GET_MODE_SIZE (innermostmode)
3168 || offset <= -GET_MODE_SIZE (word_mode))
3169 return true;
3170 return false;
3173 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3174 MODE is any multi-word or full-word mode that lacks a move_insn
3175 pattern. Note that you will get better code if you define such
3176 patterns, even if they must turn into multiple assembler instructions. */
3178 static rtx
3179 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3181 rtx last_insn = 0;
3182 rtx seq, inner;
3183 bool need_clobber;
3184 int i;
3186 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3188 /* If X is a push on the stack, do the push now and replace
3189 X with a reference to the stack pointer. */
3190 if (push_operand (x, mode))
3191 x = emit_move_resolve_push (mode, x);
3193 /* If we are in reload, see if either operand is a MEM whose address
3194 is scheduled for replacement. */
3195 if (reload_in_progress && MEM_P (x)
3196 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3197 x = replace_equiv_address_nv (x, inner);
3198 if (reload_in_progress && MEM_P (y)
3199 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3200 y = replace_equiv_address_nv (y, inner);
3202 start_sequence ();
3204 need_clobber = false;
3205 for (i = 0;
3206 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3207 i++)
3209 rtx xpart = operand_subword (x, i, 1, mode);
3210 rtx ypart;
3212 /* Do not generate code for a move if it would come entirely
3213 from the undefined bits of a paradoxical subreg. */
3214 if (undefined_operand_subword_p (y, i))
3215 continue;
3217 ypart = operand_subword (y, i, 1, mode);
3219 /* If we can't get a part of Y, put Y into memory if it is a
3220 constant. Otherwise, force it into a register. Then we must
3221 be able to get a part of Y. */
3222 if (ypart == 0 && CONSTANT_P (y))
3224 y = use_anchored_address (force_const_mem (mode, y));
3225 ypart = operand_subword (y, i, 1, mode);
3227 else if (ypart == 0)
3228 ypart = operand_subword_force (y, i, mode);
3230 gcc_assert (xpart && ypart);
3232 need_clobber |= (GET_CODE (xpart) == SUBREG);
3234 last_insn = emit_move_insn (xpart, ypart);
3237 seq = get_insns ();
3238 end_sequence ();
3240 /* Show the output dies here. This is necessary for SUBREGs
3241 of pseudos since we cannot track their lifetimes correctly;
3242 hard regs shouldn't appear here except as return values.
3243 We never want to emit such a clobber after reload. */
3244 if (x != y
3245 && ! (reload_in_progress || reload_completed)
3246 && need_clobber != 0)
3247 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3249 emit_insn (seq);
3251 return last_insn;
3254 /* Low level part of emit_move_insn.
3255 Called just like emit_move_insn, but assumes X and Y
3256 are basically valid. */
3259 emit_move_insn_1 (rtx x, rtx y)
3261 enum machine_mode mode = GET_MODE (x);
3262 enum insn_code code;
3264 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3266 code = mov_optab->handlers[mode].insn_code;
3267 if (code != CODE_FOR_nothing)
3268 return emit_insn (GEN_FCN (code) (x, y));
3270 /* Expand complex moves by moving real part and imag part. */
3271 if (COMPLEX_MODE_P (mode))
3272 return emit_move_complex (mode, x, y);
3274 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3276 rtx result = emit_move_via_integer (mode, x, y, true);
3278 /* If we can't find an integer mode, use multi words. */
3279 if (result)
3280 return result;
3281 else
3282 return emit_move_multi_word (mode, x, y);
3285 if (GET_MODE_CLASS (mode) == MODE_CC)
3286 return emit_move_ccmode (mode, x, y);
3288 /* Try using a move pattern for the corresponding integer mode. This is
3289 only safe when simplify_subreg can convert MODE constants into integer
3290 constants. At present, it can only do this reliably if the value
3291 fits within a HOST_WIDE_INT. */
3292 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3294 rtx ret = emit_move_via_integer (mode, x, y, false);
3295 if (ret)
3296 return ret;
3299 return emit_move_multi_word (mode, x, y);
3302 /* Generate code to copy Y into X.
3303 Both Y and X must have the same mode, except that
3304 Y can be a constant with VOIDmode.
3305 This mode cannot be BLKmode; use emit_block_move for that.
3307 Return the last instruction emitted. */
3310 emit_move_insn (rtx x, rtx y)
3312 enum machine_mode mode = GET_MODE (x);
3313 rtx y_cst = NULL_RTX;
3314 rtx last_insn, set;
3316 gcc_assert (mode != BLKmode
3317 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3319 if (CONSTANT_P (y))
3321 if (optimize
3322 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3323 && (last_insn = compress_float_constant (x, y)))
3324 return last_insn;
3326 y_cst = y;
3328 if (!LEGITIMATE_CONSTANT_P (y))
3330 y = force_const_mem (mode, y);
3332 /* If the target's cannot_force_const_mem prevented the spill,
3333 assume that the target's move expanders will also take care
3334 of the non-legitimate constant. */
3335 if (!y)
3336 y = y_cst;
3337 else
3338 y = use_anchored_address (y);
3342 /* If X or Y are memory references, verify that their addresses are valid
3343 for the machine. */
3344 if (MEM_P (x)
3345 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3346 && ! push_operand (x, GET_MODE (x)))
3347 || (flag_force_addr
3348 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3349 x = validize_mem (x);
3351 if (MEM_P (y)
3352 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3353 || (flag_force_addr
3354 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3355 y = validize_mem (y);
3357 gcc_assert (mode != BLKmode);
3359 last_insn = emit_move_insn_1 (x, y);
3361 if (y_cst && REG_P (x)
3362 && (set = single_set (last_insn)) != NULL_RTX
3363 && SET_DEST (set) == x
3364 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3365 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3367 return last_insn;
3370 /* If Y is representable exactly in a narrower mode, and the target can
3371 perform the extension directly from constant or memory, then emit the
3372 move as an extension. */
3374 static rtx
3375 compress_float_constant (rtx x, rtx y)
3377 enum machine_mode dstmode = GET_MODE (x);
3378 enum machine_mode orig_srcmode = GET_MODE (y);
3379 enum machine_mode srcmode;
3380 REAL_VALUE_TYPE r;
3381 int oldcost, newcost;
3383 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3385 if (LEGITIMATE_CONSTANT_P (y))
3386 oldcost = rtx_cost (y, SET);
3387 else
3388 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3390 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3391 srcmode != orig_srcmode;
3392 srcmode = GET_MODE_WIDER_MODE (srcmode))
3394 enum insn_code ic;
3395 rtx trunc_y, last_insn;
3397 /* Skip if the target can't extend this way. */
3398 ic = can_extend_p (dstmode, srcmode, 0);
3399 if (ic == CODE_FOR_nothing)
3400 continue;
3402 /* Skip if the narrowed value isn't exact. */
3403 if (! exact_real_truncate (srcmode, &r))
3404 continue;
3406 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3408 if (LEGITIMATE_CONSTANT_P (trunc_y))
3410 /* Skip if the target needs extra instructions to perform
3411 the extension. */
3412 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3413 continue;
3414 /* This is valid, but may not be cheaper than the original. */
3415 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3416 if (oldcost < newcost)
3417 continue;
3419 else if (float_extend_from_mem[dstmode][srcmode])
3421 trunc_y = force_const_mem (srcmode, trunc_y);
3422 /* This is valid, but may not be cheaper than the original. */
3423 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3424 if (oldcost < newcost)
3425 continue;
3426 trunc_y = validize_mem (trunc_y);
3428 else
3429 continue;
3431 /* For CSE's benefit, force the compressed constant pool entry
3432 into a new pseudo. This constant may be used in different modes,
3433 and if not, combine will put things back together for us. */
3434 trunc_y = force_reg (srcmode, trunc_y);
3435 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3436 last_insn = get_last_insn ();
3438 if (REG_P (x))
3439 set_unique_reg_note (last_insn, REG_EQUAL, y);
3441 return last_insn;
3444 return NULL_RTX;
3447 /* Pushing data onto the stack. */
3449 /* Push a block of length SIZE (perhaps variable)
3450 and return an rtx to address the beginning of the block.
3451 The value may be virtual_outgoing_args_rtx.
3453 EXTRA is the number of bytes of padding to push in addition to SIZE.
3454 BELOW nonzero means this padding comes at low addresses;
3455 otherwise, the padding comes at high addresses. */
3458 push_block (rtx size, int extra, int below)
3460 rtx temp;
3462 size = convert_modes (Pmode, ptr_mode, size, 1);
3463 if (CONSTANT_P (size))
3464 anti_adjust_stack (plus_constant (size, extra));
3465 else if (REG_P (size) && extra == 0)
3466 anti_adjust_stack (size);
3467 else
3469 temp = copy_to_mode_reg (Pmode, size);
3470 if (extra != 0)
3471 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3472 temp, 0, OPTAB_LIB_WIDEN);
3473 anti_adjust_stack (temp);
3476 #ifndef STACK_GROWS_DOWNWARD
3477 if (0)
3478 #else
3479 if (1)
3480 #endif
3482 temp = virtual_outgoing_args_rtx;
3483 if (extra != 0 && below)
3484 temp = plus_constant (temp, extra);
3486 else
3488 if (GET_CODE (size) == CONST_INT)
3489 temp = plus_constant (virtual_outgoing_args_rtx,
3490 -INTVAL (size) - (below ? 0 : extra));
3491 else if (extra != 0 && !below)
3492 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3493 negate_rtx (Pmode, plus_constant (size, extra)));
3494 else
3495 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3496 negate_rtx (Pmode, size));
3499 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3502 #ifdef PUSH_ROUNDING
3504 /* Emit single push insn. */
3506 static void
3507 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3509 rtx dest_addr;
3510 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3511 rtx dest;
3512 enum insn_code icode;
3513 insn_operand_predicate_fn pred;
3515 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3516 /* If there is push pattern, use it. Otherwise try old way of throwing
3517 MEM representing push operation to move expander. */
3518 icode = push_optab->handlers[(int) mode].insn_code;
3519 if (icode != CODE_FOR_nothing)
3521 if (((pred = insn_data[(int) icode].operand[0].predicate)
3522 && !((*pred) (x, mode))))
3523 x = force_reg (mode, x);
3524 emit_insn (GEN_FCN (icode) (x));
3525 return;
3527 if (GET_MODE_SIZE (mode) == rounded_size)
3528 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3529 /* If we are to pad downward, adjust the stack pointer first and
3530 then store X into the stack location using an offset. This is
3531 because emit_move_insn does not know how to pad; it does not have
3532 access to type. */
3533 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3535 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3536 HOST_WIDE_INT offset;
3538 emit_move_insn (stack_pointer_rtx,
3539 expand_binop (Pmode,
3540 #ifdef STACK_GROWS_DOWNWARD
3541 sub_optab,
3542 #else
3543 add_optab,
3544 #endif
3545 stack_pointer_rtx,
3546 GEN_INT (rounded_size),
3547 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3549 offset = (HOST_WIDE_INT) padding_size;
3550 #ifdef STACK_GROWS_DOWNWARD
3551 if (STACK_PUSH_CODE == POST_DEC)
3552 /* We have already decremented the stack pointer, so get the
3553 previous value. */
3554 offset += (HOST_WIDE_INT) rounded_size;
3555 #else
3556 if (STACK_PUSH_CODE == POST_INC)
3557 /* We have already incremented the stack pointer, so get the
3558 previous value. */
3559 offset -= (HOST_WIDE_INT) rounded_size;
3560 #endif
3561 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3563 else
3565 #ifdef STACK_GROWS_DOWNWARD
3566 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3567 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3568 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3569 #else
3570 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3571 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3572 GEN_INT (rounded_size));
3573 #endif
3574 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3577 dest = gen_rtx_MEM (mode, dest_addr);
3579 if (type != 0)
3581 set_mem_attributes (dest, type, 1);
3583 if (flag_optimize_sibling_calls)
3584 /* Function incoming arguments may overlap with sibling call
3585 outgoing arguments and we cannot allow reordering of reads
3586 from function arguments with stores to outgoing arguments
3587 of sibling calls. */
3588 set_mem_alias_set (dest, 0);
3590 emit_move_insn (dest, x);
3592 #endif
3594 /* Generate code to push X onto the stack, assuming it has mode MODE and
3595 type TYPE.
3596 MODE is redundant except when X is a CONST_INT (since they don't
3597 carry mode info).
3598 SIZE is an rtx for the size of data to be copied (in bytes),
3599 needed only if X is BLKmode.
3601 ALIGN (in bits) is maximum alignment we can assume.
3603 If PARTIAL and REG are both nonzero, then copy that many of the first
3604 bytes of X into registers starting with REG, and push the rest of X.
3605 The amount of space pushed is decreased by PARTIAL bytes.
3606 REG must be a hard register in this case.
3607 If REG is zero but PARTIAL is not, take any all others actions for an
3608 argument partially in registers, but do not actually load any
3609 registers.
3611 EXTRA is the amount in bytes of extra space to leave next to this arg.
3612 This is ignored if an argument block has already been allocated.
3614 On a machine that lacks real push insns, ARGS_ADDR is the address of
3615 the bottom of the argument block for this call. We use indexing off there
3616 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3617 argument block has not been preallocated.
3619 ARGS_SO_FAR is the size of args previously pushed for this call.
3621 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3622 for arguments passed in registers. If nonzero, it will be the number
3623 of bytes required. */
3625 void
3626 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3627 unsigned int align, int partial, rtx reg, int extra,
3628 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3629 rtx alignment_pad)
3631 rtx xinner;
3632 enum direction stack_direction
3633 #ifdef STACK_GROWS_DOWNWARD
3634 = downward;
3635 #else
3636 = upward;
3637 #endif
3639 /* Decide where to pad the argument: `downward' for below,
3640 `upward' for above, or `none' for don't pad it.
3641 Default is below for small data on big-endian machines; else above. */
3642 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3644 /* Invert direction if stack is post-decrement.
3645 FIXME: why? */
3646 if (STACK_PUSH_CODE == POST_DEC)
3647 if (where_pad != none)
3648 where_pad = (where_pad == downward ? upward : downward);
3650 xinner = x;
3652 if (mode == BLKmode
3653 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3655 /* Copy a block into the stack, entirely or partially. */
3657 rtx temp;
3658 int used;
3659 int offset;
3660 int skip;
3662 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3663 used = partial - offset;
3665 if (mode != BLKmode)
3667 /* A value is to be stored in an insufficiently aligned
3668 stack slot; copy via a suitably aligned slot if
3669 necessary. */
3670 size = GEN_INT (GET_MODE_SIZE (mode));
3671 if (!MEM_P (xinner))
3673 temp = assign_temp (type, 0, 1, 1);
3674 emit_move_insn (temp, xinner);
3675 xinner = temp;
3679 gcc_assert (size);
3681 /* USED is now the # of bytes we need not copy to the stack
3682 because registers will take care of them. */
3684 if (partial != 0)
3685 xinner = adjust_address (xinner, BLKmode, used);
3687 /* If the partial register-part of the arg counts in its stack size,
3688 skip the part of stack space corresponding to the registers.
3689 Otherwise, start copying to the beginning of the stack space,
3690 by setting SKIP to 0. */
3691 skip = (reg_parm_stack_space == 0) ? 0 : used;
3693 #ifdef PUSH_ROUNDING
3694 /* Do it with several push insns if that doesn't take lots of insns
3695 and if there is no difficulty with push insns that skip bytes
3696 on the stack for alignment purposes. */
3697 if (args_addr == 0
3698 && PUSH_ARGS
3699 && GET_CODE (size) == CONST_INT
3700 && skip == 0
3701 && MEM_ALIGN (xinner) >= align
3702 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3703 /* Here we avoid the case of a structure whose weak alignment
3704 forces many pushes of a small amount of data,
3705 and such small pushes do rounding that causes trouble. */
3706 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3707 || align >= BIGGEST_ALIGNMENT
3708 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3709 == (align / BITS_PER_UNIT)))
3710 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3712 /* Push padding now if padding above and stack grows down,
3713 or if padding below and stack grows up.
3714 But if space already allocated, this has already been done. */
3715 if (extra && args_addr == 0
3716 && where_pad != none && where_pad != stack_direction)
3717 anti_adjust_stack (GEN_INT (extra));
3719 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3721 else
3722 #endif /* PUSH_ROUNDING */
3724 rtx target;
3726 /* Otherwise make space on the stack and copy the data
3727 to the address of that space. */
3729 /* Deduct words put into registers from the size we must copy. */
3730 if (partial != 0)
3732 if (GET_CODE (size) == CONST_INT)
3733 size = GEN_INT (INTVAL (size) - used);
3734 else
3735 size = expand_binop (GET_MODE (size), sub_optab, size,
3736 GEN_INT (used), NULL_RTX, 0,
3737 OPTAB_LIB_WIDEN);
3740 /* Get the address of the stack space.
3741 In this case, we do not deal with EXTRA separately.
3742 A single stack adjust will do. */
3743 if (! args_addr)
3745 temp = push_block (size, extra, where_pad == downward);
3746 extra = 0;
3748 else if (GET_CODE (args_so_far) == CONST_INT)
3749 temp = memory_address (BLKmode,
3750 plus_constant (args_addr,
3751 skip + INTVAL (args_so_far)));
3752 else
3753 temp = memory_address (BLKmode,
3754 plus_constant (gen_rtx_PLUS (Pmode,
3755 args_addr,
3756 args_so_far),
3757 skip));
3759 if (!ACCUMULATE_OUTGOING_ARGS)
3761 /* If the source is referenced relative to the stack pointer,
3762 copy it to another register to stabilize it. We do not need
3763 to do this if we know that we won't be changing sp. */
3765 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3766 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3767 temp = copy_to_reg (temp);
3770 target = gen_rtx_MEM (BLKmode, temp);
3772 /* We do *not* set_mem_attributes here, because incoming arguments
3773 may overlap with sibling call outgoing arguments and we cannot
3774 allow reordering of reads from function arguments with stores
3775 to outgoing arguments of sibling calls. We do, however, want
3776 to record the alignment of the stack slot. */
3777 /* ALIGN may well be better aligned than TYPE, e.g. due to
3778 PARM_BOUNDARY. Assume the caller isn't lying. */
3779 set_mem_align (target, align);
3781 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3784 else if (partial > 0)
3786 /* Scalar partly in registers. */
3788 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3789 int i;
3790 int not_stack;
3791 /* # bytes of start of argument
3792 that we must make space for but need not store. */
3793 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3794 int args_offset = INTVAL (args_so_far);
3795 int skip;
3797 /* Push padding now if padding above and stack grows down,
3798 or if padding below and stack grows up.
3799 But if space already allocated, this has already been done. */
3800 if (extra && args_addr == 0
3801 && where_pad != none && where_pad != stack_direction)
3802 anti_adjust_stack (GEN_INT (extra));
3804 /* If we make space by pushing it, we might as well push
3805 the real data. Otherwise, we can leave OFFSET nonzero
3806 and leave the space uninitialized. */
3807 if (args_addr == 0)
3808 offset = 0;
3810 /* Now NOT_STACK gets the number of words that we don't need to
3811 allocate on the stack. Convert OFFSET to words too. */
3812 not_stack = (partial - offset) / UNITS_PER_WORD;
3813 offset /= UNITS_PER_WORD;
3815 /* If the partial register-part of the arg counts in its stack size,
3816 skip the part of stack space corresponding to the registers.
3817 Otherwise, start copying to the beginning of the stack space,
3818 by setting SKIP to 0. */
3819 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3821 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3822 x = validize_mem (force_const_mem (mode, x));
3824 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3825 SUBREGs of such registers are not allowed. */
3826 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3827 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3828 x = copy_to_reg (x);
3830 /* Loop over all the words allocated on the stack for this arg. */
3831 /* We can do it by words, because any scalar bigger than a word
3832 has a size a multiple of a word. */
3833 #ifndef PUSH_ARGS_REVERSED
3834 for (i = not_stack; i < size; i++)
3835 #else
3836 for (i = size - 1; i >= not_stack; i--)
3837 #endif
3838 if (i >= not_stack + offset)
3839 emit_push_insn (operand_subword_force (x, i, mode),
3840 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3841 0, args_addr,
3842 GEN_INT (args_offset + ((i - not_stack + skip)
3843 * UNITS_PER_WORD)),
3844 reg_parm_stack_space, alignment_pad);
3846 else
3848 rtx addr;
3849 rtx dest;
3851 /* Push padding now if padding above and stack grows down,
3852 or if padding below and stack grows up.
3853 But if space already allocated, this has already been done. */
3854 if (extra && args_addr == 0
3855 && where_pad != none && where_pad != stack_direction)
3856 anti_adjust_stack (GEN_INT (extra));
3858 #ifdef PUSH_ROUNDING
3859 if (args_addr == 0 && PUSH_ARGS)
3860 emit_single_push_insn (mode, x, type);
3861 else
3862 #endif
3864 if (GET_CODE (args_so_far) == CONST_INT)
3865 addr
3866 = memory_address (mode,
3867 plus_constant (args_addr,
3868 INTVAL (args_so_far)));
3869 else
3870 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3871 args_so_far));
3872 dest = gen_rtx_MEM (mode, addr);
3874 /* We do *not* set_mem_attributes here, because incoming arguments
3875 may overlap with sibling call outgoing arguments and we cannot
3876 allow reordering of reads from function arguments with stores
3877 to outgoing arguments of sibling calls. We do, however, want
3878 to record the alignment of the stack slot. */
3879 /* ALIGN may well be better aligned than TYPE, e.g. due to
3880 PARM_BOUNDARY. Assume the caller isn't lying. */
3881 set_mem_align (dest, align);
3883 emit_move_insn (dest, x);
3887 /* If part should go in registers, copy that part
3888 into the appropriate registers. Do this now, at the end,
3889 since mem-to-mem copies above may do function calls. */
3890 if (partial > 0 && reg != 0)
3892 /* Handle calls that pass values in multiple non-contiguous locations.
3893 The Irix 6 ABI has examples of this. */
3894 if (GET_CODE (reg) == PARALLEL)
3895 emit_group_load (reg, x, type, -1);
3896 else
3898 gcc_assert (partial % UNITS_PER_WORD == 0);
3899 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3903 if (extra && args_addr == 0 && where_pad == stack_direction)
3904 anti_adjust_stack (GEN_INT (extra));
3906 if (alignment_pad && args_addr == 0)
3907 anti_adjust_stack (alignment_pad);
3910 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3911 operations. */
3913 static rtx
3914 get_subtarget (rtx x)
3916 return (optimize
3917 || x == 0
3918 /* Only registers can be subtargets. */
3919 || !REG_P (x)
3920 /* Don't use hard regs to avoid extending their life. */
3921 || REGNO (x) < FIRST_PSEUDO_REGISTER
3922 ? 0 : x);
3925 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3926 FIELD is a bitfield. Returns true if the optimization was successful,
3927 and there's nothing else to do. */
3929 static bool
3930 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3931 unsigned HOST_WIDE_INT bitpos,
3932 enum machine_mode mode1, rtx str_rtx,
3933 tree to, tree src)
3935 enum machine_mode str_mode = GET_MODE (str_rtx);
3936 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3937 tree op0, op1;
3938 rtx value, result;
3939 optab binop;
3941 if (mode1 != VOIDmode
3942 || bitsize >= BITS_PER_WORD
3943 || str_bitsize > BITS_PER_WORD
3944 || TREE_SIDE_EFFECTS (to)
3945 || TREE_THIS_VOLATILE (to))
3946 return false;
3948 STRIP_NOPS (src);
3949 if (!BINARY_CLASS_P (src)
3950 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3951 return false;
3953 op0 = TREE_OPERAND (src, 0);
3954 op1 = TREE_OPERAND (src, 1);
3955 STRIP_NOPS (op0);
3957 if (!operand_equal_p (to, op0, 0))
3958 return false;
3960 if (MEM_P (str_rtx))
3962 unsigned HOST_WIDE_INT offset1;
3964 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3965 str_mode = word_mode;
3966 str_mode = get_best_mode (bitsize, bitpos,
3967 MEM_ALIGN (str_rtx), str_mode, 0);
3968 if (str_mode == VOIDmode)
3969 return false;
3970 str_bitsize = GET_MODE_BITSIZE (str_mode);
3972 offset1 = bitpos;
3973 bitpos %= str_bitsize;
3974 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3975 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3977 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3978 return false;
3980 /* If the bit field covers the whole REG/MEM, store_field
3981 will likely generate better code. */
3982 if (bitsize >= str_bitsize)
3983 return false;
3985 /* We can't handle fields split across multiple entities. */
3986 if (bitpos + bitsize > str_bitsize)
3987 return false;
3989 if (BYTES_BIG_ENDIAN)
3990 bitpos = str_bitsize - bitpos - bitsize;
3992 switch (TREE_CODE (src))
3994 case PLUS_EXPR:
3995 case MINUS_EXPR:
3996 /* For now, just optimize the case of the topmost bitfield
3997 where we don't need to do any masking and also
3998 1 bit bitfields where xor can be used.
3999 We might win by one instruction for the other bitfields
4000 too if insv/extv instructions aren't used, so that
4001 can be added later. */
4002 if (bitpos + bitsize != str_bitsize
4003 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4004 break;
4006 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4007 value = convert_modes (str_mode,
4008 TYPE_MODE (TREE_TYPE (op1)), value,
4009 TYPE_UNSIGNED (TREE_TYPE (op1)));
4011 /* We may be accessing data outside the field, which means
4012 we can alias adjacent data. */
4013 if (MEM_P (str_rtx))
4015 str_rtx = shallow_copy_rtx (str_rtx);
4016 set_mem_alias_set (str_rtx, 0);
4017 set_mem_expr (str_rtx, 0);
4020 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4021 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4023 value = expand_and (str_mode, value, const1_rtx, NULL);
4024 binop = xor_optab;
4026 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4027 build_int_cst (NULL_TREE, bitpos),
4028 NULL_RTX, 1);
4029 result = expand_binop (str_mode, binop, str_rtx,
4030 value, str_rtx, 1, OPTAB_WIDEN);
4031 if (result != str_rtx)
4032 emit_move_insn (str_rtx, result);
4033 return true;
4035 case BIT_IOR_EXPR:
4036 case BIT_XOR_EXPR:
4037 if (TREE_CODE (op1) != INTEGER_CST)
4038 break;
4039 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4040 value = convert_modes (GET_MODE (str_rtx),
4041 TYPE_MODE (TREE_TYPE (op1)), value,
4042 TYPE_UNSIGNED (TREE_TYPE (op1)));
4044 /* We may be accessing data outside the field, which means
4045 we can alias adjacent data. */
4046 if (MEM_P (str_rtx))
4048 str_rtx = shallow_copy_rtx (str_rtx);
4049 set_mem_alias_set (str_rtx, 0);
4050 set_mem_expr (str_rtx, 0);
4053 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4054 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4056 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4057 - 1);
4058 value = expand_and (GET_MODE (str_rtx), value, mask,
4059 NULL_RTX);
4061 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4062 build_int_cst (NULL_TREE, bitpos),
4063 NULL_RTX, 1);
4064 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4065 value, str_rtx, 1, OPTAB_WIDEN);
4066 if (result != str_rtx)
4067 emit_move_insn (str_rtx, result);
4068 return true;
4070 default:
4071 break;
4074 return false;
4078 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4079 is true, try generating a nontemporal store. */
4081 void
4082 expand_assignment (tree to, tree from, bool nontemporal)
4084 rtx to_rtx = 0;
4085 rtx result;
4087 /* Don't crash if the lhs of the assignment was erroneous. */
4088 if (TREE_CODE (to) == ERROR_MARK)
4090 result = expand_normal (from);
4091 return;
4094 /* Optimize away no-op moves without side-effects. */
4095 if (operand_equal_p (to, from, 0))
4096 return;
4098 /* Assignment of a structure component needs special treatment
4099 if the structure component's rtx is not simply a MEM.
4100 Assignment of an array element at a constant index, and assignment of
4101 an array element in an unaligned packed structure field, has the same
4102 problem. */
4103 if (handled_component_p (to)
4104 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4106 enum machine_mode mode1;
4107 HOST_WIDE_INT bitsize, bitpos;
4108 tree offset;
4109 int unsignedp;
4110 int volatilep = 0;
4111 tree tem;
4113 push_temp_slots ();
4114 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4115 &unsignedp, &volatilep, true);
4117 /* If we are going to use store_bit_field and extract_bit_field,
4118 make sure to_rtx will be safe for multiple use. */
4120 to_rtx = expand_normal (tem);
4122 if (offset != 0)
4124 rtx offset_rtx;
4126 if (!MEM_P (to_rtx))
4128 /* We can get constant negative offsets into arrays with broken
4129 user code. Translate this to a trap instead of ICEing. */
4130 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4131 expand_builtin_trap ();
4132 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4135 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4136 #ifdef POINTERS_EXTEND_UNSIGNED
4137 if (GET_MODE (offset_rtx) != Pmode)
4138 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4139 #else
4140 if (GET_MODE (offset_rtx) != ptr_mode)
4141 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4142 #endif
4144 /* A constant address in TO_RTX can have VOIDmode, we must not try
4145 to call force_reg for that case. Avoid that case. */
4146 if (MEM_P (to_rtx)
4147 && GET_MODE (to_rtx) == BLKmode
4148 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4149 && bitsize > 0
4150 && (bitpos % bitsize) == 0
4151 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4152 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4154 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4155 bitpos = 0;
4158 to_rtx = offset_address (to_rtx, offset_rtx,
4159 highest_pow2_factor_for_target (to,
4160 offset));
4163 /* Handle expand_expr of a complex value returning a CONCAT. */
4164 if (GET_CODE (to_rtx) == CONCAT)
4166 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4168 gcc_assert (bitpos == 0);
4169 result = store_expr (from, to_rtx, false, nontemporal);
4171 else
4173 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4174 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4175 nontemporal);
4178 else
4180 if (MEM_P (to_rtx))
4182 /* If the field is at offset zero, we could have been given the
4183 DECL_RTX of the parent struct. Don't munge it. */
4184 to_rtx = shallow_copy_rtx (to_rtx);
4186 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4188 /* Deal with volatile and readonly fields. The former is only
4189 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4190 if (volatilep)
4191 MEM_VOLATILE_P (to_rtx) = 1;
4192 if (component_uses_parent_alias_set (to))
4193 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4196 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4197 to_rtx, to, from))
4198 result = NULL;
4199 else
4200 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4201 TREE_TYPE (tem), get_alias_set (to),
4202 nontemporal);
4205 if (result)
4206 preserve_temp_slots (result);
4207 free_temp_slots ();
4208 pop_temp_slots ();
4209 return;
4212 /* If the rhs is a function call and its value is not an aggregate,
4213 call the function before we start to compute the lhs.
4214 This is needed for correct code for cases such as
4215 val = setjmp (buf) on machines where reference to val
4216 requires loading up part of an address in a separate insn.
4218 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4219 since it might be a promoted variable where the zero- or sign- extension
4220 needs to be done. Handling this in the normal way is safe because no
4221 computation is done before the call. */
4222 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4223 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4224 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4225 && REG_P (DECL_RTL (to))))
4227 rtx value;
4229 push_temp_slots ();
4230 value = expand_normal (from);
4231 if (to_rtx == 0)
4232 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4234 /* Handle calls that return values in multiple non-contiguous locations.
4235 The Irix 6 ABI has examples of this. */
4236 if (GET_CODE (to_rtx) == PARALLEL)
4237 emit_group_load (to_rtx, value, TREE_TYPE (from),
4238 int_size_in_bytes (TREE_TYPE (from)));
4239 else if (GET_MODE (to_rtx) == BLKmode)
4240 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4241 else
4243 if (POINTER_TYPE_P (TREE_TYPE (to)))
4244 value = convert_memory_address (GET_MODE (to_rtx), value);
4245 emit_move_insn (to_rtx, value);
4247 preserve_temp_slots (to_rtx);
4248 free_temp_slots ();
4249 pop_temp_slots ();
4250 return;
4253 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4254 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4256 if (to_rtx == 0)
4257 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4259 /* Don't move directly into a return register. */
4260 if (TREE_CODE (to) == RESULT_DECL
4261 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4263 rtx temp;
4265 push_temp_slots ();
4266 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4268 if (GET_CODE (to_rtx) == PARALLEL)
4269 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4270 int_size_in_bytes (TREE_TYPE (from)));
4271 else
4272 emit_move_insn (to_rtx, temp);
4274 preserve_temp_slots (to_rtx);
4275 free_temp_slots ();
4276 pop_temp_slots ();
4277 return;
4280 /* In case we are returning the contents of an object which overlaps
4281 the place the value is being stored, use a safe function when copying
4282 a value through a pointer into a structure value return block. */
4283 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4284 && current_function_returns_struct
4285 && !current_function_returns_pcc_struct)
4287 rtx from_rtx, size;
4289 push_temp_slots ();
4290 size = expr_size (from);
4291 from_rtx = expand_normal (from);
4293 emit_library_call (memmove_libfunc, LCT_NORMAL,
4294 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4295 XEXP (from_rtx, 0), Pmode,
4296 convert_to_mode (TYPE_MODE (sizetype),
4297 size, TYPE_UNSIGNED (sizetype)),
4298 TYPE_MODE (sizetype));
4300 preserve_temp_slots (to_rtx);
4301 free_temp_slots ();
4302 pop_temp_slots ();
4303 return;
4306 /* Compute FROM and store the value in the rtx we got. */
4308 push_temp_slots ();
4309 result = store_expr (from, to_rtx, 0, nontemporal);
4310 preserve_temp_slots (result);
4311 free_temp_slots ();
4312 pop_temp_slots ();
4313 return;
4316 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4317 succeeded, false otherwise. */
4319 static bool
4320 emit_storent_insn (rtx to, rtx from)
4322 enum machine_mode mode = GET_MODE (to), imode;
4323 enum insn_code code = storent_optab->handlers[mode].insn_code;
4324 rtx pattern;
4326 if (code == CODE_FOR_nothing)
4327 return false;
4329 imode = insn_data[code].operand[0].mode;
4330 if (!insn_data[code].operand[0].predicate (to, imode))
4331 return false;
4333 imode = insn_data[code].operand[1].mode;
4334 if (!insn_data[code].operand[1].predicate (from, imode))
4336 from = copy_to_mode_reg (imode, from);
4337 if (!insn_data[code].operand[1].predicate (from, imode))
4338 return false;
4341 pattern = GEN_FCN (code) (to, from);
4342 if (pattern == NULL_RTX)
4343 return false;
4345 emit_insn (pattern);
4346 return true;
4349 /* Generate code for computing expression EXP,
4350 and storing the value into TARGET.
4352 If the mode is BLKmode then we may return TARGET itself.
4353 It turns out that in BLKmode it doesn't cause a problem.
4354 because C has no operators that could combine two different
4355 assignments into the same BLKmode object with different values
4356 with no sequence point. Will other languages need this to
4357 be more thorough?
4359 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4360 stack, and block moves may need to be treated specially.
4362 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4365 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4367 rtx temp;
4368 rtx alt_rtl = NULL_RTX;
4369 int dont_return_target = 0;
4371 if (VOID_TYPE_P (TREE_TYPE (exp)))
4373 /* C++ can generate ?: expressions with a throw expression in one
4374 branch and an rvalue in the other. Here, we resolve attempts to
4375 store the throw expression's nonexistent result. */
4376 gcc_assert (!call_param_p);
4377 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4378 return NULL_RTX;
4380 if (TREE_CODE (exp) == COMPOUND_EXPR)
4382 /* Perform first part of compound expression, then assign from second
4383 part. */
4384 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4385 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4386 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4387 nontemporal);
4389 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4391 /* For conditional expression, get safe form of the target. Then
4392 test the condition, doing the appropriate assignment on either
4393 side. This avoids the creation of unnecessary temporaries.
4394 For non-BLKmode, it is more efficient not to do this. */
4396 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4398 do_pending_stack_adjust ();
4399 NO_DEFER_POP;
4400 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4401 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4402 nontemporal);
4403 emit_jump_insn (gen_jump (lab2));
4404 emit_barrier ();
4405 emit_label (lab1);
4406 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4407 nontemporal);
4408 emit_label (lab2);
4409 OK_DEFER_POP;
4411 return NULL_RTX;
4413 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4414 /* If this is a scalar in a register that is stored in a wider mode
4415 than the declared mode, compute the result into its declared mode
4416 and then convert to the wider mode. Our value is the computed
4417 expression. */
4419 rtx inner_target = 0;
4421 /* We can do the conversion inside EXP, which will often result
4422 in some optimizations. Do the conversion in two steps: first
4423 change the signedness, if needed, then the extend. But don't
4424 do this if the type of EXP is a subtype of something else
4425 since then the conversion might involve more than just
4426 converting modes. */
4427 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4428 && TREE_TYPE (TREE_TYPE (exp)) == 0
4429 && (!lang_hooks.reduce_bit_field_operations
4430 || (GET_MODE_PRECISION (GET_MODE (target))
4431 == TYPE_PRECISION (TREE_TYPE (exp)))))
4433 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4434 != SUBREG_PROMOTED_UNSIGNED_P (target))
4436 /* Some types, e.g. Fortran's logical*4, won't have a signed
4437 version, so use the mode instead. */
4438 tree ntype
4439 = (signed_or_unsigned_type_for
4440 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4441 if (ntype == NULL)
4442 ntype = lang_hooks.types.type_for_mode
4443 (TYPE_MODE (TREE_TYPE (exp)),
4444 SUBREG_PROMOTED_UNSIGNED_P (target));
4446 exp = fold_convert (ntype, exp);
4449 exp = fold_convert (lang_hooks.types.type_for_mode
4450 (GET_MODE (SUBREG_REG (target)),
4451 SUBREG_PROMOTED_UNSIGNED_P (target)),
4452 exp);
4454 inner_target = SUBREG_REG (target);
4457 temp = expand_expr (exp, inner_target, VOIDmode,
4458 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4460 /* If TEMP is a VOIDmode constant, use convert_modes to make
4461 sure that we properly convert it. */
4462 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4464 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4465 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4466 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4467 GET_MODE (target), temp,
4468 SUBREG_PROMOTED_UNSIGNED_P (target));
4471 convert_move (SUBREG_REG (target), temp,
4472 SUBREG_PROMOTED_UNSIGNED_P (target));
4474 return NULL_RTX;
4476 else
4478 rtx tmp_target;
4480 /* If we want to use a nontemporal store, force the value to
4481 register first. */
4482 tmp_target = nontemporal ? NULL_RTX : target;
4483 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4484 (call_param_p
4485 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4486 &alt_rtl);
4487 /* Return TARGET if it's a specified hardware register.
4488 If TARGET is a volatile mem ref, either return TARGET
4489 or return a reg copied *from* TARGET; ANSI requires this.
4491 Otherwise, if TEMP is not TARGET, return TEMP
4492 if it is constant (for efficiency),
4493 or if we really want the correct value. */
4494 if (!(target && REG_P (target)
4495 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4496 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4497 && ! rtx_equal_p (temp, target)
4498 && CONSTANT_P (temp))
4499 dont_return_target = 1;
4502 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4503 the same as that of TARGET, adjust the constant. This is needed, for
4504 example, in case it is a CONST_DOUBLE and we want only a word-sized
4505 value. */
4506 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4507 && TREE_CODE (exp) != ERROR_MARK
4508 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4509 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4510 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4512 /* If value was not generated in the target, store it there.
4513 Convert the value to TARGET's type first if necessary and emit the
4514 pending incrementations that have been queued when expanding EXP.
4515 Note that we cannot emit the whole queue blindly because this will
4516 effectively disable the POST_INC optimization later.
4518 If TEMP and TARGET compare equal according to rtx_equal_p, but
4519 one or both of them are volatile memory refs, we have to distinguish
4520 two cases:
4521 - expand_expr has used TARGET. In this case, we must not generate
4522 another copy. This can be detected by TARGET being equal according
4523 to == .
4524 - expand_expr has not used TARGET - that means that the source just
4525 happens to have the same RTX form. Since temp will have been created
4526 by expand_expr, it will compare unequal according to == .
4527 We must generate a copy in this case, to reach the correct number
4528 of volatile memory references. */
4530 if ((! rtx_equal_p (temp, target)
4531 || (temp != target && (side_effects_p (temp)
4532 || side_effects_p (target))))
4533 && TREE_CODE (exp) != ERROR_MARK
4534 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4535 but TARGET is not valid memory reference, TEMP will differ
4536 from TARGET although it is really the same location. */
4537 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4538 /* If there's nothing to copy, don't bother. Don't call
4539 expr_size unless necessary, because some front-ends (C++)
4540 expr_size-hook must not be given objects that are not
4541 supposed to be bit-copied or bit-initialized. */
4542 && expr_size (exp) != const0_rtx)
4544 if (GET_MODE (temp) != GET_MODE (target)
4545 && GET_MODE (temp) != VOIDmode)
4547 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4548 if (dont_return_target)
4550 /* In this case, we will return TEMP,
4551 so make sure it has the proper mode.
4552 But don't forget to store the value into TARGET. */
4553 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4554 emit_move_insn (target, temp);
4556 else if (GET_MODE (target) == BLKmode)
4557 emit_block_move (target, temp, expr_size (exp),
4558 (call_param_p
4559 ? BLOCK_OP_CALL_PARM
4560 : BLOCK_OP_NORMAL));
4561 else
4562 convert_move (target, temp, unsignedp);
4565 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4567 /* Handle copying a string constant into an array. The string
4568 constant may be shorter than the array. So copy just the string's
4569 actual length, and clear the rest. First get the size of the data
4570 type of the string, which is actually the size of the target. */
4571 rtx size = expr_size (exp);
4573 if (GET_CODE (size) == CONST_INT
4574 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4575 emit_block_move (target, temp, size,
4576 (call_param_p
4577 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4578 else
4580 /* Compute the size of the data to copy from the string. */
4581 tree copy_size
4582 = size_binop (MIN_EXPR,
4583 make_tree (sizetype, size),
4584 size_int (TREE_STRING_LENGTH (exp)));
4585 rtx copy_size_rtx
4586 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4587 (call_param_p
4588 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4589 rtx label = 0;
4591 /* Copy that much. */
4592 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4593 TYPE_UNSIGNED (sizetype));
4594 emit_block_move (target, temp, copy_size_rtx,
4595 (call_param_p
4596 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4598 /* Figure out how much is left in TARGET that we have to clear.
4599 Do all calculations in ptr_mode. */
4600 if (GET_CODE (copy_size_rtx) == CONST_INT)
4602 size = plus_constant (size, -INTVAL (copy_size_rtx));
4603 target = adjust_address (target, BLKmode,
4604 INTVAL (copy_size_rtx));
4606 else
4608 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4609 copy_size_rtx, NULL_RTX, 0,
4610 OPTAB_LIB_WIDEN);
4612 #ifdef POINTERS_EXTEND_UNSIGNED
4613 if (GET_MODE (copy_size_rtx) != Pmode)
4614 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4615 TYPE_UNSIGNED (sizetype));
4616 #endif
4618 target = offset_address (target, copy_size_rtx,
4619 highest_pow2_factor (copy_size));
4620 label = gen_label_rtx ();
4621 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4622 GET_MODE (size), 0, label);
4625 if (size != const0_rtx)
4626 clear_storage (target, size, BLOCK_OP_NORMAL);
4628 if (label)
4629 emit_label (label);
4632 /* Handle calls that return values in multiple non-contiguous locations.
4633 The Irix 6 ABI has examples of this. */
4634 else if (GET_CODE (target) == PARALLEL)
4635 emit_group_load (target, temp, TREE_TYPE (exp),
4636 int_size_in_bytes (TREE_TYPE (exp)));
4637 else if (GET_MODE (temp) == BLKmode)
4638 emit_block_move (target, temp, expr_size (exp),
4639 (call_param_p
4640 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4641 else if (nontemporal
4642 && emit_storent_insn (target, temp))
4643 /* If we managed to emit a nontemporal store, there is nothing else to
4644 do. */
4646 else
4648 temp = force_operand (temp, target);
4649 if (temp != target)
4650 emit_move_insn (target, temp);
4654 return NULL_RTX;
4657 /* Helper for categorize_ctor_elements. Identical interface. */
4659 static bool
4660 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4661 HOST_WIDE_INT *p_elt_count,
4662 bool *p_must_clear)
4664 unsigned HOST_WIDE_INT idx;
4665 HOST_WIDE_INT nz_elts, elt_count;
4666 tree value, purpose;
4668 /* Whether CTOR is a valid constant initializer, in accordance with what
4669 initializer_constant_valid_p does. If inferred from the constructor
4670 elements, true until proven otherwise. */
4671 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4672 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4674 nz_elts = 0;
4675 elt_count = 0;
4677 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4679 HOST_WIDE_INT mult;
4681 mult = 1;
4682 if (TREE_CODE (purpose) == RANGE_EXPR)
4684 tree lo_index = TREE_OPERAND (purpose, 0);
4685 tree hi_index = TREE_OPERAND (purpose, 1);
4687 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4688 mult = (tree_low_cst (hi_index, 1)
4689 - tree_low_cst (lo_index, 1) + 1);
4692 switch (TREE_CODE (value))
4694 case CONSTRUCTOR:
4696 HOST_WIDE_INT nz = 0, ic = 0;
4698 bool const_elt_p
4699 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4701 nz_elts += mult * nz;
4702 elt_count += mult * ic;
4704 if (const_from_elts_p && const_p)
4705 const_p = const_elt_p;
4707 break;
4709 case INTEGER_CST:
4710 case REAL_CST:
4711 if (!initializer_zerop (value))
4712 nz_elts += mult;
4713 elt_count += mult;
4714 break;
4716 case STRING_CST:
4717 nz_elts += mult * TREE_STRING_LENGTH (value);
4718 elt_count += mult * TREE_STRING_LENGTH (value);
4719 break;
4721 case COMPLEX_CST:
4722 if (!initializer_zerop (TREE_REALPART (value)))
4723 nz_elts += mult;
4724 if (!initializer_zerop (TREE_IMAGPART (value)))
4725 nz_elts += mult;
4726 elt_count += mult;
4727 break;
4729 case VECTOR_CST:
4731 tree v;
4732 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4734 if (!initializer_zerop (TREE_VALUE (v)))
4735 nz_elts += mult;
4736 elt_count += mult;
4739 break;
4741 default:
4742 nz_elts += mult;
4743 elt_count += mult;
4745 if (const_from_elts_p && const_p)
4746 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4747 != NULL_TREE;
4748 break;
4752 if (!*p_must_clear
4753 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4754 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4756 tree init_sub_type;
4757 bool clear_this = true;
4759 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4761 /* We don't expect more than one element of the union to be
4762 initialized. Not sure what we should do otherwise... */
4763 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4764 == 1);
4766 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4767 CONSTRUCTOR_ELTS (ctor),
4768 0)->value);
4770 /* ??? We could look at each element of the union, and find the
4771 largest element. Which would avoid comparing the size of the
4772 initialized element against any tail padding in the union.
4773 Doesn't seem worth the effort... */
4774 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4775 TYPE_SIZE (init_sub_type)) == 1)
4777 /* And now we have to find out if the element itself is fully
4778 constructed. E.g. for union { struct { int a, b; } s; } u
4779 = { .s = { .a = 1 } }. */
4780 if (elt_count == count_type_elements (init_sub_type, false))
4781 clear_this = false;
4785 *p_must_clear = clear_this;
4788 *p_nz_elts += nz_elts;
4789 *p_elt_count += elt_count;
4791 return const_p;
4794 /* Examine CTOR to discover:
4795 * how many scalar fields are set to nonzero values,
4796 and place it in *P_NZ_ELTS;
4797 * how many scalar fields in total are in CTOR,
4798 and place it in *P_ELT_COUNT.
4799 * if a type is a union, and the initializer from the constructor
4800 is not the largest element in the union, then set *p_must_clear.
4802 Return whether or not CTOR is a valid static constant initializer, the same
4803 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4805 bool
4806 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4807 HOST_WIDE_INT *p_elt_count,
4808 bool *p_must_clear)
4810 *p_nz_elts = 0;
4811 *p_elt_count = 0;
4812 *p_must_clear = false;
4814 return
4815 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4818 /* Count the number of scalars in TYPE. Return -1 on overflow or
4819 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4820 array member at the end of the structure. */
4822 HOST_WIDE_INT
4823 count_type_elements (tree type, bool allow_flexarr)
4825 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4826 switch (TREE_CODE (type))
4828 case ARRAY_TYPE:
4830 tree telts = array_type_nelts (type);
4831 if (telts && host_integerp (telts, 1))
4833 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4834 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4835 if (n == 0)
4836 return 0;
4837 else if (max / n > m)
4838 return n * m;
4840 return -1;
4843 case RECORD_TYPE:
4845 HOST_WIDE_INT n = 0, t;
4846 tree f;
4848 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4849 if (TREE_CODE (f) == FIELD_DECL)
4851 t = count_type_elements (TREE_TYPE (f), false);
4852 if (t < 0)
4854 /* Check for structures with flexible array member. */
4855 tree tf = TREE_TYPE (f);
4856 if (allow_flexarr
4857 && TREE_CHAIN (f) == NULL
4858 && TREE_CODE (tf) == ARRAY_TYPE
4859 && TYPE_DOMAIN (tf)
4860 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4861 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4862 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4863 && int_size_in_bytes (type) >= 0)
4864 break;
4866 return -1;
4868 n += t;
4871 return n;
4874 case UNION_TYPE:
4875 case QUAL_UNION_TYPE:
4877 /* Ho hum. How in the world do we guess here? Clearly it isn't
4878 right to count the fields. Guess based on the number of words. */
4879 HOST_WIDE_INT n = int_size_in_bytes (type);
4880 if (n < 0)
4881 return -1;
4882 return n / UNITS_PER_WORD;
4885 case COMPLEX_TYPE:
4886 return 2;
4888 case VECTOR_TYPE:
4889 return TYPE_VECTOR_SUBPARTS (type);
4891 case INTEGER_TYPE:
4892 case REAL_TYPE:
4893 case ENUMERAL_TYPE:
4894 case BOOLEAN_TYPE:
4895 case POINTER_TYPE:
4896 case OFFSET_TYPE:
4897 case REFERENCE_TYPE:
4898 return 1;
4900 case VOID_TYPE:
4901 case METHOD_TYPE:
4902 case FUNCTION_TYPE:
4903 case LANG_TYPE:
4904 default:
4905 gcc_unreachable ();
4909 /* Return 1 if EXP contains mostly (3/4) zeros. */
4911 static int
4912 mostly_zeros_p (tree exp)
4914 if (TREE_CODE (exp) == CONSTRUCTOR)
4917 HOST_WIDE_INT nz_elts, count, elts;
4918 bool must_clear;
4920 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4921 if (must_clear)
4922 return 1;
4924 elts = count_type_elements (TREE_TYPE (exp), false);
4926 return nz_elts < elts / 4;
4929 return initializer_zerop (exp);
4932 /* Return 1 if EXP contains all zeros. */
4934 static int
4935 all_zeros_p (tree exp)
4937 if (TREE_CODE (exp) == CONSTRUCTOR)
4940 HOST_WIDE_INT nz_elts, count;
4941 bool must_clear;
4943 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4944 return nz_elts == 0;
4947 return initializer_zerop (exp);
4950 /* Helper function for store_constructor.
4951 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4952 TYPE is the type of the CONSTRUCTOR, not the element type.
4953 CLEARED is as for store_constructor.
4954 ALIAS_SET is the alias set to use for any stores.
4956 This provides a recursive shortcut back to store_constructor when it isn't
4957 necessary to go through store_field. This is so that we can pass through
4958 the cleared field to let store_constructor know that we may not have to
4959 clear a substructure if the outer structure has already been cleared. */
4961 static void
4962 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4963 HOST_WIDE_INT bitpos, enum machine_mode mode,
4964 tree exp, tree type, int cleared, int alias_set)
4966 if (TREE_CODE (exp) == CONSTRUCTOR
4967 /* We can only call store_constructor recursively if the size and
4968 bit position are on a byte boundary. */
4969 && bitpos % BITS_PER_UNIT == 0
4970 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4971 /* If we have a nonzero bitpos for a register target, then we just
4972 let store_field do the bitfield handling. This is unlikely to
4973 generate unnecessary clear instructions anyways. */
4974 && (bitpos == 0 || MEM_P (target)))
4976 if (MEM_P (target))
4977 target
4978 = adjust_address (target,
4979 GET_MODE (target) == BLKmode
4980 || 0 != (bitpos
4981 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4982 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4985 /* Update the alias set, if required. */
4986 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4987 && MEM_ALIAS_SET (target) != 0)
4989 target = copy_rtx (target);
4990 set_mem_alias_set (target, alias_set);
4993 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4995 else
4996 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
4999 /* Store the value of constructor EXP into the rtx TARGET.
5000 TARGET is either a REG or a MEM; we know it cannot conflict, since
5001 safe_from_p has been called.
5002 CLEARED is true if TARGET is known to have been zero'd.
5003 SIZE is the number of bytes of TARGET we are allowed to modify: this
5004 may not be the same as the size of EXP if we are assigning to a field
5005 which has been packed to exclude padding bits. */
5007 static void
5008 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5010 tree type = TREE_TYPE (exp);
5011 #ifdef WORD_REGISTER_OPERATIONS
5012 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5013 #endif
5015 switch (TREE_CODE (type))
5017 case RECORD_TYPE:
5018 case UNION_TYPE:
5019 case QUAL_UNION_TYPE:
5021 unsigned HOST_WIDE_INT idx;
5022 tree field, value;
5024 /* If size is zero or the target is already cleared, do nothing. */
5025 if (size == 0 || cleared)
5026 cleared = 1;
5027 /* We either clear the aggregate or indicate the value is dead. */
5028 else if ((TREE_CODE (type) == UNION_TYPE
5029 || TREE_CODE (type) == QUAL_UNION_TYPE)
5030 && ! CONSTRUCTOR_ELTS (exp))
5031 /* If the constructor is empty, clear the union. */
5033 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5034 cleared = 1;
5037 /* If we are building a static constructor into a register,
5038 set the initial value as zero so we can fold the value into
5039 a constant. But if more than one register is involved,
5040 this probably loses. */
5041 else if (REG_P (target) && TREE_STATIC (exp)
5042 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5044 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5045 cleared = 1;
5048 /* If the constructor has fewer fields than the structure or
5049 if we are initializing the structure to mostly zeros, clear
5050 the whole structure first. Don't do this if TARGET is a
5051 register whose mode size isn't equal to SIZE since
5052 clear_storage can't handle this case. */
5053 else if (size > 0
5054 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5055 != fields_length (type))
5056 || mostly_zeros_p (exp))
5057 && (!REG_P (target)
5058 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5059 == size)))
5061 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5062 cleared = 1;
5065 if (! cleared)
5066 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5068 /* Store each element of the constructor into the
5069 corresponding field of TARGET. */
5070 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5072 enum machine_mode mode;
5073 HOST_WIDE_INT bitsize;
5074 HOST_WIDE_INT bitpos = 0;
5075 tree offset;
5076 rtx to_rtx = target;
5078 /* Just ignore missing fields. We cleared the whole
5079 structure, above, if any fields are missing. */
5080 if (field == 0)
5081 continue;
5083 if (cleared && initializer_zerop (value))
5084 continue;
5086 if (host_integerp (DECL_SIZE (field), 1))
5087 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5088 else
5089 bitsize = -1;
5091 mode = DECL_MODE (field);
5092 if (DECL_BIT_FIELD (field))
5093 mode = VOIDmode;
5095 offset = DECL_FIELD_OFFSET (field);
5096 if (host_integerp (offset, 0)
5097 && host_integerp (bit_position (field), 0))
5099 bitpos = int_bit_position (field);
5100 offset = 0;
5102 else
5103 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5105 if (offset)
5107 rtx offset_rtx;
5109 offset
5110 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5111 make_tree (TREE_TYPE (exp),
5112 target));
5114 offset_rtx = expand_normal (offset);
5115 gcc_assert (MEM_P (to_rtx));
5117 #ifdef POINTERS_EXTEND_UNSIGNED
5118 if (GET_MODE (offset_rtx) != Pmode)
5119 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5120 #else
5121 if (GET_MODE (offset_rtx) != ptr_mode)
5122 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5123 #endif
5125 to_rtx = offset_address (to_rtx, offset_rtx,
5126 highest_pow2_factor (offset));
5129 #ifdef WORD_REGISTER_OPERATIONS
5130 /* If this initializes a field that is smaller than a
5131 word, at the start of a word, try to widen it to a full
5132 word. This special case allows us to output C++ member
5133 function initializations in a form that the optimizers
5134 can understand. */
5135 if (REG_P (target)
5136 && bitsize < BITS_PER_WORD
5137 && bitpos % BITS_PER_WORD == 0
5138 && GET_MODE_CLASS (mode) == MODE_INT
5139 && TREE_CODE (value) == INTEGER_CST
5140 && exp_size >= 0
5141 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5143 tree type = TREE_TYPE (value);
5145 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5147 type = lang_hooks.types.type_for_size
5148 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5149 value = fold_convert (type, value);
5152 if (BYTES_BIG_ENDIAN)
5153 value
5154 = fold_build2 (LSHIFT_EXPR, type, value,
5155 build_int_cst (type,
5156 BITS_PER_WORD - bitsize));
5157 bitsize = BITS_PER_WORD;
5158 mode = word_mode;
5160 #endif
5162 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5163 && DECL_NONADDRESSABLE_P (field))
5165 to_rtx = copy_rtx (to_rtx);
5166 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5169 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5170 value, type, cleared,
5171 get_alias_set (TREE_TYPE (field)));
5173 break;
5175 case ARRAY_TYPE:
5177 tree value, index;
5178 unsigned HOST_WIDE_INT i;
5179 int need_to_clear;
5180 tree domain;
5181 tree elttype = TREE_TYPE (type);
5182 int const_bounds_p;
5183 HOST_WIDE_INT minelt = 0;
5184 HOST_WIDE_INT maxelt = 0;
5186 domain = TYPE_DOMAIN (type);
5187 const_bounds_p = (TYPE_MIN_VALUE (domain)
5188 && TYPE_MAX_VALUE (domain)
5189 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5190 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5192 /* If we have constant bounds for the range of the type, get them. */
5193 if (const_bounds_p)
5195 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5196 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5199 /* If the constructor has fewer elements than the array, clear
5200 the whole array first. Similarly if this is static
5201 constructor of a non-BLKmode object. */
5202 if (cleared)
5203 need_to_clear = 0;
5204 else if (REG_P (target) && TREE_STATIC (exp))
5205 need_to_clear = 1;
5206 else
5208 unsigned HOST_WIDE_INT idx;
5209 tree index, value;
5210 HOST_WIDE_INT count = 0, zero_count = 0;
5211 need_to_clear = ! const_bounds_p;
5213 /* This loop is a more accurate version of the loop in
5214 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5215 is also needed to check for missing elements. */
5216 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5218 HOST_WIDE_INT this_node_count;
5220 if (need_to_clear)
5221 break;
5223 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5225 tree lo_index = TREE_OPERAND (index, 0);
5226 tree hi_index = TREE_OPERAND (index, 1);
5228 if (! host_integerp (lo_index, 1)
5229 || ! host_integerp (hi_index, 1))
5231 need_to_clear = 1;
5232 break;
5235 this_node_count = (tree_low_cst (hi_index, 1)
5236 - tree_low_cst (lo_index, 1) + 1);
5238 else
5239 this_node_count = 1;
5241 count += this_node_count;
5242 if (mostly_zeros_p (value))
5243 zero_count += this_node_count;
5246 /* Clear the entire array first if there are any missing
5247 elements, or if the incidence of zero elements is >=
5248 75%. */
5249 if (! need_to_clear
5250 && (count < maxelt - minelt + 1
5251 || 4 * zero_count >= 3 * count))
5252 need_to_clear = 1;
5255 if (need_to_clear && size > 0)
5257 if (REG_P (target))
5258 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5259 else
5260 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5261 cleared = 1;
5264 if (!cleared && REG_P (target))
5265 /* Inform later passes that the old value is dead. */
5266 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5268 /* Store each element of the constructor into the
5269 corresponding element of TARGET, determined by counting the
5270 elements. */
5271 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5273 enum machine_mode mode;
5274 HOST_WIDE_INT bitsize;
5275 HOST_WIDE_INT bitpos;
5276 int unsignedp;
5277 rtx xtarget = target;
5279 if (cleared && initializer_zerop (value))
5280 continue;
5282 unsignedp = TYPE_UNSIGNED (elttype);
5283 mode = TYPE_MODE (elttype);
5284 if (mode == BLKmode)
5285 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5286 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5287 : -1);
5288 else
5289 bitsize = GET_MODE_BITSIZE (mode);
5291 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5293 tree lo_index = TREE_OPERAND (index, 0);
5294 tree hi_index = TREE_OPERAND (index, 1);
5295 rtx index_r, pos_rtx;
5296 HOST_WIDE_INT lo, hi, count;
5297 tree position;
5299 /* If the range is constant and "small", unroll the loop. */
5300 if (const_bounds_p
5301 && host_integerp (lo_index, 0)
5302 && host_integerp (hi_index, 0)
5303 && (lo = tree_low_cst (lo_index, 0),
5304 hi = tree_low_cst (hi_index, 0),
5305 count = hi - lo + 1,
5306 (!MEM_P (target)
5307 || count <= 2
5308 || (host_integerp (TYPE_SIZE (elttype), 1)
5309 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5310 <= 40 * 8)))))
5312 lo -= minelt; hi -= minelt;
5313 for (; lo <= hi; lo++)
5315 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5317 if (MEM_P (target)
5318 && !MEM_KEEP_ALIAS_SET_P (target)
5319 && TREE_CODE (type) == ARRAY_TYPE
5320 && TYPE_NONALIASED_COMPONENT (type))
5322 target = copy_rtx (target);
5323 MEM_KEEP_ALIAS_SET_P (target) = 1;
5326 store_constructor_field
5327 (target, bitsize, bitpos, mode, value, type, cleared,
5328 get_alias_set (elttype));
5331 else
5333 rtx loop_start = gen_label_rtx ();
5334 rtx loop_end = gen_label_rtx ();
5335 tree exit_cond;
5337 expand_normal (hi_index);
5338 unsignedp = TYPE_UNSIGNED (domain);
5340 index = build_decl (VAR_DECL, NULL_TREE, domain);
5342 index_r
5343 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5344 &unsignedp, 0));
5345 SET_DECL_RTL (index, index_r);
5346 store_expr (lo_index, index_r, 0, false);
5348 /* Build the head of the loop. */
5349 do_pending_stack_adjust ();
5350 emit_label (loop_start);
5352 /* Assign value to element index. */
5353 position =
5354 fold_convert (ssizetype,
5355 fold_build2 (MINUS_EXPR,
5356 TREE_TYPE (index),
5357 index,
5358 TYPE_MIN_VALUE (domain)));
5360 position =
5361 size_binop (MULT_EXPR, position,
5362 fold_convert (ssizetype,
5363 TYPE_SIZE_UNIT (elttype)));
5365 pos_rtx = expand_normal (position);
5366 xtarget = offset_address (target, pos_rtx,
5367 highest_pow2_factor (position));
5368 xtarget = adjust_address (xtarget, mode, 0);
5369 if (TREE_CODE (value) == CONSTRUCTOR)
5370 store_constructor (value, xtarget, cleared,
5371 bitsize / BITS_PER_UNIT);
5372 else
5373 store_expr (value, xtarget, 0, false);
5375 /* Generate a conditional jump to exit the loop. */
5376 exit_cond = build2 (LT_EXPR, integer_type_node,
5377 index, hi_index);
5378 jumpif (exit_cond, loop_end);
5380 /* Update the loop counter, and jump to the head of
5381 the loop. */
5382 expand_assignment (index,
5383 build2 (PLUS_EXPR, TREE_TYPE (index),
5384 index, integer_one_node),
5385 false);
5387 emit_jump (loop_start);
5389 /* Build the end of the loop. */
5390 emit_label (loop_end);
5393 else if ((index != 0 && ! host_integerp (index, 0))
5394 || ! host_integerp (TYPE_SIZE (elttype), 1))
5396 tree position;
5398 if (index == 0)
5399 index = ssize_int (1);
5401 if (minelt)
5402 index = fold_convert (ssizetype,
5403 fold_build2 (MINUS_EXPR,
5404 TREE_TYPE (index),
5405 index,
5406 TYPE_MIN_VALUE (domain)));
5408 position =
5409 size_binop (MULT_EXPR, index,
5410 fold_convert (ssizetype,
5411 TYPE_SIZE_UNIT (elttype)));
5412 xtarget = offset_address (target,
5413 expand_normal (position),
5414 highest_pow2_factor (position));
5415 xtarget = adjust_address (xtarget, mode, 0);
5416 store_expr (value, xtarget, 0, false);
5418 else
5420 if (index != 0)
5421 bitpos = ((tree_low_cst (index, 0) - minelt)
5422 * tree_low_cst (TYPE_SIZE (elttype), 1));
5423 else
5424 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5426 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5427 && TREE_CODE (type) == ARRAY_TYPE
5428 && TYPE_NONALIASED_COMPONENT (type))
5430 target = copy_rtx (target);
5431 MEM_KEEP_ALIAS_SET_P (target) = 1;
5433 store_constructor_field (target, bitsize, bitpos, mode, value,
5434 type, cleared, get_alias_set (elttype));
5437 break;
5440 case VECTOR_TYPE:
5442 unsigned HOST_WIDE_INT idx;
5443 constructor_elt *ce;
5444 int i;
5445 int need_to_clear;
5446 int icode = 0;
5447 tree elttype = TREE_TYPE (type);
5448 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5449 enum machine_mode eltmode = TYPE_MODE (elttype);
5450 HOST_WIDE_INT bitsize;
5451 HOST_WIDE_INT bitpos;
5452 rtvec vector = NULL;
5453 unsigned n_elts;
5455 gcc_assert (eltmode != BLKmode);
5457 n_elts = TYPE_VECTOR_SUBPARTS (type);
5458 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5460 enum machine_mode mode = GET_MODE (target);
5462 icode = (int) vec_init_optab->handlers[mode].insn_code;
5463 if (icode != CODE_FOR_nothing)
5465 unsigned int i;
5467 vector = rtvec_alloc (n_elts);
5468 for (i = 0; i < n_elts; i++)
5469 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5473 /* If the constructor has fewer elements than the vector,
5474 clear the whole array first. Similarly if this is static
5475 constructor of a non-BLKmode object. */
5476 if (cleared)
5477 need_to_clear = 0;
5478 else if (REG_P (target) && TREE_STATIC (exp))
5479 need_to_clear = 1;
5480 else
5482 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5483 tree value;
5485 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5487 int n_elts_here = tree_low_cst
5488 (int_const_binop (TRUNC_DIV_EXPR,
5489 TYPE_SIZE (TREE_TYPE (value)),
5490 TYPE_SIZE (elttype), 0), 1);
5492 count += n_elts_here;
5493 if (mostly_zeros_p (value))
5494 zero_count += n_elts_here;
5497 /* Clear the entire vector first if there are any missing elements,
5498 or if the incidence of zero elements is >= 75%. */
5499 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5502 if (need_to_clear && size > 0 && !vector)
5504 if (REG_P (target))
5505 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5506 else
5507 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5508 cleared = 1;
5511 /* Inform later passes that the old value is dead. */
5512 if (!cleared && !vector && REG_P (target))
5513 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5515 /* Store each element of the constructor into the corresponding
5516 element of TARGET, determined by counting the elements. */
5517 for (idx = 0, i = 0;
5518 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5519 idx++, i += bitsize / elt_size)
5521 HOST_WIDE_INT eltpos;
5522 tree value = ce->value;
5524 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5525 if (cleared && initializer_zerop (value))
5526 continue;
5528 if (ce->index)
5529 eltpos = tree_low_cst (ce->index, 1);
5530 else
5531 eltpos = i;
5533 if (vector)
5535 /* Vector CONSTRUCTORs should only be built from smaller
5536 vectors in the case of BLKmode vectors. */
5537 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5538 RTVEC_ELT (vector, eltpos)
5539 = expand_normal (value);
5541 else
5543 enum machine_mode value_mode =
5544 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5545 ? TYPE_MODE (TREE_TYPE (value))
5546 : eltmode;
5547 bitpos = eltpos * elt_size;
5548 store_constructor_field (target, bitsize, bitpos,
5549 value_mode, value, type,
5550 cleared, get_alias_set (elttype));
5554 if (vector)
5555 emit_insn (GEN_FCN (icode)
5556 (target,
5557 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5558 break;
5561 default:
5562 gcc_unreachable ();
5566 /* Store the value of EXP (an expression tree)
5567 into a subfield of TARGET which has mode MODE and occupies
5568 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5569 If MODE is VOIDmode, it means that we are storing into a bit-field.
5571 Always return const0_rtx unless we have something particular to
5572 return.
5574 TYPE is the type of the underlying object,
5576 ALIAS_SET is the alias set for the destination. This value will
5577 (in general) be different from that for TARGET, since TARGET is a
5578 reference to the containing structure.
5580 If NONTEMPORAL is true, try generating a nontemporal store. */
5582 static rtx
5583 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5584 enum machine_mode mode, tree exp, tree type, int alias_set,
5585 bool nontemporal)
5587 HOST_WIDE_INT width_mask = 0;
5589 if (TREE_CODE (exp) == ERROR_MARK)
5590 return const0_rtx;
5592 /* If we have nothing to store, do nothing unless the expression has
5593 side-effects. */
5594 if (bitsize == 0)
5595 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5596 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5597 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5599 /* If we are storing into an unaligned field of an aligned union that is
5600 in a register, we may have the mode of TARGET being an integer mode but
5601 MODE == BLKmode. In that case, get an aligned object whose size and
5602 alignment are the same as TARGET and store TARGET into it (we can avoid
5603 the store if the field being stored is the entire width of TARGET). Then
5604 call ourselves recursively to store the field into a BLKmode version of
5605 that object. Finally, load from the object into TARGET. This is not
5606 very efficient in general, but should only be slightly more expensive
5607 than the otherwise-required unaligned accesses. Perhaps this can be
5608 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5609 twice, once with emit_move_insn and once via store_field. */
5611 if (mode == BLKmode
5612 && (REG_P (target) || GET_CODE (target) == SUBREG))
5614 rtx object = assign_temp (type, 0, 1, 1);
5615 rtx blk_object = adjust_address (object, BLKmode, 0);
5617 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5618 emit_move_insn (object, target);
5620 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5621 nontemporal);
5623 emit_move_insn (target, object);
5625 /* We want to return the BLKmode version of the data. */
5626 return blk_object;
5629 if (GET_CODE (target) == CONCAT)
5631 /* We're storing into a struct containing a single __complex. */
5633 gcc_assert (!bitpos);
5634 return store_expr (exp, target, 0, nontemporal);
5637 /* If the structure is in a register or if the component
5638 is a bit field, we cannot use addressing to access it.
5639 Use bit-field techniques or SUBREG to store in it. */
5641 if (mode == VOIDmode
5642 || (mode != BLKmode && ! direct_store[(int) mode]
5643 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5644 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5645 || REG_P (target)
5646 || GET_CODE (target) == SUBREG
5647 /* If the field isn't aligned enough to store as an ordinary memref,
5648 store it as a bit field. */
5649 || (mode != BLKmode
5650 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5651 || bitpos % GET_MODE_ALIGNMENT (mode))
5652 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5653 || (bitpos % BITS_PER_UNIT != 0)))
5654 /* If the RHS and field are a constant size and the size of the
5655 RHS isn't the same size as the bitfield, we must use bitfield
5656 operations. */
5657 || (bitsize >= 0
5658 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5659 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5661 rtx temp;
5663 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5664 implies a mask operation. If the precision is the same size as
5665 the field we're storing into, that mask is redundant. This is
5666 particularly common with bit field assignments generated by the
5667 C front end. */
5668 if (TREE_CODE (exp) == NOP_EXPR)
5670 tree type = TREE_TYPE (exp);
5671 if (INTEGRAL_TYPE_P (type)
5672 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5673 && bitsize == TYPE_PRECISION (type))
5675 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5676 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5677 exp = TREE_OPERAND (exp, 0);
5681 temp = expand_normal (exp);
5683 /* If BITSIZE is narrower than the size of the type of EXP
5684 we will be narrowing TEMP. Normally, what's wanted are the
5685 low-order bits. However, if EXP's type is a record and this is
5686 big-endian machine, we want the upper BITSIZE bits. */
5687 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5688 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5689 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5690 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5691 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5692 - bitsize),
5693 NULL_RTX, 1);
5695 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5696 MODE. */
5697 if (mode != VOIDmode && mode != BLKmode
5698 && mode != TYPE_MODE (TREE_TYPE (exp)))
5699 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5701 /* If the modes of TARGET and TEMP are both BLKmode, both
5702 must be in memory and BITPOS must be aligned on a byte
5703 boundary. If so, we simply do a block copy. */
5704 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5706 gcc_assert (MEM_P (target) && MEM_P (temp)
5707 && !(bitpos % BITS_PER_UNIT));
5709 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5710 emit_block_move (target, temp,
5711 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5712 / BITS_PER_UNIT),
5713 BLOCK_OP_NORMAL);
5715 return const0_rtx;
5718 /* Store the value in the bitfield. */
5719 store_bit_field (target, bitsize, bitpos, mode, temp);
5721 return const0_rtx;
5723 else
5725 /* Now build a reference to just the desired component. */
5726 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5728 if (to_rtx == target)
5729 to_rtx = copy_rtx (to_rtx);
5731 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5732 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5733 set_mem_alias_set (to_rtx, alias_set);
5735 return store_expr (exp, to_rtx, 0, nontemporal);
5739 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5740 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5741 codes and find the ultimate containing object, which we return.
5743 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5744 bit position, and *PUNSIGNEDP to the signedness of the field.
5745 If the position of the field is variable, we store a tree
5746 giving the variable offset (in units) in *POFFSET.
5747 This offset is in addition to the bit position.
5748 If the position is not variable, we store 0 in *POFFSET.
5750 If any of the extraction expressions is volatile,
5751 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5753 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5754 is a mode that can be used to access the field. In that case, *PBITSIZE
5755 is redundant.
5757 If the field describes a variable-sized object, *PMODE is set to
5758 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5759 this case, but the address of the object can be found.
5761 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5762 look through nodes that serve as markers of a greater alignment than
5763 the one that can be deduced from the expression. These nodes make it
5764 possible for front-ends to prevent temporaries from being created by
5765 the middle-end on alignment considerations. For that purpose, the
5766 normal operating mode at high-level is to always pass FALSE so that
5767 the ultimate containing object is really returned; moreover, the
5768 associated predicate handled_component_p will always return TRUE
5769 on these nodes, thus indicating that they are essentially handled
5770 by get_inner_reference. TRUE should only be passed when the caller
5771 is scanning the expression in order to build another representation
5772 and specifically knows how to handle these nodes; as such, this is
5773 the normal operating mode in the RTL expanders. */
5775 tree
5776 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5777 HOST_WIDE_INT *pbitpos, tree *poffset,
5778 enum machine_mode *pmode, int *punsignedp,
5779 int *pvolatilep, bool keep_aligning)
5781 tree size_tree = 0;
5782 enum machine_mode mode = VOIDmode;
5783 tree offset = size_zero_node;
5784 tree bit_offset = bitsize_zero_node;
5785 tree tem;
5787 /* First get the mode, signedness, and size. We do this from just the
5788 outermost expression. */
5789 if (TREE_CODE (exp) == COMPONENT_REF)
5791 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5792 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5793 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5795 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5797 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5799 size_tree = TREE_OPERAND (exp, 1);
5800 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5802 /* For vector types, with the correct size of access, use the mode of
5803 inner type. */
5804 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5805 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5806 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5807 mode = TYPE_MODE (TREE_TYPE (exp));
5809 else
5811 mode = TYPE_MODE (TREE_TYPE (exp));
5812 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5814 if (mode == BLKmode)
5815 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5816 else
5817 *pbitsize = GET_MODE_BITSIZE (mode);
5820 if (size_tree != 0)
5822 if (! host_integerp (size_tree, 1))
5823 mode = BLKmode, *pbitsize = -1;
5824 else
5825 *pbitsize = tree_low_cst (size_tree, 1);
5828 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5829 and find the ultimate containing object. */
5830 while (1)
5832 switch (TREE_CODE (exp))
5834 case BIT_FIELD_REF:
5835 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5836 TREE_OPERAND (exp, 2));
5837 break;
5839 case COMPONENT_REF:
5841 tree field = TREE_OPERAND (exp, 1);
5842 tree this_offset = component_ref_field_offset (exp);
5844 /* If this field hasn't been filled in yet, don't go past it.
5845 This should only happen when folding expressions made during
5846 type construction. */
5847 if (this_offset == 0)
5848 break;
5850 offset = size_binop (PLUS_EXPR, offset, this_offset);
5851 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5852 DECL_FIELD_BIT_OFFSET (field));
5854 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5856 break;
5858 case ARRAY_REF:
5859 case ARRAY_RANGE_REF:
5861 tree index = TREE_OPERAND (exp, 1);
5862 tree low_bound = array_ref_low_bound (exp);
5863 tree unit_size = array_ref_element_size (exp);
5865 /* We assume all arrays have sizes that are a multiple of a byte.
5866 First subtract the lower bound, if any, in the type of the
5867 index, then convert to sizetype and multiply by the size of
5868 the array element. */
5869 if (! integer_zerop (low_bound))
5870 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5871 index, low_bound);
5873 offset = size_binop (PLUS_EXPR, offset,
5874 size_binop (MULT_EXPR,
5875 fold_convert (sizetype, index),
5876 unit_size));
5878 break;
5880 case REALPART_EXPR:
5881 break;
5883 case IMAGPART_EXPR:
5884 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5885 bitsize_int (*pbitsize));
5886 break;
5888 case VIEW_CONVERT_EXPR:
5889 if (keep_aligning && STRICT_ALIGNMENT
5890 && (TYPE_ALIGN (TREE_TYPE (exp))
5891 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5892 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5893 < BIGGEST_ALIGNMENT)
5894 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5895 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5896 goto done;
5897 break;
5899 default:
5900 goto done;
5903 /* If any reference in the chain is volatile, the effect is volatile. */
5904 if (TREE_THIS_VOLATILE (exp))
5905 *pvolatilep = 1;
5907 exp = TREE_OPERAND (exp, 0);
5909 done:
5911 /* If OFFSET is constant, see if we can return the whole thing as a
5912 constant bit position. Otherwise, split it up. */
5913 if (host_integerp (offset, 0)
5914 && 0 != (tem = size_binop (MULT_EXPR,
5915 fold_convert (bitsizetype, offset),
5916 bitsize_unit_node))
5917 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5918 && host_integerp (tem, 0))
5919 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5920 else
5921 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5923 *pmode = mode;
5924 return exp;
5927 /* Return a tree of sizetype representing the size, in bytes, of the element
5928 of EXP, an ARRAY_REF. */
5930 tree
5931 array_ref_element_size (tree exp)
5933 tree aligned_size = TREE_OPERAND (exp, 3);
5934 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5936 /* If a size was specified in the ARRAY_REF, it's the size measured
5937 in alignment units of the element type. So multiply by that value. */
5938 if (aligned_size)
5940 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5941 sizetype from another type of the same width and signedness. */
5942 if (TREE_TYPE (aligned_size) != sizetype)
5943 aligned_size = fold_convert (sizetype, aligned_size);
5944 return size_binop (MULT_EXPR, aligned_size,
5945 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5948 /* Otherwise, take the size from that of the element type. Substitute
5949 any PLACEHOLDER_EXPR that we have. */
5950 else
5951 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5954 /* Return a tree representing the lower bound of the array mentioned in
5955 EXP, an ARRAY_REF. */
5957 tree
5958 array_ref_low_bound (tree exp)
5960 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5962 /* If a lower bound is specified in EXP, use it. */
5963 if (TREE_OPERAND (exp, 2))
5964 return TREE_OPERAND (exp, 2);
5966 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5967 substituting for a PLACEHOLDER_EXPR as needed. */
5968 if (domain_type && TYPE_MIN_VALUE (domain_type))
5969 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5971 /* Otherwise, return a zero of the appropriate type. */
5972 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5975 /* Return a tree representing the upper bound of the array mentioned in
5976 EXP, an ARRAY_REF. */
5978 tree
5979 array_ref_up_bound (tree exp)
5981 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5983 /* If there is a domain type and it has an upper bound, use it, substituting
5984 for a PLACEHOLDER_EXPR as needed. */
5985 if (domain_type && TYPE_MAX_VALUE (domain_type))
5986 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5988 /* Otherwise fail. */
5989 return NULL_TREE;
5992 /* Return a tree representing the offset, in bytes, of the field referenced
5993 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5995 tree
5996 component_ref_field_offset (tree exp)
5998 tree aligned_offset = TREE_OPERAND (exp, 2);
5999 tree field = TREE_OPERAND (exp, 1);
6001 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6002 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6003 value. */
6004 if (aligned_offset)
6006 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6007 sizetype from another type of the same width and signedness. */
6008 if (TREE_TYPE (aligned_offset) != sizetype)
6009 aligned_offset = fold_convert (sizetype, aligned_offset);
6010 return size_binop (MULT_EXPR, aligned_offset,
6011 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6014 /* Otherwise, take the offset from that of the field. Substitute
6015 any PLACEHOLDER_EXPR that we have. */
6016 else
6017 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6020 /* Return 1 if T is an expression that get_inner_reference handles. */
6023 handled_component_p (tree t)
6025 switch (TREE_CODE (t))
6027 case BIT_FIELD_REF:
6028 case COMPONENT_REF:
6029 case ARRAY_REF:
6030 case ARRAY_RANGE_REF:
6031 case VIEW_CONVERT_EXPR:
6032 case REALPART_EXPR:
6033 case IMAGPART_EXPR:
6034 return 1;
6036 default:
6037 return 0;
6041 /* Given an rtx VALUE that may contain additions and multiplications, return
6042 an equivalent value that just refers to a register, memory, or constant.
6043 This is done by generating instructions to perform the arithmetic and
6044 returning a pseudo-register containing the value.
6046 The returned value may be a REG, SUBREG, MEM or constant. */
6049 force_operand (rtx value, rtx target)
6051 rtx op1, op2;
6052 /* Use subtarget as the target for operand 0 of a binary operation. */
6053 rtx subtarget = get_subtarget (target);
6054 enum rtx_code code = GET_CODE (value);
6056 /* Check for subreg applied to an expression produced by loop optimizer. */
6057 if (code == SUBREG
6058 && !REG_P (SUBREG_REG (value))
6059 && !MEM_P (SUBREG_REG (value)))
6061 value
6062 = simplify_gen_subreg (GET_MODE (value),
6063 force_reg (GET_MODE (SUBREG_REG (value)),
6064 force_operand (SUBREG_REG (value),
6065 NULL_RTX)),
6066 GET_MODE (SUBREG_REG (value)),
6067 SUBREG_BYTE (value));
6068 code = GET_CODE (value);
6071 /* Check for a PIC address load. */
6072 if ((code == PLUS || code == MINUS)
6073 && XEXP (value, 0) == pic_offset_table_rtx
6074 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6075 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6076 || GET_CODE (XEXP (value, 1)) == CONST))
6078 if (!subtarget)
6079 subtarget = gen_reg_rtx (GET_MODE (value));
6080 emit_move_insn (subtarget, value);
6081 return subtarget;
6084 if (ARITHMETIC_P (value))
6086 op2 = XEXP (value, 1);
6087 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6088 subtarget = 0;
6089 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6091 code = PLUS;
6092 op2 = negate_rtx (GET_MODE (value), op2);
6095 /* Check for an addition with OP2 a constant integer and our first
6096 operand a PLUS of a virtual register and something else. In that
6097 case, we want to emit the sum of the virtual register and the
6098 constant first and then add the other value. This allows virtual
6099 register instantiation to simply modify the constant rather than
6100 creating another one around this addition. */
6101 if (code == PLUS && GET_CODE (op2) == CONST_INT
6102 && GET_CODE (XEXP (value, 0)) == PLUS
6103 && REG_P (XEXP (XEXP (value, 0), 0))
6104 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6105 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6107 rtx temp = expand_simple_binop (GET_MODE (value), code,
6108 XEXP (XEXP (value, 0), 0), op2,
6109 subtarget, 0, OPTAB_LIB_WIDEN);
6110 return expand_simple_binop (GET_MODE (value), code, temp,
6111 force_operand (XEXP (XEXP (value,
6112 0), 1), 0),
6113 target, 0, OPTAB_LIB_WIDEN);
6116 op1 = force_operand (XEXP (value, 0), subtarget);
6117 op2 = force_operand (op2, NULL_RTX);
6118 switch (code)
6120 case MULT:
6121 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6122 case DIV:
6123 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6124 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6125 target, 1, OPTAB_LIB_WIDEN);
6126 else
6127 return expand_divmod (0,
6128 FLOAT_MODE_P (GET_MODE (value))
6129 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6130 GET_MODE (value), op1, op2, target, 0);
6131 case MOD:
6132 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6133 target, 0);
6134 case UDIV:
6135 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6136 target, 1);
6137 case UMOD:
6138 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6139 target, 1);
6140 case ASHIFTRT:
6141 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6142 target, 0, OPTAB_LIB_WIDEN);
6143 default:
6144 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6145 target, 1, OPTAB_LIB_WIDEN);
6148 if (UNARY_P (value))
6150 if (!target)
6151 target = gen_reg_rtx (GET_MODE (value));
6152 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6153 switch (code)
6155 case ZERO_EXTEND:
6156 case SIGN_EXTEND:
6157 case TRUNCATE:
6158 case FLOAT_EXTEND:
6159 case FLOAT_TRUNCATE:
6160 convert_move (target, op1, code == ZERO_EXTEND);
6161 return target;
6163 case FIX:
6164 case UNSIGNED_FIX:
6165 expand_fix (target, op1, code == UNSIGNED_FIX);
6166 return target;
6168 case FLOAT:
6169 case UNSIGNED_FLOAT:
6170 expand_float (target, op1, code == UNSIGNED_FLOAT);
6171 return target;
6173 default:
6174 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6178 #ifdef INSN_SCHEDULING
6179 /* On machines that have insn scheduling, we want all memory reference to be
6180 explicit, so we need to deal with such paradoxical SUBREGs. */
6181 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6182 && (GET_MODE_SIZE (GET_MODE (value))
6183 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6184 value
6185 = simplify_gen_subreg (GET_MODE (value),
6186 force_reg (GET_MODE (SUBREG_REG (value)),
6187 force_operand (SUBREG_REG (value),
6188 NULL_RTX)),
6189 GET_MODE (SUBREG_REG (value)),
6190 SUBREG_BYTE (value));
6191 #endif
6193 return value;
6196 /* Subroutine of expand_expr: return nonzero iff there is no way that
6197 EXP can reference X, which is being modified. TOP_P is nonzero if this
6198 call is going to be used to determine whether we need a temporary
6199 for EXP, as opposed to a recursive call to this function.
6201 It is always safe for this routine to return zero since it merely
6202 searches for optimization opportunities. */
6205 safe_from_p (rtx x, tree exp, int top_p)
6207 rtx exp_rtl = 0;
6208 int i, nops;
6210 if (x == 0
6211 /* If EXP has varying size, we MUST use a target since we currently
6212 have no way of allocating temporaries of variable size
6213 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6214 So we assume here that something at a higher level has prevented a
6215 clash. This is somewhat bogus, but the best we can do. Only
6216 do this when X is BLKmode and when we are at the top level. */
6217 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6218 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6219 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6220 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6221 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6222 != INTEGER_CST)
6223 && GET_MODE (x) == BLKmode)
6224 /* If X is in the outgoing argument area, it is always safe. */
6225 || (MEM_P (x)
6226 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6227 || (GET_CODE (XEXP (x, 0)) == PLUS
6228 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6229 return 1;
6231 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6232 find the underlying pseudo. */
6233 if (GET_CODE (x) == SUBREG)
6235 x = SUBREG_REG (x);
6236 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6237 return 0;
6240 /* Now look at our tree code and possibly recurse. */
6241 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6243 case tcc_declaration:
6244 exp_rtl = DECL_RTL_IF_SET (exp);
6245 break;
6247 case tcc_constant:
6248 return 1;
6250 case tcc_exceptional:
6251 if (TREE_CODE (exp) == TREE_LIST)
6253 while (1)
6255 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6256 return 0;
6257 exp = TREE_CHAIN (exp);
6258 if (!exp)
6259 return 1;
6260 if (TREE_CODE (exp) != TREE_LIST)
6261 return safe_from_p (x, exp, 0);
6264 else if (TREE_CODE (exp) == CONSTRUCTOR)
6266 constructor_elt *ce;
6267 unsigned HOST_WIDE_INT idx;
6269 for (idx = 0;
6270 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6271 idx++)
6272 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6273 || !safe_from_p (x, ce->value, 0))
6274 return 0;
6275 return 1;
6277 else if (TREE_CODE (exp) == ERROR_MARK)
6278 return 1; /* An already-visited SAVE_EXPR? */
6279 else
6280 return 0;
6282 case tcc_statement:
6283 /* The only case we look at here is the DECL_INITIAL inside a
6284 DECL_EXPR. */
6285 return (TREE_CODE (exp) != DECL_EXPR
6286 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6287 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6288 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6290 case tcc_binary:
6291 case tcc_comparison:
6292 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6293 return 0;
6294 /* Fall through. */
6296 case tcc_unary:
6297 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6299 case tcc_expression:
6300 case tcc_reference:
6301 case tcc_vl_exp:
6302 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6303 the expression. If it is set, we conflict iff we are that rtx or
6304 both are in memory. Otherwise, we check all operands of the
6305 expression recursively. */
6307 switch (TREE_CODE (exp))
6309 case ADDR_EXPR:
6310 /* If the operand is static or we are static, we can't conflict.
6311 Likewise if we don't conflict with the operand at all. */
6312 if (staticp (TREE_OPERAND (exp, 0))
6313 || TREE_STATIC (exp)
6314 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6315 return 1;
6317 /* Otherwise, the only way this can conflict is if we are taking
6318 the address of a DECL a that address if part of X, which is
6319 very rare. */
6320 exp = TREE_OPERAND (exp, 0);
6321 if (DECL_P (exp))
6323 if (!DECL_RTL_SET_P (exp)
6324 || !MEM_P (DECL_RTL (exp)))
6325 return 0;
6326 else
6327 exp_rtl = XEXP (DECL_RTL (exp), 0);
6329 break;
6331 case MISALIGNED_INDIRECT_REF:
6332 case ALIGN_INDIRECT_REF:
6333 case INDIRECT_REF:
6334 if (MEM_P (x)
6335 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6336 get_alias_set (exp)))
6337 return 0;
6338 break;
6340 case CALL_EXPR:
6341 /* Assume that the call will clobber all hard registers and
6342 all of memory. */
6343 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6344 || MEM_P (x))
6345 return 0;
6346 break;
6348 case WITH_CLEANUP_EXPR:
6349 case CLEANUP_POINT_EXPR:
6350 /* Lowered by gimplify.c. */
6351 gcc_unreachable ();
6353 case SAVE_EXPR:
6354 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6356 default:
6357 break;
6360 /* If we have an rtx, we do not need to scan our operands. */
6361 if (exp_rtl)
6362 break;
6364 nops = TREE_OPERAND_LENGTH (exp);
6365 for (i = 0; i < nops; i++)
6366 if (TREE_OPERAND (exp, i) != 0
6367 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6368 return 0;
6370 break;
6372 case tcc_type:
6373 /* Should never get a type here. */
6374 gcc_unreachable ();
6376 case tcc_gimple_stmt:
6377 gcc_unreachable ();
6380 /* If we have an rtl, find any enclosed object. Then see if we conflict
6381 with it. */
6382 if (exp_rtl)
6384 if (GET_CODE (exp_rtl) == SUBREG)
6386 exp_rtl = SUBREG_REG (exp_rtl);
6387 if (REG_P (exp_rtl)
6388 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6389 return 0;
6392 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6393 are memory and they conflict. */
6394 return ! (rtx_equal_p (x, exp_rtl)
6395 || (MEM_P (x) && MEM_P (exp_rtl)
6396 && true_dependence (exp_rtl, VOIDmode, x,
6397 rtx_addr_varies_p)));
6400 /* If we reach here, it is safe. */
6401 return 1;
6405 /* Return the highest power of two that EXP is known to be a multiple of.
6406 This is used in updating alignment of MEMs in array references. */
6408 unsigned HOST_WIDE_INT
6409 highest_pow2_factor (tree exp)
6411 unsigned HOST_WIDE_INT c0, c1;
6413 switch (TREE_CODE (exp))
6415 case INTEGER_CST:
6416 /* We can find the lowest bit that's a one. If the low
6417 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6418 We need to handle this case since we can find it in a COND_EXPR,
6419 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6420 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6421 later ICE. */
6422 if (TREE_OVERFLOW (exp))
6423 return BIGGEST_ALIGNMENT;
6424 else
6426 /* Note: tree_low_cst is intentionally not used here,
6427 we don't care about the upper bits. */
6428 c0 = TREE_INT_CST_LOW (exp);
6429 c0 &= -c0;
6430 return c0 ? c0 : BIGGEST_ALIGNMENT;
6432 break;
6434 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6435 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6436 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6437 return MIN (c0, c1);
6439 case MULT_EXPR:
6440 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6441 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6442 return c0 * c1;
6444 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6445 case CEIL_DIV_EXPR:
6446 if (integer_pow2p (TREE_OPERAND (exp, 1))
6447 && host_integerp (TREE_OPERAND (exp, 1), 1))
6449 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6450 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6451 return MAX (1, c0 / c1);
6453 break;
6455 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6456 case SAVE_EXPR:
6457 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6459 case COMPOUND_EXPR:
6460 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6462 case COND_EXPR:
6463 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6464 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6465 return MIN (c0, c1);
6467 default:
6468 break;
6471 return 1;
6474 /* Similar, except that the alignment requirements of TARGET are
6475 taken into account. Assume it is at least as aligned as its
6476 type, unless it is a COMPONENT_REF in which case the layout of
6477 the structure gives the alignment. */
6479 static unsigned HOST_WIDE_INT
6480 highest_pow2_factor_for_target (tree target, tree exp)
6482 unsigned HOST_WIDE_INT target_align, factor;
6484 factor = highest_pow2_factor (exp);
6485 if (TREE_CODE (target) == COMPONENT_REF)
6486 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6487 else
6488 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6489 return MAX (factor, target_align);
6492 /* Return &VAR expression for emulated thread local VAR. */
6494 static tree
6495 emutls_var_address (tree var)
6497 tree emuvar = emutls_decl (var);
6498 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6499 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6500 tree arglist = build_tree_list (NULL_TREE, arg);
6501 tree call = build_function_call_expr (fn, arglist);
6502 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6505 /* Expands variable VAR. */
6507 void
6508 expand_var (tree var)
6510 if (DECL_EXTERNAL (var))
6511 return;
6513 if (TREE_STATIC (var))
6514 /* If this is an inlined copy of a static local variable,
6515 look up the original decl. */
6516 var = DECL_ORIGIN (var);
6518 if (TREE_STATIC (var)
6519 ? !TREE_ASM_WRITTEN (var)
6520 : !DECL_RTL_SET_P (var))
6522 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6523 /* Should be ignored. */;
6524 else if (lang_hooks.expand_decl (var))
6525 /* OK. */;
6526 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6527 expand_decl (var);
6528 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6529 rest_of_decl_compilation (var, 0, 0);
6530 else
6531 /* No expansion needed. */
6532 gcc_assert (TREE_CODE (var) == TYPE_DECL
6533 || TREE_CODE (var) == CONST_DECL
6534 || TREE_CODE (var) == FUNCTION_DECL
6535 || TREE_CODE (var) == LABEL_DECL);
6539 /* Subroutine of expand_expr. Expand the two operands of a binary
6540 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6541 The value may be stored in TARGET if TARGET is nonzero. The
6542 MODIFIER argument is as documented by expand_expr. */
6544 static void
6545 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6546 enum expand_modifier modifier)
6548 if (! safe_from_p (target, exp1, 1))
6549 target = 0;
6550 if (operand_equal_p (exp0, exp1, 0))
6552 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6553 *op1 = copy_rtx (*op0);
6555 else
6557 /* If we need to preserve evaluation order, copy exp0 into its own
6558 temporary variable so that it can't be clobbered by exp1. */
6559 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6560 exp0 = save_expr (exp0);
6561 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6562 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6567 /* Return a MEM that contains constant EXP. DEFER is as for
6568 output_constant_def and MODIFIER is as for expand_expr. */
6570 static rtx
6571 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6573 rtx mem;
6575 mem = output_constant_def (exp, defer);
6576 if (modifier != EXPAND_INITIALIZER)
6577 mem = use_anchored_address (mem);
6578 return mem;
6581 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6582 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6584 static rtx
6585 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6586 enum expand_modifier modifier)
6588 rtx result, subtarget;
6589 tree inner, offset;
6590 HOST_WIDE_INT bitsize, bitpos;
6591 int volatilep, unsignedp;
6592 enum machine_mode mode1;
6594 /* If we are taking the address of a constant and are at the top level,
6595 we have to use output_constant_def since we can't call force_const_mem
6596 at top level. */
6597 /* ??? This should be considered a front-end bug. We should not be
6598 generating ADDR_EXPR of something that isn't an LVALUE. The only
6599 exception here is STRING_CST. */
6600 if (TREE_CODE (exp) == CONSTRUCTOR
6601 || CONSTANT_CLASS_P (exp))
6602 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6604 /* Everything must be something allowed by is_gimple_addressable. */
6605 switch (TREE_CODE (exp))
6607 case INDIRECT_REF:
6608 /* This case will happen via recursion for &a->b. */
6609 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6611 case CONST_DECL:
6612 /* Recurse and make the output_constant_def clause above handle this. */
6613 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6614 tmode, modifier);
6616 case REALPART_EXPR:
6617 /* The real part of the complex number is always first, therefore
6618 the address is the same as the address of the parent object. */
6619 offset = 0;
6620 bitpos = 0;
6621 inner = TREE_OPERAND (exp, 0);
6622 break;
6624 case IMAGPART_EXPR:
6625 /* The imaginary part of the complex number is always second.
6626 The expression is therefore always offset by the size of the
6627 scalar type. */
6628 offset = 0;
6629 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6630 inner = TREE_OPERAND (exp, 0);
6631 break;
6633 case VAR_DECL:
6634 /* TLS emulation hook - replace __thread VAR's &VAR with
6635 __emutls_get_address (&_emutls.VAR). */
6636 if (! targetm.have_tls
6637 && TREE_CODE (exp) == VAR_DECL
6638 && DECL_THREAD_LOCAL_P (exp))
6640 exp = emutls_var_address (exp);
6641 return expand_expr (exp, target, tmode, modifier);
6643 /* Fall through. */
6645 default:
6646 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6647 expand_expr, as that can have various side effects; LABEL_DECLs for
6648 example, may not have their DECL_RTL set yet. Assume language
6649 specific tree nodes can be expanded in some interesting way. */
6650 if (DECL_P (exp)
6651 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6653 result = expand_expr (exp, target, tmode,
6654 modifier == EXPAND_INITIALIZER
6655 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6657 /* If the DECL isn't in memory, then the DECL wasn't properly
6658 marked TREE_ADDRESSABLE, which will be either a front-end
6659 or a tree optimizer bug. */
6660 gcc_assert (MEM_P (result));
6661 result = XEXP (result, 0);
6663 /* ??? Is this needed anymore? */
6664 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6666 assemble_external (exp);
6667 TREE_USED (exp) = 1;
6670 if (modifier != EXPAND_INITIALIZER
6671 && modifier != EXPAND_CONST_ADDRESS)
6672 result = force_operand (result, target);
6673 return result;
6676 /* Pass FALSE as the last argument to get_inner_reference although
6677 we are expanding to RTL. The rationale is that we know how to
6678 handle "aligning nodes" here: we can just bypass them because
6679 they won't change the final object whose address will be returned
6680 (they actually exist only for that purpose). */
6681 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6682 &mode1, &unsignedp, &volatilep, false);
6683 break;
6686 /* We must have made progress. */
6687 gcc_assert (inner != exp);
6689 subtarget = offset || bitpos ? NULL_RTX : target;
6690 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6692 if (offset)
6694 rtx tmp;
6696 if (modifier != EXPAND_NORMAL)
6697 result = force_operand (result, NULL);
6698 tmp = expand_expr (offset, NULL_RTX, tmode,
6699 modifier == EXPAND_INITIALIZER
6700 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6702 result = convert_memory_address (tmode, result);
6703 tmp = convert_memory_address (tmode, tmp);
6705 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6706 result = gen_rtx_PLUS (tmode, result, tmp);
6707 else
6709 subtarget = bitpos ? NULL_RTX : target;
6710 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6711 1, OPTAB_LIB_WIDEN);
6715 if (bitpos)
6717 /* Someone beforehand should have rejected taking the address
6718 of such an object. */
6719 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6721 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6722 if (modifier < EXPAND_SUM)
6723 result = force_operand (result, target);
6726 return result;
6729 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6730 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6732 static rtx
6733 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6734 enum expand_modifier modifier)
6736 enum machine_mode rmode;
6737 rtx result;
6739 /* Target mode of VOIDmode says "whatever's natural". */
6740 if (tmode == VOIDmode)
6741 tmode = TYPE_MODE (TREE_TYPE (exp));
6743 /* We can get called with some Weird Things if the user does silliness
6744 like "(short) &a". In that case, convert_memory_address won't do
6745 the right thing, so ignore the given target mode. */
6746 if (tmode != Pmode && tmode != ptr_mode)
6747 tmode = Pmode;
6749 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6750 tmode, modifier);
6752 /* Despite expand_expr claims concerning ignoring TMODE when not
6753 strictly convenient, stuff breaks if we don't honor it. Note
6754 that combined with the above, we only do this for pointer modes. */
6755 rmode = GET_MODE (result);
6756 if (rmode == VOIDmode)
6757 rmode = tmode;
6758 if (rmode != tmode)
6759 result = convert_memory_address (tmode, result);
6761 return result;
6765 /* expand_expr: generate code for computing expression EXP.
6766 An rtx for the computed value is returned. The value is never null.
6767 In the case of a void EXP, const0_rtx is returned.
6769 The value may be stored in TARGET if TARGET is nonzero.
6770 TARGET is just a suggestion; callers must assume that
6771 the rtx returned may not be the same as TARGET.
6773 If TARGET is CONST0_RTX, it means that the value will be ignored.
6775 If TMODE is not VOIDmode, it suggests generating the
6776 result in mode TMODE. But this is done only when convenient.
6777 Otherwise, TMODE is ignored and the value generated in its natural mode.
6778 TMODE is just a suggestion; callers must assume that
6779 the rtx returned may not have mode TMODE.
6781 Note that TARGET may have neither TMODE nor MODE. In that case, it
6782 probably will not be used.
6784 If MODIFIER is EXPAND_SUM then when EXP is an addition
6785 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6786 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6787 products as above, or REG or MEM, or constant.
6788 Ordinarily in such cases we would output mul or add instructions
6789 and then return a pseudo reg containing the sum.
6791 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6792 it also marks a label as absolutely required (it can't be dead).
6793 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6794 This is used for outputting expressions used in initializers.
6796 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6797 with a constant address even if that address is not normally legitimate.
6798 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6800 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6801 a call parameter. Such targets require special care as we haven't yet
6802 marked TARGET so that it's safe from being trashed by libcalls. We
6803 don't want to use TARGET for anything but the final result;
6804 Intermediate values must go elsewhere. Additionally, calls to
6805 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6807 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6808 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6809 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6810 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6811 recursively. */
6813 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6814 enum expand_modifier, rtx *);
6817 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6818 enum expand_modifier modifier, rtx *alt_rtl)
6820 int rn = -1;
6821 rtx ret, last = NULL;
6823 /* Handle ERROR_MARK before anybody tries to access its type. */
6824 if (TREE_CODE (exp) == ERROR_MARK
6825 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6827 ret = CONST0_RTX (tmode);
6828 return ret ? ret : const0_rtx;
6831 if (flag_non_call_exceptions)
6833 rn = lookup_stmt_eh_region (exp);
6834 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6835 if (rn >= 0)
6836 last = get_last_insn ();
6839 /* If this is an expression of some kind and it has an associated line
6840 number, then emit the line number before expanding the expression.
6842 We need to save and restore the file and line information so that
6843 errors discovered during expansion are emitted with the right
6844 information. It would be better of the diagnostic routines
6845 used the file/line information embedded in the tree nodes rather
6846 than globals. */
6847 if (cfun && EXPR_HAS_LOCATION (exp))
6849 location_t saved_location = input_location;
6850 input_location = EXPR_LOCATION (exp);
6851 set_curr_insn_source_location (input_location);
6853 /* Record where the insns produced belong. */
6854 set_curr_insn_block (TREE_BLOCK (exp));
6856 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6858 input_location = saved_location;
6860 else
6862 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6865 /* If using non-call exceptions, mark all insns that may trap.
6866 expand_call() will mark CALL_INSNs before we get to this code,
6867 but it doesn't handle libcalls, and these may trap. */
6868 if (rn >= 0)
6870 rtx insn;
6871 for (insn = next_real_insn (last); insn;
6872 insn = next_real_insn (insn))
6874 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6875 /* If we want exceptions for non-call insns, any
6876 may_trap_p instruction may throw. */
6877 && GET_CODE (PATTERN (insn)) != CLOBBER
6878 && GET_CODE (PATTERN (insn)) != USE
6879 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6881 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6882 REG_NOTES (insn));
6887 return ret;
6890 static rtx
6891 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6892 enum expand_modifier modifier, rtx *alt_rtl)
6894 rtx op0, op1, op2, temp, decl_rtl;
6895 tree type;
6896 int unsignedp;
6897 enum machine_mode mode;
6898 enum tree_code code = TREE_CODE (exp);
6899 optab this_optab;
6900 rtx subtarget, original_target;
6901 int ignore;
6902 tree context, subexp0, subexp1;
6903 bool reduce_bit_field = false;
6904 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6905 ? reduce_to_bit_field_precision ((expr), \
6906 target, \
6907 type) \
6908 : (expr))
6910 if (GIMPLE_STMT_P (exp))
6912 type = void_type_node;
6913 mode = VOIDmode;
6914 unsignedp = 0;
6916 else
6918 type = TREE_TYPE (exp);
6919 mode = TYPE_MODE (type);
6920 unsignedp = TYPE_UNSIGNED (type);
6922 if (lang_hooks.reduce_bit_field_operations
6923 && TREE_CODE (type) == INTEGER_TYPE
6924 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6926 /* An operation in what may be a bit-field type needs the
6927 result to be reduced to the precision of the bit-field type,
6928 which is narrower than that of the type's mode. */
6929 reduce_bit_field = true;
6930 if (modifier == EXPAND_STACK_PARM)
6931 target = 0;
6934 /* Use subtarget as the target for operand 0 of a binary operation. */
6935 subtarget = get_subtarget (target);
6936 original_target = target;
6937 ignore = (target == const0_rtx
6938 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6939 || code == CONVERT_EXPR || code == COND_EXPR
6940 || code == VIEW_CONVERT_EXPR)
6941 && TREE_CODE (type) == VOID_TYPE));
6943 /* If we are going to ignore this result, we need only do something
6944 if there is a side-effect somewhere in the expression. If there
6945 is, short-circuit the most common cases here. Note that we must
6946 not call expand_expr with anything but const0_rtx in case this
6947 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6949 if (ignore)
6951 if (! TREE_SIDE_EFFECTS (exp))
6952 return const0_rtx;
6954 /* Ensure we reference a volatile object even if value is ignored, but
6955 don't do this if all we are doing is taking its address. */
6956 if (TREE_THIS_VOLATILE (exp)
6957 && TREE_CODE (exp) != FUNCTION_DECL
6958 && mode != VOIDmode && mode != BLKmode
6959 && modifier != EXPAND_CONST_ADDRESS)
6961 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6962 if (MEM_P (temp))
6963 temp = copy_to_reg (temp);
6964 return const0_rtx;
6967 if (TREE_CODE_CLASS (code) == tcc_unary
6968 || code == COMPONENT_REF || code == INDIRECT_REF)
6969 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6970 modifier);
6972 else if (TREE_CODE_CLASS (code) == tcc_binary
6973 || TREE_CODE_CLASS (code) == tcc_comparison
6974 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6976 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6977 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6978 return const0_rtx;
6980 else if (code == BIT_FIELD_REF)
6982 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6983 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6984 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6985 return const0_rtx;
6988 target = 0;
6992 switch (code)
6994 case LABEL_DECL:
6996 tree function = decl_function_context (exp);
6998 temp = label_rtx (exp);
6999 temp = gen_rtx_LABEL_REF (Pmode, temp);
7001 if (function != current_function_decl
7002 && function != 0)
7003 LABEL_REF_NONLOCAL_P (temp) = 1;
7005 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7006 return temp;
7009 case SSA_NAME:
7010 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7011 NULL);
7013 case PARM_DECL:
7014 case VAR_DECL:
7015 /* If a static var's type was incomplete when the decl was written,
7016 but the type is complete now, lay out the decl now. */
7017 if (DECL_SIZE (exp) == 0
7018 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7019 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7020 layout_decl (exp, 0);
7022 /* TLS emulation hook - replace __thread vars with
7023 *__emutls_get_address (&_emutls.var). */
7024 if (! targetm.have_tls
7025 && TREE_CODE (exp) == VAR_DECL
7026 && DECL_THREAD_LOCAL_P (exp))
7028 exp = build_fold_indirect_ref (emutls_var_address (exp));
7029 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7032 /* ... fall through ... */
7034 case FUNCTION_DECL:
7035 case RESULT_DECL:
7036 decl_rtl = DECL_RTL (exp);
7037 gcc_assert (decl_rtl);
7038 decl_rtl = copy_rtx (decl_rtl);
7040 /* Ensure variable marked as used even if it doesn't go through
7041 a parser. If it hasn't be used yet, write out an external
7042 definition. */
7043 if (! TREE_USED (exp))
7045 assemble_external (exp);
7046 TREE_USED (exp) = 1;
7049 /* Show we haven't gotten RTL for this yet. */
7050 temp = 0;
7052 /* Variables inherited from containing functions should have
7053 been lowered by this point. */
7054 context = decl_function_context (exp);
7055 gcc_assert (!context
7056 || context == current_function_decl
7057 || TREE_STATIC (exp)
7058 /* ??? C++ creates functions that are not TREE_STATIC. */
7059 || TREE_CODE (exp) == FUNCTION_DECL);
7061 /* This is the case of an array whose size is to be determined
7062 from its initializer, while the initializer is still being parsed.
7063 See expand_decl. */
7065 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7066 temp = validize_mem (decl_rtl);
7068 /* If DECL_RTL is memory, we are in the normal case and either
7069 the address is not valid or it is not a register and -fforce-addr
7070 is specified, get the address into a register. */
7072 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7074 if (alt_rtl)
7075 *alt_rtl = decl_rtl;
7076 decl_rtl = use_anchored_address (decl_rtl);
7077 if (modifier != EXPAND_CONST_ADDRESS
7078 && modifier != EXPAND_SUM
7079 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
7080 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
7081 temp = replace_equiv_address (decl_rtl,
7082 copy_rtx (XEXP (decl_rtl, 0)));
7085 /* If we got something, return it. But first, set the alignment
7086 if the address is a register. */
7087 if (temp != 0)
7089 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7090 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7092 return temp;
7095 /* If the mode of DECL_RTL does not match that of the decl, it
7096 must be a promoted value. We return a SUBREG of the wanted mode,
7097 but mark it so that we know that it was already extended. */
7099 if (REG_P (decl_rtl)
7100 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7102 enum machine_mode pmode;
7104 /* Get the signedness used for this variable. Ensure we get the
7105 same mode we got when the variable was declared. */
7106 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7107 (TREE_CODE (exp) == RESULT_DECL
7108 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7109 gcc_assert (GET_MODE (decl_rtl) == pmode);
7111 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7112 SUBREG_PROMOTED_VAR_P (temp) = 1;
7113 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7114 return temp;
7117 return decl_rtl;
7119 case INTEGER_CST:
7120 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7121 TREE_INT_CST_HIGH (exp), mode);
7123 return temp;
7125 case VECTOR_CST:
7127 tree tmp = NULL_TREE;
7128 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7129 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7130 return const_vector_from_tree (exp);
7131 if (GET_MODE_CLASS (mode) == MODE_INT)
7133 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7134 if (type_for_mode)
7135 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7137 if (!tmp)
7138 tmp = build_constructor_from_list (type,
7139 TREE_VECTOR_CST_ELTS (exp));
7140 return expand_expr (tmp, ignore ? const0_rtx : target,
7141 tmode, modifier);
7144 case CONST_DECL:
7145 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7147 case REAL_CST:
7148 /* If optimized, generate immediate CONST_DOUBLE
7149 which will be turned into memory by reload if necessary.
7151 We used to force a register so that loop.c could see it. But
7152 this does not allow gen_* patterns to perform optimizations with
7153 the constants. It also produces two insns in cases like "x = 1.0;".
7154 On most machines, floating-point constants are not permitted in
7155 many insns, so we'd end up copying it to a register in any case.
7157 Now, we do the copying in expand_binop, if appropriate. */
7158 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7159 TYPE_MODE (TREE_TYPE (exp)));
7161 case COMPLEX_CST:
7162 /* Handle evaluating a complex constant in a CONCAT target. */
7163 if (original_target && GET_CODE (original_target) == CONCAT)
7165 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7166 rtx rtarg, itarg;
7168 rtarg = XEXP (original_target, 0);
7169 itarg = XEXP (original_target, 1);
7171 /* Move the real and imaginary parts separately. */
7172 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7173 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7175 if (op0 != rtarg)
7176 emit_move_insn (rtarg, op0);
7177 if (op1 != itarg)
7178 emit_move_insn (itarg, op1);
7180 return original_target;
7183 /* ... fall through ... */
7185 case STRING_CST:
7186 temp = expand_expr_constant (exp, 1, modifier);
7188 /* temp contains a constant address.
7189 On RISC machines where a constant address isn't valid,
7190 make some insns to get that address into a register. */
7191 if (modifier != EXPAND_CONST_ADDRESS
7192 && modifier != EXPAND_INITIALIZER
7193 && modifier != EXPAND_SUM
7194 && (! memory_address_p (mode, XEXP (temp, 0))
7195 || flag_force_addr))
7196 return replace_equiv_address (temp,
7197 copy_rtx (XEXP (temp, 0)));
7198 return temp;
7200 case SAVE_EXPR:
7202 tree val = TREE_OPERAND (exp, 0);
7203 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7205 if (!SAVE_EXPR_RESOLVED_P (exp))
7207 /* We can indeed still hit this case, typically via builtin
7208 expanders calling save_expr immediately before expanding
7209 something. Assume this means that we only have to deal
7210 with non-BLKmode values. */
7211 gcc_assert (GET_MODE (ret) != BLKmode);
7213 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7214 DECL_ARTIFICIAL (val) = 1;
7215 DECL_IGNORED_P (val) = 1;
7216 TREE_OPERAND (exp, 0) = val;
7217 SAVE_EXPR_RESOLVED_P (exp) = 1;
7219 if (!CONSTANT_P (ret))
7220 ret = copy_to_reg (ret);
7221 SET_DECL_RTL (val, ret);
7224 return ret;
7227 case GOTO_EXPR:
7228 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7229 expand_goto (TREE_OPERAND (exp, 0));
7230 else
7231 expand_computed_goto (TREE_OPERAND (exp, 0));
7232 return const0_rtx;
7234 case CONSTRUCTOR:
7235 /* If we don't need the result, just ensure we evaluate any
7236 subexpressions. */
7237 if (ignore)
7239 unsigned HOST_WIDE_INT idx;
7240 tree value;
7242 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7243 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7245 return const0_rtx;
7248 /* Try to avoid creating a temporary at all. This is possible
7249 if all of the initializer is zero.
7250 FIXME: try to handle all [0..255] initializers we can handle
7251 with memset. */
7252 else if (TREE_STATIC (exp)
7253 && !TREE_ADDRESSABLE (exp)
7254 && target != 0 && mode == BLKmode
7255 && all_zeros_p (exp))
7257 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7258 return target;
7261 /* All elts simple constants => refer to a constant in memory. But
7262 if this is a non-BLKmode mode, let it store a field at a time
7263 since that should make a CONST_INT or CONST_DOUBLE when we
7264 fold. Likewise, if we have a target we can use, it is best to
7265 store directly into the target unless the type is large enough
7266 that memcpy will be used. If we are making an initializer and
7267 all operands are constant, put it in memory as well.
7269 FIXME: Avoid trying to fill vector constructors piece-meal.
7270 Output them with output_constant_def below unless we're sure
7271 they're zeros. This should go away when vector initializers
7272 are treated like VECTOR_CST instead of arrays.
7274 else if ((TREE_STATIC (exp)
7275 && ((mode == BLKmode
7276 && ! (target != 0 && safe_from_p (target, exp, 1)))
7277 || TREE_ADDRESSABLE (exp)
7278 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7279 && (! MOVE_BY_PIECES_P
7280 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7281 TYPE_ALIGN (type)))
7282 && ! mostly_zeros_p (exp))))
7283 || ((modifier == EXPAND_INITIALIZER
7284 || modifier == EXPAND_CONST_ADDRESS)
7285 && TREE_CONSTANT (exp)))
7287 rtx constructor = expand_expr_constant (exp, 1, modifier);
7289 if (modifier != EXPAND_CONST_ADDRESS
7290 && modifier != EXPAND_INITIALIZER
7291 && modifier != EXPAND_SUM)
7292 constructor = validize_mem (constructor);
7294 return constructor;
7296 else
7298 /* Handle calls that pass values in multiple non-contiguous
7299 locations. The Irix 6 ABI has examples of this. */
7300 if (target == 0 || ! safe_from_p (target, exp, 1)
7301 || GET_CODE (target) == PARALLEL
7302 || modifier == EXPAND_STACK_PARM)
7303 target
7304 = assign_temp (build_qualified_type (type,
7305 (TYPE_QUALS (type)
7306 | (TREE_READONLY (exp)
7307 * TYPE_QUAL_CONST))),
7308 0, TREE_ADDRESSABLE (exp), 1);
7310 store_constructor (exp, target, 0, int_expr_size (exp));
7311 return target;
7314 case MISALIGNED_INDIRECT_REF:
7315 case ALIGN_INDIRECT_REF:
7316 case INDIRECT_REF:
7318 tree exp1 = TREE_OPERAND (exp, 0);
7320 if (modifier != EXPAND_WRITE)
7322 tree t;
7324 t = fold_read_from_constant_string (exp);
7325 if (t)
7326 return expand_expr (t, target, tmode, modifier);
7329 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7330 op0 = memory_address (mode, op0);
7332 if (code == ALIGN_INDIRECT_REF)
7334 int align = TYPE_ALIGN_UNIT (type);
7335 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7336 op0 = memory_address (mode, op0);
7339 temp = gen_rtx_MEM (mode, op0);
7341 set_mem_attributes (temp, exp, 0);
7343 /* Resolve the misalignment now, so that we don't have to remember
7344 to resolve it later. Of course, this only works for reads. */
7345 /* ??? When we get around to supporting writes, we'll have to handle
7346 this in store_expr directly. The vectorizer isn't generating
7347 those yet, however. */
7348 if (code == MISALIGNED_INDIRECT_REF)
7350 int icode;
7351 rtx reg, insn;
7353 gcc_assert (modifier == EXPAND_NORMAL
7354 || modifier == EXPAND_STACK_PARM);
7356 /* The vectorizer should have already checked the mode. */
7357 icode = movmisalign_optab->handlers[mode].insn_code;
7358 gcc_assert (icode != CODE_FOR_nothing);
7360 /* We've already validated the memory, and we're creating a
7361 new pseudo destination. The predicates really can't fail. */
7362 reg = gen_reg_rtx (mode);
7364 /* Nor can the insn generator. */
7365 insn = GEN_FCN (icode) (reg, temp);
7366 emit_insn (insn);
7368 return reg;
7371 return temp;
7374 case TARGET_MEM_REF:
7376 struct mem_address addr;
7378 get_address_description (exp, &addr);
7379 op0 = addr_for_mem_ref (&addr, true);
7380 op0 = memory_address (mode, op0);
7381 temp = gen_rtx_MEM (mode, op0);
7382 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7384 return temp;
7386 case ARRAY_REF:
7389 tree array = TREE_OPERAND (exp, 0);
7390 tree index = TREE_OPERAND (exp, 1);
7392 /* Fold an expression like: "foo"[2].
7393 This is not done in fold so it won't happen inside &.
7394 Don't fold if this is for wide characters since it's too
7395 difficult to do correctly and this is a very rare case. */
7397 if (modifier != EXPAND_CONST_ADDRESS
7398 && modifier != EXPAND_INITIALIZER
7399 && modifier != EXPAND_MEMORY)
7401 tree t = fold_read_from_constant_string (exp);
7403 if (t)
7404 return expand_expr (t, target, tmode, modifier);
7407 /* If this is a constant index into a constant array,
7408 just get the value from the array. Handle both the cases when
7409 we have an explicit constructor and when our operand is a variable
7410 that was declared const. */
7412 if (modifier != EXPAND_CONST_ADDRESS
7413 && modifier != EXPAND_INITIALIZER
7414 && modifier != EXPAND_MEMORY
7415 && TREE_CODE (array) == CONSTRUCTOR
7416 && ! TREE_SIDE_EFFECTS (array)
7417 && TREE_CODE (index) == INTEGER_CST)
7419 unsigned HOST_WIDE_INT ix;
7420 tree field, value;
7422 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7423 field, value)
7424 if (tree_int_cst_equal (field, index))
7426 if (!TREE_SIDE_EFFECTS (value))
7427 return expand_expr (fold (value), target, tmode, modifier);
7428 break;
7432 else if (optimize >= 1
7433 && modifier != EXPAND_CONST_ADDRESS
7434 && modifier != EXPAND_INITIALIZER
7435 && modifier != EXPAND_MEMORY
7436 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7437 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7438 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7439 && targetm.binds_local_p (array))
7441 if (TREE_CODE (index) == INTEGER_CST)
7443 tree init = DECL_INITIAL (array);
7445 if (TREE_CODE (init) == CONSTRUCTOR)
7447 unsigned HOST_WIDE_INT ix;
7448 tree field, value;
7450 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7451 field, value)
7452 if (tree_int_cst_equal (field, index))
7454 if (!TREE_SIDE_EFFECTS (value))
7455 return expand_expr (fold (value), target, tmode,
7456 modifier);
7457 break;
7460 else if(TREE_CODE (init) == STRING_CST)
7462 tree index1 = index;
7463 tree low_bound = array_ref_low_bound (exp);
7464 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7466 /* Optimize the special-case of a zero lower bound.
7468 We convert the low_bound to sizetype to avoid some problems
7469 with constant folding. (E.g. suppose the lower bound is 1,
7470 and its mode is QI. Without the conversion,l (ARRAY
7471 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7472 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7474 if (! integer_zerop (low_bound))
7475 index1 = size_diffop (index1, fold_convert (sizetype,
7476 low_bound));
7478 if (0 > compare_tree_int (index1,
7479 TREE_STRING_LENGTH (init)))
7481 tree type = TREE_TYPE (TREE_TYPE (init));
7482 enum machine_mode mode = TYPE_MODE (type);
7484 if (GET_MODE_CLASS (mode) == MODE_INT
7485 && GET_MODE_SIZE (mode) == 1)
7486 return gen_int_mode (TREE_STRING_POINTER (init)
7487 [TREE_INT_CST_LOW (index1)],
7488 mode);
7494 goto normal_inner_ref;
7496 case COMPONENT_REF:
7497 /* If the operand is a CONSTRUCTOR, we can just extract the
7498 appropriate field if it is present. */
7499 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7501 unsigned HOST_WIDE_INT idx;
7502 tree field, value;
7504 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7505 idx, field, value)
7506 if (field == TREE_OPERAND (exp, 1)
7507 /* We can normally use the value of the field in the
7508 CONSTRUCTOR. However, if this is a bitfield in
7509 an integral mode that we can fit in a HOST_WIDE_INT,
7510 we must mask only the number of bits in the bitfield,
7511 since this is done implicitly by the constructor. If
7512 the bitfield does not meet either of those conditions,
7513 we can't do this optimization. */
7514 && (! DECL_BIT_FIELD (field)
7515 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7516 && (GET_MODE_BITSIZE (DECL_MODE (field))
7517 <= HOST_BITS_PER_WIDE_INT))))
7519 if (DECL_BIT_FIELD (field)
7520 && modifier == EXPAND_STACK_PARM)
7521 target = 0;
7522 op0 = expand_expr (value, target, tmode, modifier);
7523 if (DECL_BIT_FIELD (field))
7525 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7526 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7528 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7530 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7531 op0 = expand_and (imode, op0, op1, target);
7533 else
7535 tree count
7536 = build_int_cst (NULL_TREE,
7537 GET_MODE_BITSIZE (imode) - bitsize);
7539 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7540 target, 0);
7541 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7542 target, 0);
7546 return op0;
7549 goto normal_inner_ref;
7551 case BIT_FIELD_REF:
7552 case ARRAY_RANGE_REF:
7553 normal_inner_ref:
7555 enum machine_mode mode1;
7556 HOST_WIDE_INT bitsize, bitpos;
7557 tree offset;
7558 int volatilep = 0;
7559 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7560 &mode1, &unsignedp, &volatilep, true);
7561 rtx orig_op0;
7563 /* If we got back the original object, something is wrong. Perhaps
7564 we are evaluating an expression too early. In any event, don't
7565 infinitely recurse. */
7566 gcc_assert (tem != exp);
7568 /* If TEM's type is a union of variable size, pass TARGET to the inner
7569 computation, since it will need a temporary and TARGET is known
7570 to have to do. This occurs in unchecked conversion in Ada. */
7572 orig_op0 = op0
7573 = expand_expr (tem,
7574 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7575 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7576 != INTEGER_CST)
7577 && modifier != EXPAND_STACK_PARM
7578 ? target : NULL_RTX),
7579 VOIDmode,
7580 (modifier == EXPAND_INITIALIZER
7581 || modifier == EXPAND_CONST_ADDRESS
7582 || modifier == EXPAND_STACK_PARM)
7583 ? modifier : EXPAND_NORMAL);
7585 /* If this is a constant, put it into a register if it is a legitimate
7586 constant, OFFSET is 0, and we won't try to extract outside the
7587 register (in case we were passed a partially uninitialized object
7588 or a view_conversion to a larger size). Force the constant to
7589 memory otherwise. */
7590 if (CONSTANT_P (op0))
7592 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7593 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7594 && offset == 0
7595 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7596 op0 = force_reg (mode, op0);
7597 else
7598 op0 = validize_mem (force_const_mem (mode, op0));
7601 /* Otherwise, if this object not in memory and we either have an
7602 offset, a BLKmode result, or a reference outside the object, put it
7603 there. Such cases can occur in Ada if we have unchecked conversion
7604 of an expression from a scalar type to an array or record type or
7605 for an ARRAY_RANGE_REF whose type is BLKmode. */
7606 else if (!MEM_P (op0)
7607 && (offset != 0
7608 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7609 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7611 tree nt = build_qualified_type (TREE_TYPE (tem),
7612 (TYPE_QUALS (TREE_TYPE (tem))
7613 | TYPE_QUAL_CONST));
7614 rtx memloc = assign_temp (nt, 1, 1, 1);
7616 emit_move_insn (memloc, op0);
7617 op0 = memloc;
7620 if (offset != 0)
7622 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7623 EXPAND_SUM);
7625 gcc_assert (MEM_P (op0));
7627 #ifdef POINTERS_EXTEND_UNSIGNED
7628 if (GET_MODE (offset_rtx) != Pmode)
7629 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7630 #else
7631 if (GET_MODE (offset_rtx) != ptr_mode)
7632 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7633 #endif
7635 if (GET_MODE (op0) == BLKmode
7636 /* A constant address in OP0 can have VOIDmode, we must
7637 not try to call force_reg in that case. */
7638 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7639 && bitsize != 0
7640 && (bitpos % bitsize) == 0
7641 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7642 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7644 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7645 bitpos = 0;
7648 op0 = offset_address (op0, offset_rtx,
7649 highest_pow2_factor (offset));
7652 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7653 record its alignment as BIGGEST_ALIGNMENT. */
7654 if (MEM_P (op0) && bitpos == 0 && offset != 0
7655 && is_aligning_offset (offset, tem))
7656 set_mem_align (op0, BIGGEST_ALIGNMENT);
7658 /* Don't forget about volatility even if this is a bitfield. */
7659 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7661 if (op0 == orig_op0)
7662 op0 = copy_rtx (op0);
7664 MEM_VOLATILE_P (op0) = 1;
7667 /* The following code doesn't handle CONCAT.
7668 Assume only bitpos == 0 can be used for CONCAT, due to
7669 one element arrays having the same mode as its element. */
7670 if (GET_CODE (op0) == CONCAT)
7672 gcc_assert (bitpos == 0
7673 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7674 return op0;
7677 /* In cases where an aligned union has an unaligned object
7678 as a field, we might be extracting a BLKmode value from
7679 an integer-mode (e.g., SImode) object. Handle this case
7680 by doing the extract into an object as wide as the field
7681 (which we know to be the width of a basic mode), then
7682 storing into memory, and changing the mode to BLKmode. */
7683 if (mode1 == VOIDmode
7684 || REG_P (op0) || GET_CODE (op0) == SUBREG
7685 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7686 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7687 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7688 && modifier != EXPAND_CONST_ADDRESS
7689 && modifier != EXPAND_INITIALIZER)
7690 /* If the field isn't aligned enough to fetch as a memref,
7691 fetch it as a bit field. */
7692 || (mode1 != BLKmode
7693 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7694 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7695 || (MEM_P (op0)
7696 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7697 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7698 && ((modifier == EXPAND_CONST_ADDRESS
7699 || modifier == EXPAND_INITIALIZER)
7700 ? STRICT_ALIGNMENT
7701 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7702 || (bitpos % BITS_PER_UNIT != 0)))
7703 /* If the type and the field are a constant size and the
7704 size of the type isn't the same size as the bitfield,
7705 we must use bitfield operations. */
7706 || (bitsize >= 0
7707 && TYPE_SIZE (TREE_TYPE (exp))
7708 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7709 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7710 bitsize)))
7712 enum machine_mode ext_mode = mode;
7714 if (ext_mode == BLKmode
7715 && ! (target != 0 && MEM_P (op0)
7716 && MEM_P (target)
7717 && bitpos % BITS_PER_UNIT == 0))
7718 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7720 if (ext_mode == BLKmode)
7722 if (target == 0)
7723 target = assign_temp (type, 0, 1, 1);
7725 if (bitsize == 0)
7726 return target;
7728 /* In this case, BITPOS must start at a byte boundary and
7729 TARGET, if specified, must be a MEM. */
7730 gcc_assert (MEM_P (op0)
7731 && (!target || MEM_P (target))
7732 && !(bitpos % BITS_PER_UNIT));
7734 emit_block_move (target,
7735 adjust_address (op0, VOIDmode,
7736 bitpos / BITS_PER_UNIT),
7737 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7738 / BITS_PER_UNIT),
7739 (modifier == EXPAND_STACK_PARM
7740 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7742 return target;
7745 op0 = validize_mem (op0);
7747 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7748 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7750 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7751 (modifier == EXPAND_STACK_PARM
7752 ? NULL_RTX : target),
7753 ext_mode, ext_mode);
7755 /* If the result is a record type and BITSIZE is narrower than
7756 the mode of OP0, an integral mode, and this is a big endian
7757 machine, we must put the field into the high-order bits. */
7758 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7759 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7760 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7761 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7762 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7763 - bitsize),
7764 op0, 1);
7766 /* If the result type is BLKmode, store the data into a temporary
7767 of the appropriate type, but with the mode corresponding to the
7768 mode for the data we have (op0's mode). It's tempting to make
7769 this a constant type, since we know it's only being stored once,
7770 but that can cause problems if we are taking the address of this
7771 COMPONENT_REF because the MEM of any reference via that address
7772 will have flags corresponding to the type, which will not
7773 necessarily be constant. */
7774 if (mode == BLKmode)
7776 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7777 rtx new;
7779 /* If the reference doesn't use the alias set of its type,
7780 we cannot create the temporary using that type. */
7781 if (component_uses_parent_alias_set (exp))
7783 new = assign_stack_local (ext_mode, size, 0);
7784 set_mem_alias_set (new, get_alias_set (exp));
7786 else
7787 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7789 emit_move_insn (new, op0);
7790 op0 = copy_rtx (new);
7791 PUT_MODE (op0, BLKmode);
7792 set_mem_attributes (op0, exp, 1);
7795 return op0;
7798 /* If the result is BLKmode, use that to access the object
7799 now as well. */
7800 if (mode == BLKmode)
7801 mode1 = BLKmode;
7803 /* Get a reference to just this component. */
7804 if (modifier == EXPAND_CONST_ADDRESS
7805 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7806 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7807 else
7808 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7810 if (op0 == orig_op0)
7811 op0 = copy_rtx (op0);
7813 set_mem_attributes (op0, exp, 0);
7814 if (REG_P (XEXP (op0, 0)))
7815 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7817 MEM_VOLATILE_P (op0) |= volatilep;
7818 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7819 || modifier == EXPAND_CONST_ADDRESS
7820 || modifier == EXPAND_INITIALIZER)
7821 return op0;
7822 else if (target == 0)
7823 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7825 convert_move (target, op0, unsignedp);
7826 return target;
7829 case OBJ_TYPE_REF:
7830 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7832 case CALL_EXPR:
7833 /* Check for a built-in function. */
7834 if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
7835 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7836 == FUNCTION_DECL)
7837 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7839 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7840 == BUILT_IN_FRONTEND)
7841 return lang_hooks.expand_expr (exp, original_target,
7842 tmode, modifier,
7843 alt_rtl);
7844 else
7845 return expand_builtin (exp, target, subtarget, tmode, ignore);
7848 return expand_call (exp, target, ignore);
7850 case NON_LVALUE_EXPR:
7851 case NOP_EXPR:
7852 case CONVERT_EXPR:
7853 if (TREE_OPERAND (exp, 0) == error_mark_node)
7854 return const0_rtx;
7856 if (TREE_CODE (type) == UNION_TYPE)
7858 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7860 /* If both input and output are BLKmode, this conversion isn't doing
7861 anything except possibly changing memory attribute. */
7862 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7864 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7865 modifier);
7867 result = copy_rtx (result);
7868 set_mem_attributes (result, exp, 0);
7869 return result;
7872 if (target == 0)
7874 if (TYPE_MODE (type) != BLKmode)
7875 target = gen_reg_rtx (TYPE_MODE (type));
7876 else
7877 target = assign_temp (type, 0, 1, 1);
7880 if (MEM_P (target))
7881 /* Store data into beginning of memory target. */
7882 store_expr (TREE_OPERAND (exp, 0),
7883 adjust_address (target, TYPE_MODE (valtype), 0),
7884 modifier == EXPAND_STACK_PARM,
7885 false);
7887 else
7889 gcc_assert (REG_P (target));
7891 /* Store this field into a union of the proper type. */
7892 store_field (target,
7893 MIN ((int_size_in_bytes (TREE_TYPE
7894 (TREE_OPERAND (exp, 0)))
7895 * BITS_PER_UNIT),
7896 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7897 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7898 type, 0, false);
7901 /* Return the entire union. */
7902 return target;
7905 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7907 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7908 modifier);
7910 /* If the signedness of the conversion differs and OP0 is
7911 a promoted SUBREG, clear that indication since we now
7912 have to do the proper extension. */
7913 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7914 && GET_CODE (op0) == SUBREG)
7915 SUBREG_PROMOTED_VAR_P (op0) = 0;
7917 return REDUCE_BIT_FIELD (op0);
7920 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7921 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7922 if (GET_MODE (op0) == mode)
7925 /* If OP0 is a constant, just convert it into the proper mode. */
7926 else if (CONSTANT_P (op0))
7928 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7929 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7931 if (modifier == EXPAND_INITIALIZER)
7932 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7933 subreg_lowpart_offset (mode,
7934 inner_mode));
7935 else
7936 op0= convert_modes (mode, inner_mode, op0,
7937 TYPE_UNSIGNED (inner_type));
7940 else if (modifier == EXPAND_INITIALIZER)
7941 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7943 else if (target == 0)
7944 op0 = convert_to_mode (mode, op0,
7945 TYPE_UNSIGNED (TREE_TYPE
7946 (TREE_OPERAND (exp, 0))));
7947 else
7949 convert_move (target, op0,
7950 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7951 op0 = target;
7954 return REDUCE_BIT_FIELD (op0);
7956 case VIEW_CONVERT_EXPR:
7957 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7959 /* If the input and output modes are both the same, we are done. */
7960 if (TYPE_MODE (type) == GET_MODE (op0))
7962 /* If neither mode is BLKmode, and both modes are the same size
7963 then we can use gen_lowpart. */
7964 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7965 && GET_MODE_SIZE (TYPE_MODE (type))
7966 == GET_MODE_SIZE (GET_MODE (op0)))
7968 if (GET_CODE (op0) == SUBREG)
7969 op0 = force_reg (GET_MODE (op0), op0);
7970 op0 = gen_lowpart (TYPE_MODE (type), op0);
7972 /* If both modes are integral, then we can convert from one to the
7973 other. */
7974 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7975 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7976 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7977 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7978 /* As a last resort, spill op0 to memory, and reload it in a
7979 different mode. */
7980 else if (!MEM_P (op0))
7982 /* If the operand is not a MEM, force it into memory. Since we
7983 are going to be changing the mode of the MEM, don't call
7984 force_const_mem for constants because we don't allow pool
7985 constants to change mode. */
7986 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7988 gcc_assert (!TREE_ADDRESSABLE (exp));
7990 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7991 target
7992 = assign_stack_temp_for_type
7993 (TYPE_MODE (inner_type),
7994 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7996 emit_move_insn (target, op0);
7997 op0 = target;
8000 /* At this point, OP0 is in the correct mode. If the output type is such
8001 that the operand is known to be aligned, indicate that it is.
8002 Otherwise, we need only be concerned about alignment for non-BLKmode
8003 results. */
8004 if (MEM_P (op0))
8006 op0 = copy_rtx (op0);
8008 if (TYPE_ALIGN_OK (type))
8009 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8010 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8011 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8013 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8014 HOST_WIDE_INT temp_size
8015 = MAX (int_size_in_bytes (inner_type),
8016 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8017 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8018 temp_size, 0, type);
8019 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8021 gcc_assert (!TREE_ADDRESSABLE (exp));
8023 if (GET_MODE (op0) == BLKmode)
8024 emit_block_move (new_with_op0_mode, op0,
8025 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8026 (modifier == EXPAND_STACK_PARM
8027 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8028 else
8029 emit_move_insn (new_with_op0_mode, op0);
8031 op0 = new;
8034 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8037 return op0;
8039 case PLUS_EXPR:
8040 /* Check if this is a case for multiplication and addition. */
8041 if (TREE_CODE (type) == INTEGER_TYPE
8042 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8044 tree subsubexp0, subsubexp1;
8045 enum tree_code code0, code1;
8047 subexp0 = TREE_OPERAND (exp, 0);
8048 subsubexp0 = TREE_OPERAND (subexp0, 0);
8049 subsubexp1 = TREE_OPERAND (subexp0, 1);
8050 code0 = TREE_CODE (subsubexp0);
8051 code1 = TREE_CODE (subsubexp1);
8052 if (code0 == NOP_EXPR && code1 == NOP_EXPR
8053 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8054 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8055 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8056 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8057 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8058 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8060 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8061 enum machine_mode innermode = TYPE_MODE (op0type);
8062 bool zextend_p = TYPE_UNSIGNED (op0type);
8063 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8064 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8065 && (this_optab->handlers[(int) mode].insn_code
8066 != CODE_FOR_nothing))
8068 expand_operands (TREE_OPERAND (subsubexp0, 0),
8069 TREE_OPERAND (subsubexp1, 0),
8070 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8071 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8072 VOIDmode, EXPAND_NORMAL);
8073 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8074 target, unsignedp);
8075 gcc_assert (temp);
8076 return REDUCE_BIT_FIELD (temp);
8081 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8082 something else, make sure we add the register to the constant and
8083 then to the other thing. This case can occur during strength
8084 reduction and doing it this way will produce better code if the
8085 frame pointer or argument pointer is eliminated.
8087 fold-const.c will ensure that the constant is always in the inner
8088 PLUS_EXPR, so the only case we need to do anything about is if
8089 sp, ap, or fp is our second argument, in which case we must swap
8090 the innermost first argument and our second argument. */
8092 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8093 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8094 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8095 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8096 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8097 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8099 tree t = TREE_OPERAND (exp, 1);
8101 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8102 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8105 /* If the result is to be ptr_mode and we are adding an integer to
8106 something, we might be forming a constant. So try to use
8107 plus_constant. If it produces a sum and we can't accept it,
8108 use force_operand. This allows P = &ARR[const] to generate
8109 efficient code on machines where a SYMBOL_REF is not a valid
8110 address.
8112 If this is an EXPAND_SUM call, always return the sum. */
8113 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8114 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8116 if (modifier == EXPAND_STACK_PARM)
8117 target = 0;
8118 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8119 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8120 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8122 rtx constant_part;
8124 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8125 EXPAND_SUM);
8126 /* Use immed_double_const to ensure that the constant is
8127 truncated according to the mode of OP1, then sign extended
8128 to a HOST_WIDE_INT. Using the constant directly can result
8129 in non-canonical RTL in a 64x32 cross compile. */
8130 constant_part
8131 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8132 (HOST_WIDE_INT) 0,
8133 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8134 op1 = plus_constant (op1, INTVAL (constant_part));
8135 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8136 op1 = force_operand (op1, target);
8137 return REDUCE_BIT_FIELD (op1);
8140 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8141 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8142 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8144 rtx constant_part;
8146 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8147 (modifier == EXPAND_INITIALIZER
8148 ? EXPAND_INITIALIZER : EXPAND_SUM));
8149 if (! CONSTANT_P (op0))
8151 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8152 VOIDmode, modifier);
8153 /* Return a PLUS if modifier says it's OK. */
8154 if (modifier == EXPAND_SUM
8155 || modifier == EXPAND_INITIALIZER)
8156 return simplify_gen_binary (PLUS, mode, op0, op1);
8157 goto binop2;
8159 /* Use immed_double_const to ensure that the constant is
8160 truncated according to the mode of OP1, then sign extended
8161 to a HOST_WIDE_INT. Using the constant directly can result
8162 in non-canonical RTL in a 64x32 cross compile. */
8163 constant_part
8164 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8165 (HOST_WIDE_INT) 0,
8166 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8167 op0 = plus_constant (op0, INTVAL (constant_part));
8168 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8169 op0 = force_operand (op0, target);
8170 return REDUCE_BIT_FIELD (op0);
8174 /* No sense saving up arithmetic to be done
8175 if it's all in the wrong mode to form part of an address.
8176 And force_operand won't know whether to sign-extend or
8177 zero-extend. */
8178 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8179 || mode != ptr_mode)
8181 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8182 subtarget, &op0, &op1, 0);
8183 if (op0 == const0_rtx)
8184 return op1;
8185 if (op1 == const0_rtx)
8186 return op0;
8187 goto binop2;
8190 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8191 subtarget, &op0, &op1, modifier);
8192 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8194 case MINUS_EXPR:
8195 /* Check if this is a case for multiplication and subtraction. */
8196 if (TREE_CODE (type) == INTEGER_TYPE
8197 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8199 tree subsubexp0, subsubexp1;
8200 enum tree_code code0, code1;
8202 subexp1 = TREE_OPERAND (exp, 1);
8203 subsubexp0 = TREE_OPERAND (subexp1, 0);
8204 subsubexp1 = TREE_OPERAND (subexp1, 1);
8205 code0 = TREE_CODE (subsubexp0);
8206 code1 = TREE_CODE (subsubexp1);
8207 if (code0 == NOP_EXPR && code1 == NOP_EXPR
8208 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8209 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8210 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8211 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8212 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8213 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8215 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8216 enum machine_mode innermode = TYPE_MODE (op0type);
8217 bool zextend_p = TYPE_UNSIGNED (op0type);
8218 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8219 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8220 && (this_optab->handlers[(int) mode].insn_code
8221 != CODE_FOR_nothing))
8223 expand_operands (TREE_OPERAND (subsubexp0, 0),
8224 TREE_OPERAND (subsubexp1, 0),
8225 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8226 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8227 VOIDmode, EXPAND_NORMAL);
8228 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8229 target, unsignedp);
8230 gcc_assert (temp);
8231 return REDUCE_BIT_FIELD (temp);
8236 /* For initializers, we are allowed to return a MINUS of two
8237 symbolic constants. Here we handle all cases when both operands
8238 are constant. */
8239 /* Handle difference of two symbolic constants,
8240 for the sake of an initializer. */
8241 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8242 && really_constant_p (TREE_OPERAND (exp, 0))
8243 && really_constant_p (TREE_OPERAND (exp, 1)))
8245 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8246 NULL_RTX, &op0, &op1, modifier);
8248 /* If the last operand is a CONST_INT, use plus_constant of
8249 the negated constant. Else make the MINUS. */
8250 if (GET_CODE (op1) == CONST_INT)
8251 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8252 else
8253 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8256 /* No sense saving up arithmetic to be done
8257 if it's all in the wrong mode to form part of an address.
8258 And force_operand won't know whether to sign-extend or
8259 zero-extend. */
8260 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8261 || mode != ptr_mode)
8262 goto binop;
8264 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8265 subtarget, &op0, &op1, modifier);
8267 /* Convert A - const to A + (-const). */
8268 if (GET_CODE (op1) == CONST_INT)
8270 op1 = negate_rtx (mode, op1);
8271 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8274 goto binop2;
8276 case MULT_EXPR:
8277 /* If first operand is constant, swap them.
8278 Thus the following special case checks need only
8279 check the second operand. */
8280 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8282 tree t1 = TREE_OPERAND (exp, 0);
8283 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8284 TREE_OPERAND (exp, 1) = t1;
8287 /* Attempt to return something suitable for generating an
8288 indexed address, for machines that support that. */
8290 if (modifier == EXPAND_SUM && mode == ptr_mode
8291 && host_integerp (TREE_OPERAND (exp, 1), 0))
8293 tree exp1 = TREE_OPERAND (exp, 1);
8295 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8296 EXPAND_SUM);
8298 if (!REG_P (op0))
8299 op0 = force_operand (op0, NULL_RTX);
8300 if (!REG_P (op0))
8301 op0 = copy_to_mode_reg (mode, op0);
8303 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8304 gen_int_mode (tree_low_cst (exp1, 0),
8305 TYPE_MODE (TREE_TYPE (exp1)))));
8308 if (modifier == EXPAND_STACK_PARM)
8309 target = 0;
8311 /* Check for multiplying things that have been extended
8312 from a narrower type. If this machine supports multiplying
8313 in that narrower type with a result in the desired type,
8314 do it that way, and avoid the explicit type-conversion. */
8316 subexp0 = TREE_OPERAND (exp, 0);
8317 subexp1 = TREE_OPERAND (exp, 1);
8318 /* First, check if we have a multiplication of one signed and one
8319 unsigned operand. */
8320 if (TREE_CODE (subexp0) == NOP_EXPR
8321 && TREE_CODE (subexp1) == NOP_EXPR
8322 && TREE_CODE (type) == INTEGER_TYPE
8323 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8324 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8325 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8326 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8327 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8328 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8330 enum machine_mode innermode
8331 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8332 this_optab = usmul_widen_optab;
8333 if (mode == GET_MODE_WIDER_MODE (innermode))
8335 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8337 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8338 expand_operands (TREE_OPERAND (subexp0, 0),
8339 TREE_OPERAND (subexp1, 0),
8340 NULL_RTX, &op0, &op1, 0);
8341 else
8342 expand_operands (TREE_OPERAND (subexp0, 0),
8343 TREE_OPERAND (subexp1, 0),
8344 NULL_RTX, &op1, &op0, 0);
8346 goto binop3;
8350 /* Check for a multiplication with matching signedness. */
8351 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8352 && TREE_CODE (type) == INTEGER_TYPE
8353 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8354 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8355 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8356 && int_fits_type_p (TREE_OPERAND (exp, 1),
8357 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8358 /* Don't use a widening multiply if a shift will do. */
8359 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8360 > HOST_BITS_PER_WIDE_INT)
8361 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8363 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8364 && (TYPE_PRECISION (TREE_TYPE
8365 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8366 == TYPE_PRECISION (TREE_TYPE
8367 (TREE_OPERAND
8368 (TREE_OPERAND (exp, 0), 0))))
8369 /* If both operands are extended, they must either both
8370 be zero-extended or both be sign-extended. */
8371 && (TYPE_UNSIGNED (TREE_TYPE
8372 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8373 == TYPE_UNSIGNED (TREE_TYPE
8374 (TREE_OPERAND
8375 (TREE_OPERAND (exp, 0), 0)))))))
8377 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8378 enum machine_mode innermode = TYPE_MODE (op0type);
8379 bool zextend_p = TYPE_UNSIGNED (op0type);
8380 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8381 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8383 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8385 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8387 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8388 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8389 TREE_OPERAND (exp, 1),
8390 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8391 else
8392 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8393 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8394 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8395 goto binop3;
8397 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8398 && innermode == word_mode)
8400 rtx htem, hipart;
8401 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8402 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8403 op1 = convert_modes (innermode, mode,
8404 expand_normal (TREE_OPERAND (exp, 1)),
8405 unsignedp);
8406 else
8407 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8408 temp = expand_binop (mode, other_optab, op0, op1, target,
8409 unsignedp, OPTAB_LIB_WIDEN);
8410 hipart = gen_highpart (innermode, temp);
8411 htem = expand_mult_highpart_adjust (innermode, hipart,
8412 op0, op1, hipart,
8413 zextend_p);
8414 if (htem != hipart)
8415 emit_move_insn (hipart, htem);
8416 return REDUCE_BIT_FIELD (temp);
8420 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8421 subtarget, &op0, &op1, 0);
8422 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8424 case TRUNC_DIV_EXPR:
8425 case FLOOR_DIV_EXPR:
8426 case CEIL_DIV_EXPR:
8427 case ROUND_DIV_EXPR:
8428 case EXACT_DIV_EXPR:
8429 if (modifier == EXPAND_STACK_PARM)
8430 target = 0;
8431 /* Possible optimization: compute the dividend with EXPAND_SUM
8432 then if the divisor is constant can optimize the case
8433 where some terms of the dividend have coeffs divisible by it. */
8434 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8435 subtarget, &op0, &op1, 0);
8436 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8438 case RDIV_EXPR:
8439 goto binop;
8441 case TRUNC_MOD_EXPR:
8442 case FLOOR_MOD_EXPR:
8443 case CEIL_MOD_EXPR:
8444 case ROUND_MOD_EXPR:
8445 if (modifier == EXPAND_STACK_PARM)
8446 target = 0;
8447 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8448 subtarget, &op0, &op1, 0);
8449 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8451 case FIX_TRUNC_EXPR:
8452 op0 = expand_normal (TREE_OPERAND (exp, 0));
8453 if (target == 0 || modifier == EXPAND_STACK_PARM)
8454 target = gen_reg_rtx (mode);
8455 expand_fix (target, op0, unsignedp);
8456 return target;
8458 case FLOAT_EXPR:
8459 op0 = expand_normal (TREE_OPERAND (exp, 0));
8460 if (target == 0 || modifier == EXPAND_STACK_PARM)
8461 target = gen_reg_rtx (mode);
8462 /* expand_float can't figure out what to do if FROM has VOIDmode.
8463 So give it the correct mode. With -O, cse will optimize this. */
8464 if (GET_MODE (op0) == VOIDmode)
8465 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8466 op0);
8467 expand_float (target, op0,
8468 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8469 return target;
8471 case NEGATE_EXPR:
8472 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8473 VOIDmode, EXPAND_NORMAL);
8474 if (modifier == EXPAND_STACK_PARM)
8475 target = 0;
8476 temp = expand_unop (mode,
8477 optab_for_tree_code (NEGATE_EXPR, type),
8478 op0, target, 0);
8479 gcc_assert (temp);
8480 return REDUCE_BIT_FIELD (temp);
8482 case ABS_EXPR:
8483 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8484 VOIDmode, EXPAND_NORMAL);
8485 if (modifier == EXPAND_STACK_PARM)
8486 target = 0;
8488 /* ABS_EXPR is not valid for complex arguments. */
8489 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8490 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8492 /* Unsigned abs is simply the operand. Testing here means we don't
8493 risk generating incorrect code below. */
8494 if (TYPE_UNSIGNED (type))
8495 return op0;
8497 return expand_abs (mode, op0, target, unsignedp,
8498 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8500 case MAX_EXPR:
8501 case MIN_EXPR:
8502 target = original_target;
8503 if (target == 0
8504 || modifier == EXPAND_STACK_PARM
8505 || (MEM_P (target) && MEM_VOLATILE_P (target))
8506 || GET_MODE (target) != mode
8507 || (REG_P (target)
8508 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8509 target = gen_reg_rtx (mode);
8510 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8511 target, &op0, &op1, 0);
8513 /* First try to do it with a special MIN or MAX instruction.
8514 If that does not win, use a conditional jump to select the proper
8515 value. */
8516 this_optab = optab_for_tree_code (code, type);
8517 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8518 OPTAB_WIDEN);
8519 if (temp != 0)
8520 return temp;
8522 /* At this point, a MEM target is no longer useful; we will get better
8523 code without it. */
8525 if (! REG_P (target))
8526 target = gen_reg_rtx (mode);
8528 /* If op1 was placed in target, swap op0 and op1. */
8529 if (target != op0 && target == op1)
8531 temp = op0;
8532 op0 = op1;
8533 op1 = temp;
8536 /* We generate better code and avoid problems with op1 mentioning
8537 target by forcing op1 into a pseudo if it isn't a constant. */
8538 if (! CONSTANT_P (op1))
8539 op1 = force_reg (mode, op1);
8542 enum rtx_code comparison_code;
8543 rtx cmpop1 = op1;
8545 if (code == MAX_EXPR)
8546 comparison_code = unsignedp ? GEU : GE;
8547 else
8548 comparison_code = unsignedp ? LEU : LE;
8550 /* Canonicalize to comparisons against 0. */
8551 if (op1 == const1_rtx)
8553 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8554 or (a != 0 ? a : 1) for unsigned.
8555 For MIN we are safe converting (a <= 1 ? a : 1)
8556 into (a <= 0 ? a : 1) */
8557 cmpop1 = const0_rtx;
8558 if (code == MAX_EXPR)
8559 comparison_code = unsignedp ? NE : GT;
8561 if (op1 == constm1_rtx && !unsignedp)
8563 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8564 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8565 cmpop1 = const0_rtx;
8566 if (code == MIN_EXPR)
8567 comparison_code = LT;
8569 #ifdef HAVE_conditional_move
8570 /* Use a conditional move if possible. */
8571 if (can_conditionally_move_p (mode))
8573 rtx insn;
8575 /* ??? Same problem as in expmed.c: emit_conditional_move
8576 forces a stack adjustment via compare_from_rtx, and we
8577 lose the stack adjustment if the sequence we are about
8578 to create is discarded. */
8579 do_pending_stack_adjust ();
8581 start_sequence ();
8583 /* Try to emit the conditional move. */
8584 insn = emit_conditional_move (target, comparison_code,
8585 op0, cmpop1, mode,
8586 op0, op1, mode,
8587 unsignedp);
8589 /* If we could do the conditional move, emit the sequence,
8590 and return. */
8591 if (insn)
8593 rtx seq = get_insns ();
8594 end_sequence ();
8595 emit_insn (seq);
8596 return target;
8599 /* Otherwise discard the sequence and fall back to code with
8600 branches. */
8601 end_sequence ();
8603 #endif
8604 if (target != op0)
8605 emit_move_insn (target, op0);
8607 temp = gen_label_rtx ();
8608 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8609 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8611 emit_move_insn (target, op1);
8612 emit_label (temp);
8613 return target;
8615 case BIT_NOT_EXPR:
8616 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8617 VOIDmode, EXPAND_NORMAL);
8618 if (modifier == EXPAND_STACK_PARM)
8619 target = 0;
8620 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8621 gcc_assert (temp);
8622 return temp;
8624 /* ??? Can optimize bitwise operations with one arg constant.
8625 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8626 and (a bitwise1 b) bitwise2 b (etc)
8627 but that is probably not worth while. */
8629 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8630 boolean values when we want in all cases to compute both of them. In
8631 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8632 as actual zero-or-1 values and then bitwise anding. In cases where
8633 there cannot be any side effects, better code would be made by
8634 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8635 how to recognize those cases. */
8637 case TRUTH_AND_EXPR:
8638 code = BIT_AND_EXPR;
8639 case BIT_AND_EXPR:
8640 goto binop;
8642 case TRUTH_OR_EXPR:
8643 code = BIT_IOR_EXPR;
8644 case BIT_IOR_EXPR:
8645 goto binop;
8647 case TRUTH_XOR_EXPR:
8648 code = BIT_XOR_EXPR;
8649 case BIT_XOR_EXPR:
8650 goto binop;
8652 case LSHIFT_EXPR:
8653 case RSHIFT_EXPR:
8654 case LROTATE_EXPR:
8655 case RROTATE_EXPR:
8656 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8657 subtarget = 0;
8658 if (modifier == EXPAND_STACK_PARM)
8659 target = 0;
8660 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8661 VOIDmode, EXPAND_NORMAL);
8662 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8663 unsignedp);
8665 /* Could determine the answer when only additive constants differ. Also,
8666 the addition of one can be handled by changing the condition. */
8667 case LT_EXPR:
8668 case LE_EXPR:
8669 case GT_EXPR:
8670 case GE_EXPR:
8671 case EQ_EXPR:
8672 case NE_EXPR:
8673 case UNORDERED_EXPR:
8674 case ORDERED_EXPR:
8675 case UNLT_EXPR:
8676 case UNLE_EXPR:
8677 case UNGT_EXPR:
8678 case UNGE_EXPR:
8679 case UNEQ_EXPR:
8680 case LTGT_EXPR:
8681 temp = do_store_flag (exp,
8682 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8683 tmode != VOIDmode ? tmode : mode, 0);
8684 if (temp != 0)
8685 return temp;
8687 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8688 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8689 && original_target
8690 && REG_P (original_target)
8691 && (GET_MODE (original_target)
8692 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8694 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8695 VOIDmode, EXPAND_NORMAL);
8697 /* If temp is constant, we can just compute the result. */
8698 if (GET_CODE (temp) == CONST_INT)
8700 if (INTVAL (temp) != 0)
8701 emit_move_insn (target, const1_rtx);
8702 else
8703 emit_move_insn (target, const0_rtx);
8705 return target;
8708 if (temp != original_target)
8710 enum machine_mode mode1 = GET_MODE (temp);
8711 if (mode1 == VOIDmode)
8712 mode1 = tmode != VOIDmode ? tmode : mode;
8714 temp = copy_to_mode_reg (mode1, temp);
8717 op1 = gen_label_rtx ();
8718 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8719 GET_MODE (temp), unsignedp, op1);
8720 emit_move_insn (temp, const1_rtx);
8721 emit_label (op1);
8722 return temp;
8725 /* If no set-flag instruction, must generate a conditional store
8726 into a temporary variable. Drop through and handle this
8727 like && and ||. */
8729 if (! ignore
8730 && (target == 0
8731 || modifier == EXPAND_STACK_PARM
8732 || ! safe_from_p (target, exp, 1)
8733 /* Make sure we don't have a hard reg (such as function's return
8734 value) live across basic blocks, if not optimizing. */
8735 || (!optimize && REG_P (target)
8736 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8737 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8739 if (target)
8740 emit_move_insn (target, const0_rtx);
8742 op1 = gen_label_rtx ();
8743 jumpifnot (exp, op1);
8745 if (target)
8746 emit_move_insn (target, const1_rtx);
8748 emit_label (op1);
8749 return ignore ? const0_rtx : target;
8751 case TRUTH_NOT_EXPR:
8752 if (modifier == EXPAND_STACK_PARM)
8753 target = 0;
8754 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
8755 VOIDmode, EXPAND_NORMAL);
8756 /* The parser is careful to generate TRUTH_NOT_EXPR
8757 only with operands that are always zero or one. */
8758 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8759 target, 1, OPTAB_LIB_WIDEN);
8760 gcc_assert (temp);
8761 return temp;
8763 case STATEMENT_LIST:
8765 tree_stmt_iterator iter;
8767 gcc_assert (ignore);
8769 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8770 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8772 return const0_rtx;
8774 case COND_EXPR:
8775 /* A COND_EXPR with its type being VOID_TYPE represents a
8776 conditional jump and is handled in
8777 expand_gimple_cond_expr. */
8778 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8780 /* Note that COND_EXPRs whose type is a structure or union
8781 are required to be constructed to contain assignments of
8782 a temporary variable, so that we can evaluate them here
8783 for side effect only. If type is void, we must do likewise. */
8785 gcc_assert (!TREE_ADDRESSABLE (type)
8786 && !ignore
8787 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8788 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8790 /* If we are not to produce a result, we have no target. Otherwise,
8791 if a target was specified use it; it will not be used as an
8792 intermediate target unless it is safe. If no target, use a
8793 temporary. */
8795 if (modifier != EXPAND_STACK_PARM
8796 && original_target
8797 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8798 && GET_MODE (original_target) == mode
8799 #ifdef HAVE_conditional_move
8800 && (! can_conditionally_move_p (mode)
8801 || REG_P (original_target))
8802 #endif
8803 && !MEM_P (original_target))
8804 temp = original_target;
8805 else
8806 temp = assign_temp (type, 0, 0, 1);
8808 do_pending_stack_adjust ();
8809 NO_DEFER_POP;
8810 op0 = gen_label_rtx ();
8811 op1 = gen_label_rtx ();
8812 jumpifnot (TREE_OPERAND (exp, 0), op0);
8813 store_expr (TREE_OPERAND (exp, 1), temp,
8814 modifier == EXPAND_STACK_PARM,
8815 false);
8817 emit_jump_insn (gen_jump (op1));
8818 emit_barrier ();
8819 emit_label (op0);
8820 store_expr (TREE_OPERAND (exp, 2), temp,
8821 modifier == EXPAND_STACK_PARM,
8822 false);
8824 emit_label (op1);
8825 OK_DEFER_POP;
8826 return temp;
8828 case VEC_COND_EXPR:
8829 target = expand_vec_cond_expr (exp, target);
8830 return target;
8832 case MODIFY_EXPR:
8834 tree lhs = TREE_OPERAND (exp, 0);
8835 tree rhs = TREE_OPERAND (exp, 1);
8836 gcc_assert (ignore);
8837 expand_assignment (lhs, rhs, false);
8838 return const0_rtx;
8841 case GIMPLE_MODIFY_STMT:
8843 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
8844 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
8846 gcc_assert (ignore);
8848 /* Check for |= or &= of a bitfield of size one into another bitfield
8849 of size 1. In this case, (unless we need the result of the
8850 assignment) we can do this more efficiently with a
8851 test followed by an assignment, if necessary.
8853 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8854 things change so we do, this code should be enhanced to
8855 support it. */
8856 if (TREE_CODE (lhs) == COMPONENT_REF
8857 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8858 || TREE_CODE (rhs) == BIT_AND_EXPR)
8859 && TREE_OPERAND (rhs, 0) == lhs
8860 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8861 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8862 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8864 rtx label = gen_label_rtx ();
8865 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8866 do_jump (TREE_OPERAND (rhs, 1),
8867 value ? label : 0,
8868 value ? 0 : label);
8869 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
8870 MOVE_NONTEMPORAL (exp));
8871 do_pending_stack_adjust ();
8872 emit_label (label);
8873 return const0_rtx;
8876 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
8877 return const0_rtx;
8880 case RETURN_EXPR:
8881 if (!TREE_OPERAND (exp, 0))
8882 expand_null_return ();
8883 else
8884 expand_return (TREE_OPERAND (exp, 0));
8885 return const0_rtx;
8887 case ADDR_EXPR:
8888 return expand_expr_addr_expr (exp, target, tmode, modifier);
8890 case COMPLEX_EXPR:
8891 /* Get the rtx code of the operands. */
8892 op0 = expand_normal (TREE_OPERAND (exp, 0));
8893 op1 = expand_normal (TREE_OPERAND (exp, 1));
8895 if (!target)
8896 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8898 /* Move the real (op0) and imaginary (op1) parts to their location. */
8899 write_complex_part (target, op0, false);
8900 write_complex_part (target, op1, true);
8902 return target;
8904 case REALPART_EXPR:
8905 op0 = expand_normal (TREE_OPERAND (exp, 0));
8906 return read_complex_part (op0, false);
8908 case IMAGPART_EXPR:
8909 op0 = expand_normal (TREE_OPERAND (exp, 0));
8910 return read_complex_part (op0, true);
8912 case RESX_EXPR:
8913 expand_resx_expr (exp);
8914 return const0_rtx;
8916 case TRY_CATCH_EXPR:
8917 case CATCH_EXPR:
8918 case EH_FILTER_EXPR:
8919 case TRY_FINALLY_EXPR:
8920 /* Lowered by tree-eh.c. */
8921 gcc_unreachable ();
8923 case WITH_CLEANUP_EXPR:
8924 case CLEANUP_POINT_EXPR:
8925 case TARGET_EXPR:
8926 case CASE_LABEL_EXPR:
8927 case VA_ARG_EXPR:
8928 case BIND_EXPR:
8929 case INIT_EXPR:
8930 case CONJ_EXPR:
8931 case COMPOUND_EXPR:
8932 case PREINCREMENT_EXPR:
8933 case PREDECREMENT_EXPR:
8934 case POSTINCREMENT_EXPR:
8935 case POSTDECREMENT_EXPR:
8936 case LOOP_EXPR:
8937 case EXIT_EXPR:
8938 case TRUTH_ANDIF_EXPR:
8939 case TRUTH_ORIF_EXPR:
8940 /* Lowered by gimplify.c. */
8941 gcc_unreachable ();
8943 case CHANGE_DYNAMIC_TYPE_EXPR:
8944 /* This is ignored at the RTL level. The tree level set
8945 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
8946 overkill for the RTL layer but is all that we can
8947 represent. */
8948 return const0_rtx;
8950 case EXC_PTR_EXPR:
8951 return get_exception_pointer (cfun);
8953 case FILTER_EXPR:
8954 return get_exception_filter (cfun);
8956 case FDESC_EXPR:
8957 /* Function descriptors are not valid except for as
8958 initialization constants, and should not be expanded. */
8959 gcc_unreachable ();
8961 case SWITCH_EXPR:
8962 expand_case (exp);
8963 return const0_rtx;
8965 case LABEL_EXPR:
8966 expand_label (TREE_OPERAND (exp, 0));
8967 return const0_rtx;
8969 case ASM_EXPR:
8970 expand_asm_expr (exp);
8971 return const0_rtx;
8973 case WITH_SIZE_EXPR:
8974 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8975 have pulled out the size to use in whatever context it needed. */
8976 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8977 modifier, alt_rtl);
8979 case REALIGN_LOAD_EXPR:
8981 tree oprnd0 = TREE_OPERAND (exp, 0);
8982 tree oprnd1 = TREE_OPERAND (exp, 1);
8983 tree oprnd2 = TREE_OPERAND (exp, 2);
8984 rtx op2;
8986 this_optab = optab_for_tree_code (code, type);
8987 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8988 op2 = expand_normal (oprnd2);
8989 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8990 target, unsignedp);
8991 gcc_assert (temp);
8992 return temp;
8995 case DOT_PROD_EXPR:
8997 tree oprnd0 = TREE_OPERAND (exp, 0);
8998 tree oprnd1 = TREE_OPERAND (exp, 1);
8999 tree oprnd2 = TREE_OPERAND (exp, 2);
9000 rtx op2;
9002 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9003 op2 = expand_normal (oprnd2);
9004 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9005 target, unsignedp);
9006 return target;
9009 case WIDEN_SUM_EXPR:
9011 tree oprnd0 = TREE_OPERAND (exp, 0);
9012 tree oprnd1 = TREE_OPERAND (exp, 1);
9014 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9015 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9016 target, unsignedp);
9017 return target;
9020 case REDUC_MAX_EXPR:
9021 case REDUC_MIN_EXPR:
9022 case REDUC_PLUS_EXPR:
9024 op0 = expand_normal (TREE_OPERAND (exp, 0));
9025 this_optab = optab_for_tree_code (code, type);
9026 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9027 gcc_assert (temp);
9028 return temp;
9031 case VEC_EXTRACT_EVEN_EXPR:
9032 case VEC_EXTRACT_ODD_EXPR:
9034 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9035 NULL_RTX, &op0, &op1, 0);
9036 this_optab = optab_for_tree_code (code, type);
9037 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9038 OPTAB_WIDEN);
9039 gcc_assert (temp);
9040 return temp;
9043 case VEC_INTERLEAVE_HIGH_EXPR:
9044 case VEC_INTERLEAVE_LOW_EXPR:
9046 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9047 NULL_RTX, &op0, &op1, 0);
9048 this_optab = optab_for_tree_code (code, type);
9049 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9050 OPTAB_WIDEN);
9051 gcc_assert (temp);
9052 return temp;
9055 case VEC_LSHIFT_EXPR:
9056 case VEC_RSHIFT_EXPR:
9058 target = expand_vec_shift_expr (exp, target);
9059 return target;
9062 case VEC_UNPACK_HI_EXPR:
9063 case VEC_UNPACK_LO_EXPR:
9065 op0 = expand_normal (TREE_OPERAND (exp, 0));
9066 this_optab = optab_for_tree_code (code, type);
9067 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9068 target, unsignedp);
9069 gcc_assert (temp);
9070 return temp;
9073 case VEC_UNPACK_FLOAT_HI_EXPR:
9074 case VEC_UNPACK_FLOAT_LO_EXPR:
9076 op0 = expand_normal (TREE_OPERAND (exp, 0));
9077 /* The signedness is determined from input operand. */
9078 this_optab = optab_for_tree_code (code,
9079 TREE_TYPE (TREE_OPERAND (exp, 0)));
9080 temp = expand_widen_pattern_expr
9081 (exp, op0, NULL_RTX, NULL_RTX,
9082 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9084 gcc_assert (temp);
9085 return temp;
9088 case VEC_WIDEN_MULT_HI_EXPR:
9089 case VEC_WIDEN_MULT_LO_EXPR:
9091 tree oprnd0 = TREE_OPERAND (exp, 0);
9092 tree oprnd1 = TREE_OPERAND (exp, 1);
9094 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9095 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9096 target, unsignedp);
9097 gcc_assert (target);
9098 return target;
9101 case VEC_PACK_TRUNC_EXPR:
9102 case VEC_PACK_SAT_EXPR:
9103 case VEC_PACK_FIX_TRUNC_EXPR:
9105 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9106 goto binop;
9109 default:
9110 return lang_hooks.expand_expr (exp, original_target, tmode,
9111 modifier, alt_rtl);
9114 /* Here to do an ordinary binary operator. */
9115 binop:
9116 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9117 subtarget, &op0, &op1, 0);
9118 binop2:
9119 this_optab = optab_for_tree_code (code, type);
9120 binop3:
9121 if (modifier == EXPAND_STACK_PARM)
9122 target = 0;
9123 temp = expand_binop (mode, this_optab, op0, op1, target,
9124 unsignedp, OPTAB_LIB_WIDEN);
9125 gcc_assert (temp);
9126 return REDUCE_BIT_FIELD (temp);
9128 #undef REDUCE_BIT_FIELD
9130 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9131 signedness of TYPE), possibly returning the result in TARGET. */
9132 static rtx
9133 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9135 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9136 if (target && GET_MODE (target) != GET_MODE (exp))
9137 target = 0;
9138 /* For constant values, reduce using build_int_cst_type. */
9139 if (GET_CODE (exp) == CONST_INT)
9141 HOST_WIDE_INT value = INTVAL (exp);
9142 tree t = build_int_cst_type (type, value);
9143 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9145 else if (TYPE_UNSIGNED (type))
9147 rtx mask;
9148 if (prec < HOST_BITS_PER_WIDE_INT)
9149 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9150 GET_MODE (exp));
9151 else
9152 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9153 ((unsigned HOST_WIDE_INT) 1
9154 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9155 GET_MODE (exp));
9156 return expand_and (GET_MODE (exp), exp, mask, target);
9158 else
9160 tree count = build_int_cst (NULL_TREE,
9161 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9162 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9163 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9167 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9168 when applied to the address of EXP produces an address known to be
9169 aligned more than BIGGEST_ALIGNMENT. */
9171 static int
9172 is_aligning_offset (tree offset, tree exp)
9174 /* Strip off any conversions. */
9175 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9176 || TREE_CODE (offset) == NOP_EXPR
9177 || TREE_CODE (offset) == CONVERT_EXPR)
9178 offset = TREE_OPERAND (offset, 0);
9180 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9181 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9182 if (TREE_CODE (offset) != BIT_AND_EXPR
9183 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9184 || compare_tree_int (TREE_OPERAND (offset, 1),
9185 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9186 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9187 return 0;
9189 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9190 It must be NEGATE_EXPR. Then strip any more conversions. */
9191 offset = TREE_OPERAND (offset, 0);
9192 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9193 || TREE_CODE (offset) == NOP_EXPR
9194 || TREE_CODE (offset) == CONVERT_EXPR)
9195 offset = TREE_OPERAND (offset, 0);
9197 if (TREE_CODE (offset) != NEGATE_EXPR)
9198 return 0;
9200 offset = TREE_OPERAND (offset, 0);
9201 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9202 || TREE_CODE (offset) == NOP_EXPR
9203 || TREE_CODE (offset) == CONVERT_EXPR)
9204 offset = TREE_OPERAND (offset, 0);
9206 /* This must now be the address of EXP. */
9207 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9210 /* Return the tree node if an ARG corresponds to a string constant or zero
9211 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9212 in bytes within the string that ARG is accessing. The type of the
9213 offset will be `sizetype'. */
9215 tree
9216 string_constant (tree arg, tree *ptr_offset)
9218 tree array, offset, lower_bound;
9219 STRIP_NOPS (arg);
9221 if (TREE_CODE (arg) == ADDR_EXPR)
9223 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9225 *ptr_offset = size_zero_node;
9226 return TREE_OPERAND (arg, 0);
9228 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9230 array = TREE_OPERAND (arg, 0);
9231 offset = size_zero_node;
9233 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9235 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9236 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9237 if (TREE_CODE (array) != STRING_CST
9238 && TREE_CODE (array) != VAR_DECL)
9239 return 0;
9241 /* Check if the array has a nonzero lower bound. */
9242 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9243 if (!integer_zerop (lower_bound))
9245 /* If the offset and base aren't both constants, return 0. */
9246 if (TREE_CODE (lower_bound) != INTEGER_CST)
9247 return 0;
9248 if (TREE_CODE (offset) != INTEGER_CST)
9249 return 0;
9250 /* Adjust offset by the lower bound. */
9251 offset = size_diffop (fold_convert (sizetype, offset),
9252 fold_convert (sizetype, lower_bound));
9255 else
9256 return 0;
9258 else if (TREE_CODE (arg) == PLUS_EXPR)
9260 tree arg0 = TREE_OPERAND (arg, 0);
9261 tree arg1 = TREE_OPERAND (arg, 1);
9263 STRIP_NOPS (arg0);
9264 STRIP_NOPS (arg1);
9266 if (TREE_CODE (arg0) == ADDR_EXPR
9267 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9268 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9270 array = TREE_OPERAND (arg0, 0);
9271 offset = arg1;
9273 else if (TREE_CODE (arg1) == ADDR_EXPR
9274 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9275 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9277 array = TREE_OPERAND (arg1, 0);
9278 offset = arg0;
9280 else
9281 return 0;
9283 else
9284 return 0;
9286 if (TREE_CODE (array) == STRING_CST)
9288 *ptr_offset = fold_convert (sizetype, offset);
9289 return array;
9291 else if (TREE_CODE (array) == VAR_DECL)
9293 int length;
9295 /* Variables initialized to string literals can be handled too. */
9296 if (DECL_INITIAL (array) == NULL_TREE
9297 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9298 return 0;
9300 /* If they are read-only, non-volatile and bind locally. */
9301 if (! TREE_READONLY (array)
9302 || TREE_SIDE_EFFECTS (array)
9303 || ! targetm.binds_local_p (array))
9304 return 0;
9306 /* Avoid const char foo[4] = "abcde"; */
9307 if (DECL_SIZE_UNIT (array) == NULL_TREE
9308 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9309 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9310 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9311 return 0;
9313 /* If variable is bigger than the string literal, OFFSET must be constant
9314 and inside of the bounds of the string literal. */
9315 offset = fold_convert (sizetype, offset);
9316 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9317 && (! host_integerp (offset, 1)
9318 || compare_tree_int (offset, length) >= 0))
9319 return 0;
9321 *ptr_offset = offset;
9322 return DECL_INITIAL (array);
9325 return 0;
9328 /* Generate code to calculate EXP using a store-flag instruction
9329 and return an rtx for the result. EXP is either a comparison
9330 or a TRUTH_NOT_EXPR whose operand is a comparison.
9332 If TARGET is nonzero, store the result there if convenient.
9334 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9335 cheap.
9337 Return zero if there is no suitable set-flag instruction
9338 available on this machine.
9340 Once expand_expr has been called on the arguments of the comparison,
9341 we are committed to doing the store flag, since it is not safe to
9342 re-evaluate the expression. We emit the store-flag insn by calling
9343 emit_store_flag, but only expand the arguments if we have a reason
9344 to believe that emit_store_flag will be successful. If we think that
9345 it will, but it isn't, we have to simulate the store-flag with a
9346 set/jump/set sequence. */
9348 static rtx
9349 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9351 enum rtx_code code;
9352 tree arg0, arg1, type;
9353 tree tem;
9354 enum machine_mode operand_mode;
9355 int invert = 0;
9356 int unsignedp;
9357 rtx op0, op1;
9358 enum insn_code icode;
9359 rtx subtarget = target;
9360 rtx result, label;
9362 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9363 result at the end. We can't simply invert the test since it would
9364 have already been inverted if it were valid. This case occurs for
9365 some floating-point comparisons. */
9367 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9368 invert = 1, exp = TREE_OPERAND (exp, 0);
9370 arg0 = TREE_OPERAND (exp, 0);
9371 arg1 = TREE_OPERAND (exp, 1);
9373 /* Don't crash if the comparison was erroneous. */
9374 if (arg0 == error_mark_node || arg1 == error_mark_node)
9375 return const0_rtx;
9377 type = TREE_TYPE (arg0);
9378 operand_mode = TYPE_MODE (type);
9379 unsignedp = TYPE_UNSIGNED (type);
9381 /* We won't bother with BLKmode store-flag operations because it would mean
9382 passing a lot of information to emit_store_flag. */
9383 if (operand_mode == BLKmode)
9384 return 0;
9386 /* We won't bother with store-flag operations involving function pointers
9387 when function pointers must be canonicalized before comparisons. */
9388 #ifdef HAVE_canonicalize_funcptr_for_compare
9389 if (HAVE_canonicalize_funcptr_for_compare
9390 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9391 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9392 == FUNCTION_TYPE))
9393 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9394 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9395 == FUNCTION_TYPE))))
9396 return 0;
9397 #endif
9399 STRIP_NOPS (arg0);
9400 STRIP_NOPS (arg1);
9402 /* Get the rtx comparison code to use. We know that EXP is a comparison
9403 operation of some type. Some comparisons against 1 and -1 can be
9404 converted to comparisons with zero. Do so here so that the tests
9405 below will be aware that we have a comparison with zero. These
9406 tests will not catch constants in the first operand, but constants
9407 are rarely passed as the first operand. */
9409 switch (TREE_CODE (exp))
9411 case EQ_EXPR:
9412 code = EQ;
9413 break;
9414 case NE_EXPR:
9415 code = NE;
9416 break;
9417 case LT_EXPR:
9418 if (integer_onep (arg1))
9419 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9420 else
9421 code = unsignedp ? LTU : LT;
9422 break;
9423 case LE_EXPR:
9424 if (! unsignedp && integer_all_onesp (arg1))
9425 arg1 = integer_zero_node, code = LT;
9426 else
9427 code = unsignedp ? LEU : LE;
9428 break;
9429 case GT_EXPR:
9430 if (! unsignedp && integer_all_onesp (arg1))
9431 arg1 = integer_zero_node, code = GE;
9432 else
9433 code = unsignedp ? GTU : GT;
9434 break;
9435 case GE_EXPR:
9436 if (integer_onep (arg1))
9437 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9438 else
9439 code = unsignedp ? GEU : GE;
9440 break;
9442 case UNORDERED_EXPR:
9443 code = UNORDERED;
9444 break;
9445 case ORDERED_EXPR:
9446 code = ORDERED;
9447 break;
9448 case UNLT_EXPR:
9449 code = UNLT;
9450 break;
9451 case UNLE_EXPR:
9452 code = UNLE;
9453 break;
9454 case UNGT_EXPR:
9455 code = UNGT;
9456 break;
9457 case UNGE_EXPR:
9458 code = UNGE;
9459 break;
9460 case UNEQ_EXPR:
9461 code = UNEQ;
9462 break;
9463 case LTGT_EXPR:
9464 code = LTGT;
9465 break;
9467 default:
9468 gcc_unreachable ();
9471 /* Put a constant second. */
9472 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9474 tem = arg0; arg0 = arg1; arg1 = tem;
9475 code = swap_condition (code);
9478 /* If this is an equality or inequality test of a single bit, we can
9479 do this by shifting the bit being tested to the low-order bit and
9480 masking the result with the constant 1. If the condition was EQ,
9481 we xor it with 1. This does not require an scc insn and is faster
9482 than an scc insn even if we have it.
9484 The code to make this transformation was moved into fold_single_bit_test,
9485 so we just call into the folder and expand its result. */
9487 if ((code == NE || code == EQ)
9488 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9489 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9491 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9492 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9493 arg0, arg1, type),
9494 target, VOIDmode, EXPAND_NORMAL);
9497 /* Now see if we are likely to be able to do this. Return if not. */
9498 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9499 return 0;
9501 icode = setcc_gen_code[(int) code];
9503 if (icode == CODE_FOR_nothing)
9505 enum machine_mode wmode;
9507 for (wmode = operand_mode;
9508 icode == CODE_FOR_nothing && wmode != VOIDmode;
9509 wmode = GET_MODE_WIDER_MODE (wmode))
9510 icode = cstore_optab->handlers[(int) wmode].insn_code;
9513 if (icode == CODE_FOR_nothing
9514 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9516 /* We can only do this if it is one of the special cases that
9517 can be handled without an scc insn. */
9518 if ((code == LT && integer_zerop (arg1))
9519 || (! only_cheap && code == GE && integer_zerop (arg1)))
9521 else if (! only_cheap && (code == NE || code == EQ)
9522 && TREE_CODE (type) != REAL_TYPE
9523 && ((abs_optab->handlers[(int) operand_mode].insn_code
9524 != CODE_FOR_nothing)
9525 || (ffs_optab->handlers[(int) operand_mode].insn_code
9526 != CODE_FOR_nothing)))
9528 else
9529 return 0;
9532 if (! get_subtarget (target)
9533 || GET_MODE (subtarget) != operand_mode)
9534 subtarget = 0;
9536 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9538 if (target == 0)
9539 target = gen_reg_rtx (mode);
9541 result = emit_store_flag (target, code, op0, op1,
9542 operand_mode, unsignedp, 1);
9544 if (result)
9546 if (invert)
9547 result = expand_binop (mode, xor_optab, result, const1_rtx,
9548 result, 0, OPTAB_LIB_WIDEN);
9549 return result;
9552 /* If this failed, we have to do this with set/compare/jump/set code. */
9553 if (!REG_P (target)
9554 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9555 target = gen_reg_rtx (GET_MODE (target));
9557 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9558 label = gen_label_rtx ();
9559 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9560 NULL_RTX, label);
9562 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9563 emit_label (label);
9565 return target;
9569 /* Stubs in case we haven't got a casesi insn. */
9570 #ifndef HAVE_casesi
9571 # define HAVE_casesi 0
9572 # define gen_casesi(a, b, c, d, e) (0)
9573 # define CODE_FOR_casesi CODE_FOR_nothing
9574 #endif
9576 /* If the machine does not have a case insn that compares the bounds,
9577 this means extra overhead for dispatch tables, which raises the
9578 threshold for using them. */
9579 #ifndef CASE_VALUES_THRESHOLD
9580 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9581 #endif /* CASE_VALUES_THRESHOLD */
9583 unsigned int
9584 case_values_threshold (void)
9586 return CASE_VALUES_THRESHOLD;
9589 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9590 0 otherwise (i.e. if there is no casesi instruction). */
9592 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9593 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9595 enum machine_mode index_mode = SImode;
9596 int index_bits = GET_MODE_BITSIZE (index_mode);
9597 rtx op1, op2, index;
9598 enum machine_mode op_mode;
9600 if (! HAVE_casesi)
9601 return 0;
9603 /* Convert the index to SImode. */
9604 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9606 enum machine_mode omode = TYPE_MODE (index_type);
9607 rtx rangertx = expand_normal (range);
9609 /* We must handle the endpoints in the original mode. */
9610 index_expr = build2 (MINUS_EXPR, index_type,
9611 index_expr, minval);
9612 minval = integer_zero_node;
9613 index = expand_normal (index_expr);
9614 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9615 omode, 1, default_label);
9616 /* Now we can safely truncate. */
9617 index = convert_to_mode (index_mode, index, 0);
9619 else
9621 if (TYPE_MODE (index_type) != index_mode)
9623 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9624 index_expr = fold_convert (index_type, index_expr);
9627 index = expand_normal (index_expr);
9630 do_pending_stack_adjust ();
9632 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9633 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9634 (index, op_mode))
9635 index = copy_to_mode_reg (op_mode, index);
9637 op1 = expand_normal (minval);
9639 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9640 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9641 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9642 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9643 (op1, op_mode))
9644 op1 = copy_to_mode_reg (op_mode, op1);
9646 op2 = expand_normal (range);
9648 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9649 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9650 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9651 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9652 (op2, op_mode))
9653 op2 = copy_to_mode_reg (op_mode, op2);
9655 emit_jump_insn (gen_casesi (index, op1, op2,
9656 table_label, default_label));
9657 return 1;
9660 /* Attempt to generate a tablejump instruction; same concept. */
9661 #ifndef HAVE_tablejump
9662 #define HAVE_tablejump 0
9663 #define gen_tablejump(x, y) (0)
9664 #endif
9666 /* Subroutine of the next function.
9668 INDEX is the value being switched on, with the lowest value
9669 in the table already subtracted.
9670 MODE is its expected mode (needed if INDEX is constant).
9671 RANGE is the length of the jump table.
9672 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9674 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9675 index value is out of range. */
9677 static void
9678 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9679 rtx default_label)
9681 rtx temp, vector;
9683 if (INTVAL (range) > cfun->max_jumptable_ents)
9684 cfun->max_jumptable_ents = INTVAL (range);
9686 /* Do an unsigned comparison (in the proper mode) between the index
9687 expression and the value which represents the length of the range.
9688 Since we just finished subtracting the lower bound of the range
9689 from the index expression, this comparison allows us to simultaneously
9690 check that the original index expression value is both greater than
9691 or equal to the minimum value of the range and less than or equal to
9692 the maximum value of the range. */
9694 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9695 default_label);
9697 /* If index is in range, it must fit in Pmode.
9698 Convert to Pmode so we can index with it. */
9699 if (mode != Pmode)
9700 index = convert_to_mode (Pmode, index, 1);
9702 /* Don't let a MEM slip through, because then INDEX that comes
9703 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9704 and break_out_memory_refs will go to work on it and mess it up. */
9705 #ifdef PIC_CASE_VECTOR_ADDRESS
9706 if (flag_pic && !REG_P (index))
9707 index = copy_to_mode_reg (Pmode, index);
9708 #endif
9710 /* If flag_force_addr were to affect this address
9711 it could interfere with the tricky assumptions made
9712 about addresses that contain label-refs,
9713 which may be valid only very near the tablejump itself. */
9714 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9715 GET_MODE_SIZE, because this indicates how large insns are. The other
9716 uses should all be Pmode, because they are addresses. This code
9717 could fail if addresses and insns are not the same size. */
9718 index = gen_rtx_PLUS (Pmode,
9719 gen_rtx_MULT (Pmode, index,
9720 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9721 gen_rtx_LABEL_REF (Pmode, table_label));
9722 #ifdef PIC_CASE_VECTOR_ADDRESS
9723 if (flag_pic)
9724 index = PIC_CASE_VECTOR_ADDRESS (index);
9725 else
9726 #endif
9727 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9728 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9729 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9730 convert_move (temp, vector, 0);
9732 emit_jump_insn (gen_tablejump (temp, table_label));
9734 /* If we are generating PIC code or if the table is PC-relative, the
9735 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9736 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9737 emit_barrier ();
9741 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9742 rtx table_label, rtx default_label)
9744 rtx index;
9746 if (! HAVE_tablejump)
9747 return 0;
9749 index_expr = fold_build2 (MINUS_EXPR, index_type,
9750 fold_convert (index_type, index_expr),
9751 fold_convert (index_type, minval));
9752 index = expand_normal (index_expr);
9753 do_pending_stack_adjust ();
9755 do_tablejump (index, TYPE_MODE (index_type),
9756 convert_modes (TYPE_MODE (index_type),
9757 TYPE_MODE (TREE_TYPE (range)),
9758 expand_normal (range),
9759 TYPE_UNSIGNED (TREE_TYPE (range))),
9760 table_label, default_label);
9761 return 1;
9764 /* Nonzero if the mode is a valid vector mode for this architecture.
9765 This returns nonzero even if there is no hardware support for the
9766 vector mode, but we can emulate with narrower modes. */
9769 vector_mode_valid_p (enum machine_mode mode)
9771 enum mode_class class = GET_MODE_CLASS (mode);
9772 enum machine_mode innermode;
9774 /* Doh! What's going on? */
9775 if (class != MODE_VECTOR_INT
9776 && class != MODE_VECTOR_FLOAT)
9777 return 0;
9779 /* Hardware support. Woo hoo! */
9780 if (targetm.vector_mode_supported_p (mode))
9781 return 1;
9783 innermode = GET_MODE_INNER (mode);
9785 /* We should probably return 1 if requesting V4DI and we have no DI,
9786 but we have V2DI, but this is probably very unlikely. */
9788 /* If we have support for the inner mode, we can safely emulate it.
9789 We may not have V2DI, but me can emulate with a pair of DIs. */
9790 return targetm.scalar_mode_supported_p (innermode);
9793 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9794 static rtx
9795 const_vector_from_tree (tree exp)
9797 rtvec v;
9798 int units, i;
9799 tree link, elt;
9800 enum machine_mode inner, mode;
9802 mode = TYPE_MODE (TREE_TYPE (exp));
9804 if (initializer_zerop (exp))
9805 return CONST0_RTX (mode);
9807 units = GET_MODE_NUNITS (mode);
9808 inner = GET_MODE_INNER (mode);
9810 v = rtvec_alloc (units);
9812 link = TREE_VECTOR_CST_ELTS (exp);
9813 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9815 elt = TREE_VALUE (link);
9817 if (TREE_CODE (elt) == REAL_CST)
9818 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9819 inner);
9820 else
9821 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9822 TREE_INT_CST_HIGH (elt),
9823 inner);
9826 /* Initialize remaining elements to 0. */
9827 for (; i < units; ++i)
9828 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9830 return gen_rtx_CONST_VECTOR (mode, v);
9832 #include "gt-expr.h"