Mark ChangeLog
[official-gcc.git] / gcc / expr.c
blob9ef151eeb0c2eb68645242a324b1f0c25ef242b8
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static rtx clear_storage_via_libcall (rtx, rtx, bool);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, int);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150 static int is_aligning_offset (tree, tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
179 #endif
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
187 #endif
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) MOVE_RATIO)
196 #endif
198 /* This array records the insn_code of insns to perform block moves. */
199 enum insn_code movmem_optab[NUM_MACHINE_MODES];
201 /* This array records the insn_code of insns to perform block sets. */
202 enum insn_code setmem_optab[NUM_MACHINE_MODES];
204 /* These arrays record the insn_code of three different kinds of insns
205 to perform block compares. */
206 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* Synchronization primitives. */
211 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
238 #endif
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
243 void
244 init_expr_once (void)
246 rtx insn, pat;
247 enum machine_mode mode;
248 int num_clobbers;
249 rtx mem, mem1;
250 rtx reg;
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
269 int regno;
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
287 REGNO (reg) = regno;
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
320 enum insn_code ic;
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
324 continue;
326 PUT_MODE (mem, srcmode);
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
334 /* This is run at the start of compiling a function. */
336 void
337 init_expr (void)
339 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
342 /* Copy data from FROM to TO, where the machine modes are not the same.
343 Both modes may be integer, or both may be floating.
344 UNSIGNEDP should be nonzero if FROM is an unsigned type.
345 This causes zero-extension instead of sign-extension. */
347 void
348 convert_move (rtx to, rtx from, int unsignedp)
350 enum machine_mode to_mode = GET_MODE (to);
351 enum machine_mode from_mode = GET_MODE (from);
352 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
353 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
354 enum insn_code code;
355 rtx libcall;
357 /* rtx code for making an equivalent value. */
358 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
359 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
362 gcc_assert (to_real == from_real);
364 /* If the source and destination are already the same, then there's
365 nothing to do. */
366 if (to == from)
367 return;
369 /* If FROM is a SUBREG that indicates that we have already done at least
370 the required extension, strip it. We don't handle such SUBREGs as
371 TO here. */
373 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
374 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
375 >= GET_MODE_SIZE (to_mode))
376 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
377 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381 if (to_mode == from_mode
382 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 emit_move_insn (to, from);
385 return;
388 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392 if (VECTOR_MODE_P (to_mode))
393 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
394 else
395 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397 emit_move_insn (to, from);
398 return;
401 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
404 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
405 return;
408 if (to_real)
410 rtx value, insns;
411 convert_optab tab;
413 gcc_assert ((GET_MODE_PRECISION (from_mode)
414 != GET_MODE_PRECISION (to_mode))
415 || (DECIMAL_FLOAT_MODE_P (from_mode)
416 != DECIMAL_FLOAT_MODE_P (to_mode)));
418 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
419 /* Conversion between decimal float and binary float, same size. */
420 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
421 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
422 tab = sext_optab;
423 else
424 tab = trunc_optab;
426 /* Try converting directly if the insn is supported. */
428 code = tab->handlers[to_mode][from_mode].insn_code;
429 if (code != CODE_FOR_nothing)
431 emit_unop_insn (code, to, from,
432 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
433 return;
436 /* Otherwise use a libcall. */
437 libcall = tab->handlers[to_mode][from_mode].libfunc;
439 /* Is this conversion implemented yet? */
440 gcc_assert (libcall);
442 start_sequence ();
443 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
444 1, from, from_mode);
445 insns = get_insns ();
446 end_sequence ();
447 emit_libcall_block (insns, to, value,
448 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
449 from)
450 : gen_rtx_FLOAT_EXTEND (to_mode, from));
451 return;
454 /* Handle pointer conversion. */ /* SPEE 900220. */
455 /* Targets are expected to provide conversion insns between PxImode and
456 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
457 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
459 enum machine_mode full_mode
460 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
462 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
463 != CODE_FOR_nothing);
465 if (full_mode != from_mode)
466 from = convert_to_mode (full_mode, from, unsignedp);
467 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
468 to, from, UNKNOWN);
469 return;
471 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
473 rtx new_from;
474 enum machine_mode full_mode
475 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
477 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
478 != CODE_FOR_nothing);
480 if (to_mode == full_mode)
482 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
483 to, from, UNKNOWN);
484 return;
487 new_from = gen_reg_rtx (full_mode);
488 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
489 new_from, from, UNKNOWN);
491 /* else proceed to integer conversions below. */
492 from_mode = full_mode;
493 from = new_from;
496 /* Now both modes are integers. */
498 /* Handle expanding beyond a word. */
499 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
500 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502 rtx insns;
503 rtx lowpart;
504 rtx fill_value;
505 rtx lowfrom;
506 int i;
507 enum machine_mode lowpart_mode;
508 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
510 /* Try converting directly if the insn is supported. */
511 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
512 != CODE_FOR_nothing)
514 /* If FROM is a SUBREG, put it into a register. Do this
515 so that we always generate the same set of insns for
516 better cse'ing; if an intermediate assignment occurred,
517 we won't be doing the operation directly on the SUBREG. */
518 if (optimize > 0 && GET_CODE (from) == SUBREG)
519 from = force_reg (from_mode, from);
520 emit_unop_insn (code, to, from, equiv_code);
521 return;
523 /* Next, try converting via full word. */
524 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
525 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
526 != CODE_FOR_nothing))
528 if (REG_P (to))
530 if (reg_overlap_mentioned_p (to, from))
531 from = force_reg (from_mode, from);
532 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
534 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
535 emit_unop_insn (code, to,
536 gen_lowpart (word_mode, to), equiv_code);
537 return;
540 /* No special multiword conversion insn; do it by hand. */
541 start_sequence ();
543 /* Since we will turn this into a no conflict block, we must ensure
544 that the source does not overlap the target. */
546 if (reg_overlap_mentioned_p (to, from))
547 from = force_reg (from_mode, from);
549 /* Get a copy of FROM widened to a word, if necessary. */
550 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
551 lowpart_mode = word_mode;
552 else
553 lowpart_mode = from_mode;
555 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
557 lowpart = gen_lowpart (lowpart_mode, to);
558 emit_move_insn (lowpart, lowfrom);
560 /* Compute the value to put in each remaining word. */
561 if (unsignedp)
562 fill_value = const0_rtx;
563 else
565 #ifdef HAVE_slt
566 if (HAVE_slt
567 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
568 && STORE_FLAG_VALUE == -1)
570 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
571 lowpart_mode, 0);
572 fill_value = gen_reg_rtx (word_mode);
573 emit_insn (gen_slt (fill_value));
575 else
576 #endif
578 fill_value
579 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
580 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
581 NULL_RTX, 0);
582 fill_value = convert_to_mode (word_mode, fill_value, 1);
586 /* Fill the remaining words. */
587 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
589 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
590 rtx subword = operand_subword (to, index, 1, to_mode);
592 gcc_assert (subword);
594 if (fill_value != subword)
595 emit_move_insn (subword, fill_value);
598 insns = get_insns ();
599 end_sequence ();
601 emit_no_conflict_block (insns, to, from, NULL_RTX,
602 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
603 return;
606 /* Truncating multi-word to a word or less. */
607 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
608 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
610 if (!((MEM_P (from)
611 && ! MEM_VOLATILE_P (from)
612 && direct_load[(int) to_mode]
613 && ! mode_dependent_address_p (XEXP (from, 0)))
614 || REG_P (from)
615 || GET_CODE (from) == SUBREG))
616 from = force_reg (from_mode, from);
617 convert_move (to, gen_lowpart (word_mode, from), 0);
618 return;
621 /* Now follow all the conversions between integers
622 no more than a word long. */
624 /* For truncation, usually we can just refer to FROM in a narrower mode. */
625 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
626 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
627 GET_MODE_BITSIZE (from_mode)))
629 if (!((MEM_P (from)
630 && ! MEM_VOLATILE_P (from)
631 && direct_load[(int) to_mode]
632 && ! mode_dependent_address_p (XEXP (from, 0)))
633 || REG_P (from)
634 || GET_CODE (from) == SUBREG))
635 from = force_reg (from_mode, from);
636 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
637 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
638 from = copy_to_reg (from);
639 emit_move_insn (to, gen_lowpart (to_mode, from));
640 return;
643 /* Handle extension. */
644 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
646 /* Convert directly if that works. */
647 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
648 != CODE_FOR_nothing)
650 emit_unop_insn (code, to, from, equiv_code);
651 return;
653 else
655 enum machine_mode intermediate;
656 rtx tmp;
657 tree shift_amount;
659 /* Search for a mode to convert via. */
660 for (intermediate = from_mode; intermediate != VOIDmode;
661 intermediate = GET_MODE_WIDER_MODE (intermediate))
662 if (((can_extend_p (to_mode, intermediate, unsignedp)
663 != CODE_FOR_nothing)
664 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
665 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
666 GET_MODE_BITSIZE (intermediate))))
667 && (can_extend_p (intermediate, from_mode, unsignedp)
668 != CODE_FOR_nothing))
670 convert_move (to, convert_to_mode (intermediate, from,
671 unsignedp), unsignedp);
672 return;
675 /* No suitable intermediate mode.
676 Generate what we need with shifts. */
677 shift_amount = build_int_cst (NULL_TREE,
678 GET_MODE_BITSIZE (to_mode)
679 - GET_MODE_BITSIZE (from_mode));
680 from = gen_lowpart (to_mode, force_reg (from_mode, from));
681 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
682 to, unsignedp);
683 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
684 to, unsignedp);
685 if (tmp != to)
686 emit_move_insn (to, tmp);
687 return;
691 /* Support special truncate insns for certain modes. */
692 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
694 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
695 to, from, UNKNOWN);
696 return;
699 /* Handle truncation of volatile memrefs, and so on;
700 the things that couldn't be truncated directly,
701 and for which there was no special instruction.
703 ??? Code above formerly short-circuited this, for most integer
704 mode pairs, with a force_reg in from_mode followed by a recursive
705 call to this routine. Appears always to have been wrong. */
706 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
708 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
709 emit_move_insn (to, temp);
710 return;
713 /* Mode combination is not recognized. */
714 gcc_unreachable ();
717 /* Return an rtx for a value that would result
718 from converting X to mode MODE.
719 Both X and MODE may be floating, or both integer.
720 UNSIGNEDP is nonzero if X is an unsigned value.
721 This can be done by referring to a part of X in place
722 or by copying to a new temporary with conversion. */
725 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
727 return convert_modes (mode, VOIDmode, x, unsignedp);
730 /* Return an rtx for a value that would result
731 from converting X from mode OLDMODE to mode MODE.
732 Both modes may be floating, or both integer.
733 UNSIGNEDP is nonzero if X is an unsigned value.
735 This can be done by referring to a part of X in place
736 or by copying to a new temporary with conversion.
738 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
741 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
743 rtx temp;
745 /* If FROM is a SUBREG that indicates that we have already done at least
746 the required extension, strip it. */
748 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
749 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
750 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
751 x = gen_lowpart (mode, x);
753 if (GET_MODE (x) != VOIDmode)
754 oldmode = GET_MODE (x);
756 if (mode == oldmode)
757 return x;
759 /* There is one case that we must handle specially: If we are converting
760 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
761 we are to interpret the constant as unsigned, gen_lowpart will do
762 the wrong if the constant appears negative. What we want to do is
763 make the high-order word of the constant zero, not all ones. */
765 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
766 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
767 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
769 HOST_WIDE_INT val = INTVAL (x);
771 if (oldmode != VOIDmode
772 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
774 int width = GET_MODE_BITSIZE (oldmode);
776 /* We need to zero extend VAL. */
777 val &= ((HOST_WIDE_INT) 1 << width) - 1;
780 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
783 /* We can do this with a gen_lowpart if both desired and current modes
784 are integer, and this is either a constant integer, a register, or a
785 non-volatile MEM. Except for the constant case where MODE is no
786 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
788 if ((GET_CODE (x) == CONST_INT
789 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
790 || (GET_MODE_CLASS (mode) == MODE_INT
791 && GET_MODE_CLASS (oldmode) == MODE_INT
792 && (GET_CODE (x) == CONST_DOUBLE
793 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
794 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
795 && direct_load[(int) mode])
796 || (REG_P (x)
797 && (! HARD_REGISTER_P (x)
798 || HARD_REGNO_MODE_OK (REGNO (x), mode))
799 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
800 GET_MODE_BITSIZE (GET_MODE (x)))))))))
802 /* ?? If we don't know OLDMODE, we have to assume here that
803 X does not need sign- or zero-extension. This may not be
804 the case, but it's the best we can do. */
805 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
806 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
808 HOST_WIDE_INT val = INTVAL (x);
809 int width = GET_MODE_BITSIZE (oldmode);
811 /* We must sign or zero-extend in this case. Start by
812 zero-extending, then sign extend if we need to. */
813 val &= ((HOST_WIDE_INT) 1 << width) - 1;
814 if (! unsignedp
815 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
816 val |= (HOST_WIDE_INT) (-1) << width;
818 return gen_int_mode (val, mode);
821 return gen_lowpart (mode, x);
824 /* Converting from integer constant into mode is always equivalent to an
825 subreg operation. */
826 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
828 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
829 return simplify_gen_subreg (mode, x, oldmode, 0);
832 temp = gen_reg_rtx (mode);
833 convert_move (temp, x, unsignedp);
834 return temp;
837 /* STORE_MAX_PIECES is the number of bytes at a time that we can
838 store efficiently. Due to internal GCC limitations, this is
839 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
840 for an immediate constant. */
842 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
844 /* Determine whether the LEN bytes can be moved by using several move
845 instructions. Return nonzero if a call to move_by_pieces should
846 succeed. */
849 can_move_by_pieces (unsigned HOST_WIDE_INT len,
850 unsigned int align ATTRIBUTE_UNUSED)
852 return MOVE_BY_PIECES_P (len, align);
855 /* Generate several move instructions to copy LEN bytes from block FROM to
856 block TO. (These are MEM rtx's with BLKmode).
858 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
859 used to push FROM to the stack.
861 ALIGN is maximum stack alignment we can assume.
863 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
864 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
865 stpcpy. */
868 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
869 unsigned int align, int endp)
871 struct move_by_pieces data;
872 rtx to_addr, from_addr = XEXP (from, 0);
873 unsigned int max_size = MOVE_MAX_PIECES + 1;
874 enum machine_mode mode = VOIDmode, tmode;
875 enum insn_code icode;
877 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
879 data.offset = 0;
880 data.from_addr = from_addr;
881 if (to)
883 to_addr = XEXP (to, 0);
884 data.to = to;
885 data.autinc_to
886 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
887 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
888 data.reverse
889 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891 else
893 to_addr = NULL_RTX;
894 data.to = NULL_RTX;
895 data.autinc_to = 1;
896 #ifdef STACK_GROWS_DOWNWARD
897 data.reverse = 1;
898 #else
899 data.reverse = 0;
900 #endif
902 data.to_addr = to_addr;
903 data.from = from;
904 data.autinc_from
905 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
906 || GET_CODE (from_addr) == POST_INC
907 || GET_CODE (from_addr) == POST_DEC);
909 data.explicit_inc_from = 0;
910 data.explicit_inc_to = 0;
911 if (data.reverse) data.offset = len;
912 data.len = len;
914 /* If copying requires more than two move insns,
915 copy addresses to registers (to make displacements shorter)
916 and use post-increment if available. */
917 if (!(data.autinc_from && data.autinc_to)
918 && move_by_pieces_ninsns (len, align, max_size) > 2)
920 /* Find the mode of the largest move... */
921 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
922 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
923 if (GET_MODE_SIZE (tmode) < max_size)
924 mode = tmode;
926 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
929 data.autinc_from = 1;
930 data.explicit_inc_from = -1;
932 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
934 data.from_addr = copy_addr_to_reg (from_addr);
935 data.autinc_from = 1;
936 data.explicit_inc_from = 1;
938 if (!data.autinc_from && CONSTANT_P (from_addr))
939 data.from_addr = copy_addr_to_reg (from_addr);
940 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
942 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
943 data.autinc_to = 1;
944 data.explicit_inc_to = -1;
946 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
948 data.to_addr = copy_addr_to_reg (to_addr);
949 data.autinc_to = 1;
950 data.explicit_inc_to = 1;
952 if (!data.autinc_to && CONSTANT_P (to_addr))
953 data.to_addr = copy_addr_to_reg (to_addr);
956 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
957 if (align >= GET_MODE_ALIGNMENT (tmode))
958 align = GET_MODE_ALIGNMENT (tmode);
959 else
961 enum machine_mode xmode;
963 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
964 tmode != VOIDmode;
965 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
966 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
967 || SLOW_UNALIGNED_ACCESS (tmode, align))
968 break;
970 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
973 /* First move what we can in the largest integer mode, then go to
974 successively smaller modes. */
976 while (max_size > 1)
978 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
979 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
980 if (GET_MODE_SIZE (tmode) < max_size)
981 mode = tmode;
983 if (mode == VOIDmode)
984 break;
986 icode = mov_optab->handlers[(int) mode].insn_code;
987 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
988 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
990 max_size = GET_MODE_SIZE (mode);
993 /* The code above should have handled everything. */
994 gcc_assert (!data.len);
996 if (endp)
998 rtx to1;
1000 gcc_assert (!data.reverse);
1001 if (data.autinc_to)
1003 if (endp == 2)
1005 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1006 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1007 else
1008 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1009 -1));
1011 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1012 data.offset);
1014 else
1016 if (endp == 2)
1017 --data.offset;
1018 to1 = adjust_address (data.to, QImode, data.offset);
1020 return to1;
1022 else
1023 return data.to;
1026 /* Return number of insns required to move L bytes by pieces.
1027 ALIGN (in bits) is maximum alignment we can assume. */
1029 static unsigned HOST_WIDE_INT
1030 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1031 unsigned int max_size)
1033 unsigned HOST_WIDE_INT n_insns = 0;
1034 enum machine_mode tmode;
1036 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1037 if (align >= GET_MODE_ALIGNMENT (tmode))
1038 align = GET_MODE_ALIGNMENT (tmode);
1039 else
1041 enum machine_mode tmode, xmode;
1043 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1044 tmode != VOIDmode;
1045 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1046 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1047 || SLOW_UNALIGNED_ACCESS (tmode, align))
1048 break;
1050 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1053 while (max_size > 1)
1055 enum machine_mode mode = VOIDmode;
1056 enum insn_code icode;
1058 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1059 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1060 if (GET_MODE_SIZE (tmode) < max_size)
1061 mode = tmode;
1063 if (mode == VOIDmode)
1064 break;
1066 icode = mov_optab->handlers[(int) mode].insn_code;
1067 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1068 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1070 max_size = GET_MODE_SIZE (mode);
1073 gcc_assert (!l);
1074 return n_insns;
1077 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1078 with move instructions for mode MODE. GENFUN is the gen_... function
1079 to make a move insn for that mode. DATA has all the other info. */
1081 static void
1082 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1083 struct move_by_pieces *data)
1085 unsigned int size = GET_MODE_SIZE (mode);
1086 rtx to1 = NULL_RTX, from1;
1088 while (data->len >= size)
1090 if (data->reverse)
1091 data->offset -= size;
1093 if (data->to)
1095 if (data->autinc_to)
1096 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1097 data->offset);
1098 else
1099 to1 = adjust_address (data->to, mode, data->offset);
1102 if (data->autinc_from)
1103 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1104 data->offset);
1105 else
1106 from1 = adjust_address (data->from, mode, data->offset);
1108 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1109 emit_insn (gen_add2_insn (data->to_addr,
1110 GEN_INT (-(HOST_WIDE_INT)size)));
1111 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1112 emit_insn (gen_add2_insn (data->from_addr,
1113 GEN_INT (-(HOST_WIDE_INT)size)));
1115 if (data->to)
1116 emit_insn ((*genfun) (to1, from1));
1117 else
1119 #ifdef PUSH_ROUNDING
1120 emit_single_push_insn (mode, from1, NULL);
1121 #else
1122 gcc_unreachable ();
1123 #endif
1126 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1127 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1128 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1129 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1131 if (! data->reverse)
1132 data->offset += size;
1134 data->len -= size;
1138 /* Emit code to move a block Y to a block X. This may be done with
1139 string-move instructions, with multiple scalar move instructions,
1140 or with a library call.
1142 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1143 SIZE is an rtx that says how long they are.
1144 ALIGN is the maximum alignment we can assume they have.
1145 METHOD describes what kind of copy this is, and what mechanisms may be used.
1147 Return the address of the new block, if memcpy is called and returns it,
1148 0 otherwise. */
1151 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1153 bool may_use_call;
1154 rtx retval = 0;
1155 unsigned int align;
1157 switch (method)
1159 case BLOCK_OP_NORMAL:
1160 case BLOCK_OP_TAILCALL:
1161 may_use_call = true;
1162 break;
1164 case BLOCK_OP_CALL_PARM:
1165 may_use_call = block_move_libcall_safe_for_call_parm ();
1167 /* Make inhibit_defer_pop nonzero around the library call
1168 to force it to pop the arguments right away. */
1169 NO_DEFER_POP;
1170 break;
1172 case BLOCK_OP_NO_LIBCALL:
1173 may_use_call = false;
1174 break;
1176 default:
1177 gcc_unreachable ();
1180 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1182 gcc_assert (MEM_P (x));
1183 gcc_assert (MEM_P (y));
1184 gcc_assert (size);
1186 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1187 block copy is more efficient for other large modes, e.g. DCmode. */
1188 x = adjust_address (x, BLKmode, 0);
1189 y = adjust_address (y, BLKmode, 0);
1191 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1192 can be incorrect is coming from __builtin_memcpy. */
1193 if (GET_CODE (size) == CONST_INT)
1195 if (INTVAL (size) == 0)
1196 return 0;
1198 x = shallow_copy_rtx (x);
1199 y = shallow_copy_rtx (y);
1200 set_mem_size (x, size);
1201 set_mem_size (y, size);
1204 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1205 move_by_pieces (x, y, INTVAL (size), align, 0);
1206 else if (emit_block_move_via_movmem (x, y, size, align))
1208 else if (may_use_call)
1209 retval = emit_block_move_via_libcall (x, y, size,
1210 method == BLOCK_OP_TAILCALL);
1211 else
1212 emit_block_move_via_loop (x, y, size, align);
1214 if (method == BLOCK_OP_CALL_PARM)
1215 OK_DEFER_POP;
1217 return retval;
1220 /* A subroutine of emit_block_move. Returns true if calling the
1221 block move libcall will not clobber any parameters which may have
1222 already been placed on the stack. */
1224 static bool
1225 block_move_libcall_safe_for_call_parm (void)
1227 /* If arguments are pushed on the stack, then they're safe. */
1228 if (PUSH_ARGS)
1229 return true;
1231 /* If registers go on the stack anyway, any argument is sure to clobber
1232 an outgoing argument. */
1233 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1235 tree fn = emit_block_move_libcall_fn (false);
1236 (void) fn;
1237 if (REG_PARM_STACK_SPACE (fn) != 0)
1238 return false;
1240 #endif
1242 /* If any argument goes in memory, then it might clobber an outgoing
1243 argument. */
1245 CUMULATIVE_ARGS args_so_far;
1246 tree fn, arg;
1248 fn = emit_block_move_libcall_fn (false);
1249 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1251 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1252 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1254 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1255 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1256 if (!tmp || !REG_P (tmp))
1257 return false;
1258 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1259 return false;
1260 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1263 return true;
1266 /* A subroutine of emit_block_move. Expand a movmem pattern;
1267 return true if successful. */
1269 static bool
1270 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1272 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1273 int save_volatile_ok = volatile_ok;
1274 enum machine_mode mode;
1276 /* Since this is a move insn, we don't care about volatility. */
1277 volatile_ok = 1;
1279 /* Try the most limited insn first, because there's no point
1280 including more than one in the machine description unless
1281 the more limited one has some advantage. */
1283 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1284 mode = GET_MODE_WIDER_MODE (mode))
1286 enum insn_code code = movmem_optab[(int) mode];
1287 insn_operand_predicate_fn pred;
1289 if (code != CODE_FOR_nothing
1290 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1291 here because if SIZE is less than the mode mask, as it is
1292 returned by the macro, it will definitely be less than the
1293 actual mode mask. */
1294 && ((GET_CODE (size) == CONST_INT
1295 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1296 <= (GET_MODE_MASK (mode) >> 1)))
1297 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1298 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1299 || (*pred) (x, BLKmode))
1300 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1301 || (*pred) (y, BLKmode))
1302 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1303 || (*pred) (opalign, VOIDmode)))
1305 rtx op2;
1306 rtx last = get_last_insn ();
1307 rtx pat;
1309 op2 = convert_to_mode (mode, size, 1);
1310 pred = insn_data[(int) code].operand[2].predicate;
1311 if (pred != 0 && ! (*pred) (op2, mode))
1312 op2 = copy_to_mode_reg (mode, op2);
1314 /* ??? When called via emit_block_move_for_call, it'd be
1315 nice if there were some way to inform the backend, so
1316 that it doesn't fail the expansion because it thinks
1317 emitting the libcall would be more efficient. */
1319 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1320 if (pat)
1322 emit_insn (pat);
1323 volatile_ok = save_volatile_ok;
1324 return true;
1326 else
1327 delete_insns_since (last);
1331 volatile_ok = save_volatile_ok;
1332 return false;
1335 /* A subroutine of emit_block_move. Expand a call to memcpy.
1336 Return the return value from memcpy, 0 otherwise. */
1338 static rtx
1339 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1341 rtx dst_addr, src_addr;
1342 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1343 enum machine_mode size_mode;
1344 rtx retval;
1346 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1347 pseudos. We can then place those new pseudos into a VAR_DECL and
1348 use them later. */
1350 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1351 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1353 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1354 src_addr = convert_memory_address (ptr_mode, src_addr);
1356 dst_tree = make_tree (ptr_type_node, dst_addr);
1357 src_tree = make_tree (ptr_type_node, src_addr);
1359 size_mode = TYPE_MODE (sizetype);
1361 size = convert_to_mode (size_mode, size, 1);
1362 size = copy_to_mode_reg (size_mode, size);
1364 /* It is incorrect to use the libcall calling conventions to call
1365 memcpy in this context. This could be a user call to memcpy and
1366 the user may wish to examine the return value from memcpy. For
1367 targets where libcalls and normal calls have different conventions
1368 for returning pointers, we could end up generating incorrect code. */
1370 size_tree = make_tree (sizetype, size);
1372 fn = emit_block_move_libcall_fn (true);
1373 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1374 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1375 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1377 /* Now we have to build up the CALL_EXPR itself. */
1378 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1379 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1380 call_expr, arg_list, NULL_TREE);
1381 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1383 retval = expand_normal (call_expr);
1385 return retval;
1388 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1389 for the function we use for block copies. The first time FOR_CALL
1390 is true, we call assemble_external. */
1392 static GTY(()) tree block_move_fn;
1394 void
1395 init_block_move_fn (const char *asmspec)
1397 if (!block_move_fn)
1399 tree args, fn;
1401 fn = get_identifier ("memcpy");
1402 args = build_function_type_list (ptr_type_node, ptr_type_node,
1403 const_ptr_type_node, sizetype,
1404 NULL_TREE);
1406 fn = build_decl (FUNCTION_DECL, fn, args);
1407 DECL_EXTERNAL (fn) = 1;
1408 TREE_PUBLIC (fn) = 1;
1409 DECL_ARTIFICIAL (fn) = 1;
1410 TREE_NOTHROW (fn) = 1;
1411 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1412 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1414 block_move_fn = fn;
1417 if (asmspec)
1418 set_user_assembler_name (block_move_fn, asmspec);
1421 static tree
1422 emit_block_move_libcall_fn (int for_call)
1424 static bool emitted_extern;
1426 if (!block_move_fn)
1427 init_block_move_fn (NULL);
1429 if (for_call && !emitted_extern)
1431 emitted_extern = true;
1432 make_decl_rtl (block_move_fn);
1433 assemble_external (block_move_fn);
1436 return block_move_fn;
1439 /* A subroutine of emit_block_move. Copy the data via an explicit
1440 loop. This is used only when libcalls are forbidden. */
1441 /* ??? It'd be nice to copy in hunks larger than QImode. */
1443 static void
1444 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1445 unsigned int align ATTRIBUTE_UNUSED)
1447 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1448 enum machine_mode iter_mode;
1450 iter_mode = GET_MODE (size);
1451 if (iter_mode == VOIDmode)
1452 iter_mode = word_mode;
1454 top_label = gen_label_rtx ();
1455 cmp_label = gen_label_rtx ();
1456 iter = gen_reg_rtx (iter_mode);
1458 emit_move_insn (iter, const0_rtx);
1460 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1461 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1462 do_pending_stack_adjust ();
1464 emit_jump (cmp_label);
1465 emit_label (top_label);
1467 tmp = convert_modes (Pmode, iter_mode, iter, true);
1468 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1469 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1470 x = change_address (x, QImode, x_addr);
1471 y = change_address (y, QImode, y_addr);
1473 emit_move_insn (x, y);
1475 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1476 true, OPTAB_LIB_WIDEN);
1477 if (tmp != iter)
1478 emit_move_insn (iter, tmp);
1480 emit_label (cmp_label);
1482 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1483 true, top_label);
1486 /* Copy all or part of a value X into registers starting at REGNO.
1487 The number of registers to be filled is NREGS. */
1489 void
1490 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1492 int i;
1493 #ifdef HAVE_load_multiple
1494 rtx pat;
1495 rtx last;
1496 #endif
1498 if (nregs == 0)
1499 return;
1501 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1502 x = validize_mem (force_const_mem (mode, x));
1504 /* See if the machine can do this with a load multiple insn. */
1505 #ifdef HAVE_load_multiple
1506 if (HAVE_load_multiple)
1508 last = get_last_insn ();
1509 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1510 GEN_INT (nregs));
1511 if (pat)
1513 emit_insn (pat);
1514 return;
1516 else
1517 delete_insns_since (last);
1519 #endif
1521 for (i = 0; i < nregs; i++)
1522 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1523 operand_subword_force (x, i, mode));
1526 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1527 The number of registers to be filled is NREGS. */
1529 void
1530 move_block_from_reg (int regno, rtx x, int nregs)
1532 int i;
1534 if (nregs == 0)
1535 return;
1537 /* See if the machine can do this with a store multiple insn. */
1538 #ifdef HAVE_store_multiple
1539 if (HAVE_store_multiple)
1541 rtx last = get_last_insn ();
1542 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1543 GEN_INT (nregs));
1544 if (pat)
1546 emit_insn (pat);
1547 return;
1549 else
1550 delete_insns_since (last);
1552 #endif
1554 for (i = 0; i < nregs; i++)
1556 rtx tem = operand_subword (x, i, 1, BLKmode);
1558 gcc_assert (tem);
1560 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1564 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1565 ORIG, where ORIG is a non-consecutive group of registers represented by
1566 a PARALLEL. The clone is identical to the original except in that the
1567 original set of registers is replaced by a new set of pseudo registers.
1568 The new set has the same modes as the original set. */
1571 gen_group_rtx (rtx orig)
1573 int i, length;
1574 rtx *tmps;
1576 gcc_assert (GET_CODE (orig) == PARALLEL);
1578 length = XVECLEN (orig, 0);
1579 tmps = alloca (sizeof (rtx) * length);
1581 /* Skip a NULL entry in first slot. */
1582 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1584 if (i)
1585 tmps[0] = 0;
1587 for (; i < length; i++)
1589 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1590 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1592 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1595 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1598 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1599 except that values are placed in TMPS[i], and must later be moved
1600 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1602 static void
1603 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1605 rtx src;
1606 int start, i;
1607 enum machine_mode m = GET_MODE (orig_src);
1609 gcc_assert (GET_CODE (dst) == PARALLEL);
1611 if (m != VOIDmode
1612 && !SCALAR_INT_MODE_P (m)
1613 && !MEM_P (orig_src)
1614 && GET_CODE (orig_src) != CONCAT)
1616 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1617 if (imode == BLKmode)
1618 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1619 else
1620 src = gen_reg_rtx (imode);
1621 if (imode != BLKmode)
1622 src = gen_lowpart (GET_MODE (orig_src), src);
1623 emit_move_insn (src, orig_src);
1624 /* ...and back again. */
1625 if (imode != BLKmode)
1626 src = gen_lowpart (imode, src);
1627 emit_group_load_1 (tmps, dst, src, type, ssize);
1628 return;
1631 /* Check for a NULL entry, used to indicate that the parameter goes
1632 both on the stack and in registers. */
1633 if (XEXP (XVECEXP (dst, 0, 0), 0))
1634 start = 0;
1635 else
1636 start = 1;
1638 /* Process the pieces. */
1639 for (i = start; i < XVECLEN (dst, 0); i++)
1641 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1642 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1643 unsigned int bytelen = GET_MODE_SIZE (mode);
1644 int shift = 0;
1646 /* Handle trailing fragments that run over the size of the struct. */
1647 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1649 /* Arrange to shift the fragment to where it belongs.
1650 extract_bit_field loads to the lsb of the reg. */
1651 if (
1652 #ifdef BLOCK_REG_PADDING
1653 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1654 == (BYTES_BIG_ENDIAN ? upward : downward)
1655 #else
1656 BYTES_BIG_ENDIAN
1657 #endif
1659 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1660 bytelen = ssize - bytepos;
1661 gcc_assert (bytelen > 0);
1664 /* If we won't be loading directly from memory, protect the real source
1665 from strange tricks we might play; but make sure that the source can
1666 be loaded directly into the destination. */
1667 src = orig_src;
1668 if (!MEM_P (orig_src)
1669 && (!CONSTANT_P (orig_src)
1670 || (GET_MODE (orig_src) != mode
1671 && GET_MODE (orig_src) != VOIDmode)))
1673 if (GET_MODE (orig_src) == VOIDmode)
1674 src = gen_reg_rtx (mode);
1675 else
1676 src = gen_reg_rtx (GET_MODE (orig_src));
1678 emit_move_insn (src, orig_src);
1681 /* Optimize the access just a bit. */
1682 if (MEM_P (src)
1683 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1684 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1685 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1686 && bytelen == GET_MODE_SIZE (mode))
1688 tmps[i] = gen_reg_rtx (mode);
1689 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1691 else if (COMPLEX_MODE_P (mode)
1692 && GET_MODE (src) == mode
1693 && bytelen == GET_MODE_SIZE (mode))
1694 /* Let emit_move_complex do the bulk of the work. */
1695 tmps[i] = src;
1696 else if (GET_CODE (src) == CONCAT)
1698 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1699 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1701 if ((bytepos == 0 && bytelen == slen0)
1702 || (bytepos != 0 && bytepos + bytelen <= slen))
1704 /* The following assumes that the concatenated objects all
1705 have the same size. In this case, a simple calculation
1706 can be used to determine the object and the bit field
1707 to be extracted. */
1708 tmps[i] = XEXP (src, bytepos / slen0);
1709 if (! CONSTANT_P (tmps[i])
1710 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1711 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1712 (bytepos % slen0) * BITS_PER_UNIT,
1713 1, NULL_RTX, mode, mode);
1715 else
1717 rtx mem;
1719 gcc_assert (!bytepos);
1720 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1721 emit_move_insn (mem, src);
1722 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1723 0, 1, NULL_RTX, mode, mode);
1726 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1727 SIMD register, which is currently broken. While we get GCC
1728 to emit proper RTL for these cases, let's dump to memory. */
1729 else if (VECTOR_MODE_P (GET_MODE (dst))
1730 && REG_P (src))
1732 int slen = GET_MODE_SIZE (GET_MODE (src));
1733 rtx mem;
1735 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1736 emit_move_insn (mem, src);
1737 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1739 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1740 && XVECLEN (dst, 0) > 1)
1741 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1742 else if (CONSTANT_P (src))
1744 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1746 if (len == ssize)
1747 tmps[i] = src;
1748 else
1750 rtx first, second;
1752 gcc_assert (2 * len == ssize);
1753 split_double (src, &first, &second);
1754 if (i)
1755 tmps[i] = second;
1756 else
1757 tmps[i] = first;
1760 else if (REG_P (src) && GET_MODE (src) == mode)
1761 tmps[i] = src;
1762 else
1763 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1764 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1765 mode, mode);
1767 if (shift)
1768 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1769 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1773 /* Emit code to move a block SRC of type TYPE to a block DST,
1774 where DST is non-consecutive registers represented by a PARALLEL.
1775 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1776 if not known. */
1778 void
1779 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1781 rtx *tmps;
1782 int i;
1784 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1785 emit_group_load_1 (tmps, dst, src, type, ssize);
1787 /* Copy the extracted pieces into the proper (probable) hard regs. */
1788 for (i = 0; i < XVECLEN (dst, 0); i++)
1790 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1791 if (d == NULL)
1792 continue;
1793 emit_move_insn (d, tmps[i]);
1797 /* Similar, but load SRC into new pseudos in a format that looks like
1798 PARALLEL. This can later be fed to emit_group_move to get things
1799 in the right place. */
1802 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1804 rtvec vec;
1805 int i;
1807 vec = rtvec_alloc (XVECLEN (parallel, 0));
1808 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1810 /* Convert the vector to look just like the original PARALLEL, except
1811 with the computed values. */
1812 for (i = 0; i < XVECLEN (parallel, 0); i++)
1814 rtx e = XVECEXP (parallel, 0, i);
1815 rtx d = XEXP (e, 0);
1817 if (d)
1819 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1820 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1822 RTVEC_ELT (vec, i) = e;
1825 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1828 /* Emit code to move a block SRC to block DST, where SRC and DST are
1829 non-consecutive groups of registers, each represented by a PARALLEL. */
1831 void
1832 emit_group_move (rtx dst, rtx src)
1834 int i;
1836 gcc_assert (GET_CODE (src) == PARALLEL
1837 && GET_CODE (dst) == PARALLEL
1838 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1840 /* Skip first entry if NULL. */
1841 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1842 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1843 XEXP (XVECEXP (src, 0, i), 0));
1846 /* Move a group of registers represented by a PARALLEL into pseudos. */
1849 emit_group_move_into_temps (rtx src)
1851 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1852 int i;
1854 for (i = 0; i < XVECLEN (src, 0); i++)
1856 rtx e = XVECEXP (src, 0, i);
1857 rtx d = XEXP (e, 0);
1859 if (d)
1860 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1861 RTVEC_ELT (vec, i) = e;
1864 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1867 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1868 where SRC is non-consecutive registers represented by a PARALLEL.
1869 SSIZE represents the total size of block ORIG_DST, or -1 if not
1870 known. */
1872 void
1873 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1875 rtx *tmps, dst;
1876 int start, finish, i;
1877 enum machine_mode m = GET_MODE (orig_dst);
1879 gcc_assert (GET_CODE (src) == PARALLEL);
1881 if (!SCALAR_INT_MODE_P (m)
1882 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1884 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1885 if (imode == BLKmode)
1886 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1887 else
1888 dst = gen_reg_rtx (imode);
1889 emit_group_store (dst, src, type, ssize);
1890 if (imode != BLKmode)
1891 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1892 emit_move_insn (orig_dst, dst);
1893 return;
1896 /* Check for a NULL entry, used to indicate that the parameter goes
1897 both on the stack and in registers. */
1898 if (XEXP (XVECEXP (src, 0, 0), 0))
1899 start = 0;
1900 else
1901 start = 1;
1902 finish = XVECLEN (src, 0);
1904 tmps = alloca (sizeof (rtx) * finish);
1906 /* Copy the (probable) hard regs into pseudos. */
1907 for (i = start; i < finish; i++)
1909 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1910 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1912 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1913 emit_move_insn (tmps[i], reg);
1915 else
1916 tmps[i] = reg;
1919 /* If we won't be storing directly into memory, protect the real destination
1920 from strange tricks we might play. */
1921 dst = orig_dst;
1922 if (GET_CODE (dst) == PARALLEL)
1924 rtx temp;
1926 /* We can get a PARALLEL dst if there is a conditional expression in
1927 a return statement. In that case, the dst and src are the same,
1928 so no action is necessary. */
1929 if (rtx_equal_p (dst, src))
1930 return;
1932 /* It is unclear if we can ever reach here, but we may as well handle
1933 it. Allocate a temporary, and split this into a store/load to/from
1934 the temporary. */
1936 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1937 emit_group_store (temp, src, type, ssize);
1938 emit_group_load (dst, temp, type, ssize);
1939 return;
1941 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1943 enum machine_mode outer = GET_MODE (dst);
1944 enum machine_mode inner;
1945 HOST_WIDE_INT bytepos;
1946 bool done = false;
1947 rtx temp;
1949 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1950 dst = gen_reg_rtx (outer);
1952 /* Make life a bit easier for combine. */
1953 /* If the first element of the vector is the low part
1954 of the destination mode, use a paradoxical subreg to
1955 initialize the destination. */
1956 if (start < finish)
1958 inner = GET_MODE (tmps[start]);
1959 bytepos = subreg_lowpart_offset (inner, outer);
1960 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1962 temp = simplify_gen_subreg (outer, tmps[start],
1963 inner, 0);
1964 if (temp)
1966 emit_move_insn (dst, temp);
1967 done = true;
1968 start++;
1973 /* If the first element wasn't the low part, try the last. */
1974 if (!done
1975 && start < finish - 1)
1977 inner = GET_MODE (tmps[finish - 1]);
1978 bytepos = subreg_lowpart_offset (inner, outer);
1979 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1981 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1982 inner, 0);
1983 if (temp)
1985 emit_move_insn (dst, temp);
1986 done = true;
1987 finish--;
1992 /* Otherwise, simply initialize the result to zero. */
1993 if (!done)
1994 emit_move_insn (dst, CONST0_RTX (outer));
1997 /* Process the pieces. */
1998 for (i = start; i < finish; i++)
2000 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2001 enum machine_mode mode = GET_MODE (tmps[i]);
2002 unsigned int bytelen = GET_MODE_SIZE (mode);
2003 rtx dest = dst;
2005 /* Handle trailing fragments that run over the size of the struct. */
2006 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2008 /* store_bit_field always takes its value from the lsb.
2009 Move the fragment to the lsb if it's not already there. */
2010 if (
2011 #ifdef BLOCK_REG_PADDING
2012 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2013 == (BYTES_BIG_ENDIAN ? upward : downward)
2014 #else
2015 BYTES_BIG_ENDIAN
2016 #endif
2019 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2020 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2021 build_int_cst (NULL_TREE, shift),
2022 tmps[i], 0);
2024 bytelen = ssize - bytepos;
2027 if (GET_CODE (dst) == CONCAT)
2029 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2030 dest = XEXP (dst, 0);
2031 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2033 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2034 dest = XEXP (dst, 1);
2036 else
2038 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2039 dest = assign_stack_temp (GET_MODE (dest),
2040 GET_MODE_SIZE (GET_MODE (dest)), 0);
2041 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2042 tmps[i]);
2043 dst = dest;
2044 break;
2048 /* Optimize the access just a bit. */
2049 if (MEM_P (dest)
2050 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2051 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2052 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2053 && bytelen == GET_MODE_SIZE (mode))
2054 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2055 else
2056 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2057 mode, tmps[i]);
2060 /* Copy from the pseudo into the (probable) hard reg. */
2061 if (orig_dst != dst)
2062 emit_move_insn (orig_dst, dst);
2065 /* Generate code to copy a BLKmode object of TYPE out of a
2066 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2067 is null, a stack temporary is created. TGTBLK is returned.
2069 The purpose of this routine is to handle functions that return
2070 BLKmode structures in registers. Some machines (the PA for example)
2071 want to return all small structures in registers regardless of the
2072 structure's alignment. */
2075 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2077 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2078 rtx src = NULL, dst = NULL;
2079 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2080 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2081 enum machine_mode copy_mode;
2083 if (tgtblk == 0)
2085 tgtblk = assign_temp (build_qualified_type (type,
2086 (TYPE_QUALS (type)
2087 | TYPE_QUAL_CONST)),
2088 0, 1, 1);
2089 preserve_temp_slots (tgtblk);
2092 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2093 into a new pseudo which is a full word. */
2095 if (GET_MODE (srcreg) != BLKmode
2096 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2097 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2099 /* If the structure doesn't take up a whole number of words, see whether
2100 SRCREG is padded on the left or on the right. If it's on the left,
2101 set PADDING_CORRECTION to the number of bits to skip.
2103 In most ABIs, the structure will be returned at the least end of
2104 the register, which translates to right padding on little-endian
2105 targets and left padding on big-endian targets. The opposite
2106 holds if the structure is returned at the most significant
2107 end of the register. */
2108 if (bytes % UNITS_PER_WORD != 0
2109 && (targetm.calls.return_in_msb (type)
2110 ? !BYTES_BIG_ENDIAN
2111 : BYTES_BIG_ENDIAN))
2112 padding_correction
2113 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2115 /* Copy the structure BITSIZE bits at a time. If the target lives in
2116 memory, take care of not reading/writing past its end by selecting
2117 a copy mode suited to BITSIZE. This should always be possible given
2118 how it is computed.
2120 We could probably emit more efficient code for machines which do not use
2121 strict alignment, but it doesn't seem worth the effort at the current
2122 time. */
2124 copy_mode = word_mode;
2125 if (MEM_P (tgtblk))
2127 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2128 if (mem_mode != BLKmode)
2129 copy_mode = mem_mode;
2132 for (bitpos = 0, xbitpos = padding_correction;
2133 bitpos < bytes * BITS_PER_UNIT;
2134 bitpos += bitsize, xbitpos += bitsize)
2136 /* We need a new source operand each time xbitpos is on a
2137 word boundary and when xbitpos == padding_correction
2138 (the first time through). */
2139 if (xbitpos % BITS_PER_WORD == 0
2140 || xbitpos == padding_correction)
2141 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2142 GET_MODE (srcreg));
2144 /* We need a new destination operand each time bitpos is on
2145 a word boundary. */
2146 if (bitpos % BITS_PER_WORD == 0)
2147 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2149 /* Use xbitpos for the source extraction (right justified) and
2150 bitpos for the destination store (left justified). */
2151 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2152 extract_bit_field (src, bitsize,
2153 xbitpos % BITS_PER_WORD, 1,
2154 NULL_RTX, copy_mode, copy_mode));
2157 return tgtblk;
2160 /* Add a USE expression for REG to the (possibly empty) list pointed
2161 to by CALL_FUSAGE. REG must denote a hard register. */
2163 void
2164 use_reg (rtx *call_fusage, rtx reg)
2166 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2168 *call_fusage
2169 = gen_rtx_EXPR_LIST (VOIDmode,
2170 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2173 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2174 starting at REGNO. All of these registers must be hard registers. */
2176 void
2177 use_regs (rtx *call_fusage, int regno, int nregs)
2179 int i;
2181 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2183 for (i = 0; i < nregs; i++)
2184 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2187 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2188 PARALLEL REGS. This is for calls that pass values in multiple
2189 non-contiguous locations. The Irix 6 ABI has examples of this. */
2191 void
2192 use_group_regs (rtx *call_fusage, rtx regs)
2194 int i;
2196 for (i = 0; i < XVECLEN (regs, 0); i++)
2198 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2200 /* A NULL entry means the parameter goes both on the stack and in
2201 registers. This can also be a MEM for targets that pass values
2202 partially on the stack and partially in registers. */
2203 if (reg != 0 && REG_P (reg))
2204 use_reg (call_fusage, reg);
2209 /* Determine whether the LEN bytes generated by CONSTFUN can be
2210 stored to memory using several move instructions. CONSTFUNDATA is
2211 a pointer which will be passed as argument in every CONSTFUN call.
2212 ALIGN is maximum alignment we can assume. Return nonzero if a
2213 call to store_by_pieces should succeed. */
2216 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2217 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2218 void *constfundata, unsigned int align)
2220 unsigned HOST_WIDE_INT l;
2221 unsigned int max_size;
2222 HOST_WIDE_INT offset = 0;
2223 enum machine_mode mode, tmode;
2224 enum insn_code icode;
2225 int reverse;
2226 rtx cst;
2228 if (len == 0)
2229 return 1;
2231 if (! STORE_BY_PIECES_P (len, align))
2232 return 0;
2234 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2235 if (align >= GET_MODE_ALIGNMENT (tmode))
2236 align = GET_MODE_ALIGNMENT (tmode);
2237 else
2239 enum machine_mode xmode;
2241 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2242 tmode != VOIDmode;
2243 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2244 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2245 || SLOW_UNALIGNED_ACCESS (tmode, align))
2246 break;
2248 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2251 /* We would first store what we can in the largest integer mode, then go to
2252 successively smaller modes. */
2254 for (reverse = 0;
2255 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2256 reverse++)
2258 l = len;
2259 mode = VOIDmode;
2260 max_size = STORE_MAX_PIECES + 1;
2261 while (max_size > 1)
2263 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2264 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2265 if (GET_MODE_SIZE (tmode) < max_size)
2266 mode = tmode;
2268 if (mode == VOIDmode)
2269 break;
2271 icode = mov_optab->handlers[(int) mode].insn_code;
2272 if (icode != CODE_FOR_nothing
2273 && align >= GET_MODE_ALIGNMENT (mode))
2275 unsigned int size = GET_MODE_SIZE (mode);
2277 while (l >= size)
2279 if (reverse)
2280 offset -= size;
2282 cst = (*constfun) (constfundata, offset, mode);
2283 if (!LEGITIMATE_CONSTANT_P (cst))
2284 return 0;
2286 if (!reverse)
2287 offset += size;
2289 l -= size;
2293 max_size = GET_MODE_SIZE (mode);
2296 /* The code above should have handled everything. */
2297 gcc_assert (!l);
2300 return 1;
2303 /* Generate several move instructions to store LEN bytes generated by
2304 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2305 pointer which will be passed as argument in every CONSTFUN call.
2306 ALIGN is maximum alignment we can assume.
2307 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2308 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2309 stpcpy. */
2312 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2313 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2314 void *constfundata, unsigned int align, int endp)
2316 struct store_by_pieces data;
2318 if (len == 0)
2320 gcc_assert (endp != 2);
2321 return to;
2324 gcc_assert (STORE_BY_PIECES_P (len, align));
2325 data.constfun = constfun;
2326 data.constfundata = constfundata;
2327 data.len = len;
2328 data.to = to;
2329 store_by_pieces_1 (&data, align);
2330 if (endp)
2332 rtx to1;
2334 gcc_assert (!data.reverse);
2335 if (data.autinc_to)
2337 if (endp == 2)
2339 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2340 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2341 else
2342 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2343 -1));
2345 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2346 data.offset);
2348 else
2350 if (endp == 2)
2351 --data.offset;
2352 to1 = adjust_address (data.to, QImode, data.offset);
2354 return to1;
2356 else
2357 return data.to;
2360 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2361 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2363 static void
2364 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2366 struct store_by_pieces data;
2368 if (len == 0)
2369 return;
2371 data.constfun = clear_by_pieces_1;
2372 data.constfundata = NULL;
2373 data.len = len;
2374 data.to = to;
2375 store_by_pieces_1 (&data, align);
2378 /* Callback routine for clear_by_pieces.
2379 Return const0_rtx unconditionally. */
2381 static rtx
2382 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2383 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2384 enum machine_mode mode ATTRIBUTE_UNUSED)
2386 return const0_rtx;
2389 /* Subroutine of clear_by_pieces and store_by_pieces.
2390 Generate several move instructions to store LEN bytes of block TO. (A MEM
2391 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2393 static void
2394 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2395 unsigned int align ATTRIBUTE_UNUSED)
2397 rtx to_addr = XEXP (data->to, 0);
2398 unsigned int max_size = STORE_MAX_PIECES + 1;
2399 enum machine_mode mode = VOIDmode, tmode;
2400 enum insn_code icode;
2402 data->offset = 0;
2403 data->to_addr = to_addr;
2404 data->autinc_to
2405 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2406 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2408 data->explicit_inc_to = 0;
2409 data->reverse
2410 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2411 if (data->reverse)
2412 data->offset = data->len;
2414 /* If storing requires more than two move insns,
2415 copy addresses to registers (to make displacements shorter)
2416 and use post-increment if available. */
2417 if (!data->autinc_to
2418 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2420 /* Determine the main mode we'll be using. */
2421 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2422 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2423 if (GET_MODE_SIZE (tmode) < max_size)
2424 mode = tmode;
2426 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2428 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2429 data->autinc_to = 1;
2430 data->explicit_inc_to = -1;
2433 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2434 && ! data->autinc_to)
2436 data->to_addr = copy_addr_to_reg (to_addr);
2437 data->autinc_to = 1;
2438 data->explicit_inc_to = 1;
2441 if ( !data->autinc_to && CONSTANT_P (to_addr))
2442 data->to_addr = copy_addr_to_reg (to_addr);
2445 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2446 if (align >= GET_MODE_ALIGNMENT (tmode))
2447 align = GET_MODE_ALIGNMENT (tmode);
2448 else
2450 enum machine_mode xmode;
2452 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2453 tmode != VOIDmode;
2454 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2455 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2456 || SLOW_UNALIGNED_ACCESS (tmode, align))
2457 break;
2459 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2462 /* First store what we can in the largest integer mode, then go to
2463 successively smaller modes. */
2465 while (max_size > 1)
2467 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2468 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2469 if (GET_MODE_SIZE (tmode) < max_size)
2470 mode = tmode;
2472 if (mode == VOIDmode)
2473 break;
2475 icode = mov_optab->handlers[(int) mode].insn_code;
2476 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2477 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2479 max_size = GET_MODE_SIZE (mode);
2482 /* The code above should have handled everything. */
2483 gcc_assert (!data->len);
2486 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2487 with move instructions for mode MODE. GENFUN is the gen_... function
2488 to make a move insn for that mode. DATA has all the other info. */
2490 static void
2491 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2492 struct store_by_pieces *data)
2494 unsigned int size = GET_MODE_SIZE (mode);
2495 rtx to1, cst;
2497 while (data->len >= size)
2499 if (data->reverse)
2500 data->offset -= size;
2502 if (data->autinc_to)
2503 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2504 data->offset);
2505 else
2506 to1 = adjust_address (data->to, mode, data->offset);
2508 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2509 emit_insn (gen_add2_insn (data->to_addr,
2510 GEN_INT (-(HOST_WIDE_INT) size)));
2512 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2513 emit_insn ((*genfun) (to1, cst));
2515 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2516 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2518 if (! data->reverse)
2519 data->offset += size;
2521 data->len -= size;
2525 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2526 its length in bytes. */
2529 clear_storage (rtx object, rtx size, enum block_op_methods method)
2531 enum machine_mode mode = GET_MODE (object);
2532 unsigned int align;
2534 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2536 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2537 just move a zero. Otherwise, do this a piece at a time. */
2538 if (mode != BLKmode
2539 && GET_CODE (size) == CONST_INT
2540 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2542 rtx zero = CONST0_RTX (mode);
2543 if (zero != NULL)
2545 emit_move_insn (object, zero);
2546 return NULL;
2549 if (COMPLEX_MODE_P (mode))
2551 zero = CONST0_RTX (GET_MODE_INNER (mode));
2552 if (zero != NULL)
2554 write_complex_part (object, zero, 0);
2555 write_complex_part (object, zero, 1);
2556 return NULL;
2561 if (size == const0_rtx)
2562 return NULL;
2564 align = MEM_ALIGN (object);
2566 if (GET_CODE (size) == CONST_INT
2567 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2568 clear_by_pieces (object, INTVAL (size), align);
2569 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2571 else
2572 return clear_storage_via_libcall (object, size,
2573 method == BLOCK_OP_TAILCALL);
2575 return NULL;
2578 /* A subroutine of clear_storage. Expand a call to memset.
2579 Return the return value of memset, 0 otherwise. */
2581 static rtx
2582 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2584 tree call_expr, arg_list, fn, object_tree, size_tree;
2585 enum machine_mode size_mode;
2586 rtx retval;
2588 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2589 place those into new pseudos into a VAR_DECL and use them later. */
2591 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2593 size_mode = TYPE_MODE (sizetype);
2594 size = convert_to_mode (size_mode, size, 1);
2595 size = copy_to_mode_reg (size_mode, size);
2597 /* It is incorrect to use the libcall calling conventions to call
2598 memset in this context. This could be a user call to memset and
2599 the user may wish to examine the return value from memset. For
2600 targets where libcalls and normal calls have different conventions
2601 for returning pointers, we could end up generating incorrect code. */
2603 object_tree = make_tree (ptr_type_node, object);
2604 size_tree = make_tree (sizetype, size);
2606 fn = clear_storage_libcall_fn (true);
2607 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2608 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2609 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2611 /* Now we have to build up the CALL_EXPR itself. */
2612 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2613 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2614 call_expr, arg_list, NULL_TREE);
2615 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2617 retval = expand_normal (call_expr);
2619 return retval;
2622 /* A subroutine of clear_storage_via_libcall. Create the tree node
2623 for the function we use for block clears. The first time FOR_CALL
2624 is true, we call assemble_external. */
2626 static GTY(()) tree block_clear_fn;
2628 void
2629 init_block_clear_fn (const char *asmspec)
2631 if (!block_clear_fn)
2633 tree fn, args;
2635 fn = get_identifier ("memset");
2636 args = build_function_type_list (ptr_type_node, ptr_type_node,
2637 integer_type_node, sizetype,
2638 NULL_TREE);
2640 fn = build_decl (FUNCTION_DECL, fn, args);
2641 DECL_EXTERNAL (fn) = 1;
2642 TREE_PUBLIC (fn) = 1;
2643 DECL_ARTIFICIAL (fn) = 1;
2644 TREE_NOTHROW (fn) = 1;
2645 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2646 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2648 block_clear_fn = fn;
2651 if (asmspec)
2652 set_user_assembler_name (block_clear_fn, asmspec);
2655 static tree
2656 clear_storage_libcall_fn (int for_call)
2658 static bool emitted_extern;
2660 if (!block_clear_fn)
2661 init_block_clear_fn (NULL);
2663 if (for_call && !emitted_extern)
2665 emitted_extern = true;
2666 make_decl_rtl (block_clear_fn);
2667 assemble_external (block_clear_fn);
2670 return block_clear_fn;
2673 /* Expand a setmem pattern; return true if successful. */
2675 bool
2676 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2678 /* Try the most limited insn first, because there's no point
2679 including more than one in the machine description unless
2680 the more limited one has some advantage. */
2682 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2683 enum machine_mode mode;
2685 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2686 mode = GET_MODE_WIDER_MODE (mode))
2688 enum insn_code code = setmem_optab[(int) mode];
2689 insn_operand_predicate_fn pred;
2691 if (code != CODE_FOR_nothing
2692 /* We don't need MODE to be narrower than
2693 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2694 the mode mask, as it is returned by the macro, it will
2695 definitely be less than the actual mode mask. */
2696 && ((GET_CODE (size) == CONST_INT
2697 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2698 <= (GET_MODE_MASK (mode) >> 1)))
2699 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2700 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2701 || (*pred) (object, BLKmode))
2702 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2703 || (*pred) (opalign, VOIDmode)))
2705 rtx opsize, opchar;
2706 enum machine_mode char_mode;
2707 rtx last = get_last_insn ();
2708 rtx pat;
2710 opsize = convert_to_mode (mode, size, 1);
2711 pred = insn_data[(int) code].operand[1].predicate;
2712 if (pred != 0 && ! (*pred) (opsize, mode))
2713 opsize = copy_to_mode_reg (mode, opsize);
2715 opchar = val;
2716 char_mode = insn_data[(int) code].operand[2].mode;
2717 if (char_mode != VOIDmode)
2719 opchar = convert_to_mode (char_mode, opchar, 1);
2720 pred = insn_data[(int) code].operand[2].predicate;
2721 if (pred != 0 && ! (*pred) (opchar, char_mode))
2722 opchar = copy_to_mode_reg (char_mode, opchar);
2725 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2726 if (pat)
2728 emit_insn (pat);
2729 return true;
2731 else
2732 delete_insns_since (last);
2736 return false;
2740 /* Write to one of the components of the complex value CPLX. Write VAL to
2741 the real part if IMAG_P is false, and the imaginary part if its true. */
2743 static void
2744 write_complex_part (rtx cplx, rtx val, bool imag_p)
2746 enum machine_mode cmode;
2747 enum machine_mode imode;
2748 unsigned ibitsize;
2750 if (GET_CODE (cplx) == CONCAT)
2752 emit_move_insn (XEXP (cplx, imag_p), val);
2753 return;
2756 cmode = GET_MODE (cplx);
2757 imode = GET_MODE_INNER (cmode);
2758 ibitsize = GET_MODE_BITSIZE (imode);
2760 /* For MEMs simplify_gen_subreg may generate an invalid new address
2761 because, e.g., the original address is considered mode-dependent
2762 by the target, which restricts simplify_subreg from invoking
2763 adjust_address_nv. Instead of preparing fallback support for an
2764 invalid address, we call adjust_address_nv directly. */
2765 if (MEM_P (cplx))
2767 emit_move_insn (adjust_address_nv (cplx, imode,
2768 imag_p ? GET_MODE_SIZE (imode) : 0),
2769 val);
2770 return;
2773 /* If the sub-object is at least word sized, then we know that subregging
2774 will work. This special case is important, since store_bit_field
2775 wants to operate on integer modes, and there's rarely an OImode to
2776 correspond to TCmode. */
2777 if (ibitsize >= BITS_PER_WORD
2778 /* For hard regs we have exact predicates. Assume we can split
2779 the original object if it spans an even number of hard regs.
2780 This special case is important for SCmode on 64-bit platforms
2781 where the natural size of floating-point regs is 32-bit. */
2782 || (REG_P (cplx)
2783 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2784 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2786 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2787 imag_p ? GET_MODE_SIZE (imode) : 0);
2788 if (part)
2790 emit_move_insn (part, val);
2791 return;
2793 else
2794 /* simplify_gen_subreg may fail for sub-word MEMs. */
2795 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2798 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2801 /* Extract one of the components of the complex value CPLX. Extract the
2802 real part if IMAG_P is false, and the imaginary part if it's true. */
2804 static rtx
2805 read_complex_part (rtx cplx, bool imag_p)
2807 enum machine_mode cmode, imode;
2808 unsigned ibitsize;
2810 if (GET_CODE (cplx) == CONCAT)
2811 return XEXP (cplx, imag_p);
2813 cmode = GET_MODE (cplx);
2814 imode = GET_MODE_INNER (cmode);
2815 ibitsize = GET_MODE_BITSIZE (imode);
2817 /* Special case reads from complex constants that got spilled to memory. */
2818 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2820 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2821 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2823 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2824 if (CONSTANT_CLASS_P (part))
2825 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2829 /* For MEMs simplify_gen_subreg may generate an invalid new address
2830 because, e.g., the original address is considered mode-dependent
2831 by the target, which restricts simplify_subreg from invoking
2832 adjust_address_nv. Instead of preparing fallback support for an
2833 invalid address, we call adjust_address_nv directly. */
2834 if (MEM_P (cplx))
2835 return adjust_address_nv (cplx, imode,
2836 imag_p ? GET_MODE_SIZE (imode) : 0);
2838 /* If the sub-object is at least word sized, then we know that subregging
2839 will work. This special case is important, since extract_bit_field
2840 wants to operate on integer modes, and there's rarely an OImode to
2841 correspond to TCmode. */
2842 if (ibitsize >= BITS_PER_WORD
2843 /* For hard regs we have exact predicates. Assume we can split
2844 the original object if it spans an even number of hard regs.
2845 This special case is important for SCmode on 64-bit platforms
2846 where the natural size of floating-point regs is 32-bit. */
2847 || (REG_P (cplx)
2848 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2849 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2851 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2852 imag_p ? GET_MODE_SIZE (imode) : 0);
2853 if (ret)
2854 return ret;
2855 else
2856 /* simplify_gen_subreg may fail for sub-word MEMs. */
2857 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2860 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2861 true, NULL_RTX, imode, imode);
2864 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2865 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2866 represented in NEW_MODE. If FORCE is true, this will never happen, as
2867 we'll force-create a SUBREG if needed. */
2869 static rtx
2870 emit_move_change_mode (enum machine_mode new_mode,
2871 enum machine_mode old_mode, rtx x, bool force)
2873 rtx ret;
2875 if (MEM_P (x))
2877 /* We don't have to worry about changing the address since the
2878 size in bytes is supposed to be the same. */
2879 if (reload_in_progress)
2881 /* Copy the MEM to change the mode and move any
2882 substitutions from the old MEM to the new one. */
2883 ret = adjust_address_nv (x, new_mode, 0);
2884 copy_replacements (x, ret);
2886 else
2887 ret = adjust_address (x, new_mode, 0);
2889 else
2891 /* Note that we do want simplify_subreg's behavior of validating
2892 that the new mode is ok for a hard register. If we were to use
2893 simplify_gen_subreg, we would create the subreg, but would
2894 probably run into the target not being able to implement it. */
2895 /* Except, of course, when FORCE is true, when this is exactly what
2896 we want. Which is needed for CCmodes on some targets. */
2897 if (force)
2898 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2899 else
2900 ret = simplify_subreg (new_mode, x, old_mode, 0);
2903 return ret;
2906 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2907 an integer mode of the same size as MODE. Returns the instruction
2908 emitted, or NULL if such a move could not be generated. */
2910 static rtx
2911 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2913 enum machine_mode imode;
2914 enum insn_code code;
2916 /* There must exist a mode of the exact size we require. */
2917 imode = int_mode_for_mode (mode);
2918 if (imode == BLKmode)
2919 return NULL_RTX;
2921 /* The target must support moves in this mode. */
2922 code = mov_optab->handlers[imode].insn_code;
2923 if (code == CODE_FOR_nothing)
2924 return NULL_RTX;
2926 x = emit_move_change_mode (imode, mode, x, force);
2927 if (x == NULL_RTX)
2928 return NULL_RTX;
2929 y = emit_move_change_mode (imode, mode, y, force);
2930 if (y == NULL_RTX)
2931 return NULL_RTX;
2932 return emit_insn (GEN_FCN (code) (x, y));
2935 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2936 Return an equivalent MEM that does not use an auto-increment. */
2938 static rtx
2939 emit_move_resolve_push (enum machine_mode mode, rtx x)
2941 enum rtx_code code = GET_CODE (XEXP (x, 0));
2942 HOST_WIDE_INT adjust;
2943 rtx temp;
2945 adjust = GET_MODE_SIZE (mode);
2946 #ifdef PUSH_ROUNDING
2947 adjust = PUSH_ROUNDING (adjust);
2948 #endif
2949 if (code == PRE_DEC || code == POST_DEC)
2950 adjust = -adjust;
2951 else if (code == PRE_MODIFY || code == POST_MODIFY)
2953 rtx expr = XEXP (XEXP (x, 0), 1);
2954 HOST_WIDE_INT val;
2956 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2957 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2958 val = INTVAL (XEXP (expr, 1));
2959 if (GET_CODE (expr) == MINUS)
2960 val = -val;
2961 gcc_assert (adjust == val || adjust == -val);
2962 adjust = val;
2965 /* Do not use anti_adjust_stack, since we don't want to update
2966 stack_pointer_delta. */
2967 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2968 GEN_INT (adjust), stack_pointer_rtx,
2969 0, OPTAB_LIB_WIDEN);
2970 if (temp != stack_pointer_rtx)
2971 emit_move_insn (stack_pointer_rtx, temp);
2973 switch (code)
2975 case PRE_INC:
2976 case PRE_DEC:
2977 case PRE_MODIFY:
2978 temp = stack_pointer_rtx;
2979 break;
2980 case POST_INC:
2981 case POST_DEC:
2982 case POST_MODIFY:
2983 temp = plus_constant (stack_pointer_rtx, -adjust);
2984 break;
2985 default:
2986 gcc_unreachable ();
2989 return replace_equiv_address (x, temp);
2992 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2993 X is known to satisfy push_operand, and MODE is known to be complex.
2994 Returns the last instruction emitted. */
2996 static rtx
2997 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2999 enum machine_mode submode = GET_MODE_INNER (mode);
3000 bool imag_first;
3002 #ifdef PUSH_ROUNDING
3003 unsigned int submodesize = GET_MODE_SIZE (submode);
3005 /* In case we output to the stack, but the size is smaller than the
3006 machine can push exactly, we need to use move instructions. */
3007 if (PUSH_ROUNDING (submodesize) != submodesize)
3009 x = emit_move_resolve_push (mode, x);
3010 return emit_move_insn (x, y);
3012 #endif
3014 /* Note that the real part always precedes the imag part in memory
3015 regardless of machine's endianness. */
3016 switch (GET_CODE (XEXP (x, 0)))
3018 case PRE_DEC:
3019 case POST_DEC:
3020 imag_first = true;
3021 break;
3022 case PRE_INC:
3023 case POST_INC:
3024 imag_first = false;
3025 break;
3026 default:
3027 gcc_unreachable ();
3030 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3031 read_complex_part (y, imag_first));
3032 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3033 read_complex_part (y, !imag_first));
3036 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3037 MODE is known to be complex. Returns the last instruction emitted. */
3039 static rtx
3040 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3042 bool try_int;
3044 /* Need to take special care for pushes, to maintain proper ordering
3045 of the data, and possibly extra padding. */
3046 if (push_operand (x, mode))
3047 return emit_move_complex_push (mode, x, y);
3049 /* See if we can coerce the target into moving both values at once. */
3051 /* Move floating point as parts. */
3052 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3053 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3054 try_int = false;
3055 /* Not possible if the values are inherently not adjacent. */
3056 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3057 try_int = false;
3058 /* Is possible if both are registers (or subregs of registers). */
3059 else if (register_operand (x, mode) && register_operand (y, mode))
3060 try_int = true;
3061 /* If one of the operands is a memory, and alignment constraints
3062 are friendly enough, we may be able to do combined memory operations.
3063 We do not attempt this if Y is a constant because that combination is
3064 usually better with the by-parts thing below. */
3065 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3066 && (!STRICT_ALIGNMENT
3067 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3068 try_int = true;
3069 else
3070 try_int = false;
3072 if (try_int)
3074 rtx ret;
3076 /* For memory to memory moves, optimal behavior can be had with the
3077 existing block move logic. */
3078 if (MEM_P (x) && MEM_P (y))
3080 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3081 BLOCK_OP_NO_LIBCALL);
3082 return get_last_insn ();
3085 ret = emit_move_via_integer (mode, x, y, true);
3086 if (ret)
3087 return ret;
3090 /* Show the output dies here. This is necessary for SUBREGs
3091 of pseudos since we cannot track their lifetimes correctly;
3092 hard regs shouldn't appear here except as return values. */
3093 if (!reload_completed && !reload_in_progress
3094 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3095 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3097 write_complex_part (x, read_complex_part (y, false), false);
3098 write_complex_part (x, read_complex_part (y, true), true);
3099 return get_last_insn ();
3102 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3103 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3105 static rtx
3106 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3108 rtx ret;
3110 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3111 if (mode != CCmode)
3113 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3114 if (code != CODE_FOR_nothing)
3116 x = emit_move_change_mode (CCmode, mode, x, true);
3117 y = emit_move_change_mode (CCmode, mode, y, true);
3118 return emit_insn (GEN_FCN (code) (x, y));
3122 /* Otherwise, find the MODE_INT mode of the same width. */
3123 ret = emit_move_via_integer (mode, x, y, false);
3124 gcc_assert (ret != NULL);
3125 return ret;
3128 /* Return true if word I of OP lies entirely in the
3129 undefined bits of a paradoxical subreg. */
3131 static bool
3132 undefined_operand_subword_p (rtx op, int i)
3134 enum machine_mode innermode, innermostmode;
3135 int offset;
3136 if (GET_CODE (op) != SUBREG)
3137 return false;
3138 innermode = GET_MODE (op);
3139 innermostmode = GET_MODE (SUBREG_REG (op));
3140 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3141 /* The SUBREG_BYTE represents offset, as if the value were stored in
3142 memory, except for a paradoxical subreg where we define
3143 SUBREG_BYTE to be 0; undo this exception as in
3144 simplify_subreg. */
3145 if (SUBREG_BYTE (op) == 0
3146 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3148 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3149 if (WORDS_BIG_ENDIAN)
3150 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3151 if (BYTES_BIG_ENDIAN)
3152 offset += difference % UNITS_PER_WORD;
3154 if (offset >= GET_MODE_SIZE (innermostmode)
3155 || offset <= -GET_MODE_SIZE (word_mode))
3156 return true;
3157 return false;
3160 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3161 MODE is any multi-word or full-word mode that lacks a move_insn
3162 pattern. Note that you will get better code if you define such
3163 patterns, even if they must turn into multiple assembler instructions. */
3165 static rtx
3166 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3168 rtx last_insn = 0;
3169 rtx seq, inner;
3170 bool need_clobber;
3171 int i;
3173 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3175 /* If X is a push on the stack, do the push now and replace
3176 X with a reference to the stack pointer. */
3177 if (push_operand (x, mode))
3178 x = emit_move_resolve_push (mode, x);
3180 /* If we are in reload, see if either operand is a MEM whose address
3181 is scheduled for replacement. */
3182 if (reload_in_progress && MEM_P (x)
3183 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3184 x = replace_equiv_address_nv (x, inner);
3185 if (reload_in_progress && MEM_P (y)
3186 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3187 y = replace_equiv_address_nv (y, inner);
3189 start_sequence ();
3191 need_clobber = false;
3192 for (i = 0;
3193 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3194 i++)
3196 rtx xpart = operand_subword (x, i, 1, mode);
3197 rtx ypart;
3199 /* Do not generate code for a move if it would come entirely
3200 from the undefined bits of a paradoxical subreg. */
3201 if (undefined_operand_subword_p (y, i))
3202 continue;
3204 ypart = operand_subword (y, i, 1, mode);
3206 /* If we can't get a part of Y, put Y into memory if it is a
3207 constant. Otherwise, force it into a register. Then we must
3208 be able to get a part of Y. */
3209 if (ypart == 0 && CONSTANT_P (y))
3211 y = use_anchored_address (force_const_mem (mode, y));
3212 ypart = operand_subword (y, i, 1, mode);
3214 else if (ypart == 0)
3215 ypart = operand_subword_force (y, i, mode);
3217 gcc_assert (xpart && ypart);
3219 need_clobber |= (GET_CODE (xpart) == SUBREG);
3221 last_insn = emit_move_insn (xpart, ypart);
3224 seq = get_insns ();
3225 end_sequence ();
3227 /* Show the output dies here. This is necessary for SUBREGs
3228 of pseudos since we cannot track their lifetimes correctly;
3229 hard regs shouldn't appear here except as return values.
3230 We never want to emit such a clobber after reload. */
3231 if (x != y
3232 && ! (reload_in_progress || reload_completed)
3233 && need_clobber != 0)
3234 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3236 emit_insn (seq);
3238 return last_insn;
3241 /* Low level part of emit_move_insn.
3242 Called just like emit_move_insn, but assumes X and Y
3243 are basically valid. */
3246 emit_move_insn_1 (rtx x, rtx y)
3248 enum machine_mode mode = GET_MODE (x);
3249 enum insn_code code;
3251 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3253 code = mov_optab->handlers[mode].insn_code;
3254 if (code != CODE_FOR_nothing)
3255 return emit_insn (GEN_FCN (code) (x, y));
3257 /* Expand complex moves by moving real part and imag part. */
3258 if (COMPLEX_MODE_P (mode))
3259 return emit_move_complex (mode, x, y);
3261 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3263 rtx result = emit_move_via_integer (mode, x, y, true);
3265 /* If we can't find an integer mode, use multi words. */
3266 if (result)
3267 return result;
3268 else
3269 return emit_move_multi_word (mode, x, y);
3272 if (GET_MODE_CLASS (mode) == MODE_CC)
3273 return emit_move_ccmode (mode, x, y);
3275 /* Try using a move pattern for the corresponding integer mode. This is
3276 only safe when simplify_subreg can convert MODE constants into integer
3277 constants. At present, it can only do this reliably if the value
3278 fits within a HOST_WIDE_INT. */
3279 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3281 rtx ret = emit_move_via_integer (mode, x, y, false);
3282 if (ret)
3283 return ret;
3286 return emit_move_multi_word (mode, x, y);
3289 /* Generate code to copy Y into X.
3290 Both Y and X must have the same mode, except that
3291 Y can be a constant with VOIDmode.
3292 This mode cannot be BLKmode; use emit_block_move for that.
3294 Return the last instruction emitted. */
3297 emit_move_insn (rtx x, rtx y)
3299 enum machine_mode mode = GET_MODE (x);
3300 rtx y_cst = NULL_RTX;
3301 rtx last_insn, set;
3303 gcc_assert (mode != BLKmode
3304 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3306 if (CONSTANT_P (y))
3308 if (optimize
3309 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3310 && (last_insn = compress_float_constant (x, y)))
3311 return last_insn;
3313 y_cst = y;
3315 if (!LEGITIMATE_CONSTANT_P (y))
3317 y = force_const_mem (mode, y);
3319 /* If the target's cannot_force_const_mem prevented the spill,
3320 assume that the target's move expanders will also take care
3321 of the non-legitimate constant. */
3322 if (!y)
3323 y = y_cst;
3324 else
3325 y = use_anchored_address (y);
3329 /* If X or Y are memory references, verify that their addresses are valid
3330 for the machine. */
3331 if (MEM_P (x)
3332 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3333 && ! push_operand (x, GET_MODE (x)))
3334 || (flag_force_addr
3335 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3336 x = validize_mem (x);
3338 if (MEM_P (y)
3339 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3340 || (flag_force_addr
3341 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3342 y = validize_mem (y);
3344 gcc_assert (mode != BLKmode);
3346 last_insn = emit_move_insn_1 (x, y);
3348 if (y_cst && REG_P (x)
3349 && (set = single_set (last_insn)) != NULL_RTX
3350 && SET_DEST (set) == x
3351 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3352 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3354 return last_insn;
3357 /* If Y is representable exactly in a narrower mode, and the target can
3358 perform the extension directly from constant or memory, then emit the
3359 move as an extension. */
3361 static rtx
3362 compress_float_constant (rtx x, rtx y)
3364 enum machine_mode dstmode = GET_MODE (x);
3365 enum machine_mode orig_srcmode = GET_MODE (y);
3366 enum machine_mode srcmode;
3367 REAL_VALUE_TYPE r;
3368 int oldcost, newcost;
3370 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3372 if (LEGITIMATE_CONSTANT_P (y))
3373 oldcost = rtx_cost (y, SET);
3374 else
3375 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3377 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3378 srcmode != orig_srcmode;
3379 srcmode = GET_MODE_WIDER_MODE (srcmode))
3381 enum insn_code ic;
3382 rtx trunc_y, last_insn;
3384 /* Skip if the target can't extend this way. */
3385 ic = can_extend_p (dstmode, srcmode, 0);
3386 if (ic == CODE_FOR_nothing)
3387 continue;
3389 /* Skip if the narrowed value isn't exact. */
3390 if (! exact_real_truncate (srcmode, &r))
3391 continue;
3393 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3395 if (LEGITIMATE_CONSTANT_P (trunc_y))
3397 /* Skip if the target needs extra instructions to perform
3398 the extension. */
3399 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3400 continue;
3401 /* This is valid, but may not be cheaper than the original. */
3402 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3403 if (oldcost < newcost)
3404 continue;
3406 else if (float_extend_from_mem[dstmode][srcmode])
3408 trunc_y = force_const_mem (srcmode, trunc_y);
3409 /* This is valid, but may not be cheaper than the original. */
3410 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3411 if (oldcost < newcost)
3412 continue;
3413 trunc_y = validize_mem (trunc_y);
3415 else
3416 continue;
3418 /* For CSE's benefit, force the compressed constant pool entry
3419 into a new pseudo. This constant may be used in different modes,
3420 and if not, combine will put things back together for us. */
3421 trunc_y = force_reg (srcmode, trunc_y);
3422 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3423 last_insn = get_last_insn ();
3425 if (REG_P (x))
3426 set_unique_reg_note (last_insn, REG_EQUAL, y);
3428 return last_insn;
3431 return NULL_RTX;
3434 /* Pushing data onto the stack. */
3436 /* Push a block of length SIZE (perhaps variable)
3437 and return an rtx to address the beginning of the block.
3438 The value may be virtual_outgoing_args_rtx.
3440 EXTRA is the number of bytes of padding to push in addition to SIZE.
3441 BELOW nonzero means this padding comes at low addresses;
3442 otherwise, the padding comes at high addresses. */
3445 push_block (rtx size, int extra, int below)
3447 rtx temp;
3449 size = convert_modes (Pmode, ptr_mode, size, 1);
3450 if (CONSTANT_P (size))
3451 anti_adjust_stack (plus_constant (size, extra));
3452 else if (REG_P (size) && extra == 0)
3453 anti_adjust_stack (size);
3454 else
3456 temp = copy_to_mode_reg (Pmode, size);
3457 if (extra != 0)
3458 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3459 temp, 0, OPTAB_LIB_WIDEN);
3460 anti_adjust_stack (temp);
3463 #ifndef STACK_GROWS_DOWNWARD
3464 if (0)
3465 #else
3466 if (1)
3467 #endif
3469 temp = virtual_outgoing_args_rtx;
3470 if (extra != 0 && below)
3471 temp = plus_constant (temp, extra);
3473 else
3475 if (GET_CODE (size) == CONST_INT)
3476 temp = plus_constant (virtual_outgoing_args_rtx,
3477 -INTVAL (size) - (below ? 0 : extra));
3478 else if (extra != 0 && !below)
3479 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3480 negate_rtx (Pmode, plus_constant (size, extra)));
3481 else
3482 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3483 negate_rtx (Pmode, size));
3486 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3489 #ifdef PUSH_ROUNDING
3491 /* Emit single push insn. */
3493 static void
3494 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3496 rtx dest_addr;
3497 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3498 rtx dest;
3499 enum insn_code icode;
3500 insn_operand_predicate_fn pred;
3502 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3503 /* If there is push pattern, use it. Otherwise try old way of throwing
3504 MEM representing push operation to move expander. */
3505 icode = push_optab->handlers[(int) mode].insn_code;
3506 if (icode != CODE_FOR_nothing)
3508 if (((pred = insn_data[(int) icode].operand[0].predicate)
3509 && !((*pred) (x, mode))))
3510 x = force_reg (mode, x);
3511 emit_insn (GEN_FCN (icode) (x));
3512 return;
3514 if (GET_MODE_SIZE (mode) == rounded_size)
3515 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3516 /* If we are to pad downward, adjust the stack pointer first and
3517 then store X into the stack location using an offset. This is
3518 because emit_move_insn does not know how to pad; it does not have
3519 access to type. */
3520 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3522 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3523 HOST_WIDE_INT offset;
3525 emit_move_insn (stack_pointer_rtx,
3526 expand_binop (Pmode,
3527 #ifdef STACK_GROWS_DOWNWARD
3528 sub_optab,
3529 #else
3530 add_optab,
3531 #endif
3532 stack_pointer_rtx,
3533 GEN_INT (rounded_size),
3534 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3536 offset = (HOST_WIDE_INT) padding_size;
3537 #ifdef STACK_GROWS_DOWNWARD
3538 if (STACK_PUSH_CODE == POST_DEC)
3539 /* We have already decremented the stack pointer, so get the
3540 previous value. */
3541 offset += (HOST_WIDE_INT) rounded_size;
3542 #else
3543 if (STACK_PUSH_CODE == POST_INC)
3544 /* We have already incremented the stack pointer, so get the
3545 previous value. */
3546 offset -= (HOST_WIDE_INT) rounded_size;
3547 #endif
3548 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3550 else
3552 #ifdef STACK_GROWS_DOWNWARD
3553 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3554 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3555 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3556 #else
3557 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3558 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3559 GEN_INT (rounded_size));
3560 #endif
3561 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3564 dest = gen_rtx_MEM (mode, dest_addr);
3566 if (type != 0)
3568 set_mem_attributes (dest, type, 1);
3570 if (flag_optimize_sibling_calls)
3571 /* Function incoming arguments may overlap with sibling call
3572 outgoing arguments and we cannot allow reordering of reads
3573 from function arguments with stores to outgoing arguments
3574 of sibling calls. */
3575 set_mem_alias_set (dest, 0);
3577 emit_move_insn (dest, x);
3579 #endif
3581 /* Generate code to push X onto the stack, assuming it has mode MODE and
3582 type TYPE.
3583 MODE is redundant except when X is a CONST_INT (since they don't
3584 carry mode info).
3585 SIZE is an rtx for the size of data to be copied (in bytes),
3586 needed only if X is BLKmode.
3588 ALIGN (in bits) is maximum alignment we can assume.
3590 If PARTIAL and REG are both nonzero, then copy that many of the first
3591 bytes of X into registers starting with REG, and push the rest of X.
3592 The amount of space pushed is decreased by PARTIAL bytes.
3593 REG must be a hard register in this case.
3594 If REG is zero but PARTIAL is not, take any all others actions for an
3595 argument partially in registers, but do not actually load any
3596 registers.
3598 EXTRA is the amount in bytes of extra space to leave next to this arg.
3599 This is ignored if an argument block has already been allocated.
3601 On a machine that lacks real push insns, ARGS_ADDR is the address of
3602 the bottom of the argument block for this call. We use indexing off there
3603 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3604 argument block has not been preallocated.
3606 ARGS_SO_FAR is the size of args previously pushed for this call.
3608 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3609 for arguments passed in registers. If nonzero, it will be the number
3610 of bytes required. */
3612 void
3613 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3614 unsigned int align, int partial, rtx reg, int extra,
3615 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3616 rtx alignment_pad)
3618 rtx xinner;
3619 enum direction stack_direction
3620 #ifdef STACK_GROWS_DOWNWARD
3621 = downward;
3622 #else
3623 = upward;
3624 #endif
3626 /* Decide where to pad the argument: `downward' for below,
3627 `upward' for above, or `none' for don't pad it.
3628 Default is below for small data on big-endian machines; else above. */
3629 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3631 /* Invert direction if stack is post-decrement.
3632 FIXME: why? */
3633 if (STACK_PUSH_CODE == POST_DEC)
3634 if (where_pad != none)
3635 where_pad = (where_pad == downward ? upward : downward);
3637 xinner = x;
3639 if (mode == BLKmode)
3641 /* Copy a block into the stack, entirely or partially. */
3643 rtx temp;
3644 int used;
3645 int offset;
3646 int skip;
3648 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3649 used = partial - offset;
3651 gcc_assert (size);
3653 /* USED is now the # of bytes we need not copy to the stack
3654 because registers will take care of them. */
3656 if (partial != 0)
3657 xinner = adjust_address (xinner, BLKmode, used);
3659 /* If the partial register-part of the arg counts in its stack size,
3660 skip the part of stack space corresponding to the registers.
3661 Otherwise, start copying to the beginning of the stack space,
3662 by setting SKIP to 0. */
3663 skip = (reg_parm_stack_space == 0) ? 0 : used;
3665 #ifdef PUSH_ROUNDING
3666 /* Do it with several push insns if that doesn't take lots of insns
3667 and if there is no difficulty with push insns that skip bytes
3668 on the stack for alignment purposes. */
3669 if (args_addr == 0
3670 && PUSH_ARGS
3671 && GET_CODE (size) == CONST_INT
3672 && skip == 0
3673 && MEM_ALIGN (xinner) >= align
3674 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3675 /* Here we avoid the case of a structure whose weak alignment
3676 forces many pushes of a small amount of data,
3677 and such small pushes do rounding that causes trouble. */
3678 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3679 || align >= BIGGEST_ALIGNMENT
3680 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3681 == (align / BITS_PER_UNIT)))
3682 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3684 /* Push padding now if padding above and stack grows down,
3685 or if padding below and stack grows up.
3686 But if space already allocated, this has already been done. */
3687 if (extra && args_addr == 0
3688 && where_pad != none && where_pad != stack_direction)
3689 anti_adjust_stack (GEN_INT (extra));
3691 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3693 else
3694 #endif /* PUSH_ROUNDING */
3696 rtx target;
3698 /* Otherwise make space on the stack and copy the data
3699 to the address of that space. */
3701 /* Deduct words put into registers from the size we must copy. */
3702 if (partial != 0)
3704 if (GET_CODE (size) == CONST_INT)
3705 size = GEN_INT (INTVAL (size) - used);
3706 else
3707 size = expand_binop (GET_MODE (size), sub_optab, size,
3708 GEN_INT (used), NULL_RTX, 0,
3709 OPTAB_LIB_WIDEN);
3712 /* Get the address of the stack space.
3713 In this case, we do not deal with EXTRA separately.
3714 A single stack adjust will do. */
3715 if (! args_addr)
3717 temp = push_block (size, extra, where_pad == downward);
3718 extra = 0;
3720 else if (GET_CODE (args_so_far) == CONST_INT)
3721 temp = memory_address (BLKmode,
3722 plus_constant (args_addr,
3723 skip + INTVAL (args_so_far)));
3724 else
3725 temp = memory_address (BLKmode,
3726 plus_constant (gen_rtx_PLUS (Pmode,
3727 args_addr,
3728 args_so_far),
3729 skip));
3731 if (!ACCUMULATE_OUTGOING_ARGS)
3733 /* If the source is referenced relative to the stack pointer,
3734 copy it to another register to stabilize it. We do not need
3735 to do this if we know that we won't be changing sp. */
3737 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3738 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3739 temp = copy_to_reg (temp);
3742 target = gen_rtx_MEM (BLKmode, temp);
3744 /* We do *not* set_mem_attributes here, because incoming arguments
3745 may overlap with sibling call outgoing arguments and we cannot
3746 allow reordering of reads from function arguments with stores
3747 to outgoing arguments of sibling calls. We do, however, want
3748 to record the alignment of the stack slot. */
3749 /* ALIGN may well be better aligned than TYPE, e.g. due to
3750 PARM_BOUNDARY. Assume the caller isn't lying. */
3751 set_mem_align (target, align);
3753 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3756 else if (partial > 0)
3758 /* Scalar partly in registers. */
3760 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3761 int i;
3762 int not_stack;
3763 /* # bytes of start of argument
3764 that we must make space for but need not store. */
3765 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3766 int args_offset = INTVAL (args_so_far);
3767 int skip;
3769 /* Push padding now if padding above and stack grows down,
3770 or if padding below and stack grows up.
3771 But if space already allocated, this has already been done. */
3772 if (extra && args_addr == 0
3773 && where_pad != none && where_pad != stack_direction)
3774 anti_adjust_stack (GEN_INT (extra));
3776 /* If we make space by pushing it, we might as well push
3777 the real data. Otherwise, we can leave OFFSET nonzero
3778 and leave the space uninitialized. */
3779 if (args_addr == 0)
3780 offset = 0;
3782 /* Now NOT_STACK gets the number of words that we don't need to
3783 allocate on the stack. Convert OFFSET to words too. */
3784 not_stack = (partial - offset) / UNITS_PER_WORD;
3785 offset /= UNITS_PER_WORD;
3787 /* If the partial register-part of the arg counts in its stack size,
3788 skip the part of stack space corresponding to the registers.
3789 Otherwise, start copying to the beginning of the stack space,
3790 by setting SKIP to 0. */
3791 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3793 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3794 x = validize_mem (force_const_mem (mode, x));
3796 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3797 SUBREGs of such registers are not allowed. */
3798 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3799 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3800 x = copy_to_reg (x);
3802 /* Loop over all the words allocated on the stack for this arg. */
3803 /* We can do it by words, because any scalar bigger than a word
3804 has a size a multiple of a word. */
3805 #ifndef PUSH_ARGS_REVERSED
3806 for (i = not_stack; i < size; i++)
3807 #else
3808 for (i = size - 1; i >= not_stack; i--)
3809 #endif
3810 if (i >= not_stack + offset)
3811 emit_push_insn (operand_subword_force (x, i, mode),
3812 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3813 0, args_addr,
3814 GEN_INT (args_offset + ((i - not_stack + skip)
3815 * UNITS_PER_WORD)),
3816 reg_parm_stack_space, alignment_pad);
3818 else
3820 rtx addr;
3821 rtx dest;
3823 /* Push padding now if padding above and stack grows down,
3824 or if padding below and stack grows up.
3825 But if space already allocated, this has already been done. */
3826 if (extra && args_addr == 0
3827 && where_pad != none && where_pad != stack_direction)
3828 anti_adjust_stack (GEN_INT (extra));
3830 #ifdef PUSH_ROUNDING
3831 if (args_addr == 0 && PUSH_ARGS)
3832 emit_single_push_insn (mode, x, type);
3833 else
3834 #endif
3836 if (GET_CODE (args_so_far) == CONST_INT)
3837 addr
3838 = memory_address (mode,
3839 plus_constant (args_addr,
3840 INTVAL (args_so_far)));
3841 else
3842 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3843 args_so_far));
3844 dest = gen_rtx_MEM (mode, addr);
3846 /* We do *not* set_mem_attributes here, because incoming arguments
3847 may overlap with sibling call outgoing arguments and we cannot
3848 allow reordering of reads from function arguments with stores
3849 to outgoing arguments of sibling calls. We do, however, want
3850 to record the alignment of the stack slot. */
3851 /* ALIGN may well be better aligned than TYPE, e.g. due to
3852 PARM_BOUNDARY. Assume the caller isn't lying. */
3853 set_mem_align (dest, align);
3855 emit_move_insn (dest, x);
3859 /* If part should go in registers, copy that part
3860 into the appropriate registers. Do this now, at the end,
3861 since mem-to-mem copies above may do function calls. */
3862 if (partial > 0 && reg != 0)
3864 /* Handle calls that pass values in multiple non-contiguous locations.
3865 The Irix 6 ABI has examples of this. */
3866 if (GET_CODE (reg) == PARALLEL)
3867 emit_group_load (reg, x, type, -1);
3868 else
3870 gcc_assert (partial % UNITS_PER_WORD == 0);
3871 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3875 if (extra && args_addr == 0 && where_pad == stack_direction)
3876 anti_adjust_stack (GEN_INT (extra));
3878 if (alignment_pad && args_addr == 0)
3879 anti_adjust_stack (alignment_pad);
3882 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3883 operations. */
3885 static rtx
3886 get_subtarget (rtx x)
3888 return (optimize
3889 || x == 0
3890 /* Only registers can be subtargets. */
3891 || !REG_P (x)
3892 /* Don't use hard regs to avoid extending their life. */
3893 || REGNO (x) < FIRST_PSEUDO_REGISTER
3894 ? 0 : x);
3897 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3898 FIELD is a bitfield. Returns true if the optimization was successful,
3899 and there's nothing else to do. */
3901 static bool
3902 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3903 unsigned HOST_WIDE_INT bitpos,
3904 enum machine_mode mode1, rtx str_rtx,
3905 tree to, tree src)
3907 enum machine_mode str_mode = GET_MODE (str_rtx);
3908 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3909 tree op0, op1;
3910 rtx value, result;
3911 optab binop;
3913 if (mode1 != VOIDmode
3914 || bitsize >= BITS_PER_WORD
3915 || str_bitsize > BITS_PER_WORD
3916 || TREE_SIDE_EFFECTS (to)
3917 || TREE_THIS_VOLATILE (to))
3918 return false;
3920 STRIP_NOPS (src);
3921 if (!BINARY_CLASS_P (src)
3922 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3923 return false;
3925 op0 = TREE_OPERAND (src, 0);
3926 op1 = TREE_OPERAND (src, 1);
3927 STRIP_NOPS (op0);
3929 if (!operand_equal_p (to, op0, 0))
3930 return false;
3932 if (MEM_P (str_rtx))
3934 unsigned HOST_WIDE_INT offset1;
3936 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3937 str_mode = word_mode;
3938 str_mode = get_best_mode (bitsize, bitpos,
3939 MEM_ALIGN (str_rtx), str_mode, 0);
3940 if (str_mode == VOIDmode)
3941 return false;
3942 str_bitsize = GET_MODE_BITSIZE (str_mode);
3944 offset1 = bitpos;
3945 bitpos %= str_bitsize;
3946 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3947 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3949 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3950 return false;
3952 /* If the bit field covers the whole REG/MEM, store_field
3953 will likely generate better code. */
3954 if (bitsize >= str_bitsize)
3955 return false;
3957 /* We can't handle fields split across multiple entities. */
3958 if (bitpos + bitsize > str_bitsize)
3959 return false;
3961 if (BYTES_BIG_ENDIAN)
3962 bitpos = str_bitsize - bitpos - bitsize;
3964 switch (TREE_CODE (src))
3966 case PLUS_EXPR:
3967 case MINUS_EXPR:
3968 /* For now, just optimize the case of the topmost bitfield
3969 where we don't need to do any masking and also
3970 1 bit bitfields where xor can be used.
3971 We might win by one instruction for the other bitfields
3972 too if insv/extv instructions aren't used, so that
3973 can be added later. */
3974 if (bitpos + bitsize != str_bitsize
3975 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3976 break;
3978 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3979 value = convert_modes (str_mode,
3980 TYPE_MODE (TREE_TYPE (op1)), value,
3981 TYPE_UNSIGNED (TREE_TYPE (op1)));
3983 /* We may be accessing data outside the field, which means
3984 we can alias adjacent data. */
3985 if (MEM_P (str_rtx))
3987 str_rtx = shallow_copy_rtx (str_rtx);
3988 set_mem_alias_set (str_rtx, 0);
3989 set_mem_expr (str_rtx, 0);
3992 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3993 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3995 value = expand_and (str_mode, value, const1_rtx, NULL);
3996 binop = xor_optab;
3998 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3999 build_int_cst (NULL_TREE, bitpos),
4000 NULL_RTX, 1);
4001 result = expand_binop (str_mode, binop, str_rtx,
4002 value, str_rtx, 1, OPTAB_WIDEN);
4003 if (result != str_rtx)
4004 emit_move_insn (str_rtx, result);
4005 return true;
4007 case BIT_IOR_EXPR:
4008 case BIT_XOR_EXPR:
4009 if (TREE_CODE (op1) != INTEGER_CST)
4010 break;
4011 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
4012 value = convert_modes (GET_MODE (str_rtx),
4013 TYPE_MODE (TREE_TYPE (op1)), value,
4014 TYPE_UNSIGNED (TREE_TYPE (op1)));
4016 /* We may be accessing data outside the field, which means
4017 we can alias adjacent data. */
4018 if (MEM_P (str_rtx))
4020 str_rtx = shallow_copy_rtx (str_rtx);
4021 set_mem_alias_set (str_rtx, 0);
4022 set_mem_expr (str_rtx, 0);
4025 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4026 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4028 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4029 - 1);
4030 value = expand_and (GET_MODE (str_rtx), value, mask,
4031 NULL_RTX);
4033 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4034 build_int_cst (NULL_TREE, bitpos),
4035 NULL_RTX, 1);
4036 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4037 value, str_rtx, 1, OPTAB_WIDEN);
4038 if (result != str_rtx)
4039 emit_move_insn (str_rtx, result);
4040 return true;
4042 default:
4043 break;
4046 return false;
4050 /* Expand an assignment that stores the value of FROM into TO. */
4052 void
4053 expand_assignment (tree to, tree from)
4055 rtx to_rtx = 0;
4056 rtx result;
4058 /* Don't crash if the lhs of the assignment was erroneous. */
4059 if (TREE_CODE (to) == ERROR_MARK)
4061 result = expand_normal (from);
4062 return;
4065 /* Optimize away no-op moves without side-effects. */
4066 if (operand_equal_p (to, from, 0))
4067 return;
4069 /* Assignment of a structure component needs special treatment
4070 if the structure component's rtx is not simply a MEM.
4071 Assignment of an array element at a constant index, and assignment of
4072 an array element in an unaligned packed structure field, has the same
4073 problem. */
4074 if (handled_component_p (to)
4075 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4077 enum machine_mode mode1;
4078 HOST_WIDE_INT bitsize, bitpos;
4079 tree offset;
4080 int unsignedp;
4081 int volatilep = 0;
4082 tree tem;
4084 push_temp_slots ();
4085 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4086 &unsignedp, &volatilep, true);
4088 /* If we are going to use store_bit_field and extract_bit_field,
4089 make sure to_rtx will be safe for multiple use. */
4091 to_rtx = expand_normal (tem);
4093 if (offset != 0)
4095 rtx offset_rtx;
4097 if (!MEM_P (to_rtx))
4099 /* We can get constant negative offsets into arrays with broken
4100 user code. Translate this to a trap instead of ICEing. */
4101 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4102 expand_builtin_trap ();
4103 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4106 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4107 #ifdef POINTERS_EXTEND_UNSIGNED
4108 if (GET_MODE (offset_rtx) != Pmode)
4109 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4110 #else
4111 if (GET_MODE (offset_rtx) != ptr_mode)
4112 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4113 #endif
4115 /* A constant address in TO_RTX can have VOIDmode, we must not try
4116 to call force_reg for that case. Avoid that case. */
4117 if (MEM_P (to_rtx)
4118 && GET_MODE (to_rtx) == BLKmode
4119 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4120 && bitsize > 0
4121 && (bitpos % bitsize) == 0
4122 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4123 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4125 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4126 bitpos = 0;
4129 to_rtx = offset_address (to_rtx, offset_rtx,
4130 highest_pow2_factor_for_target (to,
4131 offset));
4134 /* Handle expand_expr of a complex value returning a CONCAT. */
4135 if (GET_CODE (to_rtx) == CONCAT)
4137 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4139 gcc_assert (bitpos == 0);
4140 result = store_expr (from, to_rtx, false);
4142 else
4144 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4145 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4148 else
4150 if (MEM_P (to_rtx))
4152 /* If the field is at offset zero, we could have been given the
4153 DECL_RTX of the parent struct. Don't munge it. */
4154 to_rtx = shallow_copy_rtx (to_rtx);
4156 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4158 /* Deal with volatile and readonly fields. The former is only
4159 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4160 if (volatilep)
4161 MEM_VOLATILE_P (to_rtx) = 1;
4162 if (component_uses_parent_alias_set (to))
4163 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4166 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4167 to_rtx, to, from))
4168 result = NULL;
4169 else
4170 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4171 TREE_TYPE (tem), get_alias_set (to));
4174 if (result)
4175 preserve_temp_slots (result);
4176 free_temp_slots ();
4177 pop_temp_slots ();
4178 return;
4181 /* If the rhs is a function call and its value is not an aggregate,
4182 call the function before we start to compute the lhs.
4183 This is needed for correct code for cases such as
4184 val = setjmp (buf) on machines where reference to val
4185 requires loading up part of an address in a separate insn.
4187 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4188 since it might be a promoted variable where the zero- or sign- extension
4189 needs to be done. Handling this in the normal way is safe because no
4190 computation is done before the call. */
4191 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4192 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4193 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4194 && REG_P (DECL_RTL (to))))
4196 rtx value;
4198 push_temp_slots ();
4199 value = expand_normal (from);
4200 if (to_rtx == 0)
4201 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4203 /* Handle calls that return values in multiple non-contiguous locations.
4204 The Irix 6 ABI has examples of this. */
4205 if (GET_CODE (to_rtx) == PARALLEL)
4206 emit_group_load (to_rtx, value, TREE_TYPE (from),
4207 int_size_in_bytes (TREE_TYPE (from)));
4208 else if (GET_MODE (to_rtx) == BLKmode)
4209 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4210 else
4212 if (POINTER_TYPE_P (TREE_TYPE (to)))
4213 value = convert_memory_address (GET_MODE (to_rtx), value);
4214 emit_move_insn (to_rtx, value);
4216 preserve_temp_slots (to_rtx);
4217 free_temp_slots ();
4218 pop_temp_slots ();
4219 return;
4222 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4223 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4225 if (to_rtx == 0)
4226 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4228 /* Don't move directly into a return register. */
4229 if (TREE_CODE (to) == RESULT_DECL
4230 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4232 rtx temp;
4234 push_temp_slots ();
4235 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4237 if (GET_CODE (to_rtx) == PARALLEL)
4238 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4239 int_size_in_bytes (TREE_TYPE (from)));
4240 else
4241 emit_move_insn (to_rtx, temp);
4243 preserve_temp_slots (to_rtx);
4244 free_temp_slots ();
4245 pop_temp_slots ();
4246 return;
4249 /* In case we are returning the contents of an object which overlaps
4250 the place the value is being stored, use a safe function when copying
4251 a value through a pointer into a structure value return block. */
4252 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4253 && current_function_returns_struct
4254 && !current_function_returns_pcc_struct)
4256 rtx from_rtx, size;
4258 push_temp_slots ();
4259 size = expr_size (from);
4260 from_rtx = expand_normal (from);
4262 emit_library_call (memmove_libfunc, LCT_NORMAL,
4263 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4264 XEXP (from_rtx, 0), Pmode,
4265 convert_to_mode (TYPE_MODE (sizetype),
4266 size, TYPE_UNSIGNED (sizetype)),
4267 TYPE_MODE (sizetype));
4269 preserve_temp_slots (to_rtx);
4270 free_temp_slots ();
4271 pop_temp_slots ();
4272 return;
4275 /* Compute FROM and store the value in the rtx we got. */
4277 push_temp_slots ();
4278 result = store_expr (from, to_rtx, 0);
4279 preserve_temp_slots (result);
4280 free_temp_slots ();
4281 pop_temp_slots ();
4282 return;
4285 /* Generate code for computing expression EXP,
4286 and storing the value into TARGET.
4288 If the mode is BLKmode then we may return TARGET itself.
4289 It turns out that in BLKmode it doesn't cause a problem.
4290 because C has no operators that could combine two different
4291 assignments into the same BLKmode object with different values
4292 with no sequence point. Will other languages need this to
4293 be more thorough?
4295 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4296 stack, and block moves may need to be treated specially. */
4299 store_expr (tree exp, rtx target, int call_param_p)
4301 rtx temp;
4302 rtx alt_rtl = NULL_RTX;
4303 int dont_return_target = 0;
4305 if (VOID_TYPE_P (TREE_TYPE (exp)))
4307 /* C++ can generate ?: expressions with a throw expression in one
4308 branch and an rvalue in the other. Here, we resolve attempts to
4309 store the throw expression's nonexistent result. */
4310 gcc_assert (!call_param_p);
4311 expand_expr (exp, const0_rtx, VOIDmode, 0);
4312 return NULL_RTX;
4314 if (TREE_CODE (exp) == COMPOUND_EXPR)
4316 /* Perform first part of compound expression, then assign from second
4317 part. */
4318 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4319 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4320 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4322 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4324 /* For conditional expression, get safe form of the target. Then
4325 test the condition, doing the appropriate assignment on either
4326 side. This avoids the creation of unnecessary temporaries.
4327 For non-BLKmode, it is more efficient not to do this. */
4329 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4331 do_pending_stack_adjust ();
4332 NO_DEFER_POP;
4333 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4334 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4335 emit_jump_insn (gen_jump (lab2));
4336 emit_barrier ();
4337 emit_label (lab1);
4338 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4339 emit_label (lab2);
4340 OK_DEFER_POP;
4342 return NULL_RTX;
4344 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4345 /* If this is a scalar in a register that is stored in a wider mode
4346 than the declared mode, compute the result into its declared mode
4347 and then convert to the wider mode. Our value is the computed
4348 expression. */
4350 rtx inner_target = 0;
4352 /* We can do the conversion inside EXP, which will often result
4353 in some optimizations. Do the conversion in two steps: first
4354 change the signedness, if needed, then the extend. But don't
4355 do this if the type of EXP is a subtype of something else
4356 since then the conversion might involve more than just
4357 converting modes. */
4358 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4359 && TREE_TYPE (TREE_TYPE (exp)) == 0
4360 && (!lang_hooks.reduce_bit_field_operations
4361 || (GET_MODE_PRECISION (GET_MODE (target))
4362 == TYPE_PRECISION (TREE_TYPE (exp)))))
4364 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4365 != SUBREG_PROMOTED_UNSIGNED_P (target))
4366 exp = fold_convert
4367 (lang_hooks.types.signed_or_unsigned_type
4368 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4370 exp = fold_convert (lang_hooks.types.type_for_mode
4371 (GET_MODE (SUBREG_REG (target)),
4372 SUBREG_PROMOTED_UNSIGNED_P (target)),
4373 exp);
4375 inner_target = SUBREG_REG (target);
4378 temp = expand_expr (exp, inner_target, VOIDmode,
4379 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4381 /* If TEMP is a VOIDmode constant, use convert_modes to make
4382 sure that we properly convert it. */
4383 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4385 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4386 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4387 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4388 GET_MODE (target), temp,
4389 SUBREG_PROMOTED_UNSIGNED_P (target));
4392 convert_move (SUBREG_REG (target), temp,
4393 SUBREG_PROMOTED_UNSIGNED_P (target));
4395 return NULL_RTX;
4397 else
4399 temp = expand_expr_real (exp, target, GET_MODE (target),
4400 (call_param_p
4401 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4402 &alt_rtl);
4403 /* Return TARGET if it's a specified hardware register.
4404 If TARGET is a volatile mem ref, either return TARGET
4405 or return a reg copied *from* TARGET; ANSI requires this.
4407 Otherwise, if TEMP is not TARGET, return TEMP
4408 if it is constant (for efficiency),
4409 or if we really want the correct value. */
4410 if (!(target && REG_P (target)
4411 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4412 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4413 && ! rtx_equal_p (temp, target)
4414 && CONSTANT_P (temp))
4415 dont_return_target = 1;
4418 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4419 the same as that of TARGET, adjust the constant. This is needed, for
4420 example, in case it is a CONST_DOUBLE and we want only a word-sized
4421 value. */
4422 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4423 && TREE_CODE (exp) != ERROR_MARK
4424 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4425 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4426 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4428 /* If value was not generated in the target, store it there.
4429 Convert the value to TARGET's type first if necessary and emit the
4430 pending incrementations that have been queued when expanding EXP.
4431 Note that we cannot emit the whole queue blindly because this will
4432 effectively disable the POST_INC optimization later.
4434 If TEMP and TARGET compare equal according to rtx_equal_p, but
4435 one or both of them are volatile memory refs, we have to distinguish
4436 two cases:
4437 - expand_expr has used TARGET. In this case, we must not generate
4438 another copy. This can be detected by TARGET being equal according
4439 to == .
4440 - expand_expr has not used TARGET - that means that the source just
4441 happens to have the same RTX form. Since temp will have been created
4442 by expand_expr, it will compare unequal according to == .
4443 We must generate a copy in this case, to reach the correct number
4444 of volatile memory references. */
4446 if ((! rtx_equal_p (temp, target)
4447 || (temp != target && (side_effects_p (temp)
4448 || side_effects_p (target))))
4449 && TREE_CODE (exp) != ERROR_MARK
4450 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4451 but TARGET is not valid memory reference, TEMP will differ
4452 from TARGET although it is really the same location. */
4453 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4454 /* If there's nothing to copy, don't bother. Don't call
4455 expr_size unless necessary, because some front-ends (C++)
4456 expr_size-hook must not be given objects that are not
4457 supposed to be bit-copied or bit-initialized. */
4458 && expr_size (exp) != const0_rtx)
4460 if (GET_MODE (temp) != GET_MODE (target)
4461 && GET_MODE (temp) != VOIDmode)
4463 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4464 if (dont_return_target)
4466 /* In this case, we will return TEMP,
4467 so make sure it has the proper mode.
4468 But don't forget to store the value into TARGET. */
4469 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4470 emit_move_insn (target, temp);
4472 else
4473 convert_move (target, temp, unsignedp);
4476 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4478 /* Handle copying a string constant into an array. The string
4479 constant may be shorter than the array. So copy just the string's
4480 actual length, and clear the rest. First get the size of the data
4481 type of the string, which is actually the size of the target. */
4482 rtx size = expr_size (exp);
4484 if (GET_CODE (size) == CONST_INT
4485 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4486 emit_block_move (target, temp, size,
4487 (call_param_p
4488 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4489 else
4491 /* Compute the size of the data to copy from the string. */
4492 tree copy_size
4493 = size_binop (MIN_EXPR,
4494 make_tree (sizetype, size),
4495 size_int (TREE_STRING_LENGTH (exp)));
4496 rtx copy_size_rtx
4497 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4498 (call_param_p
4499 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4500 rtx label = 0;
4502 /* Copy that much. */
4503 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4504 TYPE_UNSIGNED (sizetype));
4505 emit_block_move (target, temp, copy_size_rtx,
4506 (call_param_p
4507 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4509 /* Figure out how much is left in TARGET that we have to clear.
4510 Do all calculations in ptr_mode. */
4511 if (GET_CODE (copy_size_rtx) == CONST_INT)
4513 size = plus_constant (size, -INTVAL (copy_size_rtx));
4514 target = adjust_address (target, BLKmode,
4515 INTVAL (copy_size_rtx));
4517 else
4519 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4520 copy_size_rtx, NULL_RTX, 0,
4521 OPTAB_LIB_WIDEN);
4523 #ifdef POINTERS_EXTEND_UNSIGNED
4524 if (GET_MODE (copy_size_rtx) != Pmode)
4525 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4526 TYPE_UNSIGNED (sizetype));
4527 #endif
4529 target = offset_address (target, copy_size_rtx,
4530 highest_pow2_factor (copy_size));
4531 label = gen_label_rtx ();
4532 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4533 GET_MODE (size), 0, label);
4536 if (size != const0_rtx)
4537 clear_storage (target, size, BLOCK_OP_NORMAL);
4539 if (label)
4540 emit_label (label);
4543 /* Handle calls that return values in multiple non-contiguous locations.
4544 The Irix 6 ABI has examples of this. */
4545 else if (GET_CODE (target) == PARALLEL)
4546 emit_group_load (target, temp, TREE_TYPE (exp),
4547 int_size_in_bytes (TREE_TYPE (exp)));
4548 else if (GET_MODE (temp) == BLKmode)
4549 emit_block_move (target, temp, expr_size (exp),
4550 (call_param_p
4551 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4552 else
4554 temp = force_operand (temp, target);
4555 if (temp != target)
4556 emit_move_insn (target, temp);
4560 return NULL_RTX;
4563 /* Helper for categorize_ctor_elements. Identical interface. */
4565 static bool
4566 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4567 HOST_WIDE_INT *p_elt_count,
4568 bool *p_must_clear)
4570 unsigned HOST_WIDE_INT idx;
4571 HOST_WIDE_INT nz_elts, elt_count;
4572 tree value, purpose;
4574 /* Whether CTOR is a valid constant initializer, in accordance with what
4575 initializer_constant_valid_p does. If inferred from the constructor
4576 elements, true until proven otherwise. */
4577 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4578 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4580 nz_elts = 0;
4581 elt_count = 0;
4583 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4585 HOST_WIDE_INT mult;
4587 mult = 1;
4588 if (TREE_CODE (purpose) == RANGE_EXPR)
4590 tree lo_index = TREE_OPERAND (purpose, 0);
4591 tree hi_index = TREE_OPERAND (purpose, 1);
4593 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4594 mult = (tree_low_cst (hi_index, 1)
4595 - tree_low_cst (lo_index, 1) + 1);
4598 switch (TREE_CODE (value))
4600 case CONSTRUCTOR:
4602 HOST_WIDE_INT nz = 0, ic = 0;
4604 bool const_elt_p
4605 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4607 nz_elts += mult * nz;
4608 elt_count += mult * ic;
4610 if (const_from_elts_p && const_p)
4611 const_p = const_elt_p;
4613 break;
4615 case INTEGER_CST:
4616 case REAL_CST:
4617 if (!initializer_zerop (value))
4618 nz_elts += mult;
4619 elt_count += mult;
4620 break;
4622 case STRING_CST:
4623 nz_elts += mult * TREE_STRING_LENGTH (value);
4624 elt_count += mult * TREE_STRING_LENGTH (value);
4625 break;
4627 case COMPLEX_CST:
4628 if (!initializer_zerop (TREE_REALPART (value)))
4629 nz_elts += mult;
4630 if (!initializer_zerop (TREE_IMAGPART (value)))
4631 nz_elts += mult;
4632 elt_count += mult;
4633 break;
4635 case VECTOR_CST:
4637 tree v;
4638 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4640 if (!initializer_zerop (TREE_VALUE (v)))
4641 nz_elts += mult;
4642 elt_count += mult;
4645 break;
4647 default:
4648 nz_elts += mult;
4649 elt_count += mult;
4651 if (const_from_elts_p && const_p)
4652 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4653 != NULL_TREE;
4654 break;
4658 if (!*p_must_clear
4659 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4660 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4662 tree init_sub_type;
4663 bool clear_this = true;
4665 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4667 /* We don't expect more than one element of the union to be
4668 initialized. Not sure what we should do otherwise... */
4669 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4670 == 1);
4672 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4673 CONSTRUCTOR_ELTS (ctor),
4674 0)->value);
4676 /* ??? We could look at each element of the union, and find the
4677 largest element. Which would avoid comparing the size of the
4678 initialized element against any tail padding in the union.
4679 Doesn't seem worth the effort... */
4680 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4681 TYPE_SIZE (init_sub_type)) == 1)
4683 /* And now we have to find out if the element itself is fully
4684 constructed. E.g. for union { struct { int a, b; } s; } u
4685 = { .s = { .a = 1 } }. */
4686 if (elt_count == count_type_elements (init_sub_type, false))
4687 clear_this = false;
4691 *p_must_clear = clear_this;
4694 *p_nz_elts += nz_elts;
4695 *p_elt_count += elt_count;
4697 return const_p;
4700 /* Examine CTOR to discover:
4701 * how many scalar fields are set to nonzero values,
4702 and place it in *P_NZ_ELTS;
4703 * how many scalar fields in total are in CTOR,
4704 and place it in *P_ELT_COUNT.
4705 * if a type is a union, and the initializer from the constructor
4706 is not the largest element in the union, then set *p_must_clear.
4708 Return whether or not CTOR is a valid static constant initializer, the same
4709 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4711 bool
4712 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4713 HOST_WIDE_INT *p_elt_count,
4714 bool *p_must_clear)
4716 *p_nz_elts = 0;
4717 *p_elt_count = 0;
4718 *p_must_clear = false;
4720 return
4721 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4724 /* Count the number of scalars in TYPE. Return -1 on overflow or
4725 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4726 array member at the end of the structure. */
4728 HOST_WIDE_INT
4729 count_type_elements (tree type, bool allow_flexarr)
4731 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4732 switch (TREE_CODE (type))
4734 case ARRAY_TYPE:
4736 tree telts = array_type_nelts (type);
4737 if (telts && host_integerp (telts, 1))
4739 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4740 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4741 if (n == 0)
4742 return 0;
4743 else if (max / n > m)
4744 return n * m;
4746 return -1;
4749 case RECORD_TYPE:
4751 HOST_WIDE_INT n = 0, t;
4752 tree f;
4754 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4755 if (TREE_CODE (f) == FIELD_DECL)
4757 t = count_type_elements (TREE_TYPE (f), false);
4758 if (t < 0)
4760 /* Check for structures with flexible array member. */
4761 tree tf = TREE_TYPE (f);
4762 if (allow_flexarr
4763 && TREE_CHAIN (f) == NULL
4764 && TREE_CODE (tf) == ARRAY_TYPE
4765 && TYPE_DOMAIN (tf)
4766 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4767 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4768 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4769 && int_size_in_bytes (type) >= 0)
4770 break;
4772 return -1;
4774 n += t;
4777 return n;
4780 case UNION_TYPE:
4781 case QUAL_UNION_TYPE:
4783 /* Ho hum. How in the world do we guess here? Clearly it isn't
4784 right to count the fields. Guess based on the number of words. */
4785 HOST_WIDE_INT n = int_size_in_bytes (type);
4786 if (n < 0)
4787 return -1;
4788 return n / UNITS_PER_WORD;
4791 case COMPLEX_TYPE:
4792 return 2;
4794 case VECTOR_TYPE:
4795 return TYPE_VECTOR_SUBPARTS (type);
4797 case INTEGER_TYPE:
4798 case REAL_TYPE:
4799 case ENUMERAL_TYPE:
4800 case BOOLEAN_TYPE:
4801 case POINTER_TYPE:
4802 case OFFSET_TYPE:
4803 case REFERENCE_TYPE:
4804 return 1;
4806 case VOID_TYPE:
4807 case METHOD_TYPE:
4808 case FUNCTION_TYPE:
4809 case LANG_TYPE:
4810 default:
4811 gcc_unreachable ();
4815 /* Return 1 if EXP contains mostly (3/4) zeros. */
4817 static int
4818 mostly_zeros_p (tree exp)
4820 if (TREE_CODE (exp) == CONSTRUCTOR)
4823 HOST_WIDE_INT nz_elts, count, elts;
4824 bool must_clear;
4826 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4827 if (must_clear)
4828 return 1;
4830 elts = count_type_elements (TREE_TYPE (exp), false);
4832 return nz_elts < elts / 4;
4835 return initializer_zerop (exp);
4838 /* Return 1 if EXP contains all zeros. */
4840 static int
4841 all_zeros_p (tree exp)
4843 if (TREE_CODE (exp) == CONSTRUCTOR)
4846 HOST_WIDE_INT nz_elts, count;
4847 bool must_clear;
4849 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4850 return nz_elts == 0;
4853 return initializer_zerop (exp);
4856 /* Helper function for store_constructor.
4857 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4858 TYPE is the type of the CONSTRUCTOR, not the element type.
4859 CLEARED is as for store_constructor.
4860 ALIAS_SET is the alias set to use for any stores.
4862 This provides a recursive shortcut back to store_constructor when it isn't
4863 necessary to go through store_field. This is so that we can pass through
4864 the cleared field to let store_constructor know that we may not have to
4865 clear a substructure if the outer structure has already been cleared. */
4867 static void
4868 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4869 HOST_WIDE_INT bitpos, enum machine_mode mode,
4870 tree exp, tree type, int cleared, int alias_set)
4872 if (TREE_CODE (exp) == CONSTRUCTOR
4873 /* We can only call store_constructor recursively if the size and
4874 bit position are on a byte boundary. */
4875 && bitpos % BITS_PER_UNIT == 0
4876 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4877 /* If we have a nonzero bitpos for a register target, then we just
4878 let store_field do the bitfield handling. This is unlikely to
4879 generate unnecessary clear instructions anyways. */
4880 && (bitpos == 0 || MEM_P (target)))
4882 if (MEM_P (target))
4883 target
4884 = adjust_address (target,
4885 GET_MODE (target) == BLKmode
4886 || 0 != (bitpos
4887 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4888 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4891 /* Update the alias set, if required. */
4892 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4893 && MEM_ALIAS_SET (target) != 0)
4895 target = copy_rtx (target);
4896 set_mem_alias_set (target, alias_set);
4899 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4901 else
4902 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4905 /* Store the value of constructor EXP into the rtx TARGET.
4906 TARGET is either a REG or a MEM; we know it cannot conflict, since
4907 safe_from_p has been called.
4908 CLEARED is true if TARGET is known to have been zero'd.
4909 SIZE is the number of bytes of TARGET we are allowed to modify: this
4910 may not be the same as the size of EXP if we are assigning to a field
4911 which has been packed to exclude padding bits. */
4913 static void
4914 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4916 tree type = TREE_TYPE (exp);
4917 #ifdef WORD_REGISTER_OPERATIONS
4918 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4919 #endif
4921 switch (TREE_CODE (type))
4923 case RECORD_TYPE:
4924 case UNION_TYPE:
4925 case QUAL_UNION_TYPE:
4927 unsigned HOST_WIDE_INT idx;
4928 tree field, value;
4930 /* If size is zero or the target is already cleared, do nothing. */
4931 if (size == 0 || cleared)
4932 cleared = 1;
4933 /* We either clear the aggregate or indicate the value is dead. */
4934 else if ((TREE_CODE (type) == UNION_TYPE
4935 || TREE_CODE (type) == QUAL_UNION_TYPE)
4936 && ! CONSTRUCTOR_ELTS (exp))
4937 /* If the constructor is empty, clear the union. */
4939 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4940 cleared = 1;
4943 /* If we are building a static constructor into a register,
4944 set the initial value as zero so we can fold the value into
4945 a constant. But if more than one register is involved,
4946 this probably loses. */
4947 else if (REG_P (target) && TREE_STATIC (exp)
4948 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4950 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4951 cleared = 1;
4954 /* If the constructor has fewer fields than the structure or
4955 if we are initializing the structure to mostly zeros, clear
4956 the whole structure first. Don't do this if TARGET is a
4957 register whose mode size isn't equal to SIZE since
4958 clear_storage can't handle this case. */
4959 else if (size > 0
4960 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4961 != fields_length (type))
4962 || mostly_zeros_p (exp))
4963 && (!REG_P (target)
4964 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4965 == size)))
4967 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4968 cleared = 1;
4971 if (! cleared)
4972 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4974 /* Store each element of the constructor into the
4975 corresponding field of TARGET. */
4976 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4978 enum machine_mode mode;
4979 HOST_WIDE_INT bitsize;
4980 HOST_WIDE_INT bitpos = 0;
4981 tree offset;
4982 rtx to_rtx = target;
4984 /* Just ignore missing fields. We cleared the whole
4985 structure, above, if any fields are missing. */
4986 if (field == 0)
4987 continue;
4989 if (cleared && initializer_zerop (value))
4990 continue;
4992 if (host_integerp (DECL_SIZE (field), 1))
4993 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4994 else
4995 bitsize = -1;
4997 mode = DECL_MODE (field);
4998 if (DECL_BIT_FIELD (field))
4999 mode = VOIDmode;
5001 offset = DECL_FIELD_OFFSET (field);
5002 if (host_integerp (offset, 0)
5003 && host_integerp (bit_position (field), 0))
5005 bitpos = int_bit_position (field);
5006 offset = 0;
5008 else
5009 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5011 if (offset)
5013 rtx offset_rtx;
5015 offset
5016 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5017 make_tree (TREE_TYPE (exp),
5018 target));
5020 offset_rtx = expand_normal (offset);
5021 gcc_assert (MEM_P (to_rtx));
5023 #ifdef POINTERS_EXTEND_UNSIGNED
5024 if (GET_MODE (offset_rtx) != Pmode)
5025 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5026 #else
5027 if (GET_MODE (offset_rtx) != ptr_mode)
5028 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5029 #endif
5031 to_rtx = offset_address (to_rtx, offset_rtx,
5032 highest_pow2_factor (offset));
5035 #ifdef WORD_REGISTER_OPERATIONS
5036 /* If this initializes a field that is smaller than a
5037 word, at the start of a word, try to widen it to a full
5038 word. This special case allows us to output C++ member
5039 function initializations in a form that the optimizers
5040 can understand. */
5041 if (REG_P (target)
5042 && bitsize < BITS_PER_WORD
5043 && bitpos % BITS_PER_WORD == 0
5044 && GET_MODE_CLASS (mode) == MODE_INT
5045 && TREE_CODE (value) == INTEGER_CST
5046 && exp_size >= 0
5047 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5049 tree type = TREE_TYPE (value);
5051 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5053 type = lang_hooks.types.type_for_size
5054 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5055 value = fold_convert (type, value);
5058 if (BYTES_BIG_ENDIAN)
5059 value
5060 = fold_build2 (LSHIFT_EXPR, type, value,
5061 build_int_cst (type,
5062 BITS_PER_WORD - bitsize));
5063 bitsize = BITS_PER_WORD;
5064 mode = word_mode;
5066 #endif
5068 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5069 && DECL_NONADDRESSABLE_P (field))
5071 to_rtx = copy_rtx (to_rtx);
5072 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5075 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5076 value, type, cleared,
5077 get_alias_set (TREE_TYPE (field)));
5079 break;
5081 case ARRAY_TYPE:
5083 tree value, index;
5084 unsigned HOST_WIDE_INT i;
5085 int need_to_clear;
5086 tree domain;
5087 tree elttype = TREE_TYPE (type);
5088 int const_bounds_p;
5089 HOST_WIDE_INT minelt = 0;
5090 HOST_WIDE_INT maxelt = 0;
5092 domain = TYPE_DOMAIN (type);
5093 const_bounds_p = (TYPE_MIN_VALUE (domain)
5094 && TYPE_MAX_VALUE (domain)
5095 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5096 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5098 /* If we have constant bounds for the range of the type, get them. */
5099 if (const_bounds_p)
5101 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5102 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5105 /* If the constructor has fewer elements than the array, clear
5106 the whole array first. Similarly if this is static
5107 constructor of a non-BLKmode object. */
5108 if (cleared)
5109 need_to_clear = 0;
5110 else if (REG_P (target) && TREE_STATIC (exp))
5111 need_to_clear = 1;
5112 else
5114 unsigned HOST_WIDE_INT idx;
5115 tree index, value;
5116 HOST_WIDE_INT count = 0, zero_count = 0;
5117 need_to_clear = ! const_bounds_p;
5119 /* This loop is a more accurate version of the loop in
5120 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5121 is also needed to check for missing elements. */
5122 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5124 HOST_WIDE_INT this_node_count;
5126 if (need_to_clear)
5127 break;
5129 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5131 tree lo_index = TREE_OPERAND (index, 0);
5132 tree hi_index = TREE_OPERAND (index, 1);
5134 if (! host_integerp (lo_index, 1)
5135 || ! host_integerp (hi_index, 1))
5137 need_to_clear = 1;
5138 break;
5141 this_node_count = (tree_low_cst (hi_index, 1)
5142 - tree_low_cst (lo_index, 1) + 1);
5144 else
5145 this_node_count = 1;
5147 count += this_node_count;
5148 if (mostly_zeros_p (value))
5149 zero_count += this_node_count;
5152 /* Clear the entire array first if there are any missing
5153 elements, or if the incidence of zero elements is >=
5154 75%. */
5155 if (! need_to_clear
5156 && (count < maxelt - minelt + 1
5157 || 4 * zero_count >= 3 * count))
5158 need_to_clear = 1;
5161 if (need_to_clear && size > 0)
5163 if (REG_P (target))
5164 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5165 else
5166 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5167 cleared = 1;
5170 if (!cleared && REG_P (target))
5171 /* Inform later passes that the old value is dead. */
5172 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5174 /* Store each element of the constructor into the
5175 corresponding element of TARGET, determined by counting the
5176 elements. */
5177 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5179 enum machine_mode mode;
5180 HOST_WIDE_INT bitsize;
5181 HOST_WIDE_INT bitpos;
5182 int unsignedp;
5183 rtx xtarget = target;
5185 if (cleared && initializer_zerop (value))
5186 continue;
5188 unsignedp = TYPE_UNSIGNED (elttype);
5189 mode = TYPE_MODE (elttype);
5190 if (mode == BLKmode)
5191 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5192 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5193 : -1);
5194 else
5195 bitsize = GET_MODE_BITSIZE (mode);
5197 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5199 tree lo_index = TREE_OPERAND (index, 0);
5200 tree hi_index = TREE_OPERAND (index, 1);
5201 rtx index_r, pos_rtx;
5202 HOST_WIDE_INT lo, hi, count;
5203 tree position;
5205 /* If the range is constant and "small", unroll the loop. */
5206 if (const_bounds_p
5207 && host_integerp (lo_index, 0)
5208 && host_integerp (hi_index, 0)
5209 && (lo = tree_low_cst (lo_index, 0),
5210 hi = tree_low_cst (hi_index, 0),
5211 count = hi - lo + 1,
5212 (!MEM_P (target)
5213 || count <= 2
5214 || (host_integerp (TYPE_SIZE (elttype), 1)
5215 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5216 <= 40 * 8)))))
5218 lo -= minelt; hi -= minelt;
5219 for (; lo <= hi; lo++)
5221 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5223 if (MEM_P (target)
5224 && !MEM_KEEP_ALIAS_SET_P (target)
5225 && TREE_CODE (type) == ARRAY_TYPE
5226 && TYPE_NONALIASED_COMPONENT (type))
5228 target = copy_rtx (target);
5229 MEM_KEEP_ALIAS_SET_P (target) = 1;
5232 store_constructor_field
5233 (target, bitsize, bitpos, mode, value, type, cleared,
5234 get_alias_set (elttype));
5237 else
5239 rtx loop_start = gen_label_rtx ();
5240 rtx loop_end = gen_label_rtx ();
5241 tree exit_cond;
5243 expand_normal (hi_index);
5244 unsignedp = TYPE_UNSIGNED (domain);
5246 index = build_decl (VAR_DECL, NULL_TREE, domain);
5248 index_r
5249 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5250 &unsignedp, 0));
5251 SET_DECL_RTL (index, index_r);
5252 store_expr (lo_index, index_r, 0);
5254 /* Build the head of the loop. */
5255 do_pending_stack_adjust ();
5256 emit_label (loop_start);
5258 /* Assign value to element index. */
5259 position =
5260 fold_convert (ssizetype,
5261 fold_build2 (MINUS_EXPR,
5262 TREE_TYPE (index),
5263 index,
5264 TYPE_MIN_VALUE (domain)));
5266 position =
5267 size_binop (MULT_EXPR, position,
5268 fold_convert (ssizetype,
5269 TYPE_SIZE_UNIT (elttype)));
5271 pos_rtx = expand_normal (position);
5272 xtarget = offset_address (target, pos_rtx,
5273 highest_pow2_factor (position));
5274 xtarget = adjust_address (xtarget, mode, 0);
5275 if (TREE_CODE (value) == CONSTRUCTOR)
5276 store_constructor (value, xtarget, cleared,
5277 bitsize / BITS_PER_UNIT);
5278 else
5279 store_expr (value, xtarget, 0);
5281 /* Generate a conditional jump to exit the loop. */
5282 exit_cond = build2 (LT_EXPR, integer_type_node,
5283 index, hi_index);
5284 jumpif (exit_cond, loop_end);
5286 /* Update the loop counter, and jump to the head of
5287 the loop. */
5288 expand_assignment (index,
5289 build2 (PLUS_EXPR, TREE_TYPE (index),
5290 index, integer_one_node));
5292 emit_jump (loop_start);
5294 /* Build the end of the loop. */
5295 emit_label (loop_end);
5298 else if ((index != 0 && ! host_integerp (index, 0))
5299 || ! host_integerp (TYPE_SIZE (elttype), 1))
5301 tree position;
5303 if (index == 0)
5304 index = ssize_int (1);
5306 if (minelt)
5307 index = fold_convert (ssizetype,
5308 fold_build2 (MINUS_EXPR,
5309 TREE_TYPE (index),
5310 index,
5311 TYPE_MIN_VALUE (domain)));
5313 position =
5314 size_binop (MULT_EXPR, index,
5315 fold_convert (ssizetype,
5316 TYPE_SIZE_UNIT (elttype)));
5317 xtarget = offset_address (target,
5318 expand_normal (position),
5319 highest_pow2_factor (position));
5320 xtarget = adjust_address (xtarget, mode, 0);
5321 store_expr (value, xtarget, 0);
5323 else
5325 if (index != 0)
5326 bitpos = ((tree_low_cst (index, 0) - minelt)
5327 * tree_low_cst (TYPE_SIZE (elttype), 1));
5328 else
5329 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5331 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5332 && TREE_CODE (type) == ARRAY_TYPE
5333 && TYPE_NONALIASED_COMPONENT (type))
5335 target = copy_rtx (target);
5336 MEM_KEEP_ALIAS_SET_P (target) = 1;
5338 store_constructor_field (target, bitsize, bitpos, mode, value,
5339 type, cleared, get_alias_set (elttype));
5342 break;
5345 case VECTOR_TYPE:
5347 unsigned HOST_WIDE_INT idx;
5348 constructor_elt *ce;
5349 int i;
5350 int need_to_clear;
5351 int icode = 0;
5352 tree elttype = TREE_TYPE (type);
5353 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5354 enum machine_mode eltmode = TYPE_MODE (elttype);
5355 HOST_WIDE_INT bitsize;
5356 HOST_WIDE_INT bitpos;
5357 rtvec vector = NULL;
5358 unsigned n_elts;
5360 gcc_assert (eltmode != BLKmode);
5362 n_elts = TYPE_VECTOR_SUBPARTS (type);
5363 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5365 enum machine_mode mode = GET_MODE (target);
5367 icode = (int) vec_init_optab->handlers[mode].insn_code;
5368 if (icode != CODE_FOR_nothing)
5370 unsigned int i;
5372 vector = rtvec_alloc (n_elts);
5373 for (i = 0; i < n_elts; i++)
5374 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5378 /* If the constructor has fewer elements than the vector,
5379 clear the whole array first. Similarly if this is static
5380 constructor of a non-BLKmode object. */
5381 if (cleared)
5382 need_to_clear = 0;
5383 else if (REG_P (target) && TREE_STATIC (exp))
5384 need_to_clear = 1;
5385 else
5387 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5388 tree value;
5390 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5392 int n_elts_here = tree_low_cst
5393 (int_const_binop (TRUNC_DIV_EXPR,
5394 TYPE_SIZE (TREE_TYPE (value)),
5395 TYPE_SIZE (elttype), 0), 1);
5397 count += n_elts_here;
5398 if (mostly_zeros_p (value))
5399 zero_count += n_elts_here;
5402 /* Clear the entire vector first if there are any missing elements,
5403 or if the incidence of zero elements is >= 75%. */
5404 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5407 if (need_to_clear && size > 0 && !vector)
5409 if (REG_P (target))
5410 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5411 else
5412 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5413 cleared = 1;
5416 /* Inform later passes that the old value is dead. */
5417 if (!cleared && !vector && REG_P (target))
5418 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5420 /* Store each element of the constructor into the corresponding
5421 element of TARGET, determined by counting the elements. */
5422 for (idx = 0, i = 0;
5423 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5424 idx++, i += bitsize / elt_size)
5426 HOST_WIDE_INT eltpos;
5427 tree value = ce->value;
5429 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5430 if (cleared && initializer_zerop (value))
5431 continue;
5433 if (ce->index)
5434 eltpos = tree_low_cst (ce->index, 1);
5435 else
5436 eltpos = i;
5438 if (vector)
5440 /* Vector CONSTRUCTORs should only be built from smaller
5441 vectors in the case of BLKmode vectors. */
5442 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5443 RTVEC_ELT (vector, eltpos)
5444 = expand_normal (value);
5446 else
5448 enum machine_mode value_mode =
5449 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5450 ? TYPE_MODE (TREE_TYPE (value))
5451 : eltmode;
5452 bitpos = eltpos * elt_size;
5453 store_constructor_field (target, bitsize, bitpos,
5454 value_mode, value, type,
5455 cleared, get_alias_set (elttype));
5459 if (vector)
5460 emit_insn (GEN_FCN (icode)
5461 (target,
5462 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5463 break;
5466 default:
5467 gcc_unreachable ();
5471 /* Store the value of EXP (an expression tree)
5472 into a subfield of TARGET which has mode MODE and occupies
5473 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5474 If MODE is VOIDmode, it means that we are storing into a bit-field.
5476 Always return const0_rtx unless we have something particular to
5477 return.
5479 TYPE is the type of the underlying object,
5481 ALIAS_SET is the alias set for the destination. This value will
5482 (in general) be different from that for TARGET, since TARGET is a
5483 reference to the containing structure. */
5485 static rtx
5486 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5487 enum machine_mode mode, tree exp, tree type, int alias_set)
5489 HOST_WIDE_INT width_mask = 0;
5491 if (TREE_CODE (exp) == ERROR_MARK)
5492 return const0_rtx;
5494 /* If we have nothing to store, do nothing unless the expression has
5495 side-effects. */
5496 if (bitsize == 0)
5497 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5498 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5499 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5501 /* If we are storing into an unaligned field of an aligned union that is
5502 in a register, we may have the mode of TARGET being an integer mode but
5503 MODE == BLKmode. In that case, get an aligned object whose size and
5504 alignment are the same as TARGET and store TARGET into it (we can avoid
5505 the store if the field being stored is the entire width of TARGET). Then
5506 call ourselves recursively to store the field into a BLKmode version of
5507 that object. Finally, load from the object into TARGET. This is not
5508 very efficient in general, but should only be slightly more expensive
5509 than the otherwise-required unaligned accesses. Perhaps this can be
5510 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5511 twice, once with emit_move_insn and once via store_field. */
5513 if (mode == BLKmode
5514 && (REG_P (target) || GET_CODE (target) == SUBREG))
5516 rtx object = assign_temp (type, 0, 1, 1);
5517 rtx blk_object = adjust_address (object, BLKmode, 0);
5519 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5520 emit_move_insn (object, target);
5522 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5524 emit_move_insn (target, object);
5526 /* We want to return the BLKmode version of the data. */
5527 return blk_object;
5530 if (GET_CODE (target) == CONCAT)
5532 /* We're storing into a struct containing a single __complex. */
5534 gcc_assert (!bitpos);
5535 return store_expr (exp, target, 0);
5538 /* If the structure is in a register or if the component
5539 is a bit field, we cannot use addressing to access it.
5540 Use bit-field techniques or SUBREG to store in it. */
5542 if (mode == VOIDmode
5543 || (mode != BLKmode && ! direct_store[(int) mode]
5544 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5545 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5546 || REG_P (target)
5547 || GET_CODE (target) == SUBREG
5548 /* If the field isn't aligned enough to store as an ordinary memref,
5549 store it as a bit field. */
5550 || (mode != BLKmode
5551 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5552 || bitpos % GET_MODE_ALIGNMENT (mode))
5553 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5554 || (bitpos % BITS_PER_UNIT != 0)))
5555 /* If the RHS and field are a constant size and the size of the
5556 RHS isn't the same size as the bitfield, we must use bitfield
5557 operations. */
5558 || (bitsize >= 0
5559 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5560 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5562 rtx temp;
5564 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5565 implies a mask operation. If the precision is the same size as
5566 the field we're storing into, that mask is redundant. This is
5567 particularly common with bit field assignments generated by the
5568 C front end. */
5569 if (TREE_CODE (exp) == NOP_EXPR)
5571 tree type = TREE_TYPE (exp);
5572 if (INTEGRAL_TYPE_P (type)
5573 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5574 && bitsize == TYPE_PRECISION (type))
5576 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5577 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5578 exp = TREE_OPERAND (exp, 0);
5582 temp = expand_normal (exp);
5584 /* If BITSIZE is narrower than the size of the type of EXP
5585 we will be narrowing TEMP. Normally, what's wanted are the
5586 low-order bits. However, if EXP's type is a record and this is
5587 big-endian machine, we want the upper BITSIZE bits. */
5588 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5589 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5590 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5591 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5592 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5593 - bitsize),
5594 NULL_RTX, 1);
5596 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5597 MODE. */
5598 if (mode != VOIDmode && mode != BLKmode
5599 && mode != TYPE_MODE (TREE_TYPE (exp)))
5600 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5602 /* If the modes of TARGET and TEMP are both BLKmode, both
5603 must be in memory and BITPOS must be aligned on a byte
5604 boundary. If so, we simply do a block copy. */
5605 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5607 gcc_assert (MEM_P (target) && MEM_P (temp)
5608 && !(bitpos % BITS_PER_UNIT));
5610 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5611 emit_block_move (target, temp,
5612 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5613 / BITS_PER_UNIT),
5614 BLOCK_OP_NORMAL);
5616 return const0_rtx;
5619 /* Store the value in the bitfield. */
5620 store_bit_field (target, bitsize, bitpos, mode, temp);
5622 return const0_rtx;
5624 else
5626 /* Now build a reference to just the desired component. */
5627 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5629 if (to_rtx == target)
5630 to_rtx = copy_rtx (to_rtx);
5632 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5633 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5634 set_mem_alias_set (to_rtx, alias_set);
5636 return store_expr (exp, to_rtx, 0);
5640 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5641 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5642 codes and find the ultimate containing object, which we return.
5644 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5645 bit position, and *PUNSIGNEDP to the signedness of the field.
5646 If the position of the field is variable, we store a tree
5647 giving the variable offset (in units) in *POFFSET.
5648 This offset is in addition to the bit position.
5649 If the position is not variable, we store 0 in *POFFSET.
5651 If any of the extraction expressions is volatile,
5652 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5654 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5655 is a mode that can be used to access the field. In that case, *PBITSIZE
5656 is redundant.
5658 If the field describes a variable-sized object, *PMODE is set to
5659 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5660 this case, but the address of the object can be found.
5662 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5663 look through nodes that serve as markers of a greater alignment than
5664 the one that can be deduced from the expression. These nodes make it
5665 possible for front-ends to prevent temporaries from being created by
5666 the middle-end on alignment considerations. For that purpose, the
5667 normal operating mode at high-level is to always pass FALSE so that
5668 the ultimate containing object is really returned; moreover, the
5669 associated predicate handled_component_p will always return TRUE
5670 on these nodes, thus indicating that they are essentially handled
5671 by get_inner_reference. TRUE should only be passed when the caller
5672 is scanning the expression in order to build another representation
5673 and specifically knows how to handle these nodes; as such, this is
5674 the normal operating mode in the RTL expanders. */
5676 tree
5677 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5678 HOST_WIDE_INT *pbitpos, tree *poffset,
5679 enum machine_mode *pmode, int *punsignedp,
5680 int *pvolatilep, bool keep_aligning)
5682 tree size_tree = 0;
5683 enum machine_mode mode = VOIDmode;
5684 tree offset = size_zero_node;
5685 tree bit_offset = bitsize_zero_node;
5687 /* First get the mode, signedness, and size. We do this from just the
5688 outermost expression. */
5689 if (TREE_CODE (exp) == COMPONENT_REF)
5691 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5692 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5693 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5695 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5697 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5699 size_tree = TREE_OPERAND (exp, 1);
5700 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5702 else
5704 mode = TYPE_MODE (TREE_TYPE (exp));
5705 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5707 if (mode == BLKmode)
5708 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5709 else
5710 *pbitsize = GET_MODE_BITSIZE (mode);
5713 if (size_tree != 0)
5715 if (! host_integerp (size_tree, 1))
5716 mode = BLKmode, *pbitsize = -1;
5717 else
5718 *pbitsize = tree_low_cst (size_tree, 1);
5721 *pmode = mode;
5723 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5724 and find the ultimate containing object. */
5725 while (1)
5727 switch (TREE_CODE (exp))
5729 case BIT_FIELD_REF:
5730 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5731 TREE_OPERAND (exp, 2));
5732 break;
5734 case COMPONENT_REF:
5736 tree field = TREE_OPERAND (exp, 1);
5737 tree this_offset = component_ref_field_offset (exp);
5739 /* If this field hasn't been filled in yet, don't go past it.
5740 This should only happen when folding expressions made during
5741 type construction. */
5742 if (this_offset == 0)
5743 break;
5745 offset = size_binop (PLUS_EXPR, offset, this_offset);
5746 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5747 DECL_FIELD_BIT_OFFSET (field));
5749 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5751 break;
5753 case ARRAY_REF:
5754 case ARRAY_RANGE_REF:
5756 tree index = TREE_OPERAND (exp, 1);
5757 tree low_bound = array_ref_low_bound (exp);
5758 tree unit_size = array_ref_element_size (exp);
5760 /* We assume all arrays have sizes that are a multiple of a byte.
5761 First subtract the lower bound, if any, in the type of the
5762 index, then convert to sizetype and multiply by the size of
5763 the array element. */
5764 if (! integer_zerop (low_bound))
5765 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5766 index, low_bound);
5768 offset = size_binop (PLUS_EXPR, offset,
5769 size_binop (MULT_EXPR,
5770 fold_convert (sizetype, index),
5771 unit_size));
5773 break;
5775 case REALPART_EXPR:
5776 break;
5778 case IMAGPART_EXPR:
5779 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5780 bitsize_int (*pbitsize));
5781 break;
5783 case VIEW_CONVERT_EXPR:
5784 if (keep_aligning && STRICT_ALIGNMENT
5785 && (TYPE_ALIGN (TREE_TYPE (exp))
5786 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5787 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5788 < BIGGEST_ALIGNMENT)
5789 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5790 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5791 goto done;
5792 break;
5794 default:
5795 goto done;
5798 /* If any reference in the chain is volatile, the effect is volatile. */
5799 if (TREE_THIS_VOLATILE (exp))
5800 *pvolatilep = 1;
5802 exp = TREE_OPERAND (exp, 0);
5804 done:
5806 /* If OFFSET is constant, see if we can return the whole thing as a
5807 constant bit position. Make sure to handle overflow during
5808 this conversion. */
5809 if (host_integerp (offset, 0))
5811 double_int tem = double_int_mul (tree_to_double_int (offset),
5812 uhwi_to_double_int (BITS_PER_UNIT));
5813 tem = double_int_add (tem, tree_to_double_int (bit_offset));
5814 if (double_int_fits_in_shwi_p (tem))
5816 *pbitpos = double_int_to_shwi (tem);
5817 *poffset = NULL_TREE;
5818 return exp;
5822 /* Otherwise, split it up. */
5823 *pbitpos = tree_low_cst (bit_offset, 0);
5824 *poffset = offset;
5826 return exp;
5829 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
5830 look for whether EXP or any nested component-refs within EXP is marked
5831 as PACKED. */
5833 bool
5834 contains_packed_reference (tree exp)
5836 bool packed_p = false;
5838 while (1)
5840 switch (TREE_CODE (exp))
5842 case COMPONENT_REF:
5844 tree field = TREE_OPERAND (exp, 1);
5845 packed_p = DECL_PACKED (field)
5846 || TYPE_PACKED (TREE_TYPE (field))
5847 || TYPE_PACKED (TREE_TYPE (exp));
5848 if (packed_p)
5849 goto done;
5851 break;
5853 case BIT_FIELD_REF:
5854 case ARRAY_REF:
5855 case ARRAY_RANGE_REF:
5856 case REALPART_EXPR:
5857 case IMAGPART_EXPR:
5858 case VIEW_CONVERT_EXPR:
5859 break;
5861 default:
5862 goto done;
5864 exp = TREE_OPERAND (exp, 0);
5866 done:
5867 return packed_p;
5870 /* Return a tree of sizetype representing the size, in bytes, of the element
5871 of EXP, an ARRAY_REF. */
5873 tree
5874 array_ref_element_size (tree exp)
5876 tree aligned_size = TREE_OPERAND (exp, 3);
5877 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5879 /* If a size was specified in the ARRAY_REF, it's the size measured
5880 in alignment units of the element type. So multiply by that value. */
5881 if (aligned_size)
5883 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5884 sizetype from another type of the same width and signedness. */
5885 if (TREE_TYPE (aligned_size) != sizetype)
5886 aligned_size = fold_convert (sizetype, aligned_size);
5887 return size_binop (MULT_EXPR, aligned_size,
5888 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5891 /* Otherwise, take the size from that of the element type. Substitute
5892 any PLACEHOLDER_EXPR that we have. */
5893 else
5894 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5897 /* Return a tree representing the lower bound of the array mentioned in
5898 EXP, an ARRAY_REF. */
5900 tree
5901 array_ref_low_bound (tree exp)
5903 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5905 /* If a lower bound is specified in EXP, use it. */
5906 if (TREE_OPERAND (exp, 2))
5907 return TREE_OPERAND (exp, 2);
5909 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5910 substituting for a PLACEHOLDER_EXPR as needed. */
5911 if (domain_type && TYPE_MIN_VALUE (domain_type))
5912 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5914 /* Otherwise, return a zero of the appropriate type. */
5915 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5918 /* Return a tree representing the upper bound of the array mentioned in
5919 EXP, an ARRAY_REF. */
5921 tree
5922 array_ref_up_bound (tree exp)
5924 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5926 /* If there is a domain type and it has an upper bound, use it, substituting
5927 for a PLACEHOLDER_EXPR as needed. */
5928 if (domain_type && TYPE_MAX_VALUE (domain_type))
5929 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5931 /* Otherwise fail. */
5932 return NULL_TREE;
5935 /* Return a tree representing the offset, in bytes, of the field referenced
5936 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5938 tree
5939 component_ref_field_offset (tree exp)
5941 tree aligned_offset = TREE_OPERAND (exp, 2);
5942 tree field = TREE_OPERAND (exp, 1);
5944 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5945 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5946 value. */
5947 if (aligned_offset)
5949 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5950 sizetype from another type of the same width and signedness. */
5951 if (TREE_TYPE (aligned_offset) != sizetype)
5952 aligned_offset = fold_convert (sizetype, aligned_offset);
5953 return size_binop (MULT_EXPR, aligned_offset,
5954 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5957 /* Otherwise, take the offset from that of the field. Substitute
5958 any PLACEHOLDER_EXPR that we have. */
5959 else
5960 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5963 /* Return 1 if T is an expression that get_inner_reference handles. */
5966 handled_component_p (tree t)
5968 switch (TREE_CODE (t))
5970 case BIT_FIELD_REF:
5971 case COMPONENT_REF:
5972 case ARRAY_REF:
5973 case ARRAY_RANGE_REF:
5974 case VIEW_CONVERT_EXPR:
5975 case REALPART_EXPR:
5976 case IMAGPART_EXPR:
5977 return 1;
5979 default:
5980 return 0;
5984 /* Given an rtx VALUE that may contain additions and multiplications, return
5985 an equivalent value that just refers to a register, memory, or constant.
5986 This is done by generating instructions to perform the arithmetic and
5987 returning a pseudo-register containing the value.
5989 The returned value may be a REG, SUBREG, MEM or constant. */
5992 force_operand (rtx value, rtx target)
5994 rtx op1, op2;
5995 /* Use subtarget as the target for operand 0 of a binary operation. */
5996 rtx subtarget = get_subtarget (target);
5997 enum rtx_code code = GET_CODE (value);
5999 /* Check for subreg applied to an expression produced by loop optimizer. */
6000 if (code == SUBREG
6001 && !REG_P (SUBREG_REG (value))
6002 && !MEM_P (SUBREG_REG (value)))
6004 value = simplify_gen_subreg (GET_MODE (value),
6005 force_reg (GET_MODE (SUBREG_REG (value)),
6006 force_operand (SUBREG_REG (value),
6007 NULL_RTX)),
6008 GET_MODE (SUBREG_REG (value)),
6009 SUBREG_BYTE (value));
6010 code = GET_CODE (value);
6013 /* Check for a PIC address load. */
6014 if ((code == PLUS || code == MINUS)
6015 && XEXP (value, 0) == pic_offset_table_rtx
6016 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6017 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6018 || GET_CODE (XEXP (value, 1)) == CONST))
6020 if (!subtarget)
6021 subtarget = gen_reg_rtx (GET_MODE (value));
6022 emit_move_insn (subtarget, value);
6023 return subtarget;
6026 if (ARITHMETIC_P (value))
6028 op2 = XEXP (value, 1);
6029 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6030 subtarget = 0;
6031 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6033 code = PLUS;
6034 op2 = negate_rtx (GET_MODE (value), op2);
6037 /* Check for an addition with OP2 a constant integer and our first
6038 operand a PLUS of a virtual register and something else. In that
6039 case, we want to emit the sum of the virtual register and the
6040 constant first and then add the other value. This allows virtual
6041 register instantiation to simply modify the constant rather than
6042 creating another one around this addition. */
6043 if (code == PLUS && GET_CODE (op2) == CONST_INT
6044 && GET_CODE (XEXP (value, 0)) == PLUS
6045 && REG_P (XEXP (XEXP (value, 0), 0))
6046 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6047 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6049 rtx temp = expand_simple_binop (GET_MODE (value), code,
6050 XEXP (XEXP (value, 0), 0), op2,
6051 subtarget, 0, OPTAB_LIB_WIDEN);
6052 return expand_simple_binop (GET_MODE (value), code, temp,
6053 force_operand (XEXP (XEXP (value,
6054 0), 1), 0),
6055 target, 0, OPTAB_LIB_WIDEN);
6058 op1 = force_operand (XEXP (value, 0), subtarget);
6059 op2 = force_operand (op2, NULL_RTX);
6060 switch (code)
6062 case MULT:
6063 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6064 case DIV:
6065 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6066 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6067 target, 1, OPTAB_LIB_WIDEN);
6068 else
6069 return expand_divmod (0,
6070 FLOAT_MODE_P (GET_MODE (value))
6071 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6072 GET_MODE (value), op1, op2, target, 0);
6073 break;
6074 case MOD:
6075 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6076 target, 0);
6077 break;
6078 case UDIV:
6079 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6080 target, 1);
6081 break;
6082 case UMOD:
6083 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6084 target, 1);
6085 break;
6086 case ASHIFTRT:
6087 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6088 target, 0, OPTAB_LIB_WIDEN);
6089 break;
6090 default:
6091 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6092 target, 1, OPTAB_LIB_WIDEN);
6095 if (UNARY_P (value))
6097 if (!target)
6098 target = gen_reg_rtx (GET_MODE (value));
6099 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6100 switch (code)
6102 case ZERO_EXTEND:
6103 case SIGN_EXTEND:
6104 case TRUNCATE:
6105 case FLOAT_EXTEND:
6106 case FLOAT_TRUNCATE:
6107 convert_move (target, op1, code == ZERO_EXTEND);
6108 return target;
6110 case FIX:
6111 case UNSIGNED_FIX:
6112 expand_fix (target, op1, code == UNSIGNED_FIX);
6113 return target;
6115 case FLOAT:
6116 case UNSIGNED_FLOAT:
6117 expand_float (target, op1, code == UNSIGNED_FLOAT);
6118 return target;
6120 default:
6121 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6125 #ifdef INSN_SCHEDULING
6126 /* On machines that have insn scheduling, we want all memory reference to be
6127 explicit, so we need to deal with such paradoxical SUBREGs. */
6128 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6129 && (GET_MODE_SIZE (GET_MODE (value))
6130 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6131 value
6132 = simplify_gen_subreg (GET_MODE (value),
6133 force_reg (GET_MODE (SUBREG_REG (value)),
6134 force_operand (SUBREG_REG (value),
6135 NULL_RTX)),
6136 GET_MODE (SUBREG_REG (value)),
6137 SUBREG_BYTE (value));
6138 #endif
6140 return value;
6143 /* Subroutine of expand_expr: return nonzero iff there is no way that
6144 EXP can reference X, which is being modified. TOP_P is nonzero if this
6145 call is going to be used to determine whether we need a temporary
6146 for EXP, as opposed to a recursive call to this function.
6148 It is always safe for this routine to return zero since it merely
6149 searches for optimization opportunities. */
6152 safe_from_p (rtx x, tree exp, int top_p)
6154 rtx exp_rtl = 0;
6155 int i, nops;
6157 if (x == 0
6158 /* If EXP has varying size, we MUST use a target since we currently
6159 have no way of allocating temporaries of variable size
6160 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6161 So we assume here that something at a higher level has prevented a
6162 clash. This is somewhat bogus, but the best we can do. Only
6163 do this when X is BLKmode and when we are at the top level. */
6164 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6165 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6166 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6167 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6168 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6169 != INTEGER_CST)
6170 && GET_MODE (x) == BLKmode)
6171 /* If X is in the outgoing argument area, it is always safe. */
6172 || (MEM_P (x)
6173 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6174 || (GET_CODE (XEXP (x, 0)) == PLUS
6175 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6176 return 1;
6178 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6179 find the underlying pseudo. */
6180 if (GET_CODE (x) == SUBREG)
6182 x = SUBREG_REG (x);
6183 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6184 return 0;
6187 /* Now look at our tree code and possibly recurse. */
6188 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6190 case tcc_declaration:
6191 exp_rtl = DECL_RTL_IF_SET (exp);
6192 break;
6194 case tcc_constant:
6195 return 1;
6197 case tcc_exceptional:
6198 if (TREE_CODE (exp) == TREE_LIST)
6200 while (1)
6202 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6203 return 0;
6204 exp = TREE_CHAIN (exp);
6205 if (!exp)
6206 return 1;
6207 if (TREE_CODE (exp) != TREE_LIST)
6208 return safe_from_p (x, exp, 0);
6211 else if (TREE_CODE (exp) == CONSTRUCTOR)
6213 constructor_elt *ce;
6214 unsigned HOST_WIDE_INT idx;
6216 for (idx = 0;
6217 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6218 idx++)
6219 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6220 || !safe_from_p (x, ce->value, 0))
6221 return 0;
6222 return 1;
6224 else if (TREE_CODE (exp) == ERROR_MARK)
6225 return 1; /* An already-visited SAVE_EXPR? */
6226 else
6227 return 0;
6229 case tcc_statement:
6230 /* The only case we look at here is the DECL_INITIAL inside a
6231 DECL_EXPR. */
6232 return (TREE_CODE (exp) != DECL_EXPR
6233 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6234 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6235 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6237 case tcc_binary:
6238 case tcc_comparison:
6239 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6240 return 0;
6241 /* Fall through. */
6243 case tcc_unary:
6244 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6246 case tcc_expression:
6247 case tcc_reference:
6248 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6249 the expression. If it is set, we conflict iff we are that rtx or
6250 both are in memory. Otherwise, we check all operands of the
6251 expression recursively. */
6253 switch (TREE_CODE (exp))
6255 case ADDR_EXPR:
6256 /* If the operand is static or we are static, we can't conflict.
6257 Likewise if we don't conflict with the operand at all. */
6258 if (staticp (TREE_OPERAND (exp, 0))
6259 || TREE_STATIC (exp)
6260 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6261 return 1;
6263 /* Otherwise, the only way this can conflict is if we are taking
6264 the address of a DECL a that address if part of X, which is
6265 very rare. */
6266 exp = TREE_OPERAND (exp, 0);
6267 if (DECL_P (exp))
6269 if (!DECL_RTL_SET_P (exp)
6270 || !MEM_P (DECL_RTL (exp)))
6271 return 0;
6272 else
6273 exp_rtl = XEXP (DECL_RTL (exp), 0);
6275 break;
6277 case MISALIGNED_INDIRECT_REF:
6278 case ALIGN_INDIRECT_REF:
6279 case INDIRECT_REF:
6280 if (MEM_P (x)
6281 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6282 get_alias_set (exp)))
6283 return 0;
6284 break;
6286 case CALL_EXPR:
6287 /* Assume that the call will clobber all hard registers and
6288 all of memory. */
6289 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6290 || MEM_P (x))
6291 return 0;
6292 break;
6294 case WITH_CLEANUP_EXPR:
6295 case CLEANUP_POINT_EXPR:
6296 /* Lowered by gimplify.c. */
6297 gcc_unreachable ();
6299 case SAVE_EXPR:
6300 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6302 default:
6303 break;
6306 /* If we have an rtx, we do not need to scan our operands. */
6307 if (exp_rtl)
6308 break;
6310 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6311 for (i = 0; i < nops; i++)
6312 if (TREE_OPERAND (exp, i) != 0
6313 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6314 return 0;
6316 /* If this is a language-specific tree code, it may require
6317 special handling. */
6318 if ((unsigned int) TREE_CODE (exp)
6319 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6320 && !lang_hooks.safe_from_p (x, exp))
6321 return 0;
6322 break;
6324 case tcc_type:
6325 /* Should never get a type here. */
6326 gcc_unreachable ();
6329 /* If we have an rtl, find any enclosed object. Then see if we conflict
6330 with it. */
6331 if (exp_rtl)
6333 if (GET_CODE (exp_rtl) == SUBREG)
6335 exp_rtl = SUBREG_REG (exp_rtl);
6336 if (REG_P (exp_rtl)
6337 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6338 return 0;
6341 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6342 are memory and they conflict. */
6343 return ! (rtx_equal_p (x, exp_rtl)
6344 || (MEM_P (x) && MEM_P (exp_rtl)
6345 && true_dependence (exp_rtl, VOIDmode, x,
6346 rtx_addr_varies_p)));
6349 /* If we reach here, it is safe. */
6350 return 1;
6354 /* Return the highest power of two that EXP is known to be a multiple of.
6355 This is used in updating alignment of MEMs in array references. */
6357 unsigned HOST_WIDE_INT
6358 highest_pow2_factor (tree exp)
6360 unsigned HOST_WIDE_INT c0, c1;
6362 switch (TREE_CODE (exp))
6364 case INTEGER_CST:
6365 /* We can find the lowest bit that's a one. If the low
6366 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6367 We need to handle this case since we can find it in a COND_EXPR,
6368 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6369 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6370 later ICE. */
6371 if (TREE_CONSTANT_OVERFLOW (exp))
6372 return BIGGEST_ALIGNMENT;
6373 else
6375 /* Note: tree_low_cst is intentionally not used here,
6376 we don't care about the upper bits. */
6377 c0 = TREE_INT_CST_LOW (exp);
6378 c0 &= -c0;
6379 return c0 ? c0 : BIGGEST_ALIGNMENT;
6381 break;
6383 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6384 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6385 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6386 return MIN (c0, c1);
6388 case MULT_EXPR:
6389 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6390 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6391 return c0 * c1;
6393 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6394 case CEIL_DIV_EXPR:
6395 if (integer_pow2p (TREE_OPERAND (exp, 1))
6396 && host_integerp (TREE_OPERAND (exp, 1), 1))
6398 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6399 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6400 return MAX (1, c0 / c1);
6402 break;
6404 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6405 case SAVE_EXPR:
6406 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6408 case COMPOUND_EXPR:
6409 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6411 case COND_EXPR:
6412 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6413 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6414 return MIN (c0, c1);
6416 default:
6417 break;
6420 return 1;
6423 /* Similar, except that the alignment requirements of TARGET are
6424 taken into account. Assume it is at least as aligned as its
6425 type, unless it is a COMPONENT_REF in which case the layout of
6426 the structure gives the alignment. */
6428 static unsigned HOST_WIDE_INT
6429 highest_pow2_factor_for_target (tree target, tree exp)
6431 unsigned HOST_WIDE_INT target_align, factor;
6433 factor = highest_pow2_factor (exp);
6434 if (TREE_CODE (target) == COMPONENT_REF)
6435 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6436 else
6437 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6438 return MAX (factor, target_align);
6441 /* Expands variable VAR. */
6443 void
6444 expand_var (tree var)
6446 if (DECL_EXTERNAL (var))
6447 return;
6449 if (TREE_STATIC (var))
6450 /* If this is an inlined copy of a static local variable,
6451 look up the original decl. */
6452 var = DECL_ORIGIN (var);
6454 if (TREE_STATIC (var)
6455 ? !TREE_ASM_WRITTEN (var)
6456 : !DECL_RTL_SET_P (var))
6458 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6459 /* Should be ignored. */;
6460 else if (lang_hooks.expand_decl (var))
6461 /* OK. */;
6462 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6463 expand_decl (var);
6464 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6465 rest_of_decl_compilation (var, 0, 0);
6466 else
6467 /* No expansion needed. */
6468 gcc_assert (TREE_CODE (var) == TYPE_DECL
6469 || TREE_CODE (var) == CONST_DECL
6470 || TREE_CODE (var) == FUNCTION_DECL
6471 || TREE_CODE (var) == LABEL_DECL);
6475 /* Subroutine of expand_expr. Expand the two operands of a binary
6476 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6477 The value may be stored in TARGET if TARGET is nonzero. The
6478 MODIFIER argument is as documented by expand_expr. */
6480 static void
6481 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6482 enum expand_modifier modifier)
6484 if (! safe_from_p (target, exp1, 1))
6485 target = 0;
6486 if (operand_equal_p (exp0, exp1, 0))
6488 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6489 *op1 = copy_rtx (*op0);
6491 else
6493 /* If we need to preserve evaluation order, copy exp0 into its own
6494 temporary variable so that it can't be clobbered by exp1. */
6495 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6496 exp0 = save_expr (exp0);
6497 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6498 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6503 /* Return a MEM that contains constant EXP. DEFER is as for
6504 output_constant_def and MODIFIER is as for expand_expr. */
6506 static rtx
6507 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6509 rtx mem;
6511 mem = output_constant_def (exp, defer);
6512 if (modifier != EXPAND_INITIALIZER)
6513 mem = use_anchored_address (mem);
6514 return mem;
6517 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6518 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6520 static rtx
6521 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6522 enum expand_modifier modifier)
6524 rtx result, subtarget;
6525 tree inner, offset;
6526 HOST_WIDE_INT bitsize, bitpos;
6527 int volatilep, unsignedp;
6528 enum machine_mode mode1;
6530 /* If we are taking the address of a constant and are at the top level,
6531 we have to use output_constant_def since we can't call force_const_mem
6532 at top level. */
6533 /* ??? This should be considered a front-end bug. We should not be
6534 generating ADDR_EXPR of something that isn't an LVALUE. The only
6535 exception here is STRING_CST. */
6536 if (TREE_CODE (exp) == CONSTRUCTOR
6537 || CONSTANT_CLASS_P (exp))
6538 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6540 /* Everything must be something allowed by is_gimple_addressable. */
6541 switch (TREE_CODE (exp))
6543 case INDIRECT_REF:
6544 /* This case will happen via recursion for &a->b. */
6545 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6547 case CONST_DECL:
6548 /* Recurse and make the output_constant_def clause above handle this. */
6549 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6550 tmode, modifier);
6552 case REALPART_EXPR:
6553 /* The real part of the complex number is always first, therefore
6554 the address is the same as the address of the parent object. */
6555 offset = 0;
6556 bitpos = 0;
6557 inner = TREE_OPERAND (exp, 0);
6558 break;
6560 case IMAGPART_EXPR:
6561 /* The imaginary part of the complex number is always second.
6562 The expression is therefore always offset by the size of the
6563 scalar type. */
6564 offset = 0;
6565 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6566 inner = TREE_OPERAND (exp, 0);
6567 break;
6569 default:
6570 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6571 expand_expr, as that can have various side effects; LABEL_DECLs for
6572 example, may not have their DECL_RTL set yet. Assume language
6573 specific tree nodes can be expanded in some interesting way. */
6574 if (DECL_P (exp)
6575 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6577 result = expand_expr (exp, target, tmode,
6578 modifier == EXPAND_INITIALIZER
6579 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6581 /* If the DECL isn't in memory, then the DECL wasn't properly
6582 marked TREE_ADDRESSABLE, which will be either a front-end
6583 or a tree optimizer bug. */
6584 gcc_assert (MEM_P (result));
6585 result = XEXP (result, 0);
6587 /* ??? Is this needed anymore? */
6588 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6590 assemble_external (exp);
6591 TREE_USED (exp) = 1;
6594 if (modifier != EXPAND_INITIALIZER
6595 && modifier != EXPAND_CONST_ADDRESS)
6596 result = force_operand (result, target);
6597 return result;
6600 /* Pass FALSE as the last argument to get_inner_reference although
6601 we are expanding to RTL. The rationale is that we know how to
6602 handle "aligning nodes" here: we can just bypass them because
6603 they won't change the final object whose address will be returned
6604 (they actually exist only for that purpose). */
6605 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6606 &mode1, &unsignedp, &volatilep, false);
6607 break;
6610 /* We must have made progress. */
6611 gcc_assert (inner != exp);
6613 subtarget = offset || bitpos ? NULL_RTX : target;
6614 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6616 if (offset)
6618 rtx tmp;
6620 if (modifier != EXPAND_NORMAL)
6621 result = force_operand (result, NULL);
6622 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6624 result = convert_memory_address (tmode, result);
6625 tmp = convert_memory_address (tmode, tmp);
6627 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6628 result = gen_rtx_PLUS (tmode, result, tmp);
6629 else
6631 subtarget = bitpos ? NULL_RTX : target;
6632 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6633 1, OPTAB_LIB_WIDEN);
6637 if (bitpos)
6639 /* Someone beforehand should have rejected taking the address
6640 of such an object. */
6641 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6643 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6644 if (modifier < EXPAND_SUM)
6645 result = force_operand (result, target);
6648 return result;
6651 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6652 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6654 static rtx
6655 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6656 enum expand_modifier modifier)
6658 enum machine_mode rmode;
6659 rtx result;
6661 /* Target mode of VOIDmode says "whatever's natural". */
6662 if (tmode == VOIDmode)
6663 tmode = TYPE_MODE (TREE_TYPE (exp));
6665 /* We can get called with some Weird Things if the user does silliness
6666 like "(short) &a". In that case, convert_memory_address won't do
6667 the right thing, so ignore the given target mode. */
6668 if (tmode != Pmode && tmode != ptr_mode)
6669 tmode = Pmode;
6671 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6672 tmode, modifier);
6674 /* Despite expand_expr claims concerning ignoring TMODE when not
6675 strictly convenient, stuff breaks if we don't honor it. Note
6676 that combined with the above, we only do this for pointer modes. */
6677 rmode = GET_MODE (result);
6678 if (rmode == VOIDmode)
6679 rmode = tmode;
6680 if (rmode != tmode)
6681 result = convert_memory_address (tmode, result);
6683 return result;
6687 /* expand_expr: generate code for computing expression EXP.
6688 An rtx for the computed value is returned. The value is never null.
6689 In the case of a void EXP, const0_rtx is returned.
6691 The value may be stored in TARGET if TARGET is nonzero.
6692 TARGET is just a suggestion; callers must assume that
6693 the rtx returned may not be the same as TARGET.
6695 If TARGET is CONST0_RTX, it means that the value will be ignored.
6697 If TMODE is not VOIDmode, it suggests generating the
6698 result in mode TMODE. But this is done only when convenient.
6699 Otherwise, TMODE is ignored and the value generated in its natural mode.
6700 TMODE is just a suggestion; callers must assume that
6701 the rtx returned may not have mode TMODE.
6703 Note that TARGET may have neither TMODE nor MODE. In that case, it
6704 probably will not be used.
6706 If MODIFIER is EXPAND_SUM then when EXP is an addition
6707 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6708 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6709 products as above, or REG or MEM, or constant.
6710 Ordinarily in such cases we would output mul or add instructions
6711 and then return a pseudo reg containing the sum.
6713 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6714 it also marks a label as absolutely required (it can't be dead).
6715 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6716 This is used for outputting expressions used in initializers.
6718 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6719 with a constant address even if that address is not normally legitimate.
6720 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6722 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6723 a call parameter. Such targets require special care as we haven't yet
6724 marked TARGET so that it's safe from being trashed by libcalls. We
6725 don't want to use TARGET for anything but the final result;
6726 Intermediate values must go elsewhere. Additionally, calls to
6727 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6729 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6730 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6731 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6732 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6733 recursively. */
6735 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6736 enum expand_modifier, rtx *);
6739 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6740 enum expand_modifier modifier, rtx *alt_rtl)
6742 int rn = -1;
6743 rtx ret, last = NULL;
6745 /* Handle ERROR_MARK before anybody tries to access its type. */
6746 if (TREE_CODE (exp) == ERROR_MARK
6747 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6749 ret = CONST0_RTX (tmode);
6750 return ret ? ret : const0_rtx;
6753 if (flag_non_call_exceptions)
6755 rn = lookup_stmt_eh_region (exp);
6756 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6757 if (rn >= 0)
6758 last = get_last_insn ();
6761 /* If this is an expression of some kind and it has an associated line
6762 number, then emit the line number before expanding the expression.
6764 We need to save and restore the file and line information so that
6765 errors discovered during expansion are emitted with the right
6766 information. It would be better of the diagnostic routines
6767 used the file/line information embedded in the tree nodes rather
6768 than globals. */
6769 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6771 location_t saved_location = input_location;
6772 input_location = EXPR_LOCATION (exp);
6773 emit_line_note (input_location);
6775 /* Record where the insns produced belong. */
6776 record_block_change (TREE_BLOCK (exp));
6778 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6780 input_location = saved_location;
6782 else
6784 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6787 /* If using non-call exceptions, mark all insns that may trap.
6788 expand_call() will mark CALL_INSNs before we get to this code,
6789 but it doesn't handle libcalls, and these may trap. */
6790 if (rn >= 0)
6792 rtx insn;
6793 for (insn = next_real_insn (last); insn;
6794 insn = next_real_insn (insn))
6796 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6797 /* If we want exceptions for non-call insns, any
6798 may_trap_p instruction may throw. */
6799 && GET_CODE (PATTERN (insn)) != CLOBBER
6800 && GET_CODE (PATTERN (insn)) != USE
6801 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6803 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6804 REG_NOTES (insn));
6809 return ret;
6812 static rtx
6813 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6814 enum expand_modifier modifier, rtx *alt_rtl)
6816 rtx op0, op1, temp, decl_rtl;
6817 tree type = TREE_TYPE (exp);
6818 int unsignedp;
6819 enum machine_mode mode;
6820 enum tree_code code = TREE_CODE (exp);
6821 optab this_optab;
6822 rtx subtarget, original_target;
6823 int ignore;
6824 tree context, subexp0, subexp1;
6825 bool reduce_bit_field = false;
6826 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6827 ? reduce_to_bit_field_precision ((expr), \
6828 target, \
6829 type) \
6830 : (expr))
6832 mode = TYPE_MODE (type);
6833 unsignedp = TYPE_UNSIGNED (type);
6834 if (lang_hooks.reduce_bit_field_operations
6835 && TREE_CODE (type) == INTEGER_TYPE
6836 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6838 /* An operation in what may be a bit-field type needs the
6839 result to be reduced to the precision of the bit-field type,
6840 which is narrower than that of the type's mode. */
6841 reduce_bit_field = true;
6842 if (modifier == EXPAND_STACK_PARM)
6843 target = 0;
6846 /* Use subtarget as the target for operand 0 of a binary operation. */
6847 subtarget = get_subtarget (target);
6848 original_target = target;
6849 ignore = (target == const0_rtx
6850 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6851 || code == CONVERT_EXPR || code == COND_EXPR
6852 || code == VIEW_CONVERT_EXPR)
6853 && TREE_CODE (type) == VOID_TYPE));
6855 /* If we are going to ignore this result, we need only do something
6856 if there is a side-effect somewhere in the expression. If there
6857 is, short-circuit the most common cases here. Note that we must
6858 not call expand_expr with anything but const0_rtx in case this
6859 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6861 if (ignore)
6863 if (! TREE_SIDE_EFFECTS (exp))
6864 return const0_rtx;
6866 /* Ensure we reference a volatile object even if value is ignored, but
6867 don't do this if all we are doing is taking its address. */
6868 if (TREE_THIS_VOLATILE (exp)
6869 && TREE_CODE (exp) != FUNCTION_DECL
6870 && mode != VOIDmode && mode != BLKmode
6871 && modifier != EXPAND_CONST_ADDRESS)
6873 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6874 if (MEM_P (temp))
6875 temp = copy_to_reg (temp);
6876 return const0_rtx;
6879 if (TREE_CODE_CLASS (code) == tcc_unary
6880 || code == COMPONENT_REF || code == INDIRECT_REF)
6881 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6882 modifier);
6884 else if (TREE_CODE_CLASS (code) == tcc_binary
6885 || TREE_CODE_CLASS (code) == tcc_comparison
6886 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6888 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6889 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6890 return const0_rtx;
6892 else if (code == BIT_FIELD_REF)
6894 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6895 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6896 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6897 return const0_rtx;
6900 target = 0;
6904 switch (code)
6906 case LABEL_DECL:
6908 tree function = decl_function_context (exp);
6910 temp = label_rtx (exp);
6911 temp = gen_rtx_LABEL_REF (Pmode, temp);
6913 if (function != current_function_decl
6914 && function != 0)
6915 LABEL_REF_NONLOCAL_P (temp) = 1;
6917 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6918 return temp;
6921 case SSA_NAME:
6922 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6923 NULL);
6925 case PARM_DECL:
6926 case VAR_DECL:
6927 /* If a static var's type was incomplete when the decl was written,
6928 but the type is complete now, lay out the decl now. */
6929 if (DECL_SIZE (exp) == 0
6930 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6931 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6932 layout_decl (exp, 0);
6934 /* ... fall through ... */
6936 case FUNCTION_DECL:
6937 case RESULT_DECL:
6938 decl_rtl = DECL_RTL (exp);
6939 gcc_assert (decl_rtl);
6941 /* Ensure variable marked as used even if it doesn't go through
6942 a parser. If it hasn't be used yet, write out an external
6943 definition. */
6944 if (! TREE_USED (exp))
6946 assemble_external (exp);
6947 TREE_USED (exp) = 1;
6950 /* Show we haven't gotten RTL for this yet. */
6951 temp = 0;
6953 /* Variables inherited from containing functions should have
6954 been lowered by this point. */
6955 context = decl_function_context (exp);
6956 gcc_assert (!context
6957 || context == current_function_decl
6958 || TREE_STATIC (exp)
6959 /* ??? C++ creates functions that are not TREE_STATIC. */
6960 || TREE_CODE (exp) == FUNCTION_DECL);
6962 /* This is the case of an array whose size is to be determined
6963 from its initializer, while the initializer is still being parsed.
6964 See expand_decl. */
6966 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6967 temp = validize_mem (decl_rtl);
6969 /* If DECL_RTL is memory, we are in the normal case and either
6970 the address is not valid or it is not a register and -fforce-addr
6971 is specified, get the address into a register. */
6973 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6975 if (alt_rtl)
6976 *alt_rtl = decl_rtl;
6977 decl_rtl = use_anchored_address (decl_rtl);
6978 if (modifier != EXPAND_CONST_ADDRESS
6979 && modifier != EXPAND_SUM
6980 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6981 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6982 temp = replace_equiv_address (decl_rtl,
6983 copy_rtx (XEXP (decl_rtl, 0)));
6986 /* If we got something, return it. But first, set the alignment
6987 if the address is a register. */
6988 if (temp != 0)
6990 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6991 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6993 return temp;
6996 /* If the mode of DECL_RTL does not match that of the decl, it
6997 must be a promoted value. We return a SUBREG of the wanted mode,
6998 but mark it so that we know that it was already extended. */
7000 if (REG_P (decl_rtl)
7001 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7003 enum machine_mode pmode;
7005 /* Get the signedness used for this variable. Ensure we get the
7006 same mode we got when the variable was declared. */
7007 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7008 (TREE_CODE (exp) == RESULT_DECL
7009 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7010 gcc_assert (GET_MODE (decl_rtl) == pmode);
7012 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7013 SUBREG_PROMOTED_VAR_P (temp) = 1;
7014 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7015 return temp;
7018 return decl_rtl;
7020 case INTEGER_CST:
7021 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7022 TREE_INT_CST_HIGH (exp), mode);
7024 /* ??? If overflow is set, fold will have done an incomplete job,
7025 which can result in (plus xx (const_int 0)), which can get
7026 simplified by validate_replace_rtx during virtual register
7027 instantiation, which can result in unrecognizable insns.
7028 Avoid this by forcing all overflows into registers. */
7029 if (TREE_CONSTANT_OVERFLOW (exp)
7030 && modifier != EXPAND_INITIALIZER)
7031 temp = force_reg (mode, temp);
7033 return temp;
7035 case VECTOR_CST:
7037 tree tmp = NULL_TREE;
7038 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7039 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7040 return const_vector_from_tree (exp);
7041 if (GET_MODE_CLASS (mode) == MODE_INT)
7043 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7044 if (type_for_mode)
7045 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7047 if (!tmp)
7048 tmp = build_constructor_from_list (type,
7049 TREE_VECTOR_CST_ELTS (exp));
7050 return expand_expr (tmp, ignore ? const0_rtx : target,
7051 tmode, modifier);
7054 case CONST_DECL:
7055 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7057 case REAL_CST:
7058 /* If optimized, generate immediate CONST_DOUBLE
7059 which will be turned into memory by reload if necessary.
7061 We used to force a register so that loop.c could see it. But
7062 this does not allow gen_* patterns to perform optimizations with
7063 the constants. It also produces two insns in cases like "x = 1.0;".
7064 On most machines, floating-point constants are not permitted in
7065 many insns, so we'd end up copying it to a register in any case.
7067 Now, we do the copying in expand_binop, if appropriate. */
7068 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7069 TYPE_MODE (TREE_TYPE (exp)));
7071 case COMPLEX_CST:
7072 /* Handle evaluating a complex constant in a CONCAT target. */
7073 if (original_target && GET_CODE (original_target) == CONCAT)
7075 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7076 rtx rtarg, itarg;
7078 rtarg = XEXP (original_target, 0);
7079 itarg = XEXP (original_target, 1);
7081 /* Move the real and imaginary parts separately. */
7082 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7083 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7085 if (op0 != rtarg)
7086 emit_move_insn (rtarg, op0);
7087 if (op1 != itarg)
7088 emit_move_insn (itarg, op1);
7090 return original_target;
7093 /* ... fall through ... */
7095 case STRING_CST:
7096 temp = expand_expr_constant (exp, 1, modifier);
7098 /* temp contains a constant address.
7099 On RISC machines where a constant address isn't valid,
7100 make some insns to get that address into a register. */
7101 if (modifier != EXPAND_CONST_ADDRESS
7102 && modifier != EXPAND_INITIALIZER
7103 && modifier != EXPAND_SUM
7104 && (! memory_address_p (mode, XEXP (temp, 0))
7105 || flag_force_addr))
7106 return replace_equiv_address (temp,
7107 copy_rtx (XEXP (temp, 0)));
7108 return temp;
7110 case SAVE_EXPR:
7112 tree val = TREE_OPERAND (exp, 0);
7113 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7115 if (!SAVE_EXPR_RESOLVED_P (exp))
7117 /* We can indeed still hit this case, typically via builtin
7118 expanders calling save_expr immediately before expanding
7119 something. Assume this means that we only have to deal
7120 with non-BLKmode values. */
7121 gcc_assert (GET_MODE (ret) != BLKmode);
7123 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7124 DECL_ARTIFICIAL (val) = 1;
7125 DECL_IGNORED_P (val) = 1;
7126 TREE_OPERAND (exp, 0) = val;
7127 SAVE_EXPR_RESOLVED_P (exp) = 1;
7129 if (!CONSTANT_P (ret))
7130 ret = copy_to_reg (ret);
7131 SET_DECL_RTL (val, ret);
7134 return ret;
7137 case GOTO_EXPR:
7138 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7139 expand_goto (TREE_OPERAND (exp, 0));
7140 else
7141 expand_computed_goto (TREE_OPERAND (exp, 0));
7142 return const0_rtx;
7144 case CONSTRUCTOR:
7145 /* If we don't need the result, just ensure we evaluate any
7146 subexpressions. */
7147 if (ignore)
7149 unsigned HOST_WIDE_INT idx;
7150 tree value;
7152 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7153 expand_expr (value, const0_rtx, VOIDmode, 0);
7155 return const0_rtx;
7158 /* Try to avoid creating a temporary at all. This is possible
7159 if all of the initializer is zero.
7160 FIXME: try to handle all [0..255] initializers we can handle
7161 with memset. */
7162 else if (TREE_STATIC (exp)
7163 && !TREE_ADDRESSABLE (exp)
7164 && target != 0 && mode == BLKmode
7165 && all_zeros_p (exp))
7167 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7168 return target;
7171 /* All elts simple constants => refer to a constant in memory. But
7172 if this is a non-BLKmode mode, let it store a field at a time
7173 since that should make a CONST_INT or CONST_DOUBLE when we
7174 fold. Likewise, if we have a target we can use, it is best to
7175 store directly into the target unless the type is large enough
7176 that memcpy will be used. If we are making an initializer and
7177 all operands are constant, put it in memory as well.
7179 FIXME: Avoid trying to fill vector constructors piece-meal.
7180 Output them with output_constant_def below unless we're sure
7181 they're zeros. This should go away when vector initializers
7182 are treated like VECTOR_CST instead of arrays.
7184 else if ((TREE_STATIC (exp)
7185 && ((mode == BLKmode
7186 && ! (target != 0 && safe_from_p (target, exp, 1)))
7187 || TREE_ADDRESSABLE (exp)
7188 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7189 && (! MOVE_BY_PIECES_P
7190 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7191 TYPE_ALIGN (type)))
7192 && ! mostly_zeros_p (exp))))
7193 || ((modifier == EXPAND_INITIALIZER
7194 || modifier == EXPAND_CONST_ADDRESS)
7195 && TREE_CONSTANT (exp)))
7197 rtx constructor = expand_expr_constant (exp, 1, modifier);
7199 if (modifier != EXPAND_CONST_ADDRESS
7200 && modifier != EXPAND_INITIALIZER
7201 && modifier != EXPAND_SUM)
7202 constructor = validize_mem (constructor);
7204 return constructor;
7206 else
7208 /* Handle calls that pass values in multiple non-contiguous
7209 locations. The Irix 6 ABI has examples of this. */
7210 if (target == 0 || ! safe_from_p (target, exp, 1)
7211 || GET_CODE (target) == PARALLEL
7212 || modifier == EXPAND_STACK_PARM)
7213 target
7214 = assign_temp (build_qualified_type (type,
7215 (TYPE_QUALS (type)
7216 | (TREE_READONLY (exp)
7217 * TYPE_QUAL_CONST))),
7218 0, TREE_ADDRESSABLE (exp), 1);
7220 store_constructor (exp, target, 0, int_expr_size (exp));
7221 return target;
7224 case MISALIGNED_INDIRECT_REF:
7225 case ALIGN_INDIRECT_REF:
7226 case INDIRECT_REF:
7228 tree exp1 = TREE_OPERAND (exp, 0);
7230 if (modifier != EXPAND_WRITE)
7232 tree t;
7234 t = fold_read_from_constant_string (exp);
7235 if (t)
7236 return expand_expr (t, target, tmode, modifier);
7239 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7240 op0 = memory_address (mode, op0);
7242 if (code == ALIGN_INDIRECT_REF)
7244 int align = TYPE_ALIGN_UNIT (type);
7245 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7246 op0 = memory_address (mode, op0);
7249 temp = gen_rtx_MEM (mode, op0);
7251 set_mem_attributes (temp, exp, 0);
7253 /* Resolve the misalignment now, so that we don't have to remember
7254 to resolve it later. Of course, this only works for reads. */
7255 /* ??? When we get around to supporting writes, we'll have to handle
7256 this in store_expr directly. The vectorizer isn't generating
7257 those yet, however. */
7258 if (code == MISALIGNED_INDIRECT_REF)
7260 int icode;
7261 rtx reg, insn;
7263 gcc_assert (modifier == EXPAND_NORMAL
7264 || modifier == EXPAND_STACK_PARM);
7266 /* The vectorizer should have already checked the mode. */
7267 icode = movmisalign_optab->handlers[mode].insn_code;
7268 gcc_assert (icode != CODE_FOR_nothing);
7270 /* We've already validated the memory, and we're creating a
7271 new pseudo destination. The predicates really can't fail. */
7272 reg = gen_reg_rtx (mode);
7274 /* Nor can the insn generator. */
7275 insn = GEN_FCN (icode) (reg, temp);
7276 emit_insn (insn);
7278 return reg;
7281 return temp;
7284 case TARGET_MEM_REF:
7286 struct mem_address addr;
7288 get_address_description (exp, &addr);
7289 op0 = addr_for_mem_ref (&addr, true);
7290 op0 = memory_address (mode, op0);
7291 temp = gen_rtx_MEM (mode, op0);
7292 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7294 return temp;
7296 case ARRAY_REF:
7299 tree array = TREE_OPERAND (exp, 0);
7300 tree index = TREE_OPERAND (exp, 1);
7302 /* Fold an expression like: "foo"[2].
7303 This is not done in fold so it won't happen inside &.
7304 Don't fold if this is for wide characters since it's too
7305 difficult to do correctly and this is a very rare case. */
7307 if (modifier != EXPAND_CONST_ADDRESS
7308 && modifier != EXPAND_INITIALIZER
7309 && modifier != EXPAND_MEMORY)
7311 tree t = fold_read_from_constant_string (exp);
7313 if (t)
7314 return expand_expr (t, target, tmode, modifier);
7317 /* If this is a constant index into a constant array,
7318 just get the value from the array. Handle both the cases when
7319 we have an explicit constructor and when our operand is a variable
7320 that was declared const. */
7322 if (modifier != EXPAND_CONST_ADDRESS
7323 && modifier != EXPAND_INITIALIZER
7324 && modifier != EXPAND_MEMORY
7325 && TREE_CODE (array) == CONSTRUCTOR
7326 && ! TREE_SIDE_EFFECTS (array)
7327 && TREE_CODE (index) == INTEGER_CST)
7329 unsigned HOST_WIDE_INT ix;
7330 tree field, value;
7332 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7333 field, value)
7334 if (tree_int_cst_equal (field, index))
7336 if (!TREE_SIDE_EFFECTS (value))
7337 return expand_expr (fold (value), target, tmode, modifier);
7338 break;
7342 else if (optimize >= 1
7343 && modifier != EXPAND_CONST_ADDRESS
7344 && modifier != EXPAND_INITIALIZER
7345 && modifier != EXPAND_MEMORY
7346 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7347 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7348 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7349 && targetm.binds_local_p (array))
7351 if (TREE_CODE (index) == INTEGER_CST)
7353 tree init = DECL_INITIAL (array);
7355 if (TREE_CODE (init) == CONSTRUCTOR)
7357 unsigned HOST_WIDE_INT ix;
7358 tree field, value;
7360 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7361 field, value)
7362 if (tree_int_cst_equal (field, index))
7364 if (!TREE_SIDE_EFFECTS (value))
7365 return expand_expr (fold (value), target, tmode,
7366 modifier);
7367 break;
7370 else if(TREE_CODE (init) == STRING_CST)
7372 tree index1 = index;
7373 tree low_bound = array_ref_low_bound (exp);
7374 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7376 /* Optimize the special-case of a zero lower bound.
7378 We convert the low_bound to sizetype to avoid some problems
7379 with constant folding. (E.g. suppose the lower bound is 1,
7380 and its mode is QI. Without the conversion,l (ARRAY
7381 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7382 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7384 if (! integer_zerop (low_bound))
7385 index1 = size_diffop (index1, fold_convert (sizetype,
7386 low_bound));
7388 if (0 > compare_tree_int (index1,
7389 TREE_STRING_LENGTH (init)))
7391 tree type = TREE_TYPE (TREE_TYPE (init));
7392 enum machine_mode mode = TYPE_MODE (type);
7394 if (GET_MODE_CLASS (mode) == MODE_INT
7395 && GET_MODE_SIZE (mode) == 1)
7396 return gen_int_mode (TREE_STRING_POINTER (init)
7397 [TREE_INT_CST_LOW (index1)],
7398 mode);
7404 goto normal_inner_ref;
7406 case COMPONENT_REF:
7407 /* If the operand is a CONSTRUCTOR, we can just extract the
7408 appropriate field if it is present. */
7409 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7411 unsigned HOST_WIDE_INT idx;
7412 tree field, value;
7414 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7415 idx, field, value)
7416 if (field == TREE_OPERAND (exp, 1)
7417 /* We can normally use the value of the field in the
7418 CONSTRUCTOR. However, if this is a bitfield in
7419 an integral mode that we can fit in a HOST_WIDE_INT,
7420 we must mask only the number of bits in the bitfield,
7421 since this is done implicitly by the constructor. If
7422 the bitfield does not meet either of those conditions,
7423 we can't do this optimization. */
7424 && (! DECL_BIT_FIELD (field)
7425 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7426 && (GET_MODE_BITSIZE (DECL_MODE (field))
7427 <= HOST_BITS_PER_WIDE_INT))))
7429 if (DECL_BIT_FIELD (field)
7430 && modifier == EXPAND_STACK_PARM)
7431 target = 0;
7432 op0 = expand_expr (value, target, tmode, modifier);
7433 if (DECL_BIT_FIELD (field))
7435 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7436 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7438 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7440 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7441 op0 = expand_and (imode, op0, op1, target);
7443 else
7445 tree count
7446 = build_int_cst (NULL_TREE,
7447 GET_MODE_BITSIZE (imode) - bitsize);
7449 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7450 target, 0);
7451 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7452 target, 0);
7456 return op0;
7459 goto normal_inner_ref;
7461 case BIT_FIELD_REF:
7462 case ARRAY_RANGE_REF:
7463 normal_inner_ref:
7465 enum machine_mode mode1;
7466 HOST_WIDE_INT bitsize, bitpos;
7467 tree offset;
7468 int volatilep = 0;
7469 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7470 &mode1, &unsignedp, &volatilep, true);
7471 rtx orig_op0;
7473 /* If we got back the original object, something is wrong. Perhaps
7474 we are evaluating an expression too early. In any event, don't
7475 infinitely recurse. */
7476 gcc_assert (tem != exp);
7478 /* If TEM's type is a union of variable size, pass TARGET to the inner
7479 computation, since it will need a temporary and TARGET is known
7480 to have to do. This occurs in unchecked conversion in Ada. */
7482 orig_op0 = op0
7483 = expand_expr (tem,
7484 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7485 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7486 != INTEGER_CST)
7487 && modifier != EXPAND_STACK_PARM
7488 ? target : NULL_RTX),
7489 VOIDmode,
7490 (modifier == EXPAND_INITIALIZER
7491 || modifier == EXPAND_CONST_ADDRESS
7492 || modifier == EXPAND_STACK_PARM)
7493 ? modifier : EXPAND_NORMAL);
7495 /* If this is a constant, put it into a register if it is a legitimate
7496 constant, OFFSET is 0, and we won't try to extract outside the
7497 register (in case we were passed a partially uninitialized object
7498 or a view_conversion to a larger size). Force the constant to
7499 memory otherwise. */
7500 if (CONSTANT_P (op0))
7502 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7503 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7504 && offset == 0
7505 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7506 op0 = force_reg (mode, op0);
7507 else
7508 op0 = validize_mem (force_const_mem (mode, op0));
7511 /* Otherwise, if this object not in memory and we either have an
7512 offset, a BLKmode result, or a reference outside the object, put it
7513 there. Such cases can occur in Ada if we have unchecked conversion
7514 of an expression from a scalar type to an array or record type or
7515 for an ARRAY_RANGE_REF whose type is BLKmode. */
7516 else if (!MEM_P (op0)
7517 && (offset != 0
7518 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7519 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7521 tree nt = build_qualified_type (TREE_TYPE (tem),
7522 (TYPE_QUALS (TREE_TYPE (tem))
7523 | TYPE_QUAL_CONST));
7524 rtx memloc = assign_temp (nt, 1, 1, 1);
7526 emit_move_insn (memloc, op0);
7527 op0 = memloc;
7530 if (offset != 0)
7532 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7533 EXPAND_SUM);
7535 gcc_assert (MEM_P (op0));
7537 #ifdef POINTERS_EXTEND_UNSIGNED
7538 if (GET_MODE (offset_rtx) != Pmode)
7539 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7540 #else
7541 if (GET_MODE (offset_rtx) != ptr_mode)
7542 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7543 #endif
7545 if (GET_MODE (op0) == BLKmode
7546 /* A constant address in OP0 can have VOIDmode, we must
7547 not try to call force_reg in that case. */
7548 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7549 && bitsize != 0
7550 && (bitpos % bitsize) == 0
7551 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7552 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7554 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7555 bitpos = 0;
7558 op0 = offset_address (op0, offset_rtx,
7559 highest_pow2_factor (offset));
7562 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7563 record its alignment as BIGGEST_ALIGNMENT. */
7564 if (MEM_P (op0) && bitpos == 0 && offset != 0
7565 && is_aligning_offset (offset, tem))
7566 set_mem_align (op0, BIGGEST_ALIGNMENT);
7568 /* Don't forget about volatility even if this is a bitfield. */
7569 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7571 if (op0 == orig_op0)
7572 op0 = copy_rtx (op0);
7574 MEM_VOLATILE_P (op0) = 1;
7577 /* The following code doesn't handle CONCAT.
7578 Assume only bitpos == 0 can be used for CONCAT, due to
7579 one element arrays having the same mode as its element. */
7580 if (GET_CODE (op0) == CONCAT)
7582 gcc_assert (bitpos == 0
7583 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7584 return op0;
7587 /* In cases where an aligned union has an unaligned object
7588 as a field, we might be extracting a BLKmode value from
7589 an integer-mode (e.g., SImode) object. Handle this case
7590 by doing the extract into an object as wide as the field
7591 (which we know to be the width of a basic mode), then
7592 storing into memory, and changing the mode to BLKmode. */
7593 if (mode1 == VOIDmode
7594 || REG_P (op0) || GET_CODE (op0) == SUBREG
7595 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7596 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7597 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7598 && modifier != EXPAND_CONST_ADDRESS
7599 && modifier != EXPAND_INITIALIZER)
7600 /* If the field isn't aligned enough to fetch as a memref,
7601 fetch it as a bit field. */
7602 || (mode1 != BLKmode
7603 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7604 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7605 || (MEM_P (op0)
7606 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7607 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7608 && ((modifier == EXPAND_CONST_ADDRESS
7609 || modifier == EXPAND_INITIALIZER)
7610 ? STRICT_ALIGNMENT
7611 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7612 || (bitpos % BITS_PER_UNIT != 0)))
7613 /* If the type and the field are a constant size and the
7614 size of the type isn't the same size as the bitfield,
7615 we must use bitfield operations. */
7616 || (bitsize >= 0
7617 && TYPE_SIZE (TREE_TYPE (exp))
7618 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7619 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7620 bitsize)))
7622 enum machine_mode ext_mode = mode;
7624 if (ext_mode == BLKmode
7625 && ! (target != 0 && MEM_P (op0)
7626 && MEM_P (target)
7627 && bitpos % BITS_PER_UNIT == 0))
7628 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7630 if (ext_mode == BLKmode)
7632 if (target == 0)
7633 target = assign_temp (type, 0, 1, 1);
7635 if (bitsize == 0)
7636 return target;
7638 /* In this case, BITPOS must start at a byte boundary and
7639 TARGET, if specified, must be a MEM. */
7640 gcc_assert (MEM_P (op0)
7641 && (!target || MEM_P (target))
7642 && !(bitpos % BITS_PER_UNIT));
7644 emit_block_move (target,
7645 adjust_address (op0, VOIDmode,
7646 bitpos / BITS_PER_UNIT),
7647 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7648 / BITS_PER_UNIT),
7649 (modifier == EXPAND_STACK_PARM
7650 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7652 return target;
7655 op0 = validize_mem (op0);
7657 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7658 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7660 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7661 (modifier == EXPAND_STACK_PARM
7662 ? NULL_RTX : target),
7663 ext_mode, ext_mode);
7665 /* If the result is a record type and BITSIZE is narrower than
7666 the mode of OP0, an integral mode, and this is a big endian
7667 machine, we must put the field into the high-order bits. */
7668 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7669 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7670 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7671 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7672 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7673 - bitsize),
7674 op0, 1);
7676 /* If the result type is BLKmode, store the data into a temporary
7677 of the appropriate type, but with the mode corresponding to the
7678 mode for the data we have (op0's mode). It's tempting to make
7679 this a constant type, since we know it's only being stored once,
7680 but that can cause problems if we are taking the address of this
7681 COMPONENT_REF because the MEM of any reference via that address
7682 will have flags corresponding to the type, which will not
7683 necessarily be constant. */
7684 if (mode == BLKmode)
7686 rtx new
7687 = assign_stack_temp_for_type
7688 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7690 emit_move_insn (new, op0);
7691 op0 = copy_rtx (new);
7692 PUT_MODE (op0, BLKmode);
7693 set_mem_attributes (op0, exp, 1);
7696 return op0;
7699 /* If the result is BLKmode, use that to access the object
7700 now as well. */
7701 if (mode == BLKmode)
7702 mode1 = BLKmode;
7704 /* Get a reference to just this component. */
7705 if (modifier == EXPAND_CONST_ADDRESS
7706 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7707 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7708 else
7709 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7711 if (op0 == orig_op0)
7712 op0 = copy_rtx (op0);
7714 set_mem_attributes (op0, exp, 0);
7715 if (REG_P (XEXP (op0, 0)))
7716 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7718 MEM_VOLATILE_P (op0) |= volatilep;
7719 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7720 || modifier == EXPAND_CONST_ADDRESS
7721 || modifier == EXPAND_INITIALIZER)
7722 return op0;
7723 else if (target == 0)
7724 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7726 convert_move (target, op0, unsignedp);
7727 return target;
7730 case OBJ_TYPE_REF:
7731 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7733 case CALL_EXPR:
7734 /* Check for a built-in function. */
7735 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7736 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7737 == FUNCTION_DECL)
7738 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7740 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7741 == BUILT_IN_FRONTEND)
7742 return lang_hooks.expand_expr (exp, original_target,
7743 tmode, modifier,
7744 alt_rtl);
7745 else
7746 return expand_builtin (exp, target, subtarget, tmode, ignore);
7749 return expand_call (exp, target, ignore);
7751 case NON_LVALUE_EXPR:
7752 case NOP_EXPR:
7753 case CONVERT_EXPR:
7754 if (TREE_OPERAND (exp, 0) == error_mark_node)
7755 return const0_rtx;
7757 if (TREE_CODE (type) == UNION_TYPE)
7759 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7761 /* If both input and output are BLKmode, this conversion isn't doing
7762 anything except possibly changing memory attribute. */
7763 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7765 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7766 modifier);
7768 result = copy_rtx (result);
7769 set_mem_attributes (result, exp, 0);
7770 return result;
7773 if (target == 0)
7775 if (TYPE_MODE (type) != BLKmode)
7776 target = gen_reg_rtx (TYPE_MODE (type));
7777 else
7778 target = assign_temp (type, 0, 1, 1);
7781 if (MEM_P (target))
7782 /* Store data into beginning of memory target. */
7783 store_expr (TREE_OPERAND (exp, 0),
7784 adjust_address (target, TYPE_MODE (valtype), 0),
7785 modifier == EXPAND_STACK_PARM);
7787 else
7789 gcc_assert (REG_P (target));
7791 /* Store this field into a union of the proper type. */
7792 store_field (target,
7793 MIN ((int_size_in_bytes (TREE_TYPE
7794 (TREE_OPERAND (exp, 0)))
7795 * BITS_PER_UNIT),
7796 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7797 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7798 type, 0);
7801 /* Return the entire union. */
7802 return target;
7805 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7807 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7808 modifier);
7810 /* If the signedness of the conversion differs and OP0 is
7811 a promoted SUBREG, clear that indication since we now
7812 have to do the proper extension. */
7813 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7814 && GET_CODE (op0) == SUBREG)
7815 SUBREG_PROMOTED_VAR_P (op0) = 0;
7817 return REDUCE_BIT_FIELD (op0);
7820 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7821 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7822 if (GET_MODE (op0) == mode)
7825 /* If OP0 is a constant, just convert it into the proper mode. */
7826 else if (CONSTANT_P (op0))
7828 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7829 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7831 if (modifier == EXPAND_INITIALIZER)
7832 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7833 subreg_lowpart_offset (mode,
7834 inner_mode));
7835 else
7836 op0= convert_modes (mode, inner_mode, op0,
7837 TYPE_UNSIGNED (inner_type));
7840 else if (modifier == EXPAND_INITIALIZER)
7841 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7843 else if (target == 0)
7844 op0 = convert_to_mode (mode, op0,
7845 TYPE_UNSIGNED (TREE_TYPE
7846 (TREE_OPERAND (exp, 0))));
7847 else
7849 convert_move (target, op0,
7850 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7851 op0 = target;
7854 return REDUCE_BIT_FIELD (op0);
7856 case VIEW_CONVERT_EXPR:
7857 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7859 /* If the input and output modes are both the same, we are done. */
7860 if (TYPE_MODE (type) == GET_MODE (op0))
7862 /* If neither mode is BLKmode, and both modes are the same size
7863 then we can use gen_lowpart. */
7864 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7865 && GET_MODE_SIZE (TYPE_MODE (type))
7866 == GET_MODE_SIZE (GET_MODE (op0)))
7868 if (GET_CODE (op0) == SUBREG)
7869 op0 = force_reg (GET_MODE (op0), op0);
7870 op0 = gen_lowpart (TYPE_MODE (type), op0);
7872 /* If both modes are integral, then we can convert from one to the
7873 other. */
7874 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7875 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7876 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7877 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7878 /* As a last resort, spill op0 to memory, and reload it in a
7879 different mode. */
7880 else if (!MEM_P (op0))
7882 /* If the operand is not a MEM, force it into memory. Since we
7883 are going to be changing the mode of the MEM, don't call
7884 force_const_mem for constants because we don't allow pool
7885 constants to change mode. */
7886 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7888 gcc_assert (!TREE_ADDRESSABLE (exp));
7890 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7891 target
7892 = assign_stack_temp_for_type
7893 (TYPE_MODE (inner_type),
7894 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7896 emit_move_insn (target, op0);
7897 op0 = target;
7900 /* At this point, OP0 is in the correct mode. If the output type is such
7901 that the operand is known to be aligned, indicate that it is.
7902 Otherwise, we need only be concerned about alignment for non-BLKmode
7903 results. */
7904 if (MEM_P (op0))
7906 op0 = copy_rtx (op0);
7908 if (TYPE_ALIGN_OK (type))
7909 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7910 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7911 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7913 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7914 HOST_WIDE_INT temp_size
7915 = MAX (int_size_in_bytes (inner_type),
7916 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7917 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7918 temp_size, 0, type);
7919 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7921 gcc_assert (!TREE_ADDRESSABLE (exp));
7923 if (GET_MODE (op0) == BLKmode)
7924 emit_block_move (new_with_op0_mode, op0,
7925 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7926 (modifier == EXPAND_STACK_PARM
7927 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7928 else
7929 emit_move_insn (new_with_op0_mode, op0);
7931 op0 = new;
7934 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7937 return op0;
7939 case PLUS_EXPR:
7940 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7941 something else, make sure we add the register to the constant and
7942 then to the other thing. This case can occur during strength
7943 reduction and doing it this way will produce better code if the
7944 frame pointer or argument pointer is eliminated.
7946 fold-const.c will ensure that the constant is always in the inner
7947 PLUS_EXPR, so the only case we need to do anything about is if
7948 sp, ap, or fp is our second argument, in which case we must swap
7949 the innermost first argument and our second argument. */
7951 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7952 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7953 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7954 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7955 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7956 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7958 tree t = TREE_OPERAND (exp, 1);
7960 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7961 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7964 /* If the result is to be ptr_mode and we are adding an integer to
7965 something, we might be forming a constant. So try to use
7966 plus_constant. If it produces a sum and we can't accept it,
7967 use force_operand. This allows P = &ARR[const] to generate
7968 efficient code on machines where a SYMBOL_REF is not a valid
7969 address.
7971 If this is an EXPAND_SUM call, always return the sum. */
7972 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7973 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7975 if (modifier == EXPAND_STACK_PARM)
7976 target = 0;
7977 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7978 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7979 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7981 rtx constant_part;
7983 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7984 EXPAND_SUM);
7985 /* Use immed_double_const to ensure that the constant is
7986 truncated according to the mode of OP1, then sign extended
7987 to a HOST_WIDE_INT. Using the constant directly can result
7988 in non-canonical RTL in a 64x32 cross compile. */
7989 constant_part
7990 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7991 (HOST_WIDE_INT) 0,
7992 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7993 op1 = plus_constant (op1, INTVAL (constant_part));
7994 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7995 op1 = force_operand (op1, target);
7996 return REDUCE_BIT_FIELD (op1);
7999 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8000 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8001 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8003 rtx constant_part;
8005 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8006 (modifier == EXPAND_INITIALIZER
8007 ? EXPAND_INITIALIZER : EXPAND_SUM));
8008 if (! CONSTANT_P (op0))
8010 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8011 VOIDmode, modifier);
8012 /* Return a PLUS if modifier says it's OK. */
8013 if (modifier == EXPAND_SUM
8014 || modifier == EXPAND_INITIALIZER)
8015 return simplify_gen_binary (PLUS, mode, op0, op1);
8016 goto binop2;
8018 /* Use immed_double_const to ensure that the constant is
8019 truncated according to the mode of OP1, then sign extended
8020 to a HOST_WIDE_INT. Using the constant directly can result
8021 in non-canonical RTL in a 64x32 cross compile. */
8022 constant_part
8023 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8024 (HOST_WIDE_INT) 0,
8025 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8026 op0 = plus_constant (op0, INTVAL (constant_part));
8027 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8028 op0 = force_operand (op0, target);
8029 return REDUCE_BIT_FIELD (op0);
8033 /* No sense saving up arithmetic to be done
8034 if it's all in the wrong mode to form part of an address.
8035 And force_operand won't know whether to sign-extend or
8036 zero-extend. */
8037 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8038 || mode != ptr_mode)
8040 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8041 subtarget, &op0, &op1, 0);
8042 if (op0 == const0_rtx)
8043 return op1;
8044 if (op1 == const0_rtx)
8045 return op0;
8046 goto binop2;
8049 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8050 subtarget, &op0, &op1, modifier);
8051 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8053 case MINUS_EXPR:
8054 /* For initializers, we are allowed to return a MINUS of two
8055 symbolic constants. Here we handle all cases when both operands
8056 are constant. */
8057 /* Handle difference of two symbolic constants,
8058 for the sake of an initializer. */
8059 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8060 && really_constant_p (TREE_OPERAND (exp, 0))
8061 && really_constant_p (TREE_OPERAND (exp, 1)))
8063 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8064 NULL_RTX, &op0, &op1, modifier);
8066 /* If the last operand is a CONST_INT, use plus_constant of
8067 the negated constant. Else make the MINUS. */
8068 if (GET_CODE (op1) == CONST_INT)
8069 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8070 else
8071 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8074 /* No sense saving up arithmetic to be done
8075 if it's all in the wrong mode to form part of an address.
8076 And force_operand won't know whether to sign-extend or
8077 zero-extend. */
8078 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8079 || mode != ptr_mode)
8080 goto binop;
8082 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8083 subtarget, &op0, &op1, modifier);
8085 /* Convert A - const to A + (-const). */
8086 if (GET_CODE (op1) == CONST_INT)
8088 op1 = negate_rtx (mode, op1);
8089 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8092 goto binop2;
8094 case MULT_EXPR:
8095 /* If first operand is constant, swap them.
8096 Thus the following special case checks need only
8097 check the second operand. */
8098 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8100 tree t1 = TREE_OPERAND (exp, 0);
8101 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8102 TREE_OPERAND (exp, 1) = t1;
8105 /* Attempt to return something suitable for generating an
8106 indexed address, for machines that support that. */
8108 if (modifier == EXPAND_SUM && mode == ptr_mode
8109 && host_integerp (TREE_OPERAND (exp, 1), 0))
8111 tree exp1 = TREE_OPERAND (exp, 1);
8113 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8114 EXPAND_SUM);
8116 if (!REG_P (op0))
8117 op0 = force_operand (op0, NULL_RTX);
8118 if (!REG_P (op0))
8119 op0 = copy_to_mode_reg (mode, op0);
8121 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8122 gen_int_mode (tree_low_cst (exp1, 0),
8123 TYPE_MODE (TREE_TYPE (exp1)))));
8126 if (modifier == EXPAND_STACK_PARM)
8127 target = 0;
8129 /* Check for multiplying things that have been extended
8130 from a narrower type. If this machine supports multiplying
8131 in that narrower type with a result in the desired type,
8132 do it that way, and avoid the explicit type-conversion. */
8134 subexp0 = TREE_OPERAND (exp, 0);
8135 subexp1 = TREE_OPERAND (exp, 1);
8136 /* First, check if we have a multiplication of one signed and one
8137 unsigned operand. */
8138 if (TREE_CODE (subexp0) == NOP_EXPR
8139 && TREE_CODE (subexp1) == NOP_EXPR
8140 && TREE_CODE (type) == INTEGER_TYPE
8141 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8142 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8143 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8144 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8145 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8146 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8148 enum machine_mode innermode
8149 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8150 this_optab = usmul_widen_optab;
8151 if (mode == GET_MODE_WIDER_MODE (innermode))
8153 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8155 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8156 expand_operands (TREE_OPERAND (subexp0, 0),
8157 TREE_OPERAND (subexp1, 0),
8158 NULL_RTX, &op0, &op1, 0);
8159 else
8160 expand_operands (TREE_OPERAND (subexp0, 0),
8161 TREE_OPERAND (subexp1, 0),
8162 NULL_RTX, &op1, &op0, 0);
8164 goto binop3;
8168 /* Check for a multiplication with matching signedness. */
8169 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8170 && TREE_CODE (type) == INTEGER_TYPE
8171 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8172 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8173 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8174 && int_fits_type_p (TREE_OPERAND (exp, 1),
8175 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8176 /* Don't use a widening multiply if a shift will do. */
8177 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8178 > HOST_BITS_PER_WIDE_INT)
8179 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8181 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8182 && (TYPE_PRECISION (TREE_TYPE
8183 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8184 == TYPE_PRECISION (TREE_TYPE
8185 (TREE_OPERAND
8186 (TREE_OPERAND (exp, 0), 0))))
8187 /* If both operands are extended, they must either both
8188 be zero-extended or both be sign-extended. */
8189 && (TYPE_UNSIGNED (TREE_TYPE
8190 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8191 == TYPE_UNSIGNED (TREE_TYPE
8192 (TREE_OPERAND
8193 (TREE_OPERAND (exp, 0), 0)))))))
8195 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8196 enum machine_mode innermode = TYPE_MODE (op0type);
8197 bool zextend_p = TYPE_UNSIGNED (op0type);
8198 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8199 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8201 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8203 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8205 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8206 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8207 TREE_OPERAND (exp, 1),
8208 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8209 else
8210 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8211 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8212 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8213 goto binop3;
8215 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8216 && innermode == word_mode)
8218 rtx htem, hipart;
8219 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8220 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8221 op1 = convert_modes (innermode, mode,
8222 expand_normal (TREE_OPERAND (exp, 1)),
8223 unsignedp);
8224 else
8225 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8226 temp = expand_binop (mode, other_optab, op0, op1, target,
8227 unsignedp, OPTAB_LIB_WIDEN);
8228 hipart = gen_highpart (innermode, temp);
8229 htem = expand_mult_highpart_adjust (innermode, hipart,
8230 op0, op1, hipart,
8231 zextend_p);
8232 if (htem != hipart)
8233 emit_move_insn (hipart, htem);
8234 return REDUCE_BIT_FIELD (temp);
8238 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8239 subtarget, &op0, &op1, 0);
8240 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8242 case TRUNC_DIV_EXPR:
8243 case FLOOR_DIV_EXPR:
8244 case CEIL_DIV_EXPR:
8245 case ROUND_DIV_EXPR:
8246 case EXACT_DIV_EXPR:
8247 if (modifier == EXPAND_STACK_PARM)
8248 target = 0;
8249 /* Possible optimization: compute the dividend with EXPAND_SUM
8250 then if the divisor is constant can optimize the case
8251 where some terms of the dividend have coeffs divisible by it. */
8252 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8253 subtarget, &op0, &op1, 0);
8254 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8256 case RDIV_EXPR:
8257 goto binop;
8259 case TRUNC_MOD_EXPR:
8260 case FLOOR_MOD_EXPR:
8261 case CEIL_MOD_EXPR:
8262 case ROUND_MOD_EXPR:
8263 if (modifier == EXPAND_STACK_PARM)
8264 target = 0;
8265 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8266 subtarget, &op0, &op1, 0);
8267 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8269 case FIX_ROUND_EXPR:
8270 case FIX_FLOOR_EXPR:
8271 case FIX_CEIL_EXPR:
8272 gcc_unreachable (); /* Not used for C. */
8274 case FIX_TRUNC_EXPR:
8275 op0 = expand_normal (TREE_OPERAND (exp, 0));
8276 if (target == 0 || modifier == EXPAND_STACK_PARM)
8277 target = gen_reg_rtx (mode);
8278 expand_fix (target, op0, unsignedp);
8279 return target;
8281 case FLOAT_EXPR:
8282 op0 = expand_normal (TREE_OPERAND (exp, 0));
8283 if (target == 0 || modifier == EXPAND_STACK_PARM)
8284 target = gen_reg_rtx (mode);
8285 /* expand_float can't figure out what to do if FROM has VOIDmode.
8286 So give it the correct mode. With -O, cse will optimize this. */
8287 if (GET_MODE (op0) == VOIDmode)
8288 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8289 op0);
8290 expand_float (target, op0,
8291 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8292 return target;
8294 case NEGATE_EXPR:
8295 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8296 if (modifier == EXPAND_STACK_PARM)
8297 target = 0;
8298 temp = expand_unop (mode,
8299 optab_for_tree_code (NEGATE_EXPR, type),
8300 op0, target, 0);
8301 gcc_assert (temp);
8302 return REDUCE_BIT_FIELD (temp);
8304 case ABS_EXPR:
8305 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8306 if (modifier == EXPAND_STACK_PARM)
8307 target = 0;
8309 /* ABS_EXPR is not valid for complex arguments. */
8310 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8311 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8313 /* Unsigned abs is simply the operand. Testing here means we don't
8314 risk generating incorrect code below. */
8315 if (TYPE_UNSIGNED (type))
8316 return op0;
8318 return expand_abs (mode, op0, target, unsignedp,
8319 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8321 case MAX_EXPR:
8322 case MIN_EXPR:
8323 target = original_target;
8324 if (target == 0
8325 || modifier == EXPAND_STACK_PARM
8326 || (MEM_P (target) && MEM_VOLATILE_P (target))
8327 || GET_MODE (target) != mode
8328 || (REG_P (target)
8329 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8330 target = gen_reg_rtx (mode);
8331 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8332 target, &op0, &op1, 0);
8334 /* First try to do it with a special MIN or MAX instruction.
8335 If that does not win, use a conditional jump to select the proper
8336 value. */
8337 this_optab = optab_for_tree_code (code, type);
8338 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8339 OPTAB_WIDEN);
8340 if (temp != 0)
8341 return temp;
8343 /* At this point, a MEM target is no longer useful; we will get better
8344 code without it. */
8346 if (! REG_P (target))
8347 target = gen_reg_rtx (mode);
8349 /* If op1 was placed in target, swap op0 and op1. */
8350 if (target != op0 && target == op1)
8352 temp = op0;
8353 op0 = op1;
8354 op1 = temp;
8357 /* We generate better code and avoid problems with op1 mentioning
8358 target by forcing op1 into a pseudo if it isn't a constant. */
8359 if (! CONSTANT_P (op1))
8360 op1 = force_reg (mode, op1);
8363 enum rtx_code comparison_code;
8364 rtx cmpop1 = op1;
8366 if (code == MAX_EXPR)
8367 comparison_code = unsignedp ? GEU : GE;
8368 else
8369 comparison_code = unsignedp ? LEU : LE;
8371 /* Canonicalize to comparisons against 0. */
8372 if (op1 == const1_rtx)
8374 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8375 or (a != 0 ? a : 1) for unsigned.
8376 For MIN we are safe converting (a <= 1 ? a : 1)
8377 into (a <= 0 ? a : 1) */
8378 cmpop1 = const0_rtx;
8379 if (code == MAX_EXPR)
8380 comparison_code = unsignedp ? NE : GT;
8382 if (op1 == constm1_rtx && !unsignedp)
8384 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8385 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8386 cmpop1 = const0_rtx;
8387 if (code == MIN_EXPR)
8388 comparison_code = LT;
8390 #ifdef HAVE_conditional_move
8391 /* Use a conditional move if possible. */
8392 if (can_conditionally_move_p (mode))
8394 rtx insn;
8396 /* ??? Same problem as in expmed.c: emit_conditional_move
8397 forces a stack adjustment via compare_from_rtx, and we
8398 lose the stack adjustment if the sequence we are about
8399 to create is discarded. */
8400 do_pending_stack_adjust ();
8402 start_sequence ();
8404 /* Try to emit the conditional move. */
8405 insn = emit_conditional_move (target, comparison_code,
8406 op0, cmpop1, mode,
8407 op0, op1, mode,
8408 unsignedp);
8410 /* If we could do the conditional move, emit the sequence,
8411 and return. */
8412 if (insn)
8414 rtx seq = get_insns ();
8415 end_sequence ();
8416 emit_insn (seq);
8417 return target;
8420 /* Otherwise discard the sequence and fall back to code with
8421 branches. */
8422 end_sequence ();
8424 #endif
8425 if (target != op0)
8426 emit_move_insn (target, op0);
8428 temp = gen_label_rtx ();
8429 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8430 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8432 emit_move_insn (target, op1);
8433 emit_label (temp);
8434 return target;
8436 case BIT_NOT_EXPR:
8437 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8438 if (modifier == EXPAND_STACK_PARM)
8439 target = 0;
8440 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8441 gcc_assert (temp);
8442 return temp;
8444 /* ??? Can optimize bitwise operations with one arg constant.
8445 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8446 and (a bitwise1 b) bitwise2 b (etc)
8447 but that is probably not worth while. */
8449 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8450 boolean values when we want in all cases to compute both of them. In
8451 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8452 as actual zero-or-1 values and then bitwise anding. In cases where
8453 there cannot be any side effects, better code would be made by
8454 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8455 how to recognize those cases. */
8457 case TRUTH_AND_EXPR:
8458 code = BIT_AND_EXPR;
8459 case BIT_AND_EXPR:
8460 goto binop;
8462 case TRUTH_OR_EXPR:
8463 code = BIT_IOR_EXPR;
8464 case BIT_IOR_EXPR:
8465 goto binop;
8467 case TRUTH_XOR_EXPR:
8468 code = BIT_XOR_EXPR;
8469 case BIT_XOR_EXPR:
8470 goto binop;
8472 case LSHIFT_EXPR:
8473 case RSHIFT_EXPR:
8474 case LROTATE_EXPR:
8475 case RROTATE_EXPR:
8476 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8477 subtarget = 0;
8478 if (modifier == EXPAND_STACK_PARM)
8479 target = 0;
8480 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8481 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8482 unsignedp);
8484 /* Could determine the answer when only additive constants differ. Also,
8485 the addition of one can be handled by changing the condition. */
8486 case LT_EXPR:
8487 case LE_EXPR:
8488 case GT_EXPR:
8489 case GE_EXPR:
8490 case EQ_EXPR:
8491 case NE_EXPR:
8492 case UNORDERED_EXPR:
8493 case ORDERED_EXPR:
8494 case UNLT_EXPR:
8495 case UNLE_EXPR:
8496 case UNGT_EXPR:
8497 case UNGE_EXPR:
8498 case UNEQ_EXPR:
8499 case LTGT_EXPR:
8500 temp = do_store_flag (exp,
8501 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8502 tmode != VOIDmode ? tmode : mode, 0);
8503 if (temp != 0)
8504 return temp;
8506 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8507 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8508 && original_target
8509 && REG_P (original_target)
8510 && (GET_MODE (original_target)
8511 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8513 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8514 VOIDmode, 0);
8516 /* If temp is constant, we can just compute the result. */
8517 if (GET_CODE (temp) == CONST_INT)
8519 if (INTVAL (temp) != 0)
8520 emit_move_insn (target, const1_rtx);
8521 else
8522 emit_move_insn (target, const0_rtx);
8524 return target;
8527 if (temp != original_target)
8529 enum machine_mode mode1 = GET_MODE (temp);
8530 if (mode1 == VOIDmode)
8531 mode1 = tmode != VOIDmode ? tmode : mode;
8533 temp = copy_to_mode_reg (mode1, temp);
8536 op1 = gen_label_rtx ();
8537 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8538 GET_MODE (temp), unsignedp, op1);
8539 emit_move_insn (temp, const1_rtx);
8540 emit_label (op1);
8541 return temp;
8544 /* If no set-flag instruction, must generate a conditional store
8545 into a temporary variable. Drop through and handle this
8546 like && and ||. */
8548 if (! ignore
8549 && (target == 0
8550 || modifier == EXPAND_STACK_PARM
8551 || ! safe_from_p (target, exp, 1)
8552 /* Make sure we don't have a hard reg (such as function's return
8553 value) live across basic blocks, if not optimizing. */
8554 || (!optimize && REG_P (target)
8555 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8556 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8558 if (target)
8559 emit_move_insn (target, const0_rtx);
8561 op1 = gen_label_rtx ();
8562 jumpifnot (exp, op1);
8564 if (target)
8565 emit_move_insn (target, const1_rtx);
8567 emit_label (op1);
8568 return ignore ? const0_rtx : target;
8570 case TRUTH_NOT_EXPR:
8571 if (modifier == EXPAND_STACK_PARM)
8572 target = 0;
8573 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8574 /* The parser is careful to generate TRUTH_NOT_EXPR
8575 only with operands that are always zero or one. */
8576 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8577 target, 1, OPTAB_LIB_WIDEN);
8578 gcc_assert (temp);
8579 return temp;
8581 case STATEMENT_LIST:
8583 tree_stmt_iterator iter;
8585 gcc_assert (ignore);
8587 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8588 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8590 return const0_rtx;
8592 case COND_EXPR:
8593 /* A COND_EXPR with its type being VOID_TYPE represents a
8594 conditional jump and is handled in
8595 expand_gimple_cond_expr. */
8596 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8598 /* Note that COND_EXPRs whose type is a structure or union
8599 are required to be constructed to contain assignments of
8600 a temporary variable, so that we can evaluate them here
8601 for side effect only. If type is void, we must do likewise. */
8603 gcc_assert (!TREE_ADDRESSABLE (type)
8604 && !ignore
8605 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8606 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8608 /* If we are not to produce a result, we have no target. Otherwise,
8609 if a target was specified use it; it will not be used as an
8610 intermediate target unless it is safe. If no target, use a
8611 temporary. */
8613 if (modifier != EXPAND_STACK_PARM
8614 && original_target
8615 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8616 && GET_MODE (original_target) == mode
8617 #ifdef HAVE_conditional_move
8618 && (! can_conditionally_move_p (mode)
8619 || REG_P (original_target))
8620 #endif
8621 && !MEM_P (original_target))
8622 temp = original_target;
8623 else
8624 temp = assign_temp (type, 0, 0, 1);
8626 do_pending_stack_adjust ();
8627 NO_DEFER_POP;
8628 op0 = gen_label_rtx ();
8629 op1 = gen_label_rtx ();
8630 jumpifnot (TREE_OPERAND (exp, 0), op0);
8631 store_expr (TREE_OPERAND (exp, 1), temp,
8632 modifier == EXPAND_STACK_PARM);
8634 emit_jump_insn (gen_jump (op1));
8635 emit_barrier ();
8636 emit_label (op0);
8637 store_expr (TREE_OPERAND (exp, 2), temp,
8638 modifier == EXPAND_STACK_PARM);
8640 emit_label (op1);
8641 OK_DEFER_POP;
8642 return temp;
8644 case VEC_COND_EXPR:
8645 target = expand_vec_cond_expr (exp, target);
8646 return target;
8648 case MODIFY_EXPR:
8650 tree lhs = TREE_OPERAND (exp, 0);
8651 tree rhs = TREE_OPERAND (exp, 1);
8653 gcc_assert (ignore);
8655 /* Check for |= or &= of a bitfield of size one into another bitfield
8656 of size 1. In this case, (unless we need the result of the
8657 assignment) we can do this more efficiently with a
8658 test followed by an assignment, if necessary.
8660 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8661 things change so we do, this code should be enhanced to
8662 support it. */
8663 if (TREE_CODE (lhs) == COMPONENT_REF
8664 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8665 || TREE_CODE (rhs) == BIT_AND_EXPR)
8666 && TREE_OPERAND (rhs, 0) == lhs
8667 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8668 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8669 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8671 rtx label = gen_label_rtx ();
8672 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8673 do_jump (TREE_OPERAND (rhs, 1),
8674 value ? label : 0,
8675 value ? 0 : label);
8676 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8677 do_pending_stack_adjust ();
8678 emit_label (label);
8679 return const0_rtx;
8682 expand_assignment (lhs, rhs);
8684 return const0_rtx;
8687 case RETURN_EXPR:
8688 if (!TREE_OPERAND (exp, 0))
8689 expand_null_return ();
8690 else
8691 expand_return (TREE_OPERAND (exp, 0));
8692 return const0_rtx;
8694 case ADDR_EXPR:
8695 return expand_expr_addr_expr (exp, target, tmode, modifier);
8697 case COMPLEX_EXPR:
8698 /* Get the rtx code of the operands. */
8699 op0 = expand_normal (TREE_OPERAND (exp, 0));
8700 op1 = expand_normal (TREE_OPERAND (exp, 1));
8702 if (!target)
8703 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8705 /* Move the real (op0) and imaginary (op1) parts to their location. */
8706 write_complex_part (target, op0, false);
8707 write_complex_part (target, op1, true);
8709 return target;
8711 case REALPART_EXPR:
8712 op0 = expand_normal (TREE_OPERAND (exp, 0));
8713 return read_complex_part (op0, false);
8715 case IMAGPART_EXPR:
8716 op0 = expand_normal (TREE_OPERAND (exp, 0));
8717 return read_complex_part (op0, true);
8719 case RESX_EXPR:
8720 expand_resx_expr (exp);
8721 return const0_rtx;
8723 case TRY_CATCH_EXPR:
8724 case CATCH_EXPR:
8725 case EH_FILTER_EXPR:
8726 case TRY_FINALLY_EXPR:
8727 /* Lowered by tree-eh.c. */
8728 gcc_unreachable ();
8730 case WITH_CLEANUP_EXPR:
8731 case CLEANUP_POINT_EXPR:
8732 case TARGET_EXPR:
8733 case CASE_LABEL_EXPR:
8734 case VA_ARG_EXPR:
8735 case BIND_EXPR:
8736 case INIT_EXPR:
8737 case CONJ_EXPR:
8738 case COMPOUND_EXPR:
8739 case PREINCREMENT_EXPR:
8740 case PREDECREMENT_EXPR:
8741 case POSTINCREMENT_EXPR:
8742 case POSTDECREMENT_EXPR:
8743 case LOOP_EXPR:
8744 case EXIT_EXPR:
8745 case TRUTH_ANDIF_EXPR:
8746 case TRUTH_ORIF_EXPR:
8747 /* Lowered by gimplify.c. */
8748 gcc_unreachable ();
8750 case EXC_PTR_EXPR:
8751 return get_exception_pointer (cfun);
8753 case FILTER_EXPR:
8754 return get_exception_filter (cfun);
8756 case FDESC_EXPR:
8757 /* Function descriptors are not valid except for as
8758 initialization constants, and should not be expanded. */
8759 gcc_unreachable ();
8761 case SWITCH_EXPR:
8762 expand_case (exp);
8763 return const0_rtx;
8765 case LABEL_EXPR:
8766 expand_label (TREE_OPERAND (exp, 0));
8767 return const0_rtx;
8769 case ASM_EXPR:
8770 expand_asm_expr (exp);
8771 return const0_rtx;
8773 case WITH_SIZE_EXPR:
8774 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8775 have pulled out the size to use in whatever context it needed. */
8776 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8777 modifier, alt_rtl);
8779 case REALIGN_LOAD_EXPR:
8781 tree oprnd0 = TREE_OPERAND (exp, 0);
8782 tree oprnd1 = TREE_OPERAND (exp, 1);
8783 tree oprnd2 = TREE_OPERAND (exp, 2);
8784 rtx op2;
8786 this_optab = optab_for_tree_code (code, type);
8787 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8788 op2 = expand_normal (oprnd2);
8789 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8790 target, unsignedp);
8791 gcc_assert (temp);
8792 return temp;
8795 case DOT_PROD_EXPR:
8797 tree oprnd0 = TREE_OPERAND (exp, 0);
8798 tree oprnd1 = TREE_OPERAND (exp, 1);
8799 tree oprnd2 = TREE_OPERAND (exp, 2);
8800 rtx op2;
8802 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8803 op2 = expand_normal (oprnd2);
8804 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8805 target, unsignedp);
8806 return target;
8809 case WIDEN_SUM_EXPR:
8811 tree oprnd0 = TREE_OPERAND (exp, 0);
8812 tree oprnd1 = TREE_OPERAND (exp, 1);
8814 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8815 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8816 target, unsignedp);
8817 return target;
8820 case REDUC_MAX_EXPR:
8821 case REDUC_MIN_EXPR:
8822 case REDUC_PLUS_EXPR:
8824 op0 = expand_normal (TREE_OPERAND (exp, 0));
8825 this_optab = optab_for_tree_code (code, type);
8826 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8827 gcc_assert (temp);
8828 return temp;
8831 case VEC_LSHIFT_EXPR:
8832 case VEC_RSHIFT_EXPR:
8834 target = expand_vec_shift_expr (exp, target);
8835 return target;
8838 default:
8839 return lang_hooks.expand_expr (exp, original_target, tmode,
8840 modifier, alt_rtl);
8843 /* Here to do an ordinary binary operator. */
8844 binop:
8845 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8846 subtarget, &op0, &op1, 0);
8847 binop2:
8848 this_optab = optab_for_tree_code (code, type);
8849 binop3:
8850 if (modifier == EXPAND_STACK_PARM)
8851 target = 0;
8852 temp = expand_binop (mode, this_optab, op0, op1, target,
8853 unsignedp, OPTAB_LIB_WIDEN);
8854 gcc_assert (temp);
8855 return REDUCE_BIT_FIELD (temp);
8857 #undef REDUCE_BIT_FIELD
8859 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8860 signedness of TYPE), possibly returning the result in TARGET. */
8861 static rtx
8862 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8864 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8865 if (target && GET_MODE (target) != GET_MODE (exp))
8866 target = 0;
8867 /* For constant values, reduce using build_int_cst_type. */
8868 if (GET_CODE (exp) == CONST_INT)
8870 HOST_WIDE_INT value = INTVAL (exp);
8871 tree t = build_int_cst_type (type, value);
8872 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
8874 else if (TYPE_UNSIGNED (type))
8876 rtx mask;
8877 if (prec < HOST_BITS_PER_WIDE_INT)
8878 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8879 GET_MODE (exp));
8880 else
8881 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8882 ((unsigned HOST_WIDE_INT) 1
8883 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8884 GET_MODE (exp));
8885 return expand_and (GET_MODE (exp), exp, mask, target);
8887 else
8889 tree count = build_int_cst (NULL_TREE,
8890 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8891 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8892 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8896 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8897 when applied to the address of EXP produces an address known to be
8898 aligned more than BIGGEST_ALIGNMENT. */
8900 static int
8901 is_aligning_offset (tree offset, tree exp)
8903 /* Strip off any conversions. */
8904 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8905 || TREE_CODE (offset) == NOP_EXPR
8906 || TREE_CODE (offset) == CONVERT_EXPR)
8907 offset = TREE_OPERAND (offset, 0);
8909 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8910 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8911 if (TREE_CODE (offset) != BIT_AND_EXPR
8912 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8913 || compare_tree_int (TREE_OPERAND (offset, 1),
8914 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8915 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8916 return 0;
8918 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8919 It must be NEGATE_EXPR. Then strip any more conversions. */
8920 offset = TREE_OPERAND (offset, 0);
8921 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8922 || TREE_CODE (offset) == NOP_EXPR
8923 || TREE_CODE (offset) == CONVERT_EXPR)
8924 offset = TREE_OPERAND (offset, 0);
8926 if (TREE_CODE (offset) != NEGATE_EXPR)
8927 return 0;
8929 offset = TREE_OPERAND (offset, 0);
8930 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8931 || TREE_CODE (offset) == NOP_EXPR
8932 || TREE_CODE (offset) == CONVERT_EXPR)
8933 offset = TREE_OPERAND (offset, 0);
8935 /* This must now be the address of EXP. */
8936 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8939 /* Return the tree node if an ARG corresponds to a string constant or zero
8940 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8941 in bytes within the string that ARG is accessing. The type of the
8942 offset will be `sizetype'. */
8944 tree
8945 string_constant (tree arg, tree *ptr_offset)
8947 tree array, offset;
8948 STRIP_NOPS (arg);
8950 if (TREE_CODE (arg) == ADDR_EXPR)
8952 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8954 *ptr_offset = size_zero_node;
8955 return TREE_OPERAND (arg, 0);
8957 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8959 array = TREE_OPERAND (arg, 0);
8960 offset = size_zero_node;
8962 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8964 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8965 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8966 if (TREE_CODE (array) != STRING_CST
8967 && TREE_CODE (array) != VAR_DECL)
8968 return 0;
8970 else
8971 return 0;
8973 else if (TREE_CODE (arg) == PLUS_EXPR)
8975 tree arg0 = TREE_OPERAND (arg, 0);
8976 tree arg1 = TREE_OPERAND (arg, 1);
8978 STRIP_NOPS (arg0);
8979 STRIP_NOPS (arg1);
8981 if (TREE_CODE (arg0) == ADDR_EXPR
8982 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8983 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8985 array = TREE_OPERAND (arg0, 0);
8986 offset = arg1;
8988 else if (TREE_CODE (arg1) == ADDR_EXPR
8989 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8990 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8992 array = TREE_OPERAND (arg1, 0);
8993 offset = arg0;
8995 else
8996 return 0;
8998 else
8999 return 0;
9001 if (TREE_CODE (array) == STRING_CST)
9003 *ptr_offset = fold_convert (sizetype, offset);
9004 return array;
9006 else if (TREE_CODE (array) == VAR_DECL)
9008 int length;
9010 /* Variables initialized to string literals can be handled too. */
9011 if (DECL_INITIAL (array) == NULL_TREE
9012 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9013 return 0;
9015 /* If they are read-only, non-volatile and bind locally. */
9016 if (! TREE_READONLY (array)
9017 || TREE_SIDE_EFFECTS (array)
9018 || ! targetm.binds_local_p (array))
9019 return 0;
9021 /* Avoid const char foo[4] = "abcde"; */
9022 if (DECL_SIZE_UNIT (array) == NULL_TREE
9023 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9024 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9025 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9026 return 0;
9028 /* If variable is bigger than the string literal, OFFSET must be constant
9029 and inside of the bounds of the string literal. */
9030 offset = fold_convert (sizetype, offset);
9031 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9032 && (! host_integerp (offset, 1)
9033 || compare_tree_int (offset, length) >= 0))
9034 return 0;
9036 *ptr_offset = offset;
9037 return DECL_INITIAL (array);
9040 return 0;
9043 /* Generate code to calculate EXP using a store-flag instruction
9044 and return an rtx for the result. EXP is either a comparison
9045 or a TRUTH_NOT_EXPR whose operand is a comparison.
9047 If TARGET is nonzero, store the result there if convenient.
9049 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9050 cheap.
9052 Return zero if there is no suitable set-flag instruction
9053 available on this machine.
9055 Once expand_expr has been called on the arguments of the comparison,
9056 we are committed to doing the store flag, since it is not safe to
9057 re-evaluate the expression. We emit the store-flag insn by calling
9058 emit_store_flag, but only expand the arguments if we have a reason
9059 to believe that emit_store_flag will be successful. If we think that
9060 it will, but it isn't, we have to simulate the store-flag with a
9061 set/jump/set sequence. */
9063 static rtx
9064 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9066 enum rtx_code code;
9067 tree arg0, arg1, type;
9068 tree tem;
9069 enum machine_mode operand_mode;
9070 int invert = 0;
9071 int unsignedp;
9072 rtx op0, op1;
9073 enum insn_code icode;
9074 rtx subtarget = target;
9075 rtx result, label;
9077 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9078 result at the end. We can't simply invert the test since it would
9079 have already been inverted if it were valid. This case occurs for
9080 some floating-point comparisons. */
9082 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9083 invert = 1, exp = TREE_OPERAND (exp, 0);
9085 arg0 = TREE_OPERAND (exp, 0);
9086 arg1 = TREE_OPERAND (exp, 1);
9088 /* Don't crash if the comparison was erroneous. */
9089 if (arg0 == error_mark_node || arg1 == error_mark_node)
9090 return const0_rtx;
9092 type = TREE_TYPE (arg0);
9093 operand_mode = TYPE_MODE (type);
9094 unsignedp = TYPE_UNSIGNED (type);
9096 /* We won't bother with BLKmode store-flag operations because it would mean
9097 passing a lot of information to emit_store_flag. */
9098 if (operand_mode == BLKmode)
9099 return 0;
9101 /* We won't bother with store-flag operations involving function pointers
9102 when function pointers must be canonicalized before comparisons. */
9103 #ifdef HAVE_canonicalize_funcptr_for_compare
9104 if (HAVE_canonicalize_funcptr_for_compare
9105 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9106 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9107 == FUNCTION_TYPE))
9108 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9109 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9110 == FUNCTION_TYPE))))
9111 return 0;
9112 #endif
9114 STRIP_NOPS (arg0);
9115 STRIP_NOPS (arg1);
9117 /* Get the rtx comparison code to use. We know that EXP is a comparison
9118 operation of some type. Some comparisons against 1 and -1 can be
9119 converted to comparisons with zero. Do so here so that the tests
9120 below will be aware that we have a comparison with zero. These
9121 tests will not catch constants in the first operand, but constants
9122 are rarely passed as the first operand. */
9124 switch (TREE_CODE (exp))
9126 case EQ_EXPR:
9127 code = EQ;
9128 break;
9129 case NE_EXPR:
9130 code = NE;
9131 break;
9132 case LT_EXPR:
9133 if (integer_onep (arg1))
9134 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9135 else
9136 code = unsignedp ? LTU : LT;
9137 break;
9138 case LE_EXPR:
9139 if (! unsignedp && integer_all_onesp (arg1))
9140 arg1 = integer_zero_node, code = LT;
9141 else
9142 code = unsignedp ? LEU : LE;
9143 break;
9144 case GT_EXPR:
9145 if (! unsignedp && integer_all_onesp (arg1))
9146 arg1 = integer_zero_node, code = GE;
9147 else
9148 code = unsignedp ? GTU : GT;
9149 break;
9150 case GE_EXPR:
9151 if (integer_onep (arg1))
9152 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9153 else
9154 code = unsignedp ? GEU : GE;
9155 break;
9157 case UNORDERED_EXPR:
9158 code = UNORDERED;
9159 break;
9160 case ORDERED_EXPR:
9161 code = ORDERED;
9162 break;
9163 case UNLT_EXPR:
9164 code = UNLT;
9165 break;
9166 case UNLE_EXPR:
9167 code = UNLE;
9168 break;
9169 case UNGT_EXPR:
9170 code = UNGT;
9171 break;
9172 case UNGE_EXPR:
9173 code = UNGE;
9174 break;
9175 case UNEQ_EXPR:
9176 code = UNEQ;
9177 break;
9178 case LTGT_EXPR:
9179 code = LTGT;
9180 break;
9182 default:
9183 gcc_unreachable ();
9186 /* Put a constant second. */
9187 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9189 tem = arg0; arg0 = arg1; arg1 = tem;
9190 code = swap_condition (code);
9193 /* If this is an equality or inequality test of a single bit, we can
9194 do this by shifting the bit being tested to the low-order bit and
9195 masking the result with the constant 1. If the condition was EQ,
9196 we xor it with 1. This does not require an scc insn and is faster
9197 than an scc insn even if we have it.
9199 The code to make this transformation was moved into fold_single_bit_test,
9200 so we just call into the folder and expand its result. */
9202 if ((code == NE || code == EQ)
9203 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9204 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9206 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9207 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9208 arg0, arg1, type),
9209 target, VOIDmode, EXPAND_NORMAL);
9212 /* Now see if we are likely to be able to do this. Return if not. */
9213 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9214 return 0;
9216 icode = setcc_gen_code[(int) code];
9217 if (icode == CODE_FOR_nothing
9218 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9220 /* We can only do this if it is one of the special cases that
9221 can be handled without an scc insn. */
9222 if ((code == LT && integer_zerop (arg1))
9223 || (! only_cheap && code == GE && integer_zerop (arg1)))
9225 else if (! only_cheap && (code == NE || code == EQ)
9226 && TREE_CODE (type) != REAL_TYPE
9227 && ((abs_optab->handlers[(int) operand_mode].insn_code
9228 != CODE_FOR_nothing)
9229 || (ffs_optab->handlers[(int) operand_mode].insn_code
9230 != CODE_FOR_nothing)))
9232 else
9233 return 0;
9236 if (! get_subtarget (target)
9237 || GET_MODE (subtarget) != operand_mode)
9238 subtarget = 0;
9240 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9242 if (target == 0)
9243 target = gen_reg_rtx (mode);
9245 result = emit_store_flag (target, code, op0, op1,
9246 operand_mode, unsignedp, 1);
9248 if (result)
9250 if (invert)
9251 result = expand_binop (mode, xor_optab, result, const1_rtx,
9252 result, 0, OPTAB_LIB_WIDEN);
9253 return result;
9256 /* If this failed, we have to do this with set/compare/jump/set code. */
9257 if (!REG_P (target)
9258 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9259 target = gen_reg_rtx (GET_MODE (target));
9261 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9262 result = compare_from_rtx (op0, op1, code, unsignedp,
9263 operand_mode, NULL_RTX);
9264 if (GET_CODE (result) == CONST_INT)
9265 return (((result == const0_rtx && ! invert)
9266 || (result != const0_rtx && invert))
9267 ? const0_rtx : const1_rtx);
9269 /* The code of RESULT may not match CODE if compare_from_rtx
9270 decided to swap its operands and reverse the original code.
9272 We know that compare_from_rtx returns either a CONST_INT or
9273 a new comparison code, so it is safe to just extract the
9274 code from RESULT. */
9275 code = GET_CODE (result);
9277 label = gen_label_rtx ();
9278 gcc_assert (bcc_gen_fctn[(int) code]);
9280 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9281 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9282 emit_label (label);
9284 return target;
9288 /* Stubs in case we haven't got a casesi insn. */
9289 #ifndef HAVE_casesi
9290 # define HAVE_casesi 0
9291 # define gen_casesi(a, b, c, d, e) (0)
9292 # define CODE_FOR_casesi CODE_FOR_nothing
9293 #endif
9295 /* If the machine does not have a case insn that compares the bounds,
9296 this means extra overhead for dispatch tables, which raises the
9297 threshold for using them. */
9298 #ifndef CASE_VALUES_THRESHOLD
9299 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9300 #endif /* CASE_VALUES_THRESHOLD */
9302 unsigned int
9303 case_values_threshold (void)
9305 return CASE_VALUES_THRESHOLD;
9308 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9309 0 otherwise (i.e. if there is no casesi instruction). */
9311 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9312 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9314 enum machine_mode index_mode = SImode;
9315 int index_bits = GET_MODE_BITSIZE (index_mode);
9316 rtx op1, op2, index;
9317 enum machine_mode op_mode;
9319 if (! HAVE_casesi)
9320 return 0;
9322 /* Convert the index to SImode. */
9323 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9325 enum machine_mode omode = TYPE_MODE (index_type);
9326 rtx rangertx = expand_normal (range);
9328 /* We must handle the endpoints in the original mode. */
9329 index_expr = build2 (MINUS_EXPR, index_type,
9330 index_expr, minval);
9331 minval = integer_zero_node;
9332 index = expand_normal (index_expr);
9333 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9334 omode, 1, default_label);
9335 /* Now we can safely truncate. */
9336 index = convert_to_mode (index_mode, index, 0);
9338 else
9340 if (TYPE_MODE (index_type) != index_mode)
9342 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9343 index_expr = fold_convert (index_type, index_expr);
9346 index = expand_normal (index_expr);
9349 do_pending_stack_adjust ();
9351 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9352 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9353 (index, op_mode))
9354 index = copy_to_mode_reg (op_mode, index);
9356 op1 = expand_normal (minval);
9358 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9359 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9360 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9361 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9362 (op1, op_mode))
9363 op1 = copy_to_mode_reg (op_mode, op1);
9365 op2 = expand_normal (range);
9367 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9368 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9369 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9370 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9371 (op2, op_mode))
9372 op2 = copy_to_mode_reg (op_mode, op2);
9374 emit_jump_insn (gen_casesi (index, op1, op2,
9375 table_label, default_label));
9376 return 1;
9379 /* Attempt to generate a tablejump instruction; same concept. */
9380 #ifndef HAVE_tablejump
9381 #define HAVE_tablejump 0
9382 #define gen_tablejump(x, y) (0)
9383 #endif
9385 /* Subroutine of the next function.
9387 INDEX is the value being switched on, with the lowest value
9388 in the table already subtracted.
9389 MODE is its expected mode (needed if INDEX is constant).
9390 RANGE is the length of the jump table.
9391 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9393 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9394 index value is out of range. */
9396 static void
9397 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9398 rtx default_label)
9400 rtx temp, vector;
9402 if (INTVAL (range) > cfun->max_jumptable_ents)
9403 cfun->max_jumptable_ents = INTVAL (range);
9405 /* Do an unsigned comparison (in the proper mode) between the index
9406 expression and the value which represents the length of the range.
9407 Since we just finished subtracting the lower bound of the range
9408 from the index expression, this comparison allows us to simultaneously
9409 check that the original index expression value is both greater than
9410 or equal to the minimum value of the range and less than or equal to
9411 the maximum value of the range. */
9413 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9414 default_label);
9416 /* If index is in range, it must fit in Pmode.
9417 Convert to Pmode so we can index with it. */
9418 if (mode != Pmode)
9419 index = convert_to_mode (Pmode, index, 1);
9421 /* Don't let a MEM slip through, because then INDEX that comes
9422 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9423 and break_out_memory_refs will go to work on it and mess it up. */
9424 #ifdef PIC_CASE_VECTOR_ADDRESS
9425 if (flag_pic && !REG_P (index))
9426 index = copy_to_mode_reg (Pmode, index);
9427 #endif
9429 /* If flag_force_addr were to affect this address
9430 it could interfere with the tricky assumptions made
9431 about addresses that contain label-refs,
9432 which may be valid only very near the tablejump itself. */
9433 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9434 GET_MODE_SIZE, because this indicates how large insns are. The other
9435 uses should all be Pmode, because they are addresses. This code
9436 could fail if addresses and insns are not the same size. */
9437 index = gen_rtx_PLUS (Pmode,
9438 gen_rtx_MULT (Pmode, index,
9439 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9440 gen_rtx_LABEL_REF (Pmode, table_label));
9441 #ifdef PIC_CASE_VECTOR_ADDRESS
9442 if (flag_pic)
9443 index = PIC_CASE_VECTOR_ADDRESS (index);
9444 else
9445 #endif
9446 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9447 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9448 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9449 convert_move (temp, vector, 0);
9451 emit_jump_insn (gen_tablejump (temp, table_label));
9453 /* If we are generating PIC code or if the table is PC-relative, the
9454 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9455 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9456 emit_barrier ();
9460 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9461 rtx table_label, rtx default_label)
9463 rtx index;
9465 if (! HAVE_tablejump)
9466 return 0;
9468 index_expr = fold_build2 (MINUS_EXPR, index_type,
9469 fold_convert (index_type, index_expr),
9470 fold_convert (index_type, minval));
9471 index = expand_normal (index_expr);
9472 do_pending_stack_adjust ();
9474 do_tablejump (index, TYPE_MODE (index_type),
9475 convert_modes (TYPE_MODE (index_type),
9476 TYPE_MODE (TREE_TYPE (range)),
9477 expand_normal (range),
9478 TYPE_UNSIGNED (TREE_TYPE (range))),
9479 table_label, default_label);
9480 return 1;
9483 /* Nonzero if the mode is a valid vector mode for this architecture.
9484 This returns nonzero even if there is no hardware support for the
9485 vector mode, but we can emulate with narrower modes. */
9488 vector_mode_valid_p (enum machine_mode mode)
9490 enum mode_class class = GET_MODE_CLASS (mode);
9491 enum machine_mode innermode;
9493 /* Doh! What's going on? */
9494 if (class != MODE_VECTOR_INT
9495 && class != MODE_VECTOR_FLOAT)
9496 return 0;
9498 /* Hardware support. Woo hoo! */
9499 if (targetm.vector_mode_supported_p (mode))
9500 return 1;
9502 innermode = GET_MODE_INNER (mode);
9504 /* We should probably return 1 if requesting V4DI and we have no DI,
9505 but we have V2DI, but this is probably very unlikely. */
9507 /* If we have support for the inner mode, we can safely emulate it.
9508 We may not have V2DI, but me can emulate with a pair of DIs. */
9509 return targetm.scalar_mode_supported_p (innermode);
9512 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9513 static rtx
9514 const_vector_from_tree (tree exp)
9516 rtvec v;
9517 int units, i;
9518 tree link, elt;
9519 enum machine_mode inner, mode;
9521 mode = TYPE_MODE (TREE_TYPE (exp));
9523 if (initializer_zerop (exp))
9524 return CONST0_RTX (mode);
9526 units = GET_MODE_NUNITS (mode);
9527 inner = GET_MODE_INNER (mode);
9529 v = rtvec_alloc (units);
9531 link = TREE_VECTOR_CST_ELTS (exp);
9532 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9534 elt = TREE_VALUE (link);
9536 if (TREE_CODE (elt) == REAL_CST)
9537 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9538 inner);
9539 else
9540 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9541 TREE_INT_CST_HIGH (elt),
9542 inner);
9545 /* Initialize remaining elements to 0. */
9546 for (; i < units; ++i)
9547 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9549 return gen_rtx_CONST_VECTOR (mode, v);
9551 #include "gt-expr.h"