Add files that I missed when importing NaCl changes earlier
[gcc/nacl-gcc.git] / gcc / expr.c
blobab66016aecb0dfabb6c6d357a5b828410112b3f5
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static rtx clear_storage_via_libcall (rtx, rtx, bool);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, int);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150 static int is_aligning_offset (tree, tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
179 #endif
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
187 #endif
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) MOVE_RATIO)
196 #endif
198 /* This array records the insn_code of insns to perform block moves. */
199 enum insn_code movmem_optab[NUM_MACHINE_MODES];
201 /* This array records the insn_code of insns to perform block sets. */
202 enum insn_code setmem_optab[NUM_MACHINE_MODES];
204 /* These arrays record the insn_code of three different kinds of insns
205 to perform block compares. */
206 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* Synchronization primitives. */
211 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
238 #endif
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
243 void
244 init_expr_once (void)
246 rtx insn, pat;
247 enum machine_mode mode;
248 int num_clobbers;
249 rtx mem, mem1;
250 rtx reg;
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
269 int regno;
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
287 REGNO (reg) = regno;
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
320 enum insn_code ic;
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
324 continue;
326 PUT_MODE (mem, srcmode);
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
334 /* This is run at the start of compiling a function. */
336 void
337 init_expr (void)
339 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
342 /* Copy data from FROM to TO, where the machine modes are not the same.
343 Both modes may be integer, or both may be floating.
344 UNSIGNEDP should be nonzero if FROM is an unsigned type.
345 This causes zero-extension instead of sign-extension. */
347 void
348 convert_move (rtx to, rtx from, int unsignedp)
350 enum machine_mode to_mode = GET_MODE (to);
351 enum machine_mode from_mode = GET_MODE (from);
352 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
353 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
354 enum insn_code code;
355 rtx libcall;
357 /* rtx code for making an equivalent value. */
358 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
359 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
362 gcc_assert (to_real == from_real);
364 /* If the source and destination are already the same, then there's
365 nothing to do. */
366 if (to == from)
367 return;
369 /* If FROM is a SUBREG that indicates that we have already done at least
370 the required extension, strip it. We don't handle such SUBREGs as
371 TO here. */
373 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
374 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
375 >= GET_MODE_SIZE (to_mode))
376 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
377 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381 if (to_mode == from_mode
382 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 emit_move_insn (to, from);
385 return;
388 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392 if (VECTOR_MODE_P (to_mode))
393 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
394 else
395 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397 emit_move_insn (to, from);
398 return;
401 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
404 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
405 return;
408 if (to_real)
410 rtx value, insns;
411 convert_optab tab;
413 gcc_assert ((GET_MODE_PRECISION (from_mode)
414 != GET_MODE_PRECISION (to_mode))
415 || (DECIMAL_FLOAT_MODE_P (from_mode)
416 != DECIMAL_FLOAT_MODE_P (to_mode)));
418 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
419 /* Conversion between decimal float and binary float, same size. */
420 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
421 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
422 tab = sext_optab;
423 else
424 tab = trunc_optab;
426 /* Try converting directly if the insn is supported. */
428 code = tab->handlers[to_mode][from_mode].insn_code;
429 if (code != CODE_FOR_nothing)
431 emit_unop_insn (code, to, from,
432 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
433 return;
436 /* Otherwise use a libcall. */
437 libcall = tab->handlers[to_mode][from_mode].libfunc;
439 /* Is this conversion implemented yet? */
440 gcc_assert (libcall);
442 start_sequence ();
443 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
444 1, from, from_mode);
445 insns = get_insns ();
446 end_sequence ();
447 emit_libcall_block (insns, to, value,
448 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
449 from)
450 : gen_rtx_FLOAT_EXTEND (to_mode, from));
451 return;
454 /* Handle pointer conversion. */ /* SPEE 900220. */
455 /* Targets are expected to provide conversion insns between PxImode and
456 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
457 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
459 enum machine_mode full_mode
460 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
462 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
463 != CODE_FOR_nothing);
465 if (full_mode != from_mode)
466 from = convert_to_mode (full_mode, from, unsignedp);
467 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
468 to, from, UNKNOWN);
469 return;
471 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
473 rtx new_from;
474 enum machine_mode full_mode
475 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
477 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
478 != CODE_FOR_nothing);
480 if (to_mode == full_mode)
482 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
483 to, from, UNKNOWN);
484 return;
487 new_from = gen_reg_rtx (full_mode);
488 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
489 new_from, from, UNKNOWN);
491 /* else proceed to integer conversions below. */
492 from_mode = full_mode;
493 from = new_from;
496 /* Now both modes are integers. */
498 /* Handle expanding beyond a word. */
499 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
500 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502 rtx insns;
503 rtx lowpart;
504 rtx fill_value;
505 rtx lowfrom;
506 int i;
507 enum machine_mode lowpart_mode;
508 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
510 /* Try converting directly if the insn is supported. */
511 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
512 != CODE_FOR_nothing)
514 /* If FROM is a SUBREG, put it into a register. Do this
515 so that we always generate the same set of insns for
516 better cse'ing; if an intermediate assignment occurred,
517 we won't be doing the operation directly on the SUBREG. */
518 if (optimize > 0 && GET_CODE (from) == SUBREG)
519 from = force_reg (from_mode, from);
520 emit_unop_insn (code, to, from, equiv_code);
521 return;
523 /* Next, try converting via full word. */
524 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
525 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
526 != CODE_FOR_nothing))
528 if (REG_P (to))
530 if (reg_overlap_mentioned_p (to, from))
531 from = force_reg (from_mode, from);
532 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
534 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
535 emit_unop_insn (code, to,
536 gen_lowpart (word_mode, to), equiv_code);
537 return;
540 /* No special multiword conversion insn; do it by hand. */
541 start_sequence ();
543 /* Since we will turn this into a no conflict block, we must ensure
544 that the source does not overlap the target. */
546 if (reg_overlap_mentioned_p (to, from))
547 from = force_reg (from_mode, from);
549 /* Get a copy of FROM widened to a word, if necessary. */
550 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
551 lowpart_mode = word_mode;
552 else
553 lowpart_mode = from_mode;
555 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
557 lowpart = gen_lowpart (lowpart_mode, to);
558 emit_move_insn (lowpart, lowfrom);
560 /* Compute the value to put in each remaining word. */
561 if (unsignedp)
562 fill_value = const0_rtx;
563 else
565 #ifdef HAVE_slt
566 if (HAVE_slt
567 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
568 && STORE_FLAG_VALUE == -1)
570 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
571 lowpart_mode, 0);
572 fill_value = gen_reg_rtx (word_mode);
573 emit_insn (gen_slt (fill_value));
575 else
576 #endif
578 fill_value
579 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
580 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
581 NULL_RTX, 0);
582 fill_value = convert_to_mode (word_mode, fill_value, 1);
586 /* Fill the remaining words. */
587 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
589 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
590 rtx subword = operand_subword (to, index, 1, to_mode);
592 gcc_assert (subword);
594 if (fill_value != subword)
595 emit_move_insn (subword, fill_value);
598 insns = get_insns ();
599 end_sequence ();
601 emit_no_conflict_block (insns, to, from, NULL_RTX,
602 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
603 return;
606 /* Truncating multi-word to a word or less. */
607 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
608 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
610 if (!((MEM_P (from)
611 && ! MEM_VOLATILE_P (from)
612 && direct_load[(int) to_mode]
613 && ! mode_dependent_address_p (XEXP (from, 0)))
614 || REG_P (from)
615 || GET_CODE (from) == SUBREG))
616 from = force_reg (from_mode, from);
617 convert_move (to, gen_lowpart (word_mode, from), 0);
618 return;
621 /* Now follow all the conversions between integers
622 no more than a word long. */
624 /* For truncation, usually we can just refer to FROM in a narrower mode. */
625 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
626 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
627 GET_MODE_BITSIZE (from_mode)))
629 if (!((MEM_P (from)
630 && ! MEM_VOLATILE_P (from)
631 && direct_load[(int) to_mode]
632 && ! mode_dependent_address_p (XEXP (from, 0)))
633 || REG_P (from)
634 || GET_CODE (from) == SUBREG))
635 from = force_reg (from_mode, from);
636 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
637 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
638 from = copy_to_reg (from);
639 emit_move_insn (to, gen_lowpart (to_mode, from));
640 return;
643 /* Handle extension. */
644 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
646 /* Convert directly if that works. */
647 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
648 != CODE_FOR_nothing)
650 emit_unop_insn (code, to, from, equiv_code);
651 return;
653 else
655 enum machine_mode intermediate;
656 rtx tmp;
657 tree shift_amount;
659 /* Search for a mode to convert via. */
660 for (intermediate = from_mode; intermediate != VOIDmode;
661 intermediate = GET_MODE_WIDER_MODE (intermediate))
662 if (((can_extend_p (to_mode, intermediate, unsignedp)
663 != CODE_FOR_nothing)
664 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
665 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
666 GET_MODE_BITSIZE (intermediate))))
667 && (can_extend_p (intermediate, from_mode, unsignedp)
668 != CODE_FOR_nothing))
670 convert_move (to, convert_to_mode (intermediate, from,
671 unsignedp), unsignedp);
672 return;
675 /* No suitable intermediate mode.
676 Generate what we need with shifts. */
677 shift_amount = build_int_cst (NULL_TREE,
678 GET_MODE_BITSIZE (to_mode)
679 - GET_MODE_BITSIZE (from_mode));
680 from = gen_lowpart (to_mode, force_reg (from_mode, from));
681 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
682 to, unsignedp);
683 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
684 to, unsignedp);
685 if (tmp != to)
686 emit_move_insn (to, tmp);
687 return;
691 /* Support special truncate insns for certain modes. */
692 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
694 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
695 to, from, UNKNOWN);
696 return;
699 /* Handle truncation of volatile memrefs, and so on;
700 the things that couldn't be truncated directly,
701 and for which there was no special instruction.
703 ??? Code above formerly short-circuited this, for most integer
704 mode pairs, with a force_reg in from_mode followed by a recursive
705 call to this routine. Appears always to have been wrong. */
706 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
708 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
709 emit_move_insn (to, temp);
710 return;
713 /* Mode combination is not recognized. */
714 gcc_unreachable ();
717 /* Return an rtx for a value that would result
718 from converting X to mode MODE.
719 Both X and MODE may be floating, or both integer.
720 UNSIGNEDP is nonzero if X is an unsigned value.
721 This can be done by referring to a part of X in place
722 or by copying to a new temporary with conversion. */
725 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
727 return convert_modes (mode, VOIDmode, x, unsignedp);
730 /* Return an rtx for a value that would result
731 from converting X from mode OLDMODE to mode MODE.
732 Both modes may be floating, or both integer.
733 UNSIGNEDP is nonzero if X is an unsigned value.
735 This can be done by referring to a part of X in place
736 or by copying to a new temporary with conversion.
738 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
741 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
743 rtx temp;
745 /* If FROM is a SUBREG that indicates that we have already done at least
746 the required extension, strip it. */
748 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
749 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
750 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
751 x = gen_lowpart (mode, x);
753 if (GET_MODE (x) != VOIDmode)
754 oldmode = GET_MODE (x);
756 if (mode == oldmode)
757 return x;
759 /* There is one case that we must handle specially: If we are converting
760 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
761 we are to interpret the constant as unsigned, gen_lowpart will do
762 the wrong if the constant appears negative. What we want to do is
763 make the high-order word of the constant zero, not all ones. */
765 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
766 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
767 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
769 HOST_WIDE_INT val = INTVAL (x);
771 if (oldmode != VOIDmode
772 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
774 int width = GET_MODE_BITSIZE (oldmode);
776 /* We need to zero extend VAL. */
777 val &= ((HOST_WIDE_INT) 1 << width) - 1;
780 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
783 /* We can do this with a gen_lowpart if both desired and current modes
784 are integer, and this is either a constant integer, a register, or a
785 non-volatile MEM. Except for the constant case where MODE is no
786 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
788 if ((GET_CODE (x) == CONST_INT
789 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
790 || (GET_MODE_CLASS (mode) == MODE_INT
791 && GET_MODE_CLASS (oldmode) == MODE_INT
792 && (GET_CODE (x) == CONST_DOUBLE
793 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
794 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
795 && direct_load[(int) mode])
796 || (REG_P (x)
797 && (! HARD_REGISTER_P (x)
798 || HARD_REGNO_MODE_OK (REGNO (x), mode))
799 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
800 GET_MODE_BITSIZE (GET_MODE (x)))))))))
802 /* ?? If we don't know OLDMODE, we have to assume here that
803 X does not need sign- or zero-extension. This may not be
804 the case, but it's the best we can do. */
805 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
806 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
808 HOST_WIDE_INT val = INTVAL (x);
809 int width = GET_MODE_BITSIZE (oldmode);
811 /* We must sign or zero-extend in this case. Start by
812 zero-extending, then sign extend if we need to. */
813 val &= ((HOST_WIDE_INT) 1 << width) - 1;
814 if (! unsignedp
815 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
816 val |= (HOST_WIDE_INT) (-1) << width;
818 return gen_int_mode (val, mode);
821 return gen_lowpart (mode, x);
824 /* Converting from integer constant into mode is always equivalent to an
825 subreg operation. */
826 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
828 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
829 return simplify_gen_subreg (mode, x, oldmode, 0);
832 temp = gen_reg_rtx (mode);
833 convert_move (temp, x, unsignedp);
834 return temp;
837 /* STORE_MAX_PIECES is the number of bytes at a time that we can
838 store efficiently. Due to internal GCC limitations, this is
839 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
840 for an immediate constant. */
842 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
844 /* Determine whether the LEN bytes can be moved by using several move
845 instructions. Return nonzero if a call to move_by_pieces should
846 succeed. */
849 can_move_by_pieces (unsigned HOST_WIDE_INT len,
850 unsigned int align ATTRIBUTE_UNUSED)
852 return MOVE_BY_PIECES_P (len, align);
855 /* Generate several move instructions to copy LEN bytes from block FROM to
856 block TO. (These are MEM rtx's with BLKmode).
858 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
859 used to push FROM to the stack.
861 ALIGN is maximum stack alignment we can assume.
863 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
864 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
865 stpcpy. */
868 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
869 unsigned int align, int endp)
871 struct move_by_pieces data;
872 rtx to_addr, from_addr = XEXP (from, 0);
873 unsigned int max_size = MOVE_MAX_PIECES + 1;
874 enum machine_mode mode = VOIDmode, tmode;
875 enum insn_code icode;
877 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
879 data.offset = 0;
880 data.from_addr = from_addr;
881 if (to)
883 to_addr = XEXP (to, 0);
884 data.to = to;
885 data.autinc_to
886 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
887 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
888 data.reverse
889 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891 else
893 to_addr = NULL_RTX;
894 data.to = NULL_RTX;
895 data.autinc_to = 1;
896 #ifdef STACK_GROWS_DOWNWARD
897 data.reverse = 1;
898 #else
899 data.reverse = 0;
900 #endif
902 data.to_addr = to_addr;
903 data.from = from;
904 data.autinc_from
905 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
906 || GET_CODE (from_addr) == POST_INC
907 || GET_CODE (from_addr) == POST_DEC);
909 data.explicit_inc_from = 0;
910 data.explicit_inc_to = 0;
911 if (data.reverse) data.offset = len;
912 data.len = len;
914 /* If copying requires more than two move insns,
915 copy addresses to registers (to make displacements shorter)
916 and use post-increment if available. */
917 if (!(data.autinc_from && data.autinc_to)
918 && move_by_pieces_ninsns (len, align, max_size) > 2)
920 /* Find the mode of the largest move... */
921 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
922 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
923 if (GET_MODE_SIZE (tmode) < max_size)
924 mode = tmode;
926 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
929 data.autinc_from = 1;
930 data.explicit_inc_from = -1;
932 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
934 data.from_addr = copy_addr_to_reg (from_addr);
935 data.autinc_from = 1;
936 data.explicit_inc_from = 1;
938 if (!data.autinc_from && CONSTANT_P (from_addr))
939 data.from_addr = copy_addr_to_reg (from_addr);
940 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
942 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
943 data.autinc_to = 1;
944 data.explicit_inc_to = -1;
946 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
948 data.to_addr = copy_addr_to_reg (to_addr);
949 data.autinc_to = 1;
950 data.explicit_inc_to = 1;
952 if (!data.autinc_to && CONSTANT_P (to_addr))
953 data.to_addr = copy_addr_to_reg (to_addr);
956 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
957 if (align >= GET_MODE_ALIGNMENT (tmode))
958 align = GET_MODE_ALIGNMENT (tmode);
959 else
961 enum machine_mode xmode;
963 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
964 tmode != VOIDmode;
965 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
966 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
967 || SLOW_UNALIGNED_ACCESS (tmode, align))
968 break;
970 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
973 /* First move what we can in the largest integer mode, then go to
974 successively smaller modes. */
976 while (max_size > 1)
978 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
979 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
980 if (GET_MODE_SIZE (tmode) < max_size)
981 mode = tmode;
983 if (mode == VOIDmode)
984 break;
986 icode = mov_optab->handlers[(int) mode].insn_code;
987 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
988 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
990 max_size = GET_MODE_SIZE (mode);
993 /* The code above should have handled everything. */
994 gcc_assert (!data.len);
996 if (endp)
998 rtx to1;
1000 gcc_assert (!data.reverse);
1001 if (data.autinc_to)
1003 if (endp == 2)
1005 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1006 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1007 else
1008 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1009 -1));
1011 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1012 data.offset);
1014 else
1016 if (endp == 2)
1017 --data.offset;
1018 to1 = adjust_address (data.to, QImode, data.offset);
1020 return to1;
1022 else
1023 return data.to;
1026 /* Return number of insns required to move L bytes by pieces.
1027 ALIGN (in bits) is maximum alignment we can assume. */
1029 static unsigned HOST_WIDE_INT
1030 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1031 unsigned int max_size)
1033 unsigned HOST_WIDE_INT n_insns = 0;
1034 enum machine_mode tmode;
1036 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1037 if (align >= GET_MODE_ALIGNMENT (tmode))
1038 align = GET_MODE_ALIGNMENT (tmode);
1039 else
1041 enum machine_mode tmode, xmode;
1043 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1044 tmode != VOIDmode;
1045 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1046 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1047 || SLOW_UNALIGNED_ACCESS (tmode, align))
1048 break;
1050 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1053 while (max_size > 1)
1055 enum machine_mode mode = VOIDmode;
1056 enum insn_code icode;
1058 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1059 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1060 if (GET_MODE_SIZE (tmode) < max_size)
1061 mode = tmode;
1063 if (mode == VOIDmode)
1064 break;
1066 icode = mov_optab->handlers[(int) mode].insn_code;
1067 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1068 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1070 max_size = GET_MODE_SIZE (mode);
1073 gcc_assert (!l);
1074 return n_insns;
1077 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1078 with move instructions for mode MODE. GENFUN is the gen_... function
1079 to make a move insn for that mode. DATA has all the other info. */
1081 static void
1082 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1083 struct move_by_pieces *data)
1085 unsigned int size = GET_MODE_SIZE (mode);
1086 rtx to1 = NULL_RTX, from1;
1088 while (data->len >= size)
1090 if (data->reverse)
1091 data->offset -= size;
1093 if (data->to)
1095 if (data->autinc_to)
1096 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1097 data->offset);
1098 else
1099 to1 = adjust_address (data->to, mode, data->offset);
1102 if (data->autinc_from)
1103 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1104 data->offset);
1105 else
1106 from1 = adjust_address (data->from, mode, data->offset);
1108 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1109 emit_insn (gen_add2_insn (data->to_addr,
1110 GEN_INT (-(HOST_WIDE_INT)size)));
1111 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1112 emit_insn (gen_add2_insn (data->from_addr,
1113 GEN_INT (-(HOST_WIDE_INT)size)));
1115 if (data->to)
1116 emit_insn ((*genfun) (to1, from1));
1117 else
1119 #ifdef PUSH_ROUNDING
1120 emit_single_push_insn (mode, from1, NULL);
1121 #else
1122 gcc_unreachable ();
1123 #endif
1126 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1127 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1128 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1129 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1131 if (! data->reverse)
1132 data->offset += size;
1134 data->len -= size;
1138 /* Emit code to move a block Y to a block X. This may be done with
1139 string-move instructions, with multiple scalar move instructions,
1140 or with a library call.
1142 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1143 SIZE is an rtx that says how long they are.
1144 ALIGN is the maximum alignment we can assume they have.
1145 METHOD describes what kind of copy this is, and what mechanisms may be used.
1147 Return the address of the new block, if memcpy is called and returns it,
1148 0 otherwise. */
1151 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1153 bool may_use_call;
1154 rtx retval = 0;
1155 unsigned int align;
1157 switch (method)
1159 case BLOCK_OP_NORMAL:
1160 case BLOCK_OP_TAILCALL:
1161 may_use_call = true;
1162 break;
1164 case BLOCK_OP_CALL_PARM:
1165 may_use_call = block_move_libcall_safe_for_call_parm ();
1167 /* Make inhibit_defer_pop nonzero around the library call
1168 to force it to pop the arguments right away. */
1169 NO_DEFER_POP;
1170 break;
1172 case BLOCK_OP_NO_LIBCALL:
1173 may_use_call = false;
1174 break;
1176 default:
1177 gcc_unreachable ();
1180 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1182 gcc_assert (MEM_P (x));
1183 gcc_assert (MEM_P (y));
1184 gcc_assert (size);
1186 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1187 block copy is more efficient for other large modes, e.g. DCmode. */
1188 x = adjust_address (x, BLKmode, 0);
1189 y = adjust_address (y, BLKmode, 0);
1191 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1192 can be incorrect is coming from __builtin_memcpy. */
1193 if (GET_CODE (size) == CONST_INT)
1195 if (INTVAL (size) == 0)
1196 return 0;
1198 x = shallow_copy_rtx (x);
1199 y = shallow_copy_rtx (y);
1200 set_mem_size (x, size);
1201 set_mem_size (y, size);
1204 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1205 move_by_pieces (x, y, INTVAL (size), align, 0);
1206 else if (emit_block_move_via_movmem (x, y, size, align))
1208 else if (may_use_call)
1209 retval = emit_block_move_via_libcall (x, y, size,
1210 method == BLOCK_OP_TAILCALL);
1211 else
1212 emit_block_move_via_loop (x, y, size, align);
1214 if (method == BLOCK_OP_CALL_PARM)
1215 OK_DEFER_POP;
1217 return retval;
1220 /* A subroutine of emit_block_move. Returns true if calling the
1221 block move libcall will not clobber any parameters which may have
1222 already been placed on the stack. */
1224 static bool
1225 block_move_libcall_safe_for_call_parm (void)
1227 /* If arguments are pushed on the stack, then they're safe. */
1228 if (PUSH_ARGS)
1229 return true;
1231 /* If registers go on the stack anyway, any argument is sure to clobber
1232 an outgoing argument. */
1233 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1235 tree fn = emit_block_move_libcall_fn (false);
1236 (void) fn;
1237 if (REG_PARM_STACK_SPACE (fn) != 0)
1238 return false;
1240 #endif
1242 /* If any argument goes in memory, then it might clobber an outgoing
1243 argument. */
1245 CUMULATIVE_ARGS args_so_far;
1246 tree fn, arg;
1248 fn = emit_block_move_libcall_fn (false);
1249 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1251 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1252 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1254 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1255 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1256 if (!tmp || !REG_P (tmp))
1257 return false;
1258 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1259 return false;
1260 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1263 return true;
1266 /* A subroutine of emit_block_move. Expand a movmem pattern;
1267 return true if successful. */
1269 static bool
1270 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1272 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1273 int save_volatile_ok = volatile_ok;
1274 enum machine_mode mode;
1276 /* Since this is a move insn, we don't care about volatility. */
1277 volatile_ok = 1;
1279 /* Try the most limited insn first, because there's no point
1280 including more than one in the machine description unless
1281 the more limited one has some advantage. */
1283 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1284 mode = GET_MODE_WIDER_MODE (mode))
1286 enum insn_code code = movmem_optab[(int) mode];
1287 insn_operand_predicate_fn pred;
1289 if (code != CODE_FOR_nothing
1290 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1291 here because if SIZE is less than the mode mask, as it is
1292 returned by the macro, it will definitely be less than the
1293 actual mode mask. */
1294 && ((GET_CODE (size) == CONST_INT
1295 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1296 <= (GET_MODE_MASK (mode) >> 1)))
1297 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1298 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1299 || (*pred) (x, BLKmode))
1300 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1301 || (*pred) (y, BLKmode))
1302 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1303 || (*pred) (opalign, VOIDmode)))
1305 rtx op2;
1306 rtx last = get_last_insn ();
1307 rtx pat;
1309 op2 = convert_to_mode (mode, size, 1);
1310 pred = insn_data[(int) code].operand[2].predicate;
1311 if (pred != 0 && ! (*pred) (op2, mode))
1312 op2 = copy_to_mode_reg (mode, op2);
1314 /* ??? When called via emit_block_move_for_call, it'd be
1315 nice if there were some way to inform the backend, so
1316 that it doesn't fail the expansion because it thinks
1317 emitting the libcall would be more efficient. */
1319 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1320 if (pat)
1322 emit_insn (pat);
1323 volatile_ok = save_volatile_ok;
1324 return true;
1326 else
1327 delete_insns_since (last);
1331 volatile_ok = save_volatile_ok;
1332 return false;
1335 /* A subroutine of emit_block_move. Expand a call to memcpy.
1336 Return the return value from memcpy, 0 otherwise. */
1338 static rtx
1339 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1341 rtx dst_addr, src_addr;
1342 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1343 enum machine_mode size_mode;
1344 rtx retval;
1346 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1347 pseudos. We can then place those new pseudos into a VAR_DECL and
1348 use them later. */
1350 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1351 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1353 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1354 src_addr = convert_memory_address (ptr_mode, src_addr);
1356 dst_tree = make_tree (ptr_type_node, dst_addr);
1357 src_tree = make_tree (ptr_type_node, src_addr);
1359 size_mode = TYPE_MODE (sizetype);
1361 size = convert_to_mode (size_mode, size, 1);
1362 size = copy_to_mode_reg (size_mode, size);
1364 /* It is incorrect to use the libcall calling conventions to call
1365 memcpy in this context. This could be a user call to memcpy and
1366 the user may wish to examine the return value from memcpy. For
1367 targets where libcalls and normal calls have different conventions
1368 for returning pointers, we could end up generating incorrect code. */
1370 size_tree = make_tree (sizetype, size);
1372 fn = emit_block_move_libcall_fn (true);
1373 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1374 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1375 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1377 /* Now we have to build up the CALL_EXPR itself. */
1378 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1379 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1380 call_expr, arg_list, NULL_TREE);
1381 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1383 retval = expand_normal (call_expr);
1385 return retval;
1388 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1389 for the function we use for block copies. The first time FOR_CALL
1390 is true, we call assemble_external. */
1392 static GTY(()) tree block_move_fn;
1394 void
1395 init_block_move_fn (const char *asmspec)
1397 if (!block_move_fn)
1399 tree args, fn;
1401 fn = get_identifier ("memcpy");
1402 args = build_function_type_list (ptr_type_node, ptr_type_node,
1403 const_ptr_type_node, sizetype,
1404 NULL_TREE);
1406 fn = build_decl (FUNCTION_DECL, fn, args);
1407 DECL_EXTERNAL (fn) = 1;
1408 TREE_PUBLIC (fn) = 1;
1409 DECL_ARTIFICIAL (fn) = 1;
1410 TREE_NOTHROW (fn) = 1;
1411 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1412 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1414 block_move_fn = fn;
1417 if (asmspec)
1418 set_user_assembler_name (block_move_fn, asmspec);
1421 static tree
1422 emit_block_move_libcall_fn (int for_call)
1424 static bool emitted_extern;
1426 if (!block_move_fn)
1427 init_block_move_fn (NULL);
1429 if (for_call && !emitted_extern)
1431 emitted_extern = true;
1432 make_decl_rtl (block_move_fn);
1433 assemble_external (block_move_fn);
1436 return block_move_fn;
1439 /* A subroutine of emit_block_move. Copy the data via an explicit
1440 loop. This is used only when libcalls are forbidden. */
1441 /* ??? It'd be nice to copy in hunks larger than QImode. */
1443 static void
1444 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1445 unsigned int align ATTRIBUTE_UNUSED)
1447 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1448 enum machine_mode iter_mode;
1450 iter_mode = GET_MODE (size);
1451 if (iter_mode == VOIDmode)
1452 iter_mode = word_mode;
1454 top_label = gen_label_rtx ();
1455 cmp_label = gen_label_rtx ();
1456 iter = gen_reg_rtx (iter_mode);
1458 emit_move_insn (iter, const0_rtx);
1460 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1461 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1462 do_pending_stack_adjust ();
1464 emit_jump (cmp_label);
1465 emit_label (top_label);
1467 tmp = convert_modes (Pmode, iter_mode, iter, true);
1468 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1469 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1470 x = change_address (x, QImode, x_addr);
1471 y = change_address (y, QImode, y_addr);
1473 emit_move_insn (x, y);
1475 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1476 true, OPTAB_LIB_WIDEN);
1477 if (tmp != iter)
1478 emit_move_insn (iter, tmp);
1480 emit_label (cmp_label);
1482 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1483 true, top_label);
1486 /* Copy all or part of a value X into registers starting at REGNO.
1487 The number of registers to be filled is NREGS. */
1489 void
1490 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1492 int i;
1493 #ifdef HAVE_load_multiple
1494 rtx pat;
1495 rtx last;
1496 #endif
1498 if (nregs == 0)
1499 return;
1501 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1502 x = validize_mem (force_const_mem (mode, x));
1504 /* See if the machine can do this with a load multiple insn. */
1505 #ifdef HAVE_load_multiple
1506 if (HAVE_load_multiple)
1508 last = get_last_insn ();
1509 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1510 GEN_INT (nregs));
1511 if (pat)
1513 emit_insn (pat);
1514 return;
1516 else
1517 delete_insns_since (last);
1519 #endif
1521 for (i = 0; i < nregs; i++)
1522 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1523 operand_subword_force (x, i, mode));
1526 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1527 The number of registers to be filled is NREGS. */
1529 void
1530 move_block_from_reg (int regno, rtx x, int nregs)
1532 int i;
1534 if (nregs == 0)
1535 return;
1537 /* See if the machine can do this with a store multiple insn. */
1538 #ifdef HAVE_store_multiple
1539 if (HAVE_store_multiple)
1541 rtx last = get_last_insn ();
1542 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1543 GEN_INT (nregs));
1544 if (pat)
1546 emit_insn (pat);
1547 return;
1549 else
1550 delete_insns_since (last);
1552 #endif
1554 for (i = 0; i < nregs; i++)
1556 rtx tem = operand_subword (x, i, 1, BLKmode);
1558 gcc_assert (tem);
1560 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1564 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1565 ORIG, where ORIG is a non-consecutive group of registers represented by
1566 a PARALLEL. The clone is identical to the original except in that the
1567 original set of registers is replaced by a new set of pseudo registers.
1568 The new set has the same modes as the original set. */
1571 gen_group_rtx (rtx orig)
1573 int i, length;
1574 rtx *tmps;
1576 gcc_assert (GET_CODE (orig) == PARALLEL);
1578 length = XVECLEN (orig, 0);
1579 tmps = alloca (sizeof (rtx) * length);
1581 /* Skip a NULL entry in first slot. */
1582 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1584 if (i)
1585 tmps[0] = 0;
1587 for (; i < length; i++)
1589 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1590 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1592 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1595 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1598 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1599 except that values are placed in TMPS[i], and must later be moved
1600 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1602 static void
1603 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1605 rtx src;
1606 int start, i;
1607 enum machine_mode m = GET_MODE (orig_src);
1609 gcc_assert (GET_CODE (dst) == PARALLEL);
1611 if (m != VOIDmode
1612 && !SCALAR_INT_MODE_P (m)
1613 && !MEM_P (orig_src)
1614 && GET_CODE (orig_src) != CONCAT)
1616 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1617 if (imode == BLKmode)
1618 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1619 else
1620 src = gen_reg_rtx (imode);
1621 if (imode != BLKmode)
1622 src = gen_lowpart (GET_MODE (orig_src), src);
1623 emit_move_insn (src, orig_src);
1624 /* ...and back again. */
1625 if (imode != BLKmode)
1626 src = gen_lowpart (imode, src);
1627 emit_group_load_1 (tmps, dst, src, type, ssize);
1628 return;
1631 /* Check for a NULL entry, used to indicate that the parameter goes
1632 both on the stack and in registers. */
1633 if (XEXP (XVECEXP (dst, 0, 0), 0))
1634 start = 0;
1635 else
1636 start = 1;
1638 /* Process the pieces. */
1639 for (i = start; i < XVECLEN (dst, 0); i++)
1641 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1642 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1643 unsigned int bytelen = GET_MODE_SIZE (mode);
1644 int shift = 0;
1646 /* Handle trailing fragments that run over the size of the struct. */
1647 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1649 /* Arrange to shift the fragment to where it belongs.
1650 extract_bit_field loads to the lsb of the reg. */
1651 if (
1652 #ifdef BLOCK_REG_PADDING
1653 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1654 == (BYTES_BIG_ENDIAN ? upward : downward)
1655 #else
1656 BYTES_BIG_ENDIAN
1657 #endif
1659 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1660 bytelen = ssize - bytepos;
1661 gcc_assert (bytelen > 0);
1664 /* If we won't be loading directly from memory, protect the real source
1665 from strange tricks we might play; but make sure that the source can
1666 be loaded directly into the destination. */
1667 src = orig_src;
1668 if (!MEM_P (orig_src)
1669 && (!CONSTANT_P (orig_src)
1670 || (GET_MODE (orig_src) != mode
1671 && GET_MODE (orig_src) != VOIDmode)))
1673 if (GET_MODE (orig_src) == VOIDmode)
1674 src = gen_reg_rtx (mode);
1675 else
1676 src = gen_reg_rtx (GET_MODE (orig_src));
1678 emit_move_insn (src, orig_src);
1681 /* Optimize the access just a bit. */
1682 if (MEM_P (src)
1683 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1684 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1685 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1686 && bytelen == GET_MODE_SIZE (mode))
1688 tmps[i] = gen_reg_rtx (mode);
1689 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1691 else if (COMPLEX_MODE_P (mode)
1692 && GET_MODE (src) == mode
1693 && bytelen == GET_MODE_SIZE (mode))
1694 /* Let emit_move_complex do the bulk of the work. */
1695 tmps[i] = src;
1696 else if (GET_CODE (src) == CONCAT)
1698 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1699 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1701 if ((bytepos == 0 && bytelen == slen0)
1702 || (bytepos != 0 && bytepos + bytelen <= slen))
1704 /* The following assumes that the concatenated objects all
1705 have the same size. In this case, a simple calculation
1706 can be used to determine the object and the bit field
1707 to be extracted. */
1708 tmps[i] = XEXP (src, bytepos / slen0);
1709 if (! CONSTANT_P (tmps[i])
1710 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1711 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1712 (bytepos % slen0) * BITS_PER_UNIT,
1713 1, NULL_RTX, mode, mode);
1715 else
1717 rtx mem;
1719 gcc_assert (!bytepos);
1720 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1721 emit_move_insn (mem, src);
1722 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1723 0, 1, NULL_RTX, mode, mode);
1726 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1727 SIMD register, which is currently broken. While we get GCC
1728 to emit proper RTL for these cases, let's dump to memory. */
1729 else if (VECTOR_MODE_P (GET_MODE (dst))
1730 && REG_P (src))
1732 int slen = GET_MODE_SIZE (GET_MODE (src));
1733 rtx mem;
1735 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1736 emit_move_insn (mem, src);
1737 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1739 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1740 && XVECLEN (dst, 0) > 1)
1741 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1742 else if (CONSTANT_P (src)
1743 || (REG_P (src) && GET_MODE (src) == mode))
1744 tmps[i] = src;
1745 else
1746 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1747 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1748 mode, mode);
1750 if (shift)
1751 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1752 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1756 /* Emit code to move a block SRC of type TYPE to a block DST,
1757 where DST is non-consecutive registers represented by a PARALLEL.
1758 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1759 if not known. */
1761 void
1762 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1764 rtx *tmps;
1765 int i;
1767 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1768 emit_group_load_1 (tmps, dst, src, type, ssize);
1770 /* Copy the extracted pieces into the proper (probable) hard regs. */
1771 for (i = 0; i < XVECLEN (dst, 0); i++)
1773 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1774 if (d == NULL)
1775 continue;
1776 emit_move_insn (d, tmps[i]);
1780 /* Similar, but load SRC into new pseudos in a format that looks like
1781 PARALLEL. This can later be fed to emit_group_move to get things
1782 in the right place. */
1785 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1787 rtvec vec;
1788 int i;
1790 vec = rtvec_alloc (XVECLEN (parallel, 0));
1791 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1793 /* Convert the vector to look just like the original PARALLEL, except
1794 with the computed values. */
1795 for (i = 0; i < XVECLEN (parallel, 0); i++)
1797 rtx e = XVECEXP (parallel, 0, i);
1798 rtx d = XEXP (e, 0);
1800 if (d)
1802 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1803 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1805 RTVEC_ELT (vec, i) = e;
1808 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1811 /* Emit code to move a block SRC to block DST, where SRC and DST are
1812 non-consecutive groups of registers, each represented by a PARALLEL. */
1814 void
1815 emit_group_move (rtx dst, rtx src)
1817 int i;
1819 gcc_assert (GET_CODE (src) == PARALLEL
1820 && GET_CODE (dst) == PARALLEL
1821 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1823 /* Skip first entry if NULL. */
1824 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1825 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1826 XEXP (XVECEXP (src, 0, i), 0));
1829 /* Move a group of registers represented by a PARALLEL into pseudos. */
1832 emit_group_move_into_temps (rtx src)
1834 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1835 int i;
1837 for (i = 0; i < XVECLEN (src, 0); i++)
1839 rtx e = XVECEXP (src, 0, i);
1840 rtx d = XEXP (e, 0);
1842 if (d)
1843 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1844 RTVEC_ELT (vec, i) = e;
1847 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1850 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1851 where SRC is non-consecutive registers represented by a PARALLEL.
1852 SSIZE represents the total size of block ORIG_DST, or -1 if not
1853 known. */
1855 void
1856 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1858 rtx *tmps, dst;
1859 int start, finish, i;
1860 enum machine_mode m = GET_MODE (orig_dst);
1862 gcc_assert (GET_CODE (src) == PARALLEL);
1864 if (!SCALAR_INT_MODE_P (m)
1865 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1867 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1868 if (imode == BLKmode)
1869 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1870 else
1871 dst = gen_reg_rtx (imode);
1872 emit_group_store (dst, src, type, ssize);
1873 if (imode != BLKmode)
1874 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1875 emit_move_insn (orig_dst, dst);
1876 return;
1879 /* Check for a NULL entry, used to indicate that the parameter goes
1880 both on the stack and in registers. */
1881 if (XEXP (XVECEXP (src, 0, 0), 0))
1882 start = 0;
1883 else
1884 start = 1;
1885 finish = XVECLEN (src, 0);
1887 tmps = alloca (sizeof (rtx) * finish);
1889 /* Copy the (probable) hard regs into pseudos. */
1890 for (i = start; i < finish; i++)
1892 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1893 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1895 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1896 emit_move_insn (tmps[i], reg);
1898 else
1899 tmps[i] = reg;
1902 /* If we won't be storing directly into memory, protect the real destination
1903 from strange tricks we might play. */
1904 dst = orig_dst;
1905 if (GET_CODE (dst) == PARALLEL)
1907 rtx temp;
1909 /* We can get a PARALLEL dst if there is a conditional expression in
1910 a return statement. In that case, the dst and src are the same,
1911 so no action is necessary. */
1912 if (rtx_equal_p (dst, src))
1913 return;
1915 /* It is unclear if we can ever reach here, but we may as well handle
1916 it. Allocate a temporary, and split this into a store/load to/from
1917 the temporary. */
1919 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1920 emit_group_store (temp, src, type, ssize);
1921 emit_group_load (dst, temp, type, ssize);
1922 return;
1924 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1926 enum machine_mode outer = GET_MODE (dst);
1927 enum machine_mode inner;
1928 HOST_WIDE_INT bytepos;
1929 bool done = false;
1930 rtx temp;
1932 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1933 dst = gen_reg_rtx (outer);
1935 /* Make life a bit easier for combine. */
1936 /* If the first element of the vector is the low part
1937 of the destination mode, use a paradoxical subreg to
1938 initialize the destination. */
1939 if (start < finish)
1941 inner = GET_MODE (tmps[start]);
1942 bytepos = subreg_lowpart_offset (inner, outer);
1943 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1945 temp = simplify_gen_subreg (outer, tmps[start],
1946 inner, 0);
1947 if (temp)
1949 emit_move_insn (dst, temp);
1950 done = true;
1951 start++;
1956 /* If the first element wasn't the low part, try the last. */
1957 if (!done
1958 && start < finish - 1)
1960 inner = GET_MODE (tmps[finish - 1]);
1961 bytepos = subreg_lowpart_offset (inner, outer);
1962 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1964 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1965 inner, 0);
1966 if (temp)
1968 emit_move_insn (dst, temp);
1969 done = true;
1970 finish--;
1975 /* Otherwise, simply initialize the result to zero. */
1976 if (!done)
1977 emit_move_insn (dst, CONST0_RTX (outer));
1980 /* Process the pieces. */
1981 for (i = start; i < finish; i++)
1983 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1984 enum machine_mode mode = GET_MODE (tmps[i]);
1985 unsigned int bytelen = GET_MODE_SIZE (mode);
1986 rtx dest = dst;
1988 /* Handle trailing fragments that run over the size of the struct. */
1989 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1991 /* store_bit_field always takes its value from the lsb.
1992 Move the fragment to the lsb if it's not already there. */
1993 if (
1994 #ifdef BLOCK_REG_PADDING
1995 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1996 == (BYTES_BIG_ENDIAN ? upward : downward)
1997 #else
1998 BYTES_BIG_ENDIAN
1999 #endif
2002 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2003 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2004 build_int_cst (NULL_TREE, shift),
2005 tmps[i], 0);
2007 bytelen = ssize - bytepos;
2010 if (GET_CODE (dst) == CONCAT)
2012 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2013 dest = XEXP (dst, 0);
2014 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2016 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2017 dest = XEXP (dst, 1);
2019 else
2021 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2022 dest = assign_stack_temp (GET_MODE (dest),
2023 GET_MODE_SIZE (GET_MODE (dest)), 0);
2024 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2025 tmps[i]);
2026 dst = dest;
2027 break;
2031 /* Optimize the access just a bit. */
2032 if (MEM_P (dest)
2033 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2034 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2035 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2036 && bytelen == GET_MODE_SIZE (mode))
2037 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2038 else
2039 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2040 mode, tmps[i]);
2043 /* Copy from the pseudo into the (probable) hard reg. */
2044 if (orig_dst != dst)
2045 emit_move_insn (orig_dst, dst);
2048 /* Generate code to copy a BLKmode object of TYPE out of a
2049 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2050 is null, a stack temporary is created. TGTBLK is returned.
2052 The purpose of this routine is to handle functions that return
2053 BLKmode structures in registers. Some machines (the PA for example)
2054 want to return all small structures in registers regardless of the
2055 structure's alignment. */
2058 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2060 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2061 rtx src = NULL, dst = NULL;
2062 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2063 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2065 if (tgtblk == 0)
2067 tgtblk = assign_temp (build_qualified_type (type,
2068 (TYPE_QUALS (type)
2069 | TYPE_QUAL_CONST)),
2070 0, 1, 1);
2071 preserve_temp_slots (tgtblk);
2074 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2075 into a new pseudo which is a full word. */
2077 if (GET_MODE (srcreg) != BLKmode
2078 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2079 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2081 /* If the structure doesn't take up a whole number of words, see whether
2082 SRCREG is padded on the left or on the right. If it's on the left,
2083 set PADDING_CORRECTION to the number of bits to skip.
2085 In most ABIs, the structure will be returned at the least end of
2086 the register, which translates to right padding on little-endian
2087 targets and left padding on big-endian targets. The opposite
2088 holds if the structure is returned at the most significant
2089 end of the register. */
2090 if (bytes % UNITS_PER_WORD != 0
2091 && (targetm.calls.return_in_msb (type)
2092 ? !BYTES_BIG_ENDIAN
2093 : BYTES_BIG_ENDIAN))
2094 padding_correction
2095 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2097 /* Copy the structure BITSIZE bites at a time.
2099 We could probably emit more efficient code for machines which do not use
2100 strict alignment, but it doesn't seem worth the effort at the current
2101 time. */
2102 for (bitpos = 0, xbitpos = padding_correction;
2103 bitpos < bytes * BITS_PER_UNIT;
2104 bitpos += bitsize, xbitpos += bitsize)
2106 /* We need a new source operand each time xbitpos is on a
2107 word boundary and when xbitpos == padding_correction
2108 (the first time through). */
2109 if (xbitpos % BITS_PER_WORD == 0
2110 || xbitpos == padding_correction)
2111 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2112 GET_MODE (srcreg));
2114 /* We need a new destination operand each time bitpos is on
2115 a word boundary. */
2116 if (bitpos % BITS_PER_WORD == 0)
2117 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2119 /* Use xbitpos for the source extraction (right justified) and
2120 xbitpos for the destination store (left justified). */
2121 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2122 extract_bit_field (src, bitsize,
2123 xbitpos % BITS_PER_WORD, 1,
2124 NULL_RTX, word_mode, word_mode));
2127 return tgtblk;
2130 /* Add a USE expression for REG to the (possibly empty) list pointed
2131 to by CALL_FUSAGE. REG must denote a hard register. */
2133 void
2134 use_reg (rtx *call_fusage, rtx reg)
2136 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2138 *call_fusage
2139 = gen_rtx_EXPR_LIST (VOIDmode,
2140 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2143 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2144 starting at REGNO. All of these registers must be hard registers. */
2146 void
2147 use_regs (rtx *call_fusage, int regno, int nregs)
2149 int i;
2151 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2153 for (i = 0; i < nregs; i++)
2154 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2157 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2158 PARALLEL REGS. This is for calls that pass values in multiple
2159 non-contiguous locations. The Irix 6 ABI has examples of this. */
2161 void
2162 use_group_regs (rtx *call_fusage, rtx regs)
2164 int i;
2166 for (i = 0; i < XVECLEN (regs, 0); i++)
2168 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2170 /* A NULL entry means the parameter goes both on the stack and in
2171 registers. This can also be a MEM for targets that pass values
2172 partially on the stack and partially in registers. */
2173 if (reg != 0 && REG_P (reg))
2174 use_reg (call_fusage, reg);
2179 /* Determine whether the LEN bytes generated by CONSTFUN can be
2180 stored to memory using several move instructions. CONSTFUNDATA is
2181 a pointer which will be passed as argument in every CONSTFUN call.
2182 ALIGN is maximum alignment we can assume. Return nonzero if a
2183 call to store_by_pieces should succeed. */
2186 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2187 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2188 void *constfundata, unsigned int align)
2190 unsigned HOST_WIDE_INT l;
2191 unsigned int max_size;
2192 HOST_WIDE_INT offset = 0;
2193 enum machine_mode mode, tmode;
2194 enum insn_code icode;
2195 int reverse;
2196 rtx cst;
2198 if (len == 0)
2199 return 1;
2201 if (! STORE_BY_PIECES_P (len, align))
2202 return 0;
2204 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2205 if (align >= GET_MODE_ALIGNMENT (tmode))
2206 align = GET_MODE_ALIGNMENT (tmode);
2207 else
2209 enum machine_mode xmode;
2211 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2212 tmode != VOIDmode;
2213 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2214 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2215 || SLOW_UNALIGNED_ACCESS (tmode, align))
2216 break;
2218 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2221 /* We would first store what we can in the largest integer mode, then go to
2222 successively smaller modes. */
2224 for (reverse = 0;
2225 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2226 reverse++)
2228 l = len;
2229 mode = VOIDmode;
2230 max_size = STORE_MAX_PIECES + 1;
2231 while (max_size > 1)
2233 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2234 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2235 if (GET_MODE_SIZE (tmode) < max_size)
2236 mode = tmode;
2238 if (mode == VOIDmode)
2239 break;
2241 icode = mov_optab->handlers[(int) mode].insn_code;
2242 if (icode != CODE_FOR_nothing
2243 && align >= GET_MODE_ALIGNMENT (mode))
2245 unsigned int size = GET_MODE_SIZE (mode);
2247 while (l >= size)
2249 if (reverse)
2250 offset -= size;
2252 cst = (*constfun) (constfundata, offset, mode);
2253 if (!LEGITIMATE_CONSTANT_P (cst))
2254 return 0;
2256 if (!reverse)
2257 offset += size;
2259 l -= size;
2263 max_size = GET_MODE_SIZE (mode);
2266 /* The code above should have handled everything. */
2267 gcc_assert (!l);
2270 return 1;
2273 /* Generate several move instructions to store LEN bytes generated by
2274 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2275 pointer which will be passed as argument in every CONSTFUN call.
2276 ALIGN is maximum alignment we can assume.
2277 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2278 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2279 stpcpy. */
2282 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2283 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2284 void *constfundata, unsigned int align, int endp)
2286 struct store_by_pieces data;
2288 if (len == 0)
2290 gcc_assert (endp != 2);
2291 return to;
2294 gcc_assert (STORE_BY_PIECES_P (len, align));
2295 data.constfun = constfun;
2296 data.constfundata = constfundata;
2297 data.len = len;
2298 data.to = to;
2299 store_by_pieces_1 (&data, align);
2300 if (endp)
2302 rtx to1;
2304 gcc_assert (!data.reverse);
2305 if (data.autinc_to)
2307 if (endp == 2)
2309 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2310 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2311 else
2312 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2313 -1));
2315 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2316 data.offset);
2318 else
2320 if (endp == 2)
2321 --data.offset;
2322 to1 = adjust_address (data.to, QImode, data.offset);
2324 return to1;
2326 else
2327 return data.to;
2330 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2331 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2333 static void
2334 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2336 struct store_by_pieces data;
2338 if (len == 0)
2339 return;
2341 data.constfun = clear_by_pieces_1;
2342 data.constfundata = NULL;
2343 data.len = len;
2344 data.to = to;
2345 store_by_pieces_1 (&data, align);
2348 /* Callback routine for clear_by_pieces.
2349 Return const0_rtx unconditionally. */
2351 static rtx
2352 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2353 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2354 enum machine_mode mode ATTRIBUTE_UNUSED)
2356 return const0_rtx;
2359 /* Subroutine of clear_by_pieces and store_by_pieces.
2360 Generate several move instructions to store LEN bytes of block TO. (A MEM
2361 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2363 static void
2364 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2365 unsigned int align ATTRIBUTE_UNUSED)
2367 rtx to_addr = XEXP (data->to, 0);
2368 unsigned int max_size = STORE_MAX_PIECES + 1;
2369 enum machine_mode mode = VOIDmode, tmode;
2370 enum insn_code icode;
2372 data->offset = 0;
2373 data->to_addr = to_addr;
2374 data->autinc_to
2375 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2376 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2378 data->explicit_inc_to = 0;
2379 data->reverse
2380 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2381 if (data->reverse)
2382 data->offset = data->len;
2384 /* If storing requires more than two move insns,
2385 copy addresses to registers (to make displacements shorter)
2386 and use post-increment if available. */
2387 if (!data->autinc_to
2388 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2390 /* Determine the main mode we'll be using. */
2391 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2392 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2393 if (GET_MODE_SIZE (tmode) < max_size)
2394 mode = tmode;
2396 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2398 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2399 data->autinc_to = 1;
2400 data->explicit_inc_to = -1;
2403 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2404 && ! data->autinc_to)
2406 data->to_addr = copy_addr_to_reg (to_addr);
2407 data->autinc_to = 1;
2408 data->explicit_inc_to = 1;
2411 if ( !data->autinc_to && CONSTANT_P (to_addr))
2412 data->to_addr = copy_addr_to_reg (to_addr);
2415 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2416 if (align >= GET_MODE_ALIGNMENT (tmode))
2417 align = GET_MODE_ALIGNMENT (tmode);
2418 else
2420 enum machine_mode xmode;
2422 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2423 tmode != VOIDmode;
2424 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2425 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2426 || SLOW_UNALIGNED_ACCESS (tmode, align))
2427 break;
2429 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2432 /* First store what we can in the largest integer mode, then go to
2433 successively smaller modes. */
2435 while (max_size > 1)
2437 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2438 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2439 if (GET_MODE_SIZE (tmode) < max_size)
2440 mode = tmode;
2442 if (mode == VOIDmode)
2443 break;
2445 icode = mov_optab->handlers[(int) mode].insn_code;
2446 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2447 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2449 max_size = GET_MODE_SIZE (mode);
2452 /* The code above should have handled everything. */
2453 gcc_assert (!data->len);
2456 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2457 with move instructions for mode MODE. GENFUN is the gen_... function
2458 to make a move insn for that mode. DATA has all the other info. */
2460 static void
2461 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2462 struct store_by_pieces *data)
2464 unsigned int size = GET_MODE_SIZE (mode);
2465 rtx to1, cst;
2467 while (data->len >= size)
2469 if (data->reverse)
2470 data->offset -= size;
2472 if (data->autinc_to)
2473 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2474 data->offset);
2475 else
2476 to1 = adjust_address (data->to, mode, data->offset);
2478 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2479 emit_insn (gen_add2_insn (data->to_addr,
2480 GEN_INT (-(HOST_WIDE_INT) size)));
2482 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2483 emit_insn ((*genfun) (to1, cst));
2485 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2486 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2488 if (! data->reverse)
2489 data->offset += size;
2491 data->len -= size;
2495 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2496 its length in bytes. */
2499 clear_storage (rtx object, rtx size, enum block_op_methods method)
2501 enum machine_mode mode = GET_MODE (object);
2502 unsigned int align;
2504 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2506 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2507 just move a zero. Otherwise, do this a piece at a time. */
2508 if (mode != BLKmode
2509 && GET_CODE (size) == CONST_INT
2510 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2512 rtx zero = CONST0_RTX (mode);
2513 if (zero != NULL)
2515 emit_move_insn (object, zero);
2516 return NULL;
2519 if (COMPLEX_MODE_P (mode))
2521 zero = CONST0_RTX (GET_MODE_INNER (mode));
2522 if (zero != NULL)
2524 write_complex_part (object, zero, 0);
2525 write_complex_part (object, zero, 1);
2526 return NULL;
2531 if (size == const0_rtx)
2532 return NULL;
2534 align = MEM_ALIGN (object);
2536 if (GET_CODE (size) == CONST_INT
2537 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2538 clear_by_pieces (object, INTVAL (size), align);
2539 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2541 else
2542 return clear_storage_via_libcall (object, size,
2543 method == BLOCK_OP_TAILCALL);
2545 return NULL;
2548 /* A subroutine of clear_storage. Expand a call to memset.
2549 Return the return value of memset, 0 otherwise. */
2551 static rtx
2552 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2554 tree call_expr, arg_list, fn, object_tree, size_tree;
2555 enum machine_mode size_mode;
2556 rtx retval;
2558 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2559 place those into new pseudos into a VAR_DECL and use them later. */
2561 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2563 size_mode = TYPE_MODE (sizetype);
2564 size = convert_to_mode (size_mode, size, 1);
2565 size = copy_to_mode_reg (size_mode, size);
2567 /* It is incorrect to use the libcall calling conventions to call
2568 memset in this context. This could be a user call to memset and
2569 the user may wish to examine the return value from memset. For
2570 targets where libcalls and normal calls have different conventions
2571 for returning pointers, we could end up generating incorrect code. */
2573 object_tree = make_tree (ptr_type_node, object);
2574 size_tree = make_tree (sizetype, size);
2576 fn = clear_storage_libcall_fn (true);
2577 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2578 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2579 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2581 /* Now we have to build up the CALL_EXPR itself. */
2582 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2583 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2584 call_expr, arg_list, NULL_TREE);
2585 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2587 retval = expand_normal (call_expr);
2589 return retval;
2592 /* A subroutine of clear_storage_via_libcall. Create the tree node
2593 for the function we use for block clears. The first time FOR_CALL
2594 is true, we call assemble_external. */
2596 static GTY(()) tree block_clear_fn;
2598 void
2599 init_block_clear_fn (const char *asmspec)
2601 if (!block_clear_fn)
2603 tree fn, args;
2605 fn = get_identifier ("memset");
2606 args = build_function_type_list (ptr_type_node, ptr_type_node,
2607 integer_type_node, sizetype,
2608 NULL_TREE);
2610 fn = build_decl (FUNCTION_DECL, fn, args);
2611 DECL_EXTERNAL (fn) = 1;
2612 TREE_PUBLIC (fn) = 1;
2613 DECL_ARTIFICIAL (fn) = 1;
2614 TREE_NOTHROW (fn) = 1;
2615 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2616 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2618 block_clear_fn = fn;
2621 if (asmspec)
2622 set_user_assembler_name (block_clear_fn, asmspec);
2625 static tree
2626 clear_storage_libcall_fn (int for_call)
2628 static bool emitted_extern;
2630 if (!block_clear_fn)
2631 init_block_clear_fn (NULL);
2633 if (for_call && !emitted_extern)
2635 emitted_extern = true;
2636 make_decl_rtl (block_clear_fn);
2637 assemble_external (block_clear_fn);
2640 return block_clear_fn;
2643 /* Expand a setmem pattern; return true if successful. */
2645 bool
2646 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2648 /* Try the most limited insn first, because there's no point
2649 including more than one in the machine description unless
2650 the more limited one has some advantage. */
2652 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2653 enum machine_mode mode;
2655 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2656 mode = GET_MODE_WIDER_MODE (mode))
2658 enum insn_code code = setmem_optab[(int) mode];
2659 insn_operand_predicate_fn pred;
2661 if (code != CODE_FOR_nothing
2662 /* We don't need MODE to be narrower than
2663 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2664 the mode mask, as it is returned by the macro, it will
2665 definitely be less than the actual mode mask. */
2666 && ((GET_CODE (size) == CONST_INT
2667 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2668 <= (GET_MODE_MASK (mode) >> 1)))
2669 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2670 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2671 || (*pred) (object, BLKmode))
2672 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2673 || (*pred) (opalign, VOIDmode)))
2675 rtx opsize, opchar;
2676 enum machine_mode char_mode;
2677 rtx last = get_last_insn ();
2678 rtx pat;
2680 opsize = convert_to_mode (mode, size, 1);
2681 pred = insn_data[(int) code].operand[1].predicate;
2682 if (pred != 0 && ! (*pred) (opsize, mode))
2683 opsize = copy_to_mode_reg (mode, opsize);
2685 opchar = val;
2686 char_mode = insn_data[(int) code].operand[2].mode;
2687 if (char_mode != VOIDmode)
2689 opchar = convert_to_mode (char_mode, opchar, 1);
2690 pred = insn_data[(int) code].operand[2].predicate;
2691 if (pred != 0 && ! (*pred) (opchar, char_mode))
2692 opchar = copy_to_mode_reg (char_mode, opchar);
2695 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2696 if (pat)
2698 emit_insn (pat);
2699 return true;
2701 else
2702 delete_insns_since (last);
2706 return false;
2710 /* Write to one of the components of the complex value CPLX. Write VAL to
2711 the real part if IMAG_P is false, and the imaginary part if its true. */
2713 static void
2714 write_complex_part (rtx cplx, rtx val, bool imag_p)
2716 enum machine_mode cmode;
2717 enum machine_mode imode;
2718 unsigned ibitsize;
2720 if (GET_CODE (cplx) == CONCAT)
2722 emit_move_insn (XEXP (cplx, imag_p), val);
2723 return;
2726 cmode = GET_MODE (cplx);
2727 imode = GET_MODE_INNER (cmode);
2728 ibitsize = GET_MODE_BITSIZE (imode);
2730 /* For MEMs simplify_gen_subreg may generate an invalid new address
2731 because, e.g., the original address is considered mode-dependent
2732 by the target, which restricts simplify_subreg from invoking
2733 adjust_address_nv. Instead of preparing fallback support for an
2734 invalid address, we call adjust_address_nv directly. */
2735 if (MEM_P (cplx))
2737 emit_move_insn (adjust_address_nv (cplx, imode,
2738 imag_p ? GET_MODE_SIZE (imode) : 0),
2739 val);
2740 return;
2743 /* If the sub-object is at least word sized, then we know that subregging
2744 will work. This special case is important, since store_bit_field
2745 wants to operate on integer modes, and there's rarely an OImode to
2746 correspond to TCmode. */
2747 if (ibitsize >= BITS_PER_WORD
2748 /* For hard regs we have exact predicates. Assume we can split
2749 the original object if it spans an even number of hard regs.
2750 This special case is important for SCmode on 64-bit platforms
2751 where the natural size of floating-point regs is 32-bit. */
2752 || (REG_P (cplx)
2753 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2754 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2756 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2757 imag_p ? GET_MODE_SIZE (imode) : 0);
2758 if (part)
2760 emit_move_insn (part, val);
2761 return;
2763 else
2764 /* simplify_gen_subreg may fail for sub-word MEMs. */
2765 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2768 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2771 /* Extract one of the components of the complex value CPLX. Extract the
2772 real part if IMAG_P is false, and the imaginary part if it's true. */
2774 static rtx
2775 read_complex_part (rtx cplx, bool imag_p)
2777 enum machine_mode cmode, imode;
2778 unsigned ibitsize;
2780 if (GET_CODE (cplx) == CONCAT)
2781 return XEXP (cplx, imag_p);
2783 cmode = GET_MODE (cplx);
2784 imode = GET_MODE_INNER (cmode);
2785 ibitsize = GET_MODE_BITSIZE (imode);
2787 /* Special case reads from complex constants that got spilled to memory. */
2788 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2790 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2791 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2793 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2794 if (CONSTANT_CLASS_P (part))
2795 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2799 /* For MEMs simplify_gen_subreg may generate an invalid new address
2800 because, e.g., the original address is considered mode-dependent
2801 by the target, which restricts simplify_subreg from invoking
2802 adjust_address_nv. Instead of preparing fallback support for an
2803 invalid address, we call adjust_address_nv directly. */
2804 if (MEM_P (cplx))
2805 return adjust_address_nv (cplx, imode,
2806 imag_p ? GET_MODE_SIZE (imode) : 0);
2808 /* If the sub-object is at least word sized, then we know that subregging
2809 will work. This special case is important, since extract_bit_field
2810 wants to operate on integer modes, and there's rarely an OImode to
2811 correspond to TCmode. */
2812 if (ibitsize >= BITS_PER_WORD
2813 /* For hard regs we have exact predicates. Assume we can split
2814 the original object if it spans an even number of hard regs.
2815 This special case is important for SCmode on 64-bit platforms
2816 where the natural size of floating-point regs is 32-bit. */
2817 || (REG_P (cplx)
2818 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2819 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2821 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2822 imag_p ? GET_MODE_SIZE (imode) : 0);
2823 if (ret)
2824 return ret;
2825 else
2826 /* simplify_gen_subreg may fail for sub-word MEMs. */
2827 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2830 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2831 true, NULL_RTX, imode, imode);
2834 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2835 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2836 represented in NEW_MODE. If FORCE is true, this will never happen, as
2837 we'll force-create a SUBREG if needed. */
2839 static rtx
2840 emit_move_change_mode (enum machine_mode new_mode,
2841 enum machine_mode old_mode, rtx x, bool force)
2843 rtx ret;
2845 if (MEM_P (x))
2847 /* We don't have to worry about changing the address since the
2848 size in bytes is supposed to be the same. */
2849 if (reload_in_progress)
2851 /* Copy the MEM to change the mode and move any
2852 substitutions from the old MEM to the new one. */
2853 ret = adjust_address_nv (x, new_mode, 0);
2854 copy_replacements (x, ret);
2856 else
2857 ret = adjust_address (x, new_mode, 0);
2859 else
2861 /* Note that we do want simplify_subreg's behavior of validating
2862 that the new mode is ok for a hard register. If we were to use
2863 simplify_gen_subreg, we would create the subreg, but would
2864 probably run into the target not being able to implement it. */
2865 /* Except, of course, when FORCE is true, when this is exactly what
2866 we want. Which is needed for CCmodes on some targets. */
2867 if (force)
2868 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2869 else
2870 ret = simplify_subreg (new_mode, x, old_mode, 0);
2873 return ret;
2876 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2877 an integer mode of the same size as MODE. Returns the instruction
2878 emitted, or NULL if such a move could not be generated. */
2880 static rtx
2881 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2883 enum machine_mode imode;
2884 enum insn_code code;
2886 /* There must exist a mode of the exact size we require. */
2887 imode = int_mode_for_mode (mode);
2888 if (imode == BLKmode)
2889 return NULL_RTX;
2891 /* The target must support moves in this mode. */
2892 code = mov_optab->handlers[imode].insn_code;
2893 if (code == CODE_FOR_nothing)
2894 return NULL_RTX;
2896 x = emit_move_change_mode (imode, mode, x, force);
2897 if (x == NULL_RTX)
2898 return NULL_RTX;
2899 y = emit_move_change_mode (imode, mode, y, force);
2900 if (y == NULL_RTX)
2901 return NULL_RTX;
2902 return emit_insn (GEN_FCN (code) (x, y));
2905 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2906 Return an equivalent MEM that does not use an auto-increment. */
2908 static rtx
2909 emit_move_resolve_push (enum machine_mode mode, rtx x)
2911 enum rtx_code code = GET_CODE (XEXP (x, 0));
2912 HOST_WIDE_INT adjust;
2913 rtx temp;
2915 adjust = GET_MODE_SIZE (mode);
2916 #ifdef PUSH_ROUNDING
2917 adjust = PUSH_ROUNDING (adjust);
2918 #endif
2919 if (code == PRE_DEC || code == POST_DEC)
2920 adjust = -adjust;
2921 else if (code == PRE_MODIFY || code == POST_MODIFY)
2923 rtx expr = XEXP (XEXP (x, 0), 1);
2924 HOST_WIDE_INT val;
2926 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2927 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2928 val = INTVAL (XEXP (expr, 1));
2929 if (GET_CODE (expr) == MINUS)
2930 val = -val;
2931 gcc_assert (adjust == val || adjust == -val);
2932 adjust = val;
2935 /* Do not use anti_adjust_stack, since we don't want to update
2936 stack_pointer_delta. */
2937 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2938 GEN_INT (adjust), stack_pointer_rtx,
2939 0, OPTAB_LIB_WIDEN);
2940 if (temp != stack_pointer_rtx)
2941 emit_move_insn (stack_pointer_rtx, temp);
2943 switch (code)
2945 case PRE_INC:
2946 case PRE_DEC:
2947 case PRE_MODIFY:
2948 temp = stack_pointer_rtx;
2949 break;
2950 case POST_INC:
2951 case POST_DEC:
2952 case POST_MODIFY:
2953 temp = plus_constant (stack_pointer_rtx, -adjust);
2954 break;
2955 default:
2956 gcc_unreachable ();
2959 return replace_equiv_address (x, temp);
2962 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2963 X is known to satisfy push_operand, and MODE is known to be complex.
2964 Returns the last instruction emitted. */
2966 static rtx
2967 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2969 enum machine_mode submode = GET_MODE_INNER (mode);
2970 bool imag_first;
2972 #ifdef PUSH_ROUNDING
2973 unsigned int submodesize = GET_MODE_SIZE (submode);
2975 /* In case we output to the stack, but the size is smaller than the
2976 machine can push exactly, we need to use move instructions. */
2977 if (PUSH_ROUNDING (submodesize) != submodesize)
2979 x = emit_move_resolve_push (mode, x);
2980 return emit_move_insn (x, y);
2982 #endif
2984 /* Note that the real part always precedes the imag part in memory
2985 regardless of machine's endianness. */
2986 switch (GET_CODE (XEXP (x, 0)))
2988 case PRE_DEC:
2989 case POST_DEC:
2990 imag_first = true;
2991 break;
2992 case PRE_INC:
2993 case POST_INC:
2994 imag_first = false;
2995 break;
2996 default:
2997 gcc_unreachable ();
3000 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3001 read_complex_part (y, imag_first));
3002 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3003 read_complex_part (y, !imag_first));
3006 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3007 MODE is known to be complex. Returns the last instruction emitted. */
3009 static rtx
3010 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3012 bool try_int;
3014 /* Need to take special care for pushes, to maintain proper ordering
3015 of the data, and possibly extra padding. */
3016 if (push_operand (x, mode))
3017 return emit_move_complex_push (mode, x, y);
3019 /* See if we can coerce the target into moving both values at once. */
3021 /* Move floating point as parts. */
3022 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3023 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3024 try_int = false;
3025 /* Not possible if the values are inherently not adjacent. */
3026 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3027 try_int = false;
3028 /* Is possible if both are registers (or subregs of registers). */
3029 else if (register_operand (x, mode) && register_operand (y, mode))
3030 try_int = true;
3031 /* If one of the operands is a memory, and alignment constraints
3032 are friendly enough, we may be able to do combined memory operations.
3033 We do not attempt this if Y is a constant because that combination is
3034 usually better with the by-parts thing below. */
3035 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3036 && (!STRICT_ALIGNMENT
3037 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3038 try_int = true;
3039 else
3040 try_int = false;
3042 if (try_int)
3044 rtx ret;
3046 /* For memory to memory moves, optimal behavior can be had with the
3047 existing block move logic. */
3048 if (MEM_P (x) && MEM_P (y))
3050 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3051 BLOCK_OP_NO_LIBCALL);
3052 return get_last_insn ();
3055 ret = emit_move_via_integer (mode, x, y, true);
3056 if (ret)
3057 return ret;
3060 /* Show the output dies here. This is necessary for SUBREGs
3061 of pseudos since we cannot track their lifetimes correctly;
3062 hard regs shouldn't appear here except as return values. */
3063 if (!reload_completed && !reload_in_progress
3064 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3065 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3067 write_complex_part (x, read_complex_part (y, false), false);
3068 write_complex_part (x, read_complex_part (y, true), true);
3069 return get_last_insn ();
3072 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3073 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3075 static rtx
3076 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3078 rtx ret;
3080 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3081 if (mode != CCmode)
3083 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3084 if (code != CODE_FOR_nothing)
3086 x = emit_move_change_mode (CCmode, mode, x, true);
3087 y = emit_move_change_mode (CCmode, mode, y, true);
3088 return emit_insn (GEN_FCN (code) (x, y));
3092 /* Otherwise, find the MODE_INT mode of the same width. */
3093 ret = emit_move_via_integer (mode, x, y, false);
3094 gcc_assert (ret != NULL);
3095 return ret;
3098 /* Return true if word I of OP lies entirely in the
3099 undefined bits of a paradoxical subreg. */
3101 static bool
3102 undefined_operand_subword_p (rtx op, int i)
3104 enum machine_mode innermode, innermostmode;
3105 int offset;
3106 if (GET_CODE (op) != SUBREG)
3107 return false;
3108 innermode = GET_MODE (op);
3109 innermostmode = GET_MODE (SUBREG_REG (op));
3110 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3111 /* The SUBREG_BYTE represents offset, as if the value were stored in
3112 memory, except for a paradoxical subreg where we define
3113 SUBREG_BYTE to be 0; undo this exception as in
3114 simplify_subreg. */
3115 if (SUBREG_BYTE (op) == 0
3116 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3118 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3119 if (WORDS_BIG_ENDIAN)
3120 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3121 if (BYTES_BIG_ENDIAN)
3122 offset += difference % UNITS_PER_WORD;
3124 if (offset >= GET_MODE_SIZE (innermostmode)
3125 || offset <= -GET_MODE_SIZE (word_mode))
3126 return true;
3127 return false;
3130 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3131 MODE is any multi-word or full-word mode that lacks a move_insn
3132 pattern. Note that you will get better code if you define such
3133 patterns, even if they must turn into multiple assembler instructions. */
3135 static rtx
3136 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3138 rtx last_insn = 0;
3139 rtx seq, inner;
3140 bool need_clobber;
3141 int i;
3143 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3145 /* If X is a push on the stack, do the push now and replace
3146 X with a reference to the stack pointer. */
3147 if (push_operand (x, mode))
3148 x = emit_move_resolve_push (mode, x);
3150 /* If we are in reload, see if either operand is a MEM whose address
3151 is scheduled for replacement. */
3152 if (reload_in_progress && MEM_P (x)
3153 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3154 x = replace_equiv_address_nv (x, inner);
3155 if (reload_in_progress && MEM_P (y)
3156 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3157 y = replace_equiv_address_nv (y, inner);
3159 start_sequence ();
3161 need_clobber = false;
3162 for (i = 0;
3163 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3164 i++)
3166 rtx xpart = operand_subword (x, i, 1, mode);
3167 rtx ypart;
3169 /* Do not generate code for a move if it would come entirely
3170 from the undefined bits of a paradoxical subreg. */
3171 if (undefined_operand_subword_p (y, i))
3172 continue;
3174 ypart = operand_subword (y, i, 1, mode);
3176 /* If we can't get a part of Y, put Y into memory if it is a
3177 constant. Otherwise, force it into a register. Then we must
3178 be able to get a part of Y. */
3179 if (ypart == 0 && CONSTANT_P (y))
3181 y = use_anchored_address (force_const_mem (mode, y));
3182 ypart = operand_subword (y, i, 1, mode);
3184 else if (ypart == 0)
3185 ypart = operand_subword_force (y, i, mode);
3187 gcc_assert (xpart && ypart);
3189 need_clobber |= (GET_CODE (xpart) == SUBREG);
3191 last_insn = emit_move_insn (xpart, ypart);
3194 seq = get_insns ();
3195 end_sequence ();
3197 /* Show the output dies here. This is necessary for SUBREGs
3198 of pseudos since we cannot track their lifetimes correctly;
3199 hard regs shouldn't appear here except as return values.
3200 We never want to emit such a clobber after reload. */
3201 if (x != y
3202 && ! (reload_in_progress || reload_completed)
3203 && need_clobber != 0)
3204 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3206 emit_insn (seq);
3208 return last_insn;
3211 /* Low level part of emit_move_insn.
3212 Called just like emit_move_insn, but assumes X and Y
3213 are basically valid. */
3216 emit_move_insn_1 (rtx x, rtx y)
3218 enum machine_mode mode = GET_MODE (x);
3219 enum insn_code code;
3221 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3223 code = mov_optab->handlers[mode].insn_code;
3224 if (code != CODE_FOR_nothing)
3225 return emit_insn (GEN_FCN (code) (x, y));
3227 /* Expand complex moves by moving real part and imag part. */
3228 if (COMPLEX_MODE_P (mode))
3229 return emit_move_complex (mode, x, y);
3231 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3233 rtx result = emit_move_via_integer (mode, x, y, true);
3235 /* If we can't find an integer mode, use multi words. */
3236 if (result)
3237 return result;
3238 else
3239 return emit_move_multi_word (mode, x, y);
3242 if (GET_MODE_CLASS (mode) == MODE_CC)
3243 return emit_move_ccmode (mode, x, y);
3245 /* Try using a move pattern for the corresponding integer mode. This is
3246 only safe when simplify_subreg can convert MODE constants into integer
3247 constants. At present, it can only do this reliably if the value
3248 fits within a HOST_WIDE_INT. */
3249 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3251 rtx ret = emit_move_via_integer (mode, x, y, false);
3252 if (ret)
3253 return ret;
3256 return emit_move_multi_word (mode, x, y);
3259 /* Generate code to copy Y into X.
3260 Both Y and X must have the same mode, except that
3261 Y can be a constant with VOIDmode.
3262 This mode cannot be BLKmode; use emit_block_move for that.
3264 Return the last instruction emitted. */
3267 emit_move_insn (rtx x, rtx y)
3269 enum machine_mode mode = GET_MODE (x);
3270 rtx y_cst = NULL_RTX;
3271 rtx last_insn, set;
3273 gcc_assert (mode != BLKmode
3274 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3276 if (CONSTANT_P (y))
3278 if (optimize
3279 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3280 && (last_insn = compress_float_constant (x, y)))
3281 return last_insn;
3283 y_cst = y;
3285 if (!LEGITIMATE_CONSTANT_P (y))
3287 y = force_const_mem (mode, y);
3289 /* If the target's cannot_force_const_mem prevented the spill,
3290 assume that the target's move expanders will also take care
3291 of the non-legitimate constant. */
3292 if (!y)
3293 y = y_cst;
3294 else
3295 y = use_anchored_address (y);
3299 /* If X or Y are memory references, verify that their addresses are valid
3300 for the machine. */
3301 if (MEM_P (x)
3302 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3303 && ! push_operand (x, GET_MODE (x)))
3304 || (flag_force_addr
3305 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3306 x = validize_mem (x);
3308 if (MEM_P (y)
3309 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3310 || (flag_force_addr
3311 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3312 y = validize_mem (y);
3314 gcc_assert (mode != BLKmode);
3316 last_insn = emit_move_insn_1 (x, y);
3318 if (y_cst && REG_P (x)
3319 && (set = single_set (last_insn)) != NULL_RTX
3320 && SET_DEST (set) == x
3321 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3322 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3324 return last_insn;
3327 /* If Y is representable exactly in a narrower mode, and the target can
3328 perform the extension directly from constant or memory, then emit the
3329 move as an extension. */
3331 static rtx
3332 compress_float_constant (rtx x, rtx y)
3334 enum machine_mode dstmode = GET_MODE (x);
3335 enum machine_mode orig_srcmode = GET_MODE (y);
3336 enum machine_mode srcmode;
3337 REAL_VALUE_TYPE r;
3338 int oldcost, newcost;
3340 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3342 if (LEGITIMATE_CONSTANT_P (y))
3343 oldcost = rtx_cost (y, SET);
3344 else
3345 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3347 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3348 srcmode != orig_srcmode;
3349 srcmode = GET_MODE_WIDER_MODE (srcmode))
3351 enum insn_code ic;
3352 rtx trunc_y, last_insn;
3354 /* Skip if the target can't extend this way. */
3355 ic = can_extend_p (dstmode, srcmode, 0);
3356 if (ic == CODE_FOR_nothing)
3357 continue;
3359 /* Skip if the narrowed value isn't exact. */
3360 if (! exact_real_truncate (srcmode, &r))
3361 continue;
3363 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3365 if (LEGITIMATE_CONSTANT_P (trunc_y))
3367 /* Skip if the target needs extra instructions to perform
3368 the extension. */
3369 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3370 continue;
3371 /* This is valid, but may not be cheaper than the original. */
3372 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3373 if (oldcost < newcost)
3374 continue;
3376 else if (float_extend_from_mem[dstmode][srcmode])
3378 trunc_y = force_const_mem (srcmode, trunc_y);
3379 /* This is valid, but may not be cheaper than the original. */
3380 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3381 if (oldcost < newcost)
3382 continue;
3383 trunc_y = validize_mem (trunc_y);
3385 else
3386 continue;
3388 /* For CSE's benefit, force the compressed constant pool entry
3389 into a new pseudo. This constant may be used in different modes,
3390 and if not, combine will put things back together for us. */
3391 trunc_y = force_reg (srcmode, trunc_y);
3392 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3393 last_insn = get_last_insn ();
3395 if (REG_P (x))
3396 set_unique_reg_note (last_insn, REG_EQUAL, y);
3398 return last_insn;
3401 return NULL_RTX;
3404 /* Pushing data onto the stack. */
3406 /* Push a block of length SIZE (perhaps variable)
3407 and return an rtx to address the beginning of the block.
3408 The value may be virtual_outgoing_args_rtx.
3410 EXTRA is the number of bytes of padding to push in addition to SIZE.
3411 BELOW nonzero means this padding comes at low addresses;
3412 otherwise, the padding comes at high addresses. */
3415 push_block (rtx size, int extra, int below)
3417 rtx temp;
3419 size = convert_modes (Pmode, ptr_mode, size, 1);
3420 if (CONSTANT_P (size))
3421 anti_adjust_stack (plus_constant (size, extra));
3422 else if (REG_P (size) && extra == 0)
3423 anti_adjust_stack (size);
3424 else
3426 temp = copy_to_mode_reg (Pmode, size);
3427 if (extra != 0)
3428 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3429 temp, 0, OPTAB_LIB_WIDEN);
3430 anti_adjust_stack (temp);
3433 #ifndef STACK_GROWS_DOWNWARD
3434 if (0)
3435 #else
3436 if (1)
3437 #endif
3439 temp = virtual_outgoing_args_rtx;
3440 if (extra != 0 && below)
3441 temp = plus_constant (temp, extra);
3443 else
3445 if (GET_CODE (size) == CONST_INT)
3446 temp = plus_constant (virtual_outgoing_args_rtx,
3447 -INTVAL (size) - (below ? 0 : extra));
3448 else if (extra != 0 && !below)
3449 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3450 negate_rtx (Pmode, plus_constant (size, extra)));
3451 else
3452 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3453 negate_rtx (Pmode, size));
3456 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3459 #ifdef PUSH_ROUNDING
3461 /* Emit single push insn. */
3463 static void
3464 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3466 rtx dest_addr;
3467 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3468 rtx dest;
3469 enum insn_code icode;
3470 insn_operand_predicate_fn pred;
3472 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3473 /* If there is push pattern, use it. Otherwise try old way of throwing
3474 MEM representing push operation to move expander. */
3475 icode = push_optab->handlers[(int) mode].insn_code;
3476 if (icode != CODE_FOR_nothing)
3478 if (((pred = insn_data[(int) icode].operand[0].predicate)
3479 && !((*pred) (x, mode))))
3480 x = force_reg (mode, x);
3481 emit_insn (GEN_FCN (icode) (x));
3482 return;
3484 if (GET_MODE_SIZE (mode) == rounded_size)
3485 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3486 /* If we are to pad downward, adjust the stack pointer first and
3487 then store X into the stack location using an offset. This is
3488 because emit_move_insn does not know how to pad; it does not have
3489 access to type. */
3490 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3492 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3493 HOST_WIDE_INT offset;
3495 emit_move_insn (stack_pointer_rtx,
3496 expand_binop (Pmode,
3497 #ifdef STACK_GROWS_DOWNWARD
3498 sub_optab,
3499 #else
3500 add_optab,
3501 #endif
3502 stack_pointer_rtx,
3503 GEN_INT (rounded_size),
3504 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3506 offset = (HOST_WIDE_INT) padding_size;
3507 #ifdef STACK_GROWS_DOWNWARD
3508 if (STACK_PUSH_CODE == POST_DEC)
3509 /* We have already decremented the stack pointer, so get the
3510 previous value. */
3511 offset += (HOST_WIDE_INT) rounded_size;
3512 #else
3513 if (STACK_PUSH_CODE == POST_INC)
3514 /* We have already incremented the stack pointer, so get the
3515 previous value. */
3516 offset -= (HOST_WIDE_INT) rounded_size;
3517 #endif
3518 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3520 else
3522 #ifdef STACK_GROWS_DOWNWARD
3523 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3524 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3525 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3526 #else
3527 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3528 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3529 GEN_INT (rounded_size));
3530 #endif
3531 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3534 dest = gen_rtx_MEM (mode, dest_addr);
3536 if (type != 0)
3538 set_mem_attributes (dest, type, 1);
3540 if (flag_optimize_sibling_calls)
3541 /* Function incoming arguments may overlap with sibling call
3542 outgoing arguments and we cannot allow reordering of reads
3543 from function arguments with stores to outgoing arguments
3544 of sibling calls. */
3545 set_mem_alias_set (dest, 0);
3547 emit_move_insn (dest, x);
3549 #endif
3551 /* Generate code to push X onto the stack, assuming it has mode MODE and
3552 type TYPE.
3553 MODE is redundant except when X is a CONST_INT (since they don't
3554 carry mode info).
3555 SIZE is an rtx for the size of data to be copied (in bytes),
3556 needed only if X is BLKmode.
3558 ALIGN (in bits) is maximum alignment we can assume.
3560 If PARTIAL and REG are both nonzero, then copy that many of the first
3561 bytes of X into registers starting with REG, and push the rest of X.
3562 The amount of space pushed is decreased by PARTIAL bytes.
3563 REG must be a hard register in this case.
3564 If REG is zero but PARTIAL is not, take any all others actions for an
3565 argument partially in registers, but do not actually load any
3566 registers.
3568 EXTRA is the amount in bytes of extra space to leave next to this arg.
3569 This is ignored if an argument block has already been allocated.
3571 On a machine that lacks real push insns, ARGS_ADDR is the address of
3572 the bottom of the argument block for this call. We use indexing off there
3573 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3574 argument block has not been preallocated.
3576 ARGS_SO_FAR is the size of args previously pushed for this call.
3578 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3579 for arguments passed in registers. If nonzero, it will be the number
3580 of bytes required. */
3582 void
3583 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3584 unsigned int align, int partial, rtx reg, int extra,
3585 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3586 rtx alignment_pad)
3588 rtx xinner;
3589 enum direction stack_direction
3590 #ifdef STACK_GROWS_DOWNWARD
3591 = downward;
3592 #else
3593 = upward;
3594 #endif
3596 /* Decide where to pad the argument: `downward' for below,
3597 `upward' for above, or `none' for don't pad it.
3598 Default is below for small data on big-endian machines; else above. */
3599 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3601 /* Invert direction if stack is post-decrement.
3602 FIXME: why? */
3603 if (STACK_PUSH_CODE == POST_DEC)
3604 if (where_pad != none)
3605 where_pad = (where_pad == downward ? upward : downward);
3607 xinner = x;
3609 if (mode == BLKmode)
3611 /* Copy a block into the stack, entirely or partially. */
3613 rtx temp;
3614 int used;
3615 int offset;
3616 int skip;
3618 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3619 used = partial - offset;
3621 gcc_assert (size);
3623 /* USED is now the # of bytes we need not copy to the stack
3624 because registers will take care of them. */
3626 if (partial != 0)
3627 xinner = adjust_address (xinner, BLKmode, used);
3629 /* If the partial register-part of the arg counts in its stack size,
3630 skip the part of stack space corresponding to the registers.
3631 Otherwise, start copying to the beginning of the stack space,
3632 by setting SKIP to 0. */
3633 skip = (reg_parm_stack_space == 0) ? 0 : used;
3635 #ifdef PUSH_ROUNDING
3636 /* Do it with several push insns if that doesn't take lots of insns
3637 and if there is no difficulty with push insns that skip bytes
3638 on the stack for alignment purposes. */
3639 if (args_addr == 0
3640 && PUSH_ARGS
3641 && GET_CODE (size) == CONST_INT
3642 && skip == 0
3643 && MEM_ALIGN (xinner) >= align
3644 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3645 /* Here we avoid the case of a structure whose weak alignment
3646 forces many pushes of a small amount of data,
3647 and such small pushes do rounding that causes trouble. */
3648 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3649 || align >= BIGGEST_ALIGNMENT
3650 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3651 == (align / BITS_PER_UNIT)))
3652 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3654 /* Push padding now if padding above and stack grows down,
3655 or if padding below and stack grows up.
3656 But if space already allocated, this has already been done. */
3657 if (extra && args_addr == 0
3658 && where_pad != none && where_pad != stack_direction)
3659 anti_adjust_stack (GEN_INT (extra));
3661 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3663 else
3664 #endif /* PUSH_ROUNDING */
3666 rtx target;
3668 /* Otherwise make space on the stack and copy the data
3669 to the address of that space. */
3671 /* Deduct words put into registers from the size we must copy. */
3672 if (partial != 0)
3674 if (GET_CODE (size) == CONST_INT)
3675 size = GEN_INT (INTVAL (size) - used);
3676 else
3677 size = expand_binop (GET_MODE (size), sub_optab, size,
3678 GEN_INT (used), NULL_RTX, 0,
3679 OPTAB_LIB_WIDEN);
3682 /* Get the address of the stack space.
3683 In this case, we do not deal with EXTRA separately.
3684 A single stack adjust will do. */
3685 if (! args_addr)
3687 temp = push_block (size, extra, where_pad == downward);
3688 extra = 0;
3690 else if (GET_CODE (args_so_far) == CONST_INT)
3691 temp = memory_address (BLKmode,
3692 plus_constant (args_addr,
3693 skip + INTVAL (args_so_far)));
3694 else
3695 temp = memory_address (BLKmode,
3696 plus_constant (gen_rtx_PLUS (Pmode,
3697 args_addr,
3698 args_so_far),
3699 skip));
3701 if (!ACCUMULATE_OUTGOING_ARGS)
3703 /* If the source is referenced relative to the stack pointer,
3704 copy it to another register to stabilize it. We do not need
3705 to do this if we know that we won't be changing sp. */
3707 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3708 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3709 temp = copy_to_reg (temp);
3712 target = gen_rtx_MEM (BLKmode, temp);
3714 /* We do *not* set_mem_attributes here, because incoming arguments
3715 may overlap with sibling call outgoing arguments and we cannot
3716 allow reordering of reads from function arguments with stores
3717 to outgoing arguments of sibling calls. We do, however, want
3718 to record the alignment of the stack slot. */
3719 /* ALIGN may well be better aligned than TYPE, e.g. due to
3720 PARM_BOUNDARY. Assume the caller isn't lying. */
3721 set_mem_align (target, align);
3723 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3726 else if (partial > 0)
3728 /* Scalar partly in registers. */
3730 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3731 int i;
3732 int not_stack;
3733 /* # bytes of start of argument
3734 that we must make space for but need not store. */
3735 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3736 int args_offset = INTVAL (args_so_far);
3737 int skip;
3739 /* Push padding now if padding above and stack grows down,
3740 or if padding below and stack grows up.
3741 But if space already allocated, this has already been done. */
3742 if (extra && args_addr == 0
3743 && where_pad != none && where_pad != stack_direction)
3744 anti_adjust_stack (GEN_INT (extra));
3746 /* If we make space by pushing it, we might as well push
3747 the real data. Otherwise, we can leave OFFSET nonzero
3748 and leave the space uninitialized. */
3749 if (args_addr == 0)
3750 offset = 0;
3752 /* Now NOT_STACK gets the number of words that we don't need to
3753 allocate on the stack. Convert OFFSET to words too. */
3754 not_stack = (partial - offset) / UNITS_PER_WORD;
3755 offset /= UNITS_PER_WORD;
3757 /* If the partial register-part of the arg counts in its stack size,
3758 skip the part of stack space corresponding to the registers.
3759 Otherwise, start copying to the beginning of the stack space,
3760 by setting SKIP to 0. */
3761 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3763 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3764 x = validize_mem (force_const_mem (mode, x));
3766 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3767 SUBREGs of such registers are not allowed. */
3768 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3769 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3770 x = copy_to_reg (x);
3772 /* Loop over all the words allocated on the stack for this arg. */
3773 /* We can do it by words, because any scalar bigger than a word
3774 has a size a multiple of a word. */
3775 #ifndef PUSH_ARGS_REVERSED
3776 for (i = not_stack; i < size; i++)
3777 #else
3778 for (i = size - 1; i >= not_stack; i--)
3779 #endif
3780 if (i >= not_stack + offset)
3781 emit_push_insn (operand_subword_force (x, i, mode),
3782 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3783 0, args_addr,
3784 GEN_INT (args_offset + ((i - not_stack + skip)
3785 * UNITS_PER_WORD)),
3786 reg_parm_stack_space, alignment_pad);
3788 else
3790 rtx addr;
3791 rtx dest;
3793 /* Push padding now if padding above and stack grows down,
3794 or if padding below and stack grows up.
3795 But if space already allocated, this has already been done. */
3796 if (extra && args_addr == 0
3797 && where_pad != none && where_pad != stack_direction)
3798 anti_adjust_stack (GEN_INT (extra));
3800 #ifdef PUSH_ROUNDING
3801 if (args_addr == 0 && PUSH_ARGS)
3802 emit_single_push_insn (mode, x, type);
3803 else
3804 #endif
3806 if (GET_CODE (args_so_far) == CONST_INT)
3807 addr
3808 = memory_address (mode,
3809 plus_constant (args_addr,
3810 INTVAL (args_so_far)));
3811 else
3812 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3813 args_so_far));
3814 dest = gen_rtx_MEM (mode, addr);
3816 /* We do *not* set_mem_attributes here, because incoming arguments
3817 may overlap with sibling call outgoing arguments and we cannot
3818 allow reordering of reads from function arguments with stores
3819 to outgoing arguments of sibling calls. We do, however, want
3820 to record the alignment of the stack slot. */
3821 /* ALIGN may well be better aligned than TYPE, e.g. due to
3822 PARM_BOUNDARY. Assume the caller isn't lying. */
3823 set_mem_align (dest, align);
3825 emit_move_insn (dest, x);
3829 /* If part should go in registers, copy that part
3830 into the appropriate registers. Do this now, at the end,
3831 since mem-to-mem copies above may do function calls. */
3832 if (partial > 0 && reg != 0)
3834 /* Handle calls that pass values in multiple non-contiguous locations.
3835 The Irix 6 ABI has examples of this. */
3836 if (GET_CODE (reg) == PARALLEL)
3837 emit_group_load (reg, x, type, -1);
3838 else
3840 gcc_assert (partial % UNITS_PER_WORD == 0);
3841 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3845 if (extra && args_addr == 0 && where_pad == stack_direction)
3846 anti_adjust_stack (GEN_INT (extra));
3848 if (alignment_pad && args_addr == 0)
3849 anti_adjust_stack (alignment_pad);
3852 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3853 operations. */
3855 static rtx
3856 get_subtarget (rtx x)
3858 return (optimize
3859 || x == 0
3860 /* Only registers can be subtargets. */
3861 || !REG_P (x)
3862 /* Don't use hard regs to avoid extending their life. */
3863 || REGNO (x) < FIRST_PSEUDO_REGISTER
3864 ? 0 : x);
3867 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3868 FIELD is a bitfield. Returns true if the optimization was successful,
3869 and there's nothing else to do. */
3871 static bool
3872 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3873 unsigned HOST_WIDE_INT bitpos,
3874 enum machine_mode mode1, rtx str_rtx,
3875 tree to, tree src)
3877 enum machine_mode str_mode = GET_MODE (str_rtx);
3878 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3879 tree op0, op1;
3880 rtx value, result;
3881 optab binop;
3883 if (mode1 != VOIDmode
3884 || bitsize >= BITS_PER_WORD
3885 || str_bitsize > BITS_PER_WORD
3886 || TREE_SIDE_EFFECTS (to)
3887 || TREE_THIS_VOLATILE (to))
3888 return false;
3890 STRIP_NOPS (src);
3891 if (!BINARY_CLASS_P (src)
3892 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3893 return false;
3895 op0 = TREE_OPERAND (src, 0);
3896 op1 = TREE_OPERAND (src, 1);
3897 STRIP_NOPS (op0);
3899 if (!operand_equal_p (to, op0, 0))
3900 return false;
3902 if (MEM_P (str_rtx))
3904 unsigned HOST_WIDE_INT offset1;
3906 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3907 str_mode = word_mode;
3908 str_mode = get_best_mode (bitsize, bitpos,
3909 MEM_ALIGN (str_rtx), str_mode, 0);
3910 if (str_mode == VOIDmode)
3911 return false;
3912 str_bitsize = GET_MODE_BITSIZE (str_mode);
3914 offset1 = bitpos;
3915 bitpos %= str_bitsize;
3916 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3917 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3919 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3920 return false;
3922 /* If the bit field covers the whole REG/MEM, store_field
3923 will likely generate better code. */
3924 if (bitsize >= str_bitsize)
3925 return false;
3927 /* We can't handle fields split across multiple entities. */
3928 if (bitpos + bitsize > str_bitsize)
3929 return false;
3931 if (BYTES_BIG_ENDIAN)
3932 bitpos = str_bitsize - bitpos - bitsize;
3934 switch (TREE_CODE (src))
3936 case PLUS_EXPR:
3937 case MINUS_EXPR:
3938 /* For now, just optimize the case of the topmost bitfield
3939 where we don't need to do any masking and also
3940 1 bit bitfields where xor can be used.
3941 We might win by one instruction for the other bitfields
3942 too if insv/extv instructions aren't used, so that
3943 can be added later. */
3944 if (bitpos + bitsize != str_bitsize
3945 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3946 break;
3948 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3949 value = convert_modes (str_mode,
3950 TYPE_MODE (TREE_TYPE (op1)), value,
3951 TYPE_UNSIGNED (TREE_TYPE (op1)));
3953 /* We may be accessing data outside the field, which means
3954 we can alias adjacent data. */
3955 if (MEM_P (str_rtx))
3957 str_rtx = shallow_copy_rtx (str_rtx);
3958 set_mem_alias_set (str_rtx, 0);
3959 set_mem_expr (str_rtx, 0);
3962 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3963 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3965 value = expand_and (str_mode, value, const1_rtx, NULL);
3966 binop = xor_optab;
3968 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3969 build_int_cst (NULL_TREE, bitpos),
3970 NULL_RTX, 1);
3971 result = expand_binop (str_mode, binop, str_rtx,
3972 value, str_rtx, 1, OPTAB_WIDEN);
3973 if (result != str_rtx)
3974 emit_move_insn (str_rtx, result);
3975 return true;
3977 case BIT_IOR_EXPR:
3978 case BIT_XOR_EXPR:
3979 if (TREE_CODE (op1) != INTEGER_CST)
3980 break;
3981 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3982 value = convert_modes (GET_MODE (str_rtx),
3983 TYPE_MODE (TREE_TYPE (op1)), value,
3984 TYPE_UNSIGNED (TREE_TYPE (op1)));
3986 /* We may be accessing data outside the field, which means
3987 we can alias adjacent data. */
3988 if (MEM_P (str_rtx))
3990 str_rtx = shallow_copy_rtx (str_rtx);
3991 set_mem_alias_set (str_rtx, 0);
3992 set_mem_expr (str_rtx, 0);
3995 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3996 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3998 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3999 - 1);
4000 value = expand_and (GET_MODE (str_rtx), value, mask,
4001 NULL_RTX);
4003 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4004 build_int_cst (NULL_TREE, bitpos),
4005 NULL_RTX, 1);
4006 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4007 value, str_rtx, 1, OPTAB_WIDEN);
4008 if (result != str_rtx)
4009 emit_move_insn (str_rtx, result);
4010 return true;
4012 default:
4013 break;
4016 return false;
4020 /* Expand an assignment that stores the value of FROM into TO. */
4022 void
4023 expand_assignment (tree to, tree from)
4025 rtx to_rtx = 0;
4026 rtx result;
4028 /* Don't crash if the lhs of the assignment was erroneous. */
4029 if (TREE_CODE (to) == ERROR_MARK)
4031 result = expand_normal (from);
4032 return;
4035 /* Optimize away no-op moves without side-effects. */
4036 if (operand_equal_p (to, from, 0))
4037 return;
4039 /* Assignment of a structure component needs special treatment
4040 if the structure component's rtx is not simply a MEM.
4041 Assignment of an array element at a constant index, and assignment of
4042 an array element in an unaligned packed structure field, has the same
4043 problem. */
4044 if (handled_component_p (to)
4045 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4047 enum machine_mode mode1;
4048 HOST_WIDE_INT bitsize, bitpos;
4049 tree offset;
4050 int unsignedp;
4051 int volatilep = 0;
4052 tree tem;
4054 push_temp_slots ();
4055 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4056 &unsignedp, &volatilep, true);
4058 /* If we are going to use store_bit_field and extract_bit_field,
4059 make sure to_rtx will be safe for multiple use. */
4061 to_rtx = expand_normal (tem);
4063 if (offset != 0)
4065 rtx offset_rtx;
4067 if (!MEM_P (to_rtx))
4069 /* We can get constant negative offsets into arrays with broken
4070 user code. Translate this to a trap instead of ICEing. */
4071 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4072 expand_builtin_trap ();
4073 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4076 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4077 #ifdef POINTERS_EXTEND_UNSIGNED
4078 if (GET_MODE (offset_rtx) != Pmode)
4079 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4080 #else
4081 if (GET_MODE (offset_rtx) != ptr_mode)
4082 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4083 #endif
4085 /* A constant address in TO_RTX can have VOIDmode, we must not try
4086 to call force_reg for that case. Avoid that case. */
4087 if (MEM_P (to_rtx)
4088 && GET_MODE (to_rtx) == BLKmode
4089 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4090 && bitsize > 0
4091 && (bitpos % bitsize) == 0
4092 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4093 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4095 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4096 bitpos = 0;
4099 to_rtx = offset_address (to_rtx, offset_rtx,
4100 highest_pow2_factor_for_target (to,
4101 offset));
4104 /* Handle expand_expr of a complex value returning a CONCAT. */
4105 if (GET_CODE (to_rtx) == CONCAT)
4107 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4109 gcc_assert (bitpos == 0);
4110 result = store_expr (from, to_rtx, false);
4112 else
4114 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4115 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4118 else
4120 if (MEM_P (to_rtx))
4122 /* If the field is at offset zero, we could have been given the
4123 DECL_RTX of the parent struct. Don't munge it. */
4124 to_rtx = shallow_copy_rtx (to_rtx);
4126 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4128 /* Deal with volatile and readonly fields. The former is only
4129 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4130 if (volatilep)
4131 MEM_VOLATILE_P (to_rtx) = 1;
4132 if (component_uses_parent_alias_set (to))
4133 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4136 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4137 to_rtx, to, from))
4138 result = NULL;
4139 else
4140 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4141 TREE_TYPE (tem), get_alias_set (to));
4144 if (result)
4145 preserve_temp_slots (result);
4146 free_temp_slots ();
4147 pop_temp_slots ();
4148 return;
4151 /* If the rhs is a function call and its value is not an aggregate,
4152 call the function before we start to compute the lhs.
4153 This is needed for correct code for cases such as
4154 val = setjmp (buf) on machines where reference to val
4155 requires loading up part of an address in a separate insn.
4157 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4158 since it might be a promoted variable where the zero- or sign- extension
4159 needs to be done. Handling this in the normal way is safe because no
4160 computation is done before the call. */
4161 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4162 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4163 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4164 && REG_P (DECL_RTL (to))))
4166 rtx value;
4168 push_temp_slots ();
4169 value = expand_normal (from);
4170 if (to_rtx == 0)
4171 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4173 /* Handle calls that return values in multiple non-contiguous locations.
4174 The Irix 6 ABI has examples of this. */
4175 if (GET_CODE (to_rtx) == PARALLEL)
4176 emit_group_load (to_rtx, value, TREE_TYPE (from),
4177 int_size_in_bytes (TREE_TYPE (from)));
4178 else if (GET_MODE (to_rtx) == BLKmode)
4179 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4180 else
4182 if (POINTER_TYPE_P (TREE_TYPE (to)))
4183 value = convert_memory_address (GET_MODE (to_rtx), value);
4184 emit_move_insn (to_rtx, value);
4186 preserve_temp_slots (to_rtx);
4187 free_temp_slots ();
4188 pop_temp_slots ();
4189 return;
4192 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4193 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4195 if (to_rtx == 0)
4196 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4198 /* Don't move directly into a return register. */
4199 if (TREE_CODE (to) == RESULT_DECL
4200 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4202 rtx temp;
4204 push_temp_slots ();
4205 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4207 if (GET_CODE (to_rtx) == PARALLEL)
4208 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4209 int_size_in_bytes (TREE_TYPE (from)));
4210 else
4211 emit_move_insn (to_rtx, temp);
4213 preserve_temp_slots (to_rtx);
4214 free_temp_slots ();
4215 pop_temp_slots ();
4216 return;
4219 /* In case we are returning the contents of an object which overlaps
4220 the place the value is being stored, use a safe function when copying
4221 a value through a pointer into a structure value return block. */
4222 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4223 && current_function_returns_struct
4224 && !current_function_returns_pcc_struct)
4226 rtx from_rtx, size;
4228 push_temp_slots ();
4229 size = expr_size (from);
4230 from_rtx = expand_normal (from);
4232 emit_library_call (memmove_libfunc, LCT_NORMAL,
4233 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4234 XEXP (from_rtx, 0), Pmode,
4235 convert_to_mode (TYPE_MODE (sizetype),
4236 size, TYPE_UNSIGNED (sizetype)),
4237 TYPE_MODE (sizetype));
4239 preserve_temp_slots (to_rtx);
4240 free_temp_slots ();
4241 pop_temp_slots ();
4242 return;
4245 /* Compute FROM and store the value in the rtx we got. */
4247 push_temp_slots ();
4248 result = store_expr (from, to_rtx, 0);
4249 preserve_temp_slots (result);
4250 free_temp_slots ();
4251 pop_temp_slots ();
4252 return;
4255 /* Generate code for computing expression EXP,
4256 and storing the value into TARGET.
4258 If the mode is BLKmode then we may return TARGET itself.
4259 It turns out that in BLKmode it doesn't cause a problem.
4260 because C has no operators that could combine two different
4261 assignments into the same BLKmode object with different values
4262 with no sequence point. Will other languages need this to
4263 be more thorough?
4265 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4266 stack, and block moves may need to be treated specially. */
4269 store_expr (tree exp, rtx target, int call_param_p)
4271 rtx temp;
4272 rtx alt_rtl = NULL_RTX;
4273 int dont_return_target = 0;
4275 if (VOID_TYPE_P (TREE_TYPE (exp)))
4277 /* C++ can generate ?: expressions with a throw expression in one
4278 branch and an rvalue in the other. Here, we resolve attempts to
4279 store the throw expression's nonexistent result. */
4280 gcc_assert (!call_param_p);
4281 expand_expr (exp, const0_rtx, VOIDmode, 0);
4282 return NULL_RTX;
4284 if (TREE_CODE (exp) == COMPOUND_EXPR)
4286 /* Perform first part of compound expression, then assign from second
4287 part. */
4288 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4289 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4290 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4292 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4294 /* For conditional expression, get safe form of the target. Then
4295 test the condition, doing the appropriate assignment on either
4296 side. This avoids the creation of unnecessary temporaries.
4297 For non-BLKmode, it is more efficient not to do this. */
4299 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4301 do_pending_stack_adjust ();
4302 NO_DEFER_POP;
4303 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4304 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4305 emit_jump_insn (gen_jump (lab2));
4306 emit_barrier ();
4307 emit_label (lab1);
4308 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4309 emit_label (lab2);
4310 OK_DEFER_POP;
4312 return NULL_RTX;
4314 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4315 /* If this is a scalar in a register that is stored in a wider mode
4316 than the declared mode, compute the result into its declared mode
4317 and then convert to the wider mode. Our value is the computed
4318 expression. */
4320 rtx inner_target = 0;
4322 /* We can do the conversion inside EXP, which will often result
4323 in some optimizations. Do the conversion in two steps: first
4324 change the signedness, if needed, then the extend. But don't
4325 do this if the type of EXP is a subtype of something else
4326 since then the conversion might involve more than just
4327 converting modes. */
4328 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4329 && TREE_TYPE (TREE_TYPE (exp)) == 0
4330 && (!lang_hooks.reduce_bit_field_operations
4331 || (GET_MODE_PRECISION (GET_MODE (target))
4332 == TYPE_PRECISION (TREE_TYPE (exp)))))
4334 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4335 != SUBREG_PROMOTED_UNSIGNED_P (target))
4336 exp = fold_convert
4337 (lang_hooks.types.signed_or_unsigned_type
4338 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4340 exp = fold_convert (lang_hooks.types.type_for_mode
4341 (GET_MODE (SUBREG_REG (target)),
4342 SUBREG_PROMOTED_UNSIGNED_P (target)),
4343 exp);
4345 inner_target = SUBREG_REG (target);
4348 temp = expand_expr (exp, inner_target, VOIDmode,
4349 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4351 /* If TEMP is a VOIDmode constant, use convert_modes to make
4352 sure that we properly convert it. */
4353 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4355 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4356 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4357 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4358 GET_MODE (target), temp,
4359 SUBREG_PROMOTED_UNSIGNED_P (target));
4362 convert_move (SUBREG_REG (target), temp,
4363 SUBREG_PROMOTED_UNSIGNED_P (target));
4365 return NULL_RTX;
4367 else
4369 temp = expand_expr_real (exp, target, GET_MODE (target),
4370 (call_param_p
4371 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4372 &alt_rtl);
4373 /* Return TARGET if it's a specified hardware register.
4374 If TARGET is a volatile mem ref, either return TARGET
4375 or return a reg copied *from* TARGET; ANSI requires this.
4377 Otherwise, if TEMP is not TARGET, return TEMP
4378 if it is constant (for efficiency),
4379 or if we really want the correct value. */
4380 if (!(target && REG_P (target)
4381 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4382 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4383 && ! rtx_equal_p (temp, target)
4384 && CONSTANT_P (temp))
4385 dont_return_target = 1;
4388 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4389 the same as that of TARGET, adjust the constant. This is needed, for
4390 example, in case it is a CONST_DOUBLE and we want only a word-sized
4391 value. */
4392 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4393 && TREE_CODE (exp) != ERROR_MARK
4394 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4395 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4396 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4398 /* If value was not generated in the target, store it there.
4399 Convert the value to TARGET's type first if necessary and emit the
4400 pending incrementations that have been queued when expanding EXP.
4401 Note that we cannot emit the whole queue blindly because this will
4402 effectively disable the POST_INC optimization later.
4404 If TEMP and TARGET compare equal according to rtx_equal_p, but
4405 one or both of them are volatile memory refs, we have to distinguish
4406 two cases:
4407 - expand_expr has used TARGET. In this case, we must not generate
4408 another copy. This can be detected by TARGET being equal according
4409 to == .
4410 - expand_expr has not used TARGET - that means that the source just
4411 happens to have the same RTX form. Since temp will have been created
4412 by expand_expr, it will compare unequal according to == .
4413 We must generate a copy in this case, to reach the correct number
4414 of volatile memory references. */
4416 if ((! rtx_equal_p (temp, target)
4417 || (temp != target && (side_effects_p (temp)
4418 || side_effects_p (target))))
4419 && TREE_CODE (exp) != ERROR_MARK
4420 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4421 but TARGET is not valid memory reference, TEMP will differ
4422 from TARGET although it is really the same location. */
4423 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4424 /* If there's nothing to copy, don't bother. Don't call
4425 expr_size unless necessary, because some front-ends (C++)
4426 expr_size-hook must not be given objects that are not
4427 supposed to be bit-copied or bit-initialized. */
4428 && expr_size (exp) != const0_rtx)
4430 if (GET_MODE (temp) != GET_MODE (target)
4431 && GET_MODE (temp) != VOIDmode)
4433 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4434 if (dont_return_target)
4436 /* In this case, we will return TEMP,
4437 so make sure it has the proper mode.
4438 But don't forget to store the value into TARGET. */
4439 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4440 emit_move_insn (target, temp);
4442 else
4443 convert_move (target, temp, unsignedp);
4446 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4448 /* Handle copying a string constant into an array. The string
4449 constant may be shorter than the array. So copy just the string's
4450 actual length, and clear the rest. First get the size of the data
4451 type of the string, which is actually the size of the target. */
4452 rtx size = expr_size (exp);
4454 if (GET_CODE (size) == CONST_INT
4455 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4456 emit_block_move (target, temp, size,
4457 (call_param_p
4458 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4459 else
4461 /* Compute the size of the data to copy from the string. */
4462 tree copy_size
4463 = size_binop (MIN_EXPR,
4464 make_tree (sizetype, size),
4465 size_int (TREE_STRING_LENGTH (exp)));
4466 rtx copy_size_rtx
4467 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4468 (call_param_p
4469 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4470 rtx label = 0;
4472 /* Copy that much. */
4473 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4474 TYPE_UNSIGNED (sizetype));
4475 emit_block_move (target, temp, copy_size_rtx,
4476 (call_param_p
4477 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4479 /* Figure out how much is left in TARGET that we have to clear.
4480 Do all calculations in ptr_mode. */
4481 if (GET_CODE (copy_size_rtx) == CONST_INT)
4483 size = plus_constant (size, -INTVAL (copy_size_rtx));
4484 target = adjust_address (target, BLKmode,
4485 INTVAL (copy_size_rtx));
4487 else
4489 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4490 copy_size_rtx, NULL_RTX, 0,
4491 OPTAB_LIB_WIDEN);
4493 #ifdef POINTERS_EXTEND_UNSIGNED
4494 if (GET_MODE (copy_size_rtx) != Pmode)
4495 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4496 TYPE_UNSIGNED (sizetype));
4497 #endif
4499 target = offset_address (target, copy_size_rtx,
4500 highest_pow2_factor (copy_size));
4501 label = gen_label_rtx ();
4502 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4503 GET_MODE (size), 0, label);
4506 if (size != const0_rtx)
4507 clear_storage (target, size, BLOCK_OP_NORMAL);
4509 if (label)
4510 emit_label (label);
4513 /* Handle calls that return values in multiple non-contiguous locations.
4514 The Irix 6 ABI has examples of this. */
4515 else if (GET_CODE (target) == PARALLEL)
4516 emit_group_load (target, temp, TREE_TYPE (exp),
4517 int_size_in_bytes (TREE_TYPE (exp)));
4518 else if (GET_MODE (temp) == BLKmode)
4519 emit_block_move (target, temp, expr_size (exp),
4520 (call_param_p
4521 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4522 else
4524 temp = force_operand (temp, target);
4525 if (temp != target)
4526 emit_move_insn (target, temp);
4530 return NULL_RTX;
4533 /* Helper for categorize_ctor_elements. Identical interface. */
4535 static bool
4536 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4537 HOST_WIDE_INT *p_elt_count,
4538 bool *p_must_clear)
4540 unsigned HOST_WIDE_INT idx;
4541 HOST_WIDE_INT nz_elts, elt_count;
4542 tree value, purpose;
4544 /* Whether CTOR is a valid constant initializer, in accordance with what
4545 initializer_constant_valid_p does. If inferred from the constructor
4546 elements, true until proven otherwise. */
4547 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4548 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4550 nz_elts = 0;
4551 elt_count = 0;
4553 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4555 HOST_WIDE_INT mult;
4557 mult = 1;
4558 if (TREE_CODE (purpose) == RANGE_EXPR)
4560 tree lo_index = TREE_OPERAND (purpose, 0);
4561 tree hi_index = TREE_OPERAND (purpose, 1);
4563 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4564 mult = (tree_low_cst (hi_index, 1)
4565 - tree_low_cst (lo_index, 1) + 1);
4568 switch (TREE_CODE (value))
4570 case CONSTRUCTOR:
4572 HOST_WIDE_INT nz = 0, ic = 0;
4574 bool const_elt_p
4575 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4577 nz_elts += mult * nz;
4578 elt_count += mult * ic;
4580 if (const_from_elts_p && const_p)
4581 const_p = const_elt_p;
4583 break;
4585 case INTEGER_CST:
4586 case REAL_CST:
4587 if (!initializer_zerop (value))
4588 nz_elts += mult;
4589 elt_count += mult;
4590 break;
4592 case STRING_CST:
4593 nz_elts += mult * TREE_STRING_LENGTH (value);
4594 elt_count += mult * TREE_STRING_LENGTH (value);
4595 break;
4597 case COMPLEX_CST:
4598 if (!initializer_zerop (TREE_REALPART (value)))
4599 nz_elts += mult;
4600 if (!initializer_zerop (TREE_IMAGPART (value)))
4601 nz_elts += mult;
4602 elt_count += mult;
4603 break;
4605 case VECTOR_CST:
4607 tree v;
4608 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4610 if (!initializer_zerop (TREE_VALUE (v)))
4611 nz_elts += mult;
4612 elt_count += mult;
4615 break;
4617 default:
4618 nz_elts += mult;
4619 elt_count += mult;
4621 if (const_from_elts_p && const_p)
4622 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4623 != NULL_TREE;
4624 break;
4628 if (!*p_must_clear
4629 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4630 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4632 tree init_sub_type;
4633 bool clear_this = true;
4635 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4637 /* We don't expect more than one element of the union to be
4638 initialized. Not sure what we should do otherwise... */
4639 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4640 == 1);
4642 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4643 CONSTRUCTOR_ELTS (ctor),
4644 0)->value);
4646 /* ??? We could look at each element of the union, and find the
4647 largest element. Which would avoid comparing the size of the
4648 initialized element against any tail padding in the union.
4649 Doesn't seem worth the effort... */
4650 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4651 TYPE_SIZE (init_sub_type)) == 1)
4653 /* And now we have to find out if the element itself is fully
4654 constructed. E.g. for union { struct { int a, b; } s; } u
4655 = { .s = { .a = 1 } }. */
4656 if (elt_count == count_type_elements (init_sub_type, false))
4657 clear_this = false;
4661 *p_must_clear = clear_this;
4664 *p_nz_elts += nz_elts;
4665 *p_elt_count += elt_count;
4667 return const_p;
4670 /* Examine CTOR to discover:
4671 * how many scalar fields are set to nonzero values,
4672 and place it in *P_NZ_ELTS;
4673 * how many scalar fields in total are in CTOR,
4674 and place it in *P_ELT_COUNT.
4675 * if a type is a union, and the initializer from the constructor
4676 is not the largest element in the union, then set *p_must_clear.
4678 Return whether or not CTOR is a valid static constant initializer, the same
4679 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4681 bool
4682 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4683 HOST_WIDE_INT *p_elt_count,
4684 bool *p_must_clear)
4686 *p_nz_elts = 0;
4687 *p_elt_count = 0;
4688 *p_must_clear = false;
4690 return
4691 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4694 /* Count the number of scalars in TYPE. Return -1 on overflow or
4695 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4696 array member at the end of the structure. */
4698 HOST_WIDE_INT
4699 count_type_elements (tree type, bool allow_flexarr)
4701 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4702 switch (TREE_CODE (type))
4704 case ARRAY_TYPE:
4706 tree telts = array_type_nelts (type);
4707 if (telts && host_integerp (telts, 1))
4709 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4710 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4711 if (n == 0)
4712 return 0;
4713 else if (max / n > m)
4714 return n * m;
4716 return -1;
4719 case RECORD_TYPE:
4721 HOST_WIDE_INT n = 0, t;
4722 tree f;
4724 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4725 if (TREE_CODE (f) == FIELD_DECL)
4727 t = count_type_elements (TREE_TYPE (f), false);
4728 if (t < 0)
4730 /* Check for structures with flexible array member. */
4731 tree tf = TREE_TYPE (f);
4732 if (allow_flexarr
4733 && TREE_CHAIN (f) == NULL
4734 && TREE_CODE (tf) == ARRAY_TYPE
4735 && TYPE_DOMAIN (tf)
4736 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4737 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4738 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4739 && int_size_in_bytes (type) >= 0)
4740 break;
4742 return -1;
4744 n += t;
4747 return n;
4750 case UNION_TYPE:
4751 case QUAL_UNION_TYPE:
4753 /* Ho hum. How in the world do we guess here? Clearly it isn't
4754 right to count the fields. Guess based on the number of words. */
4755 HOST_WIDE_INT n = int_size_in_bytes (type);
4756 if (n < 0)
4757 return -1;
4758 return n / UNITS_PER_WORD;
4761 case COMPLEX_TYPE:
4762 return 2;
4764 case VECTOR_TYPE:
4765 return TYPE_VECTOR_SUBPARTS (type);
4767 case INTEGER_TYPE:
4768 case REAL_TYPE:
4769 case ENUMERAL_TYPE:
4770 case BOOLEAN_TYPE:
4771 case POINTER_TYPE:
4772 case OFFSET_TYPE:
4773 case REFERENCE_TYPE:
4774 return 1;
4776 case VOID_TYPE:
4777 case METHOD_TYPE:
4778 case FUNCTION_TYPE:
4779 case LANG_TYPE:
4780 default:
4781 gcc_unreachable ();
4785 /* Return 1 if EXP contains mostly (3/4) zeros. */
4787 static int
4788 mostly_zeros_p (tree exp)
4790 if (TREE_CODE (exp) == CONSTRUCTOR)
4793 HOST_WIDE_INT nz_elts, count, elts;
4794 bool must_clear;
4796 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4797 if (must_clear)
4798 return 1;
4800 elts = count_type_elements (TREE_TYPE (exp), false);
4802 return nz_elts < elts / 4;
4805 return initializer_zerop (exp);
4808 /* Return 1 if EXP contains all zeros. */
4810 static int
4811 all_zeros_p (tree exp)
4813 if (TREE_CODE (exp) == CONSTRUCTOR)
4816 HOST_WIDE_INT nz_elts, count;
4817 bool must_clear;
4819 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4820 return nz_elts == 0;
4823 return initializer_zerop (exp);
4826 /* Helper function for store_constructor.
4827 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4828 TYPE is the type of the CONSTRUCTOR, not the element type.
4829 CLEARED is as for store_constructor.
4830 ALIAS_SET is the alias set to use for any stores.
4832 This provides a recursive shortcut back to store_constructor when it isn't
4833 necessary to go through store_field. This is so that we can pass through
4834 the cleared field to let store_constructor know that we may not have to
4835 clear a substructure if the outer structure has already been cleared. */
4837 static void
4838 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4839 HOST_WIDE_INT bitpos, enum machine_mode mode,
4840 tree exp, tree type, int cleared, int alias_set)
4842 if (TREE_CODE (exp) == CONSTRUCTOR
4843 /* We can only call store_constructor recursively if the size and
4844 bit position are on a byte boundary. */
4845 && bitpos % BITS_PER_UNIT == 0
4846 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4847 /* If we have a nonzero bitpos for a register target, then we just
4848 let store_field do the bitfield handling. This is unlikely to
4849 generate unnecessary clear instructions anyways. */
4850 && (bitpos == 0 || MEM_P (target)))
4852 if (MEM_P (target))
4853 target
4854 = adjust_address (target,
4855 GET_MODE (target) == BLKmode
4856 || 0 != (bitpos
4857 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4858 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4861 /* Update the alias set, if required. */
4862 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4863 && MEM_ALIAS_SET (target) != 0)
4865 target = copy_rtx (target);
4866 set_mem_alias_set (target, alias_set);
4869 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4871 else
4872 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4875 /* Store the value of constructor EXP into the rtx TARGET.
4876 TARGET is either a REG or a MEM; we know it cannot conflict, since
4877 safe_from_p has been called.
4878 CLEARED is true if TARGET is known to have been zero'd.
4879 SIZE is the number of bytes of TARGET we are allowed to modify: this
4880 may not be the same as the size of EXP if we are assigning to a field
4881 which has been packed to exclude padding bits. */
4883 static void
4884 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4886 tree type = TREE_TYPE (exp);
4887 #ifdef WORD_REGISTER_OPERATIONS
4888 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4889 #endif
4891 switch (TREE_CODE (type))
4893 case RECORD_TYPE:
4894 case UNION_TYPE:
4895 case QUAL_UNION_TYPE:
4897 unsigned HOST_WIDE_INT idx;
4898 tree field, value;
4900 /* If size is zero or the target is already cleared, do nothing. */
4901 if (size == 0 || cleared)
4902 cleared = 1;
4903 /* We either clear the aggregate or indicate the value is dead. */
4904 else if ((TREE_CODE (type) == UNION_TYPE
4905 || TREE_CODE (type) == QUAL_UNION_TYPE)
4906 && ! CONSTRUCTOR_ELTS (exp))
4907 /* If the constructor is empty, clear the union. */
4909 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4910 cleared = 1;
4913 /* If we are building a static constructor into a register,
4914 set the initial value as zero so we can fold the value into
4915 a constant. But if more than one register is involved,
4916 this probably loses. */
4917 else if (REG_P (target) && TREE_STATIC (exp)
4918 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4920 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4921 cleared = 1;
4924 /* If the constructor has fewer fields than the structure or
4925 if we are initializing the structure to mostly zeros, clear
4926 the whole structure first. Don't do this if TARGET is a
4927 register whose mode size isn't equal to SIZE since
4928 clear_storage can't handle this case. */
4929 else if (size > 0
4930 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4931 != fields_length (type))
4932 || mostly_zeros_p (exp))
4933 && (!REG_P (target)
4934 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4935 == size)))
4937 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4938 cleared = 1;
4941 if (! cleared)
4942 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4944 /* Store each element of the constructor into the
4945 corresponding field of TARGET. */
4946 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4948 enum machine_mode mode;
4949 HOST_WIDE_INT bitsize;
4950 HOST_WIDE_INT bitpos = 0;
4951 tree offset;
4952 rtx to_rtx = target;
4954 /* Just ignore missing fields. We cleared the whole
4955 structure, above, if any fields are missing. */
4956 if (field == 0)
4957 continue;
4959 if (cleared && initializer_zerop (value))
4960 continue;
4962 if (host_integerp (DECL_SIZE (field), 1))
4963 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4964 else
4965 bitsize = -1;
4967 mode = DECL_MODE (field);
4968 if (DECL_BIT_FIELD (field))
4969 mode = VOIDmode;
4971 offset = DECL_FIELD_OFFSET (field);
4972 if (host_integerp (offset, 0)
4973 && host_integerp (bit_position (field), 0))
4975 bitpos = int_bit_position (field);
4976 offset = 0;
4978 else
4979 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4981 if (offset)
4983 rtx offset_rtx;
4985 offset
4986 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4987 make_tree (TREE_TYPE (exp),
4988 target));
4990 offset_rtx = expand_normal (offset);
4991 gcc_assert (MEM_P (to_rtx));
4993 #ifdef POINTERS_EXTEND_UNSIGNED
4994 if (GET_MODE (offset_rtx) != Pmode)
4995 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4996 #else
4997 if (GET_MODE (offset_rtx) != ptr_mode)
4998 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4999 #endif
5001 to_rtx = offset_address (to_rtx, offset_rtx,
5002 highest_pow2_factor (offset));
5005 #ifdef WORD_REGISTER_OPERATIONS
5006 /* If this initializes a field that is smaller than a
5007 word, at the start of a word, try to widen it to a full
5008 word. This special case allows us to output C++ member
5009 function initializations in a form that the optimizers
5010 can understand. */
5011 if (REG_P (target)
5012 && bitsize < BITS_PER_WORD
5013 && bitpos % BITS_PER_WORD == 0
5014 && GET_MODE_CLASS (mode) == MODE_INT
5015 && TREE_CODE (value) == INTEGER_CST
5016 && exp_size >= 0
5017 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5019 tree type = TREE_TYPE (value);
5021 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5023 type = lang_hooks.types.type_for_size
5024 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5025 value = fold_convert (type, value);
5028 if (BYTES_BIG_ENDIAN)
5029 value
5030 = fold_build2 (LSHIFT_EXPR, type, value,
5031 build_int_cst (type,
5032 BITS_PER_WORD - bitsize));
5033 bitsize = BITS_PER_WORD;
5034 mode = word_mode;
5036 #endif
5038 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5039 && DECL_NONADDRESSABLE_P (field))
5041 to_rtx = copy_rtx (to_rtx);
5042 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5045 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5046 value, type, cleared,
5047 get_alias_set (TREE_TYPE (field)));
5049 break;
5051 case ARRAY_TYPE:
5053 tree value, index;
5054 unsigned HOST_WIDE_INT i;
5055 int need_to_clear;
5056 tree domain;
5057 tree elttype = TREE_TYPE (type);
5058 int const_bounds_p;
5059 HOST_WIDE_INT minelt = 0;
5060 HOST_WIDE_INT maxelt = 0;
5062 domain = TYPE_DOMAIN (type);
5063 const_bounds_p = (TYPE_MIN_VALUE (domain)
5064 && TYPE_MAX_VALUE (domain)
5065 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5066 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5068 /* If we have constant bounds for the range of the type, get them. */
5069 if (const_bounds_p)
5071 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5072 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5075 /* If the constructor has fewer elements than the array, clear
5076 the whole array first. Similarly if this is static
5077 constructor of a non-BLKmode object. */
5078 if (cleared)
5079 need_to_clear = 0;
5080 else if (REG_P (target) && TREE_STATIC (exp))
5081 need_to_clear = 1;
5082 else
5084 unsigned HOST_WIDE_INT idx;
5085 tree index, value;
5086 HOST_WIDE_INT count = 0, zero_count = 0;
5087 need_to_clear = ! const_bounds_p;
5089 /* This loop is a more accurate version of the loop in
5090 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5091 is also needed to check for missing elements. */
5092 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5094 HOST_WIDE_INT this_node_count;
5096 if (need_to_clear)
5097 break;
5099 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5101 tree lo_index = TREE_OPERAND (index, 0);
5102 tree hi_index = TREE_OPERAND (index, 1);
5104 if (! host_integerp (lo_index, 1)
5105 || ! host_integerp (hi_index, 1))
5107 need_to_clear = 1;
5108 break;
5111 this_node_count = (tree_low_cst (hi_index, 1)
5112 - tree_low_cst (lo_index, 1) + 1);
5114 else
5115 this_node_count = 1;
5117 count += this_node_count;
5118 if (mostly_zeros_p (value))
5119 zero_count += this_node_count;
5122 /* Clear the entire array first if there are any missing
5123 elements, or if the incidence of zero elements is >=
5124 75%. */
5125 if (! need_to_clear
5126 && (count < maxelt - minelt + 1
5127 || 4 * zero_count >= 3 * count))
5128 need_to_clear = 1;
5131 if (need_to_clear && size > 0)
5133 if (REG_P (target))
5134 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5135 else
5136 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5137 cleared = 1;
5140 if (!cleared && REG_P (target))
5141 /* Inform later passes that the old value is dead. */
5142 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5144 /* Store each element of the constructor into the
5145 corresponding element of TARGET, determined by counting the
5146 elements. */
5147 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5149 enum machine_mode mode;
5150 HOST_WIDE_INT bitsize;
5151 HOST_WIDE_INT bitpos;
5152 int unsignedp;
5153 rtx xtarget = target;
5155 if (cleared && initializer_zerop (value))
5156 continue;
5158 unsignedp = TYPE_UNSIGNED (elttype);
5159 mode = TYPE_MODE (elttype);
5160 if (mode == BLKmode)
5161 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5162 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5163 : -1);
5164 else
5165 bitsize = GET_MODE_BITSIZE (mode);
5167 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5169 tree lo_index = TREE_OPERAND (index, 0);
5170 tree hi_index = TREE_OPERAND (index, 1);
5171 rtx index_r, pos_rtx;
5172 HOST_WIDE_INT lo, hi, count;
5173 tree position;
5175 /* If the range is constant and "small", unroll the loop. */
5176 if (const_bounds_p
5177 && host_integerp (lo_index, 0)
5178 && host_integerp (hi_index, 0)
5179 && (lo = tree_low_cst (lo_index, 0),
5180 hi = tree_low_cst (hi_index, 0),
5181 count = hi - lo + 1,
5182 (!MEM_P (target)
5183 || count <= 2
5184 || (host_integerp (TYPE_SIZE (elttype), 1)
5185 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5186 <= 40 * 8)))))
5188 lo -= minelt; hi -= minelt;
5189 for (; lo <= hi; lo++)
5191 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5193 if (MEM_P (target)
5194 && !MEM_KEEP_ALIAS_SET_P (target)
5195 && TREE_CODE (type) == ARRAY_TYPE
5196 && TYPE_NONALIASED_COMPONENT (type))
5198 target = copy_rtx (target);
5199 MEM_KEEP_ALIAS_SET_P (target) = 1;
5202 store_constructor_field
5203 (target, bitsize, bitpos, mode, value, type, cleared,
5204 get_alias_set (elttype));
5207 else
5209 rtx loop_start = gen_label_rtx ();
5210 rtx loop_end = gen_label_rtx ();
5211 tree exit_cond;
5213 expand_normal (hi_index);
5214 unsignedp = TYPE_UNSIGNED (domain);
5216 index = build_decl (VAR_DECL, NULL_TREE, domain);
5218 index_r
5219 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5220 &unsignedp, 0));
5221 SET_DECL_RTL (index, index_r);
5222 store_expr (lo_index, index_r, 0);
5224 /* Build the head of the loop. */
5225 do_pending_stack_adjust ();
5226 emit_label (loop_start);
5228 /* Assign value to element index. */
5229 position =
5230 fold_convert (ssizetype,
5231 fold_build2 (MINUS_EXPR,
5232 TREE_TYPE (index),
5233 index,
5234 TYPE_MIN_VALUE (domain)));
5236 position =
5237 size_binop (MULT_EXPR, position,
5238 fold_convert (ssizetype,
5239 TYPE_SIZE_UNIT (elttype)));
5241 pos_rtx = expand_normal (position);
5242 xtarget = offset_address (target, pos_rtx,
5243 highest_pow2_factor (position));
5244 xtarget = adjust_address (xtarget, mode, 0);
5245 if (TREE_CODE (value) == CONSTRUCTOR)
5246 store_constructor (value, xtarget, cleared,
5247 bitsize / BITS_PER_UNIT);
5248 else
5249 store_expr (value, xtarget, 0);
5251 /* Generate a conditional jump to exit the loop. */
5252 exit_cond = build2 (LT_EXPR, integer_type_node,
5253 index, hi_index);
5254 jumpif (exit_cond, loop_end);
5256 /* Update the loop counter, and jump to the head of
5257 the loop. */
5258 expand_assignment (index,
5259 build2 (PLUS_EXPR, TREE_TYPE (index),
5260 index, integer_one_node));
5262 emit_jump (loop_start);
5264 /* Build the end of the loop. */
5265 emit_label (loop_end);
5268 else if ((index != 0 && ! host_integerp (index, 0))
5269 || ! host_integerp (TYPE_SIZE (elttype), 1))
5271 tree position;
5273 if (index == 0)
5274 index = ssize_int (1);
5276 if (minelt)
5277 index = fold_convert (ssizetype,
5278 fold_build2 (MINUS_EXPR,
5279 TREE_TYPE (index),
5280 index,
5281 TYPE_MIN_VALUE (domain)));
5283 position =
5284 size_binop (MULT_EXPR, index,
5285 fold_convert (ssizetype,
5286 TYPE_SIZE_UNIT (elttype)));
5287 xtarget = offset_address (target,
5288 expand_normal (position),
5289 highest_pow2_factor (position));
5290 xtarget = adjust_address (xtarget, mode, 0);
5291 store_expr (value, xtarget, 0);
5293 else
5295 if (index != 0)
5296 bitpos = ((tree_low_cst (index, 0) - minelt)
5297 * tree_low_cst (TYPE_SIZE (elttype), 1));
5298 else
5299 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5301 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5302 && TREE_CODE (type) == ARRAY_TYPE
5303 && TYPE_NONALIASED_COMPONENT (type))
5305 target = copy_rtx (target);
5306 MEM_KEEP_ALIAS_SET_P (target) = 1;
5308 store_constructor_field (target, bitsize, bitpos, mode, value,
5309 type, cleared, get_alias_set (elttype));
5312 break;
5315 case VECTOR_TYPE:
5317 unsigned HOST_WIDE_INT idx;
5318 constructor_elt *ce;
5319 int i;
5320 int need_to_clear;
5321 int icode = 0;
5322 tree elttype = TREE_TYPE (type);
5323 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5324 enum machine_mode eltmode = TYPE_MODE (elttype);
5325 HOST_WIDE_INT bitsize;
5326 HOST_WIDE_INT bitpos;
5327 rtvec vector = NULL;
5328 unsigned n_elts;
5330 gcc_assert (eltmode != BLKmode);
5332 n_elts = TYPE_VECTOR_SUBPARTS (type);
5333 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5335 enum machine_mode mode = GET_MODE (target);
5337 icode = (int) vec_init_optab->handlers[mode].insn_code;
5338 if (icode != CODE_FOR_nothing)
5340 unsigned int i;
5342 vector = rtvec_alloc (n_elts);
5343 for (i = 0; i < n_elts; i++)
5344 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5348 /* If the constructor has fewer elements than the vector,
5349 clear the whole array first. Similarly if this is static
5350 constructor of a non-BLKmode object. */
5351 if (cleared)
5352 need_to_clear = 0;
5353 else if (REG_P (target) && TREE_STATIC (exp))
5354 need_to_clear = 1;
5355 else
5357 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5358 tree value;
5360 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5362 int n_elts_here = tree_low_cst
5363 (int_const_binop (TRUNC_DIV_EXPR,
5364 TYPE_SIZE (TREE_TYPE (value)),
5365 TYPE_SIZE (elttype), 0), 1);
5367 count += n_elts_here;
5368 if (mostly_zeros_p (value))
5369 zero_count += n_elts_here;
5372 /* Clear the entire vector first if there are any missing elements,
5373 or if the incidence of zero elements is >= 75%. */
5374 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5377 if (need_to_clear && size > 0 && !vector)
5379 if (REG_P (target))
5380 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5381 else
5382 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5383 cleared = 1;
5386 /* Inform later passes that the old value is dead. */
5387 if (!cleared && !vector && REG_P (target))
5388 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5390 /* Store each element of the constructor into the corresponding
5391 element of TARGET, determined by counting the elements. */
5392 for (idx = 0, i = 0;
5393 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5394 idx++, i += bitsize / elt_size)
5396 HOST_WIDE_INT eltpos;
5397 tree value = ce->value;
5399 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5400 if (cleared && initializer_zerop (value))
5401 continue;
5403 if (ce->index)
5404 eltpos = tree_low_cst (ce->index, 1);
5405 else
5406 eltpos = i;
5408 if (vector)
5410 /* Vector CONSTRUCTORs should only be built from smaller
5411 vectors in the case of BLKmode vectors. */
5412 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5413 RTVEC_ELT (vector, eltpos)
5414 = expand_normal (value);
5416 else
5418 enum machine_mode value_mode =
5419 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5420 ? TYPE_MODE (TREE_TYPE (value))
5421 : eltmode;
5422 bitpos = eltpos * elt_size;
5423 store_constructor_field (target, bitsize, bitpos,
5424 value_mode, value, type,
5425 cleared, get_alias_set (elttype));
5429 if (vector)
5430 emit_insn (GEN_FCN (icode)
5431 (target,
5432 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5433 break;
5436 default:
5437 gcc_unreachable ();
5441 /* Store the value of EXP (an expression tree)
5442 into a subfield of TARGET which has mode MODE and occupies
5443 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5444 If MODE is VOIDmode, it means that we are storing into a bit-field.
5446 Always return const0_rtx unless we have something particular to
5447 return.
5449 TYPE is the type of the underlying object,
5451 ALIAS_SET is the alias set for the destination. This value will
5452 (in general) be different from that for TARGET, since TARGET is a
5453 reference to the containing structure. */
5455 static rtx
5456 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5457 enum machine_mode mode, tree exp, tree type, int alias_set)
5459 HOST_WIDE_INT width_mask = 0;
5461 if (TREE_CODE (exp) == ERROR_MARK)
5462 return const0_rtx;
5464 /* If we have nothing to store, do nothing unless the expression has
5465 side-effects. */
5466 if (bitsize == 0)
5467 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5468 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5469 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5471 /* If we are storing into an unaligned field of an aligned union that is
5472 in a register, we may have the mode of TARGET being an integer mode but
5473 MODE == BLKmode. In that case, get an aligned object whose size and
5474 alignment are the same as TARGET and store TARGET into it (we can avoid
5475 the store if the field being stored is the entire width of TARGET). Then
5476 call ourselves recursively to store the field into a BLKmode version of
5477 that object. Finally, load from the object into TARGET. This is not
5478 very efficient in general, but should only be slightly more expensive
5479 than the otherwise-required unaligned accesses. Perhaps this can be
5480 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5481 twice, once with emit_move_insn and once via store_field. */
5483 if (mode == BLKmode
5484 && (REG_P (target) || GET_CODE (target) == SUBREG))
5486 rtx object = assign_temp (type, 0, 1, 1);
5487 rtx blk_object = adjust_address (object, BLKmode, 0);
5489 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5490 emit_move_insn (object, target);
5492 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5494 emit_move_insn (target, object);
5496 /* We want to return the BLKmode version of the data. */
5497 return blk_object;
5500 if (GET_CODE (target) == CONCAT)
5502 /* We're storing into a struct containing a single __complex. */
5504 gcc_assert (!bitpos);
5505 return store_expr (exp, target, 0);
5508 /* If the structure is in a register or if the component
5509 is a bit field, we cannot use addressing to access it.
5510 Use bit-field techniques or SUBREG to store in it. */
5512 if (mode == VOIDmode
5513 || (mode != BLKmode && ! direct_store[(int) mode]
5514 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5515 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5516 || REG_P (target)
5517 || GET_CODE (target) == SUBREG
5518 /* If the field isn't aligned enough to store as an ordinary memref,
5519 store it as a bit field. */
5520 || (mode != BLKmode
5521 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5522 || bitpos % GET_MODE_ALIGNMENT (mode))
5523 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5524 || (bitpos % BITS_PER_UNIT != 0)))
5525 /* If the RHS and field are a constant size and the size of the
5526 RHS isn't the same size as the bitfield, we must use bitfield
5527 operations. */
5528 || (bitsize >= 0
5529 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5530 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5532 rtx temp;
5534 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5535 implies a mask operation. If the precision is the same size as
5536 the field we're storing into, that mask is redundant. This is
5537 particularly common with bit field assignments generated by the
5538 C front end. */
5539 if (TREE_CODE (exp) == NOP_EXPR)
5541 tree type = TREE_TYPE (exp);
5542 if (INTEGRAL_TYPE_P (type)
5543 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5544 && bitsize == TYPE_PRECISION (type))
5546 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5547 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5548 exp = TREE_OPERAND (exp, 0);
5552 temp = expand_normal (exp);
5554 /* If BITSIZE is narrower than the size of the type of EXP
5555 we will be narrowing TEMP. Normally, what's wanted are the
5556 low-order bits. However, if EXP's type is a record and this is
5557 big-endian machine, we want the upper BITSIZE bits. */
5558 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5559 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5560 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5561 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5562 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5563 - bitsize),
5564 NULL_RTX, 1);
5566 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5567 MODE. */
5568 if (mode != VOIDmode && mode != BLKmode
5569 && mode != TYPE_MODE (TREE_TYPE (exp)))
5570 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5572 /* If the modes of TARGET and TEMP are both BLKmode, both
5573 must be in memory and BITPOS must be aligned on a byte
5574 boundary. If so, we simply do a block copy. */
5575 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5577 gcc_assert (MEM_P (target) && MEM_P (temp)
5578 && !(bitpos % BITS_PER_UNIT));
5580 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5581 emit_block_move (target, temp,
5582 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5583 / BITS_PER_UNIT),
5584 BLOCK_OP_NORMAL);
5586 return const0_rtx;
5589 /* Store the value in the bitfield. */
5590 store_bit_field (target, bitsize, bitpos, mode, temp);
5592 return const0_rtx;
5594 else
5596 /* Now build a reference to just the desired component. */
5597 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5599 if (to_rtx == target)
5600 to_rtx = copy_rtx (to_rtx);
5602 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5603 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5604 set_mem_alias_set (to_rtx, alias_set);
5606 return store_expr (exp, to_rtx, 0);
5610 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5611 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5612 codes and find the ultimate containing object, which we return.
5614 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5615 bit position, and *PUNSIGNEDP to the signedness of the field.
5616 If the position of the field is variable, we store a tree
5617 giving the variable offset (in units) in *POFFSET.
5618 This offset is in addition to the bit position.
5619 If the position is not variable, we store 0 in *POFFSET.
5621 If any of the extraction expressions is volatile,
5622 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5624 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5625 is a mode that can be used to access the field. In that case, *PBITSIZE
5626 is redundant.
5628 If the field describes a variable-sized object, *PMODE is set to
5629 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5630 this case, but the address of the object can be found.
5632 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5633 look through nodes that serve as markers of a greater alignment than
5634 the one that can be deduced from the expression. These nodes make it
5635 possible for front-ends to prevent temporaries from being created by
5636 the middle-end on alignment considerations. For that purpose, the
5637 normal operating mode at high-level is to always pass FALSE so that
5638 the ultimate containing object is really returned; moreover, the
5639 associated predicate handled_component_p will always return TRUE
5640 on these nodes, thus indicating that they are essentially handled
5641 by get_inner_reference. TRUE should only be passed when the caller
5642 is scanning the expression in order to build another representation
5643 and specifically knows how to handle these nodes; as such, this is
5644 the normal operating mode in the RTL expanders. */
5646 tree
5647 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5648 HOST_WIDE_INT *pbitpos, tree *poffset,
5649 enum machine_mode *pmode, int *punsignedp,
5650 int *pvolatilep, bool keep_aligning)
5652 tree size_tree = 0;
5653 enum machine_mode mode = VOIDmode;
5654 tree offset = size_zero_node;
5655 tree bit_offset = bitsize_zero_node;
5657 /* First get the mode, signedness, and size. We do this from just the
5658 outermost expression. */
5659 if (TREE_CODE (exp) == COMPONENT_REF)
5661 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5662 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5663 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5665 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5667 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5669 size_tree = TREE_OPERAND (exp, 1);
5670 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5672 else
5674 mode = TYPE_MODE (TREE_TYPE (exp));
5675 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5677 if (mode == BLKmode)
5678 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5679 else
5680 *pbitsize = GET_MODE_BITSIZE (mode);
5683 if (size_tree != 0)
5685 if (! host_integerp (size_tree, 1))
5686 mode = BLKmode, *pbitsize = -1;
5687 else
5688 *pbitsize = tree_low_cst (size_tree, 1);
5691 *pmode = mode;
5693 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5694 and find the ultimate containing object. */
5695 while (1)
5697 switch (TREE_CODE (exp))
5699 case BIT_FIELD_REF:
5700 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5701 TREE_OPERAND (exp, 2));
5702 break;
5704 case COMPONENT_REF:
5706 tree field = TREE_OPERAND (exp, 1);
5707 tree this_offset = component_ref_field_offset (exp);
5709 /* If this field hasn't been filled in yet, don't go past it.
5710 This should only happen when folding expressions made during
5711 type construction. */
5712 if (this_offset == 0)
5713 break;
5715 offset = size_binop (PLUS_EXPR, offset, this_offset);
5716 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5717 DECL_FIELD_BIT_OFFSET (field));
5719 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5721 break;
5723 case ARRAY_REF:
5724 case ARRAY_RANGE_REF:
5726 tree index = TREE_OPERAND (exp, 1);
5727 tree low_bound = array_ref_low_bound (exp);
5728 tree unit_size = array_ref_element_size (exp);
5730 /* We assume all arrays have sizes that are a multiple of a byte.
5731 First subtract the lower bound, if any, in the type of the
5732 index, then convert to sizetype and multiply by the size of
5733 the array element. */
5734 if (! integer_zerop (low_bound))
5735 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5736 index, low_bound);
5738 offset = size_binop (PLUS_EXPR, offset,
5739 size_binop (MULT_EXPR,
5740 fold_convert (sizetype, index),
5741 unit_size));
5743 break;
5745 case REALPART_EXPR:
5746 break;
5748 case IMAGPART_EXPR:
5749 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5750 bitsize_int (*pbitsize));
5751 break;
5753 case VIEW_CONVERT_EXPR:
5754 if (keep_aligning && STRICT_ALIGNMENT
5755 && (TYPE_ALIGN (TREE_TYPE (exp))
5756 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5757 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5758 < BIGGEST_ALIGNMENT)
5759 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5760 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5761 goto done;
5762 break;
5764 default:
5765 goto done;
5768 /* If any reference in the chain is volatile, the effect is volatile. */
5769 if (TREE_THIS_VOLATILE (exp))
5770 *pvolatilep = 1;
5772 exp = TREE_OPERAND (exp, 0);
5774 done:
5776 /* If OFFSET is constant, see if we can return the whole thing as a
5777 constant bit position. Make sure to handle overflow during
5778 this conversion. */
5779 if (host_integerp (offset, 0))
5781 double_int tem = double_int_mul (tree_to_double_int (offset),
5782 uhwi_to_double_int (BITS_PER_UNIT));
5783 tem = double_int_add (tem, tree_to_double_int (bit_offset));
5784 if (double_int_fits_in_shwi_p (tem))
5786 *pbitpos = double_int_to_shwi (tem);
5787 *poffset = NULL_TREE;
5788 return exp;
5792 /* Otherwise, split it up. */
5793 *pbitpos = tree_low_cst (bit_offset, 0);
5794 *poffset = offset;
5796 return exp;
5799 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
5800 look for whether EXP or any nested component-refs within EXP is marked
5801 as PACKED. */
5803 bool
5804 contains_packed_reference (tree exp)
5806 bool packed_p = false;
5808 while (1)
5810 switch (TREE_CODE (exp))
5812 case COMPONENT_REF:
5814 tree field = TREE_OPERAND (exp, 1);
5815 packed_p = DECL_PACKED (field)
5816 || TYPE_PACKED (TREE_TYPE (field))
5817 || TYPE_PACKED (TREE_TYPE (exp));
5818 if (packed_p)
5819 goto done;
5821 break;
5823 case BIT_FIELD_REF:
5824 case ARRAY_REF:
5825 case ARRAY_RANGE_REF:
5826 case REALPART_EXPR:
5827 case IMAGPART_EXPR:
5828 case VIEW_CONVERT_EXPR:
5829 break;
5831 default:
5832 goto done;
5834 exp = TREE_OPERAND (exp, 0);
5836 done:
5837 return packed_p;
5840 /* Return a tree of sizetype representing the size, in bytes, of the element
5841 of EXP, an ARRAY_REF. */
5843 tree
5844 array_ref_element_size (tree exp)
5846 tree aligned_size = TREE_OPERAND (exp, 3);
5847 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5849 /* If a size was specified in the ARRAY_REF, it's the size measured
5850 in alignment units of the element type. So multiply by that value. */
5851 if (aligned_size)
5853 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5854 sizetype from another type of the same width and signedness. */
5855 if (TREE_TYPE (aligned_size) != sizetype)
5856 aligned_size = fold_convert (sizetype, aligned_size);
5857 return size_binop (MULT_EXPR, aligned_size,
5858 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5861 /* Otherwise, take the size from that of the element type. Substitute
5862 any PLACEHOLDER_EXPR that we have. */
5863 else
5864 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5867 /* Return a tree representing the lower bound of the array mentioned in
5868 EXP, an ARRAY_REF. */
5870 tree
5871 array_ref_low_bound (tree exp)
5873 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5875 /* If a lower bound is specified in EXP, use it. */
5876 if (TREE_OPERAND (exp, 2))
5877 return TREE_OPERAND (exp, 2);
5879 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5880 substituting for a PLACEHOLDER_EXPR as needed. */
5881 if (domain_type && TYPE_MIN_VALUE (domain_type))
5882 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5884 /* Otherwise, return a zero of the appropriate type. */
5885 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5888 /* Return a tree representing the upper bound of the array mentioned in
5889 EXP, an ARRAY_REF. */
5891 tree
5892 array_ref_up_bound (tree exp)
5894 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5896 /* If there is a domain type and it has an upper bound, use it, substituting
5897 for a PLACEHOLDER_EXPR as needed. */
5898 if (domain_type && TYPE_MAX_VALUE (domain_type))
5899 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5901 /* Otherwise fail. */
5902 return NULL_TREE;
5905 /* Return a tree representing the offset, in bytes, of the field referenced
5906 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5908 tree
5909 component_ref_field_offset (tree exp)
5911 tree aligned_offset = TREE_OPERAND (exp, 2);
5912 tree field = TREE_OPERAND (exp, 1);
5914 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5915 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5916 value. */
5917 if (aligned_offset)
5919 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5920 sizetype from another type of the same width and signedness. */
5921 if (TREE_TYPE (aligned_offset) != sizetype)
5922 aligned_offset = fold_convert (sizetype, aligned_offset);
5923 return size_binop (MULT_EXPR, aligned_offset,
5924 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5927 /* Otherwise, take the offset from that of the field. Substitute
5928 any PLACEHOLDER_EXPR that we have. */
5929 else
5930 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5933 /* Return 1 if T is an expression that get_inner_reference handles. */
5936 handled_component_p (tree t)
5938 switch (TREE_CODE (t))
5940 case BIT_FIELD_REF:
5941 case COMPONENT_REF:
5942 case ARRAY_REF:
5943 case ARRAY_RANGE_REF:
5944 case VIEW_CONVERT_EXPR:
5945 case REALPART_EXPR:
5946 case IMAGPART_EXPR:
5947 return 1;
5949 default:
5950 return 0;
5954 /* Given an rtx VALUE that may contain additions and multiplications, return
5955 an equivalent value that just refers to a register, memory, or constant.
5956 This is done by generating instructions to perform the arithmetic and
5957 returning a pseudo-register containing the value.
5959 The returned value may be a REG, SUBREG, MEM or constant. */
5962 force_operand (rtx value, rtx target)
5964 rtx op1, op2;
5965 /* Use subtarget as the target for operand 0 of a binary operation. */
5966 rtx subtarget = get_subtarget (target);
5967 enum rtx_code code = GET_CODE (value);
5969 /* Check for subreg applied to an expression produced by loop optimizer. */
5970 if (code == SUBREG
5971 && !REG_P (SUBREG_REG (value))
5972 && !MEM_P (SUBREG_REG (value)))
5974 value = simplify_gen_subreg (GET_MODE (value),
5975 force_reg (GET_MODE (SUBREG_REG (value)),
5976 force_operand (SUBREG_REG (value),
5977 NULL_RTX)),
5978 GET_MODE (SUBREG_REG (value)),
5979 SUBREG_BYTE (value));
5980 code = GET_CODE (value);
5983 /* Check for a PIC address load. */
5984 if ((code == PLUS || code == MINUS)
5985 && XEXP (value, 0) == pic_offset_table_rtx
5986 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5987 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5988 || GET_CODE (XEXP (value, 1)) == CONST))
5990 if (!subtarget)
5991 subtarget = gen_reg_rtx (GET_MODE (value));
5992 emit_move_insn (subtarget, value);
5993 return subtarget;
5996 if (ARITHMETIC_P (value))
5998 op2 = XEXP (value, 1);
5999 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6000 subtarget = 0;
6001 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6003 code = PLUS;
6004 op2 = negate_rtx (GET_MODE (value), op2);
6007 /* Check for an addition with OP2 a constant integer and our first
6008 operand a PLUS of a virtual register and something else. In that
6009 case, we want to emit the sum of the virtual register and the
6010 constant first and then add the other value. This allows virtual
6011 register instantiation to simply modify the constant rather than
6012 creating another one around this addition. */
6013 if (code == PLUS && GET_CODE (op2) == CONST_INT
6014 && GET_CODE (XEXP (value, 0)) == PLUS
6015 && REG_P (XEXP (XEXP (value, 0), 0))
6016 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6017 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6019 rtx temp = expand_simple_binop (GET_MODE (value), code,
6020 XEXP (XEXP (value, 0), 0), op2,
6021 subtarget, 0, OPTAB_LIB_WIDEN);
6022 return expand_simple_binop (GET_MODE (value), code, temp,
6023 force_operand (XEXP (XEXP (value,
6024 0), 1), 0),
6025 target, 0, OPTAB_LIB_WIDEN);
6028 op1 = force_operand (XEXP (value, 0), subtarget);
6029 op2 = force_operand (op2, NULL_RTX);
6030 switch (code)
6032 case MULT:
6033 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6034 case DIV:
6035 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6036 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6037 target, 1, OPTAB_LIB_WIDEN);
6038 else
6039 return expand_divmod (0,
6040 FLOAT_MODE_P (GET_MODE (value))
6041 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6042 GET_MODE (value), op1, op2, target, 0);
6043 break;
6044 case MOD:
6045 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6046 target, 0);
6047 break;
6048 case UDIV:
6049 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6050 target, 1);
6051 break;
6052 case UMOD:
6053 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6054 target, 1);
6055 break;
6056 case ASHIFTRT:
6057 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6058 target, 0, OPTAB_LIB_WIDEN);
6059 break;
6060 default:
6061 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6062 target, 1, OPTAB_LIB_WIDEN);
6065 if (UNARY_P (value))
6067 if (!target)
6068 target = gen_reg_rtx (GET_MODE (value));
6069 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6070 switch (code)
6072 case ZERO_EXTEND:
6073 case SIGN_EXTEND:
6074 case TRUNCATE:
6075 case FLOAT_EXTEND:
6076 case FLOAT_TRUNCATE:
6077 convert_move (target, op1, code == ZERO_EXTEND);
6078 return target;
6080 case FIX:
6081 case UNSIGNED_FIX:
6082 expand_fix (target, op1, code == UNSIGNED_FIX);
6083 return target;
6085 case FLOAT:
6086 case UNSIGNED_FLOAT:
6087 expand_float (target, op1, code == UNSIGNED_FLOAT);
6088 return target;
6090 default:
6091 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6095 #ifdef INSN_SCHEDULING
6096 /* On machines that have insn scheduling, we want all memory reference to be
6097 explicit, so we need to deal with such paradoxical SUBREGs. */
6098 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6099 && (GET_MODE_SIZE (GET_MODE (value))
6100 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6101 value
6102 = simplify_gen_subreg (GET_MODE (value),
6103 force_reg (GET_MODE (SUBREG_REG (value)),
6104 force_operand (SUBREG_REG (value),
6105 NULL_RTX)),
6106 GET_MODE (SUBREG_REG (value)),
6107 SUBREG_BYTE (value));
6108 #endif
6110 return value;
6113 /* Subroutine of expand_expr: return nonzero iff there is no way that
6114 EXP can reference X, which is being modified. TOP_P is nonzero if this
6115 call is going to be used to determine whether we need a temporary
6116 for EXP, as opposed to a recursive call to this function.
6118 It is always safe for this routine to return zero since it merely
6119 searches for optimization opportunities. */
6122 safe_from_p (rtx x, tree exp, int top_p)
6124 rtx exp_rtl = 0;
6125 int i, nops;
6127 if (x == 0
6128 /* If EXP has varying size, we MUST use a target since we currently
6129 have no way of allocating temporaries of variable size
6130 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6131 So we assume here that something at a higher level has prevented a
6132 clash. This is somewhat bogus, but the best we can do. Only
6133 do this when X is BLKmode and when we are at the top level. */
6134 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6135 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6136 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6137 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6138 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6139 != INTEGER_CST)
6140 && GET_MODE (x) == BLKmode)
6141 /* If X is in the outgoing argument area, it is always safe. */
6142 || (MEM_P (x)
6143 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6144 || (GET_CODE (XEXP (x, 0)) == PLUS
6145 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6146 return 1;
6148 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6149 find the underlying pseudo. */
6150 if (GET_CODE (x) == SUBREG)
6152 x = SUBREG_REG (x);
6153 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6154 return 0;
6157 /* Now look at our tree code and possibly recurse. */
6158 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6160 case tcc_declaration:
6161 exp_rtl = DECL_RTL_IF_SET (exp);
6162 break;
6164 case tcc_constant:
6165 return 1;
6167 case tcc_exceptional:
6168 if (TREE_CODE (exp) == TREE_LIST)
6170 while (1)
6172 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6173 return 0;
6174 exp = TREE_CHAIN (exp);
6175 if (!exp)
6176 return 1;
6177 if (TREE_CODE (exp) != TREE_LIST)
6178 return safe_from_p (x, exp, 0);
6181 else if (TREE_CODE (exp) == CONSTRUCTOR)
6183 constructor_elt *ce;
6184 unsigned HOST_WIDE_INT idx;
6186 for (idx = 0;
6187 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6188 idx++)
6189 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6190 || !safe_from_p (x, ce->value, 0))
6191 return 0;
6192 return 1;
6194 else if (TREE_CODE (exp) == ERROR_MARK)
6195 return 1; /* An already-visited SAVE_EXPR? */
6196 else
6197 return 0;
6199 case tcc_statement:
6200 /* The only case we look at here is the DECL_INITIAL inside a
6201 DECL_EXPR. */
6202 return (TREE_CODE (exp) != DECL_EXPR
6203 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6204 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6205 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6207 case tcc_binary:
6208 case tcc_comparison:
6209 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6210 return 0;
6211 /* Fall through. */
6213 case tcc_unary:
6214 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6216 case tcc_expression:
6217 case tcc_reference:
6218 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6219 the expression. If it is set, we conflict iff we are that rtx or
6220 both are in memory. Otherwise, we check all operands of the
6221 expression recursively. */
6223 switch (TREE_CODE (exp))
6225 case ADDR_EXPR:
6226 /* If the operand is static or we are static, we can't conflict.
6227 Likewise if we don't conflict with the operand at all. */
6228 if (staticp (TREE_OPERAND (exp, 0))
6229 || TREE_STATIC (exp)
6230 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6231 return 1;
6233 /* Otherwise, the only way this can conflict is if we are taking
6234 the address of a DECL a that address if part of X, which is
6235 very rare. */
6236 exp = TREE_OPERAND (exp, 0);
6237 if (DECL_P (exp))
6239 if (!DECL_RTL_SET_P (exp)
6240 || !MEM_P (DECL_RTL (exp)))
6241 return 0;
6242 else
6243 exp_rtl = XEXP (DECL_RTL (exp), 0);
6245 break;
6247 case MISALIGNED_INDIRECT_REF:
6248 case ALIGN_INDIRECT_REF:
6249 case INDIRECT_REF:
6250 if (MEM_P (x)
6251 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6252 get_alias_set (exp)))
6253 return 0;
6254 break;
6256 case CALL_EXPR:
6257 /* Assume that the call will clobber all hard registers and
6258 all of memory. */
6259 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6260 || MEM_P (x))
6261 return 0;
6262 break;
6264 case WITH_CLEANUP_EXPR:
6265 case CLEANUP_POINT_EXPR:
6266 /* Lowered by gimplify.c. */
6267 gcc_unreachable ();
6269 case SAVE_EXPR:
6270 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6272 default:
6273 break;
6276 /* If we have an rtx, we do not need to scan our operands. */
6277 if (exp_rtl)
6278 break;
6280 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6281 for (i = 0; i < nops; i++)
6282 if (TREE_OPERAND (exp, i) != 0
6283 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6284 return 0;
6286 /* If this is a language-specific tree code, it may require
6287 special handling. */
6288 if ((unsigned int) TREE_CODE (exp)
6289 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6290 && !lang_hooks.safe_from_p (x, exp))
6291 return 0;
6292 break;
6294 case tcc_type:
6295 /* Should never get a type here. */
6296 gcc_unreachable ();
6299 /* If we have an rtl, find any enclosed object. Then see if we conflict
6300 with it. */
6301 if (exp_rtl)
6303 if (GET_CODE (exp_rtl) == SUBREG)
6305 exp_rtl = SUBREG_REG (exp_rtl);
6306 if (REG_P (exp_rtl)
6307 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6308 return 0;
6311 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6312 are memory and they conflict. */
6313 return ! (rtx_equal_p (x, exp_rtl)
6314 || (MEM_P (x) && MEM_P (exp_rtl)
6315 && true_dependence (exp_rtl, VOIDmode, x,
6316 rtx_addr_varies_p)));
6319 /* If we reach here, it is safe. */
6320 return 1;
6324 /* Return the highest power of two that EXP is known to be a multiple of.
6325 This is used in updating alignment of MEMs in array references. */
6327 unsigned HOST_WIDE_INT
6328 highest_pow2_factor (tree exp)
6330 unsigned HOST_WIDE_INT c0, c1;
6332 switch (TREE_CODE (exp))
6334 case INTEGER_CST:
6335 /* We can find the lowest bit that's a one. If the low
6336 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6337 We need to handle this case since we can find it in a COND_EXPR,
6338 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6339 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6340 later ICE. */
6341 if (TREE_CONSTANT_OVERFLOW (exp))
6342 return BIGGEST_ALIGNMENT;
6343 else
6345 /* Note: tree_low_cst is intentionally not used here,
6346 we don't care about the upper bits. */
6347 c0 = TREE_INT_CST_LOW (exp);
6348 c0 &= -c0;
6349 return c0 ? c0 : BIGGEST_ALIGNMENT;
6351 break;
6353 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6354 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6355 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6356 return MIN (c0, c1);
6358 case MULT_EXPR:
6359 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6360 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6361 return c0 * c1;
6363 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6364 case CEIL_DIV_EXPR:
6365 if (integer_pow2p (TREE_OPERAND (exp, 1))
6366 && host_integerp (TREE_OPERAND (exp, 1), 1))
6368 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6369 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6370 return MAX (1, c0 / c1);
6372 break;
6374 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6375 case SAVE_EXPR:
6376 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6378 case COMPOUND_EXPR:
6379 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6381 case COND_EXPR:
6382 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6383 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6384 return MIN (c0, c1);
6386 default:
6387 break;
6390 return 1;
6393 /* Similar, except that the alignment requirements of TARGET are
6394 taken into account. Assume it is at least as aligned as its
6395 type, unless it is a COMPONENT_REF in which case the layout of
6396 the structure gives the alignment. */
6398 static unsigned HOST_WIDE_INT
6399 highest_pow2_factor_for_target (tree target, tree exp)
6401 unsigned HOST_WIDE_INT target_align, factor;
6403 factor = highest_pow2_factor (exp);
6404 if (TREE_CODE (target) == COMPONENT_REF)
6405 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6406 else
6407 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6408 return MAX (factor, target_align);
6411 /* Expands variable VAR. */
6413 void
6414 expand_var (tree var)
6416 if (DECL_EXTERNAL (var))
6417 return;
6419 if (TREE_STATIC (var))
6420 /* If this is an inlined copy of a static local variable,
6421 look up the original decl. */
6422 var = DECL_ORIGIN (var);
6424 if (TREE_STATIC (var)
6425 ? !TREE_ASM_WRITTEN (var)
6426 : !DECL_RTL_SET_P (var))
6428 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6429 /* Should be ignored. */;
6430 else if (lang_hooks.expand_decl (var))
6431 /* OK. */;
6432 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6433 expand_decl (var);
6434 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6435 rest_of_decl_compilation (var, 0, 0);
6436 else
6437 /* No expansion needed. */
6438 gcc_assert (TREE_CODE (var) == TYPE_DECL
6439 || TREE_CODE (var) == CONST_DECL
6440 || TREE_CODE (var) == FUNCTION_DECL
6441 || TREE_CODE (var) == LABEL_DECL);
6445 /* Subroutine of expand_expr. Expand the two operands of a binary
6446 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6447 The value may be stored in TARGET if TARGET is nonzero. The
6448 MODIFIER argument is as documented by expand_expr. */
6450 static void
6451 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6452 enum expand_modifier modifier)
6454 if (! safe_from_p (target, exp1, 1))
6455 target = 0;
6456 if (operand_equal_p (exp0, exp1, 0))
6458 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6459 *op1 = copy_rtx (*op0);
6461 else
6463 /* If we need to preserve evaluation order, copy exp0 into its own
6464 temporary variable so that it can't be clobbered by exp1. */
6465 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6466 exp0 = save_expr (exp0);
6467 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6468 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6473 /* Return a MEM that contains constant EXP. DEFER is as for
6474 output_constant_def and MODIFIER is as for expand_expr. */
6476 static rtx
6477 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6479 rtx mem;
6481 mem = output_constant_def (exp, defer);
6482 if (modifier != EXPAND_INITIALIZER)
6483 mem = use_anchored_address (mem);
6484 return mem;
6487 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6488 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6490 static rtx
6491 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6492 enum expand_modifier modifier)
6494 rtx result, subtarget;
6495 tree inner, offset;
6496 HOST_WIDE_INT bitsize, bitpos;
6497 int volatilep, unsignedp;
6498 enum machine_mode mode1;
6500 /* If we are taking the address of a constant and are at the top level,
6501 we have to use output_constant_def since we can't call force_const_mem
6502 at top level. */
6503 /* ??? This should be considered a front-end bug. We should not be
6504 generating ADDR_EXPR of something that isn't an LVALUE. The only
6505 exception here is STRING_CST. */
6506 if (TREE_CODE (exp) == CONSTRUCTOR
6507 || CONSTANT_CLASS_P (exp))
6508 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6510 /* Everything must be something allowed by is_gimple_addressable. */
6511 switch (TREE_CODE (exp))
6513 case INDIRECT_REF:
6514 /* This case will happen via recursion for &a->b. */
6515 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6517 case CONST_DECL:
6518 /* Recurse and make the output_constant_def clause above handle this. */
6519 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6520 tmode, modifier);
6522 case REALPART_EXPR:
6523 /* The real part of the complex number is always first, therefore
6524 the address is the same as the address of the parent object. */
6525 offset = 0;
6526 bitpos = 0;
6527 inner = TREE_OPERAND (exp, 0);
6528 break;
6530 case IMAGPART_EXPR:
6531 /* The imaginary part of the complex number is always second.
6532 The expression is therefore always offset by the size of the
6533 scalar type. */
6534 offset = 0;
6535 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6536 inner = TREE_OPERAND (exp, 0);
6537 break;
6539 default:
6540 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6541 expand_expr, as that can have various side effects; LABEL_DECLs for
6542 example, may not have their DECL_RTL set yet. Assume language
6543 specific tree nodes can be expanded in some interesting way. */
6544 if (DECL_P (exp)
6545 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6547 result = expand_expr (exp, target, tmode,
6548 modifier == EXPAND_INITIALIZER
6549 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6551 /* If the DECL isn't in memory, then the DECL wasn't properly
6552 marked TREE_ADDRESSABLE, which will be either a front-end
6553 or a tree optimizer bug. */
6554 gcc_assert (MEM_P (result));
6555 result = XEXP (result, 0);
6557 /* ??? Is this needed anymore? */
6558 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6560 assemble_external (exp);
6561 TREE_USED (exp) = 1;
6564 if (modifier != EXPAND_INITIALIZER
6565 && modifier != EXPAND_CONST_ADDRESS)
6566 result = force_operand (result, target);
6567 return result;
6570 /* Pass FALSE as the last argument to get_inner_reference although
6571 we are expanding to RTL. The rationale is that we know how to
6572 handle "aligning nodes" here: we can just bypass them because
6573 they won't change the final object whose address will be returned
6574 (they actually exist only for that purpose). */
6575 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6576 &mode1, &unsignedp, &volatilep, false);
6577 break;
6580 /* We must have made progress. */
6581 gcc_assert (inner != exp);
6583 subtarget = offset || bitpos ? NULL_RTX : target;
6584 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6586 if (offset)
6588 rtx tmp;
6590 if (modifier != EXPAND_NORMAL)
6591 result = force_operand (result, NULL);
6592 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6594 result = convert_memory_address (tmode, result);
6595 tmp = convert_memory_address (tmode, tmp);
6597 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6598 result = gen_rtx_PLUS (tmode, result, tmp);
6599 else
6601 subtarget = bitpos ? NULL_RTX : target;
6602 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6603 1, OPTAB_LIB_WIDEN);
6607 if (bitpos)
6609 /* Someone beforehand should have rejected taking the address
6610 of such an object. */
6611 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6613 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6614 if (modifier < EXPAND_SUM)
6615 result = force_operand (result, target);
6618 return result;
6621 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6622 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6624 static rtx
6625 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6626 enum expand_modifier modifier)
6628 enum machine_mode rmode;
6629 rtx result;
6631 /* Target mode of VOIDmode says "whatever's natural". */
6632 if (tmode == VOIDmode)
6633 tmode = TYPE_MODE (TREE_TYPE (exp));
6635 /* We can get called with some Weird Things if the user does silliness
6636 like "(short) &a". In that case, convert_memory_address won't do
6637 the right thing, so ignore the given target mode. */
6638 if (tmode != Pmode && tmode != ptr_mode)
6639 tmode = Pmode;
6641 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6642 tmode, modifier);
6644 /* Despite expand_expr claims concerning ignoring TMODE when not
6645 strictly convenient, stuff breaks if we don't honor it. Note
6646 that combined with the above, we only do this for pointer modes. */
6647 rmode = GET_MODE (result);
6648 if (rmode == VOIDmode)
6649 rmode = tmode;
6650 if (rmode != tmode)
6651 result = convert_memory_address (tmode, result);
6653 return result;
6657 /* expand_expr: generate code for computing expression EXP.
6658 An rtx for the computed value is returned. The value is never null.
6659 In the case of a void EXP, const0_rtx is returned.
6661 The value may be stored in TARGET if TARGET is nonzero.
6662 TARGET is just a suggestion; callers must assume that
6663 the rtx returned may not be the same as TARGET.
6665 If TARGET is CONST0_RTX, it means that the value will be ignored.
6667 If TMODE is not VOIDmode, it suggests generating the
6668 result in mode TMODE. But this is done only when convenient.
6669 Otherwise, TMODE is ignored and the value generated in its natural mode.
6670 TMODE is just a suggestion; callers must assume that
6671 the rtx returned may not have mode TMODE.
6673 Note that TARGET may have neither TMODE nor MODE. In that case, it
6674 probably will not be used.
6676 If MODIFIER is EXPAND_SUM then when EXP is an addition
6677 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6678 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6679 products as above, or REG or MEM, or constant.
6680 Ordinarily in such cases we would output mul or add instructions
6681 and then return a pseudo reg containing the sum.
6683 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6684 it also marks a label as absolutely required (it can't be dead).
6685 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6686 This is used for outputting expressions used in initializers.
6688 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6689 with a constant address even if that address is not normally legitimate.
6690 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6692 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6693 a call parameter. Such targets require special care as we haven't yet
6694 marked TARGET so that it's safe from being trashed by libcalls. We
6695 don't want to use TARGET for anything but the final result;
6696 Intermediate values must go elsewhere. Additionally, calls to
6697 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6699 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6700 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6701 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6702 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6703 recursively. */
6705 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6706 enum expand_modifier, rtx *);
6709 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6710 enum expand_modifier modifier, rtx *alt_rtl)
6712 int rn = -1;
6713 rtx ret, last = NULL;
6715 /* Handle ERROR_MARK before anybody tries to access its type. */
6716 if (TREE_CODE (exp) == ERROR_MARK
6717 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6719 ret = CONST0_RTX (tmode);
6720 return ret ? ret : const0_rtx;
6723 if (flag_non_call_exceptions)
6725 rn = lookup_stmt_eh_region (exp);
6726 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6727 if (rn >= 0)
6728 last = get_last_insn ();
6731 /* If this is an expression of some kind and it has an associated line
6732 number, then emit the line number before expanding the expression.
6734 We need to save and restore the file and line information so that
6735 errors discovered during expansion are emitted with the right
6736 information. It would be better of the diagnostic routines
6737 used the file/line information embedded in the tree nodes rather
6738 than globals. */
6739 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6741 location_t saved_location = input_location;
6742 input_location = EXPR_LOCATION (exp);
6743 emit_line_note (input_location);
6745 /* Record where the insns produced belong. */
6746 record_block_change (TREE_BLOCK (exp));
6748 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6750 input_location = saved_location;
6752 else
6754 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6757 /* If using non-call exceptions, mark all insns that may trap.
6758 expand_call() will mark CALL_INSNs before we get to this code,
6759 but it doesn't handle libcalls, and these may trap. */
6760 if (rn >= 0)
6762 rtx insn;
6763 for (insn = next_real_insn (last); insn;
6764 insn = next_real_insn (insn))
6766 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6767 /* If we want exceptions for non-call insns, any
6768 may_trap_p instruction may throw. */
6769 && GET_CODE (PATTERN (insn)) != CLOBBER
6770 && GET_CODE (PATTERN (insn)) != USE
6771 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6773 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6774 REG_NOTES (insn));
6779 return ret;
6782 static rtx
6783 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6784 enum expand_modifier modifier, rtx *alt_rtl)
6786 rtx op0, op1, temp, decl_rtl;
6787 tree type = TREE_TYPE (exp);
6788 int unsignedp;
6789 enum machine_mode mode;
6790 enum tree_code code = TREE_CODE (exp);
6791 optab this_optab;
6792 rtx subtarget, original_target;
6793 int ignore;
6794 tree context, subexp0, subexp1;
6795 bool reduce_bit_field = false;
6796 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6797 ? reduce_to_bit_field_precision ((expr), \
6798 target, \
6799 type) \
6800 : (expr))
6802 mode = TYPE_MODE (type);
6803 unsignedp = TYPE_UNSIGNED (type);
6804 if (lang_hooks.reduce_bit_field_operations
6805 && TREE_CODE (type) == INTEGER_TYPE
6806 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6808 /* An operation in what may be a bit-field type needs the
6809 result to be reduced to the precision of the bit-field type,
6810 which is narrower than that of the type's mode. */
6811 reduce_bit_field = true;
6812 if (modifier == EXPAND_STACK_PARM)
6813 target = 0;
6816 /* Use subtarget as the target for operand 0 of a binary operation. */
6817 subtarget = get_subtarget (target);
6818 original_target = target;
6819 ignore = (target == const0_rtx
6820 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6821 || code == CONVERT_EXPR || code == COND_EXPR
6822 || code == VIEW_CONVERT_EXPR)
6823 && TREE_CODE (type) == VOID_TYPE));
6825 /* If we are going to ignore this result, we need only do something
6826 if there is a side-effect somewhere in the expression. If there
6827 is, short-circuit the most common cases here. Note that we must
6828 not call expand_expr with anything but const0_rtx in case this
6829 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6831 if (ignore)
6833 if (! TREE_SIDE_EFFECTS (exp))
6834 return const0_rtx;
6836 /* Ensure we reference a volatile object even if value is ignored, but
6837 don't do this if all we are doing is taking its address. */
6838 if (TREE_THIS_VOLATILE (exp)
6839 && TREE_CODE (exp) != FUNCTION_DECL
6840 && mode != VOIDmode && mode != BLKmode
6841 && modifier != EXPAND_CONST_ADDRESS)
6843 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6844 if (MEM_P (temp))
6845 temp = copy_to_reg (temp);
6846 return const0_rtx;
6849 if (TREE_CODE_CLASS (code) == tcc_unary
6850 || code == COMPONENT_REF || code == INDIRECT_REF)
6851 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6852 modifier);
6854 else if (TREE_CODE_CLASS (code) == tcc_binary
6855 || TREE_CODE_CLASS (code) == tcc_comparison
6856 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6858 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6859 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6860 return const0_rtx;
6862 else if (code == BIT_FIELD_REF)
6864 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6865 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6866 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6867 return const0_rtx;
6870 target = 0;
6874 switch (code)
6876 case LABEL_DECL:
6878 tree function = decl_function_context (exp);
6880 temp = label_rtx (exp);
6881 temp = gen_rtx_LABEL_REF (Pmode, temp);
6883 if (function != current_function_decl
6884 && function != 0)
6885 LABEL_REF_NONLOCAL_P (temp) = 1;
6887 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6888 return temp;
6891 case SSA_NAME:
6892 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6893 NULL);
6895 case PARM_DECL:
6896 case VAR_DECL:
6897 /* If a static var's type was incomplete when the decl was written,
6898 but the type is complete now, lay out the decl now. */
6899 if (DECL_SIZE (exp) == 0
6900 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6901 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6902 layout_decl (exp, 0);
6904 /* ... fall through ... */
6906 case FUNCTION_DECL:
6907 case RESULT_DECL:
6908 decl_rtl = DECL_RTL (exp);
6909 gcc_assert (decl_rtl);
6911 /* Ensure variable marked as used even if it doesn't go through
6912 a parser. If it hasn't be used yet, write out an external
6913 definition. */
6914 if (! TREE_USED (exp))
6916 assemble_external (exp);
6917 TREE_USED (exp) = 1;
6920 /* Show we haven't gotten RTL for this yet. */
6921 temp = 0;
6923 /* Variables inherited from containing functions should have
6924 been lowered by this point. */
6925 context = decl_function_context (exp);
6926 gcc_assert (!context
6927 || context == current_function_decl
6928 || TREE_STATIC (exp)
6929 /* ??? C++ creates functions that are not TREE_STATIC. */
6930 || TREE_CODE (exp) == FUNCTION_DECL);
6932 /* This is the case of an array whose size is to be determined
6933 from its initializer, while the initializer is still being parsed.
6934 See expand_decl. */
6936 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6937 temp = validize_mem (decl_rtl);
6939 /* If DECL_RTL is memory, we are in the normal case and either
6940 the address is not valid or it is not a register and -fforce-addr
6941 is specified, get the address into a register. */
6943 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6945 if (alt_rtl)
6946 *alt_rtl = decl_rtl;
6947 decl_rtl = use_anchored_address (decl_rtl);
6948 if (modifier != EXPAND_CONST_ADDRESS
6949 && modifier != EXPAND_SUM
6950 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6951 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6952 temp = replace_equiv_address (decl_rtl,
6953 copy_rtx (XEXP (decl_rtl, 0)));
6956 /* If we got something, return it. But first, set the alignment
6957 if the address is a register. */
6958 if (temp != 0)
6960 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6961 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6963 return temp;
6966 /* If the mode of DECL_RTL does not match that of the decl, it
6967 must be a promoted value. We return a SUBREG of the wanted mode,
6968 but mark it so that we know that it was already extended. */
6970 if (REG_P (decl_rtl)
6971 && GET_MODE (decl_rtl) != DECL_MODE (exp))
6973 enum machine_mode pmode;
6975 /* Get the signedness used for this variable. Ensure we get the
6976 same mode we got when the variable was declared. */
6977 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6978 (TREE_CODE (exp) == RESULT_DECL
6979 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6980 gcc_assert (GET_MODE (decl_rtl) == pmode);
6982 temp = gen_lowpart_SUBREG (mode, decl_rtl);
6983 SUBREG_PROMOTED_VAR_P (temp) = 1;
6984 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6985 return temp;
6988 return decl_rtl;
6990 case INTEGER_CST:
6991 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6992 TREE_INT_CST_HIGH (exp), mode);
6994 /* ??? If overflow is set, fold will have done an incomplete job,
6995 which can result in (plus xx (const_int 0)), which can get
6996 simplified by validate_replace_rtx during virtual register
6997 instantiation, which can result in unrecognizable insns.
6998 Avoid this by forcing all overflows into registers. */
6999 if (TREE_CONSTANT_OVERFLOW (exp)
7000 && modifier != EXPAND_INITIALIZER)
7001 temp = force_reg (mode, temp);
7003 return temp;
7005 case VECTOR_CST:
7007 tree tmp = NULL_TREE;
7008 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7009 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7010 return const_vector_from_tree (exp);
7011 if (GET_MODE_CLASS (mode) == MODE_INT)
7013 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7014 if (type_for_mode)
7015 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7017 if (!tmp)
7018 tmp = build_constructor_from_list (type,
7019 TREE_VECTOR_CST_ELTS (exp));
7020 return expand_expr (tmp, ignore ? const0_rtx : target,
7021 tmode, modifier);
7024 case CONST_DECL:
7025 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7027 case REAL_CST:
7028 /* If optimized, generate immediate CONST_DOUBLE
7029 which will be turned into memory by reload if necessary.
7031 We used to force a register so that loop.c could see it. But
7032 this does not allow gen_* patterns to perform optimizations with
7033 the constants. It also produces two insns in cases like "x = 1.0;".
7034 On most machines, floating-point constants are not permitted in
7035 many insns, so we'd end up copying it to a register in any case.
7037 Now, we do the copying in expand_binop, if appropriate. */
7038 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7039 TYPE_MODE (TREE_TYPE (exp)));
7041 case COMPLEX_CST:
7042 /* Handle evaluating a complex constant in a CONCAT target. */
7043 if (original_target && GET_CODE (original_target) == CONCAT)
7045 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7046 rtx rtarg, itarg;
7048 rtarg = XEXP (original_target, 0);
7049 itarg = XEXP (original_target, 1);
7051 /* Move the real and imaginary parts separately. */
7052 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7053 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7055 if (op0 != rtarg)
7056 emit_move_insn (rtarg, op0);
7057 if (op1 != itarg)
7058 emit_move_insn (itarg, op1);
7060 return original_target;
7063 /* ... fall through ... */
7065 case STRING_CST:
7066 temp = expand_expr_constant (exp, 1, modifier);
7068 /* temp contains a constant address.
7069 On RISC machines where a constant address isn't valid,
7070 make some insns to get that address into a register. */
7071 if (modifier != EXPAND_CONST_ADDRESS
7072 && modifier != EXPAND_INITIALIZER
7073 && modifier != EXPAND_SUM
7074 && (! memory_address_p (mode, XEXP (temp, 0))
7075 || flag_force_addr))
7076 return replace_equiv_address (temp,
7077 copy_rtx (XEXP (temp, 0)));
7078 return temp;
7080 case SAVE_EXPR:
7082 tree val = TREE_OPERAND (exp, 0);
7083 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7085 if (!SAVE_EXPR_RESOLVED_P (exp))
7087 /* We can indeed still hit this case, typically via builtin
7088 expanders calling save_expr immediately before expanding
7089 something. Assume this means that we only have to deal
7090 with non-BLKmode values. */
7091 gcc_assert (GET_MODE (ret) != BLKmode);
7093 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7094 DECL_ARTIFICIAL (val) = 1;
7095 DECL_IGNORED_P (val) = 1;
7096 TREE_OPERAND (exp, 0) = val;
7097 SAVE_EXPR_RESOLVED_P (exp) = 1;
7099 if (!CONSTANT_P (ret))
7100 ret = copy_to_reg (ret);
7101 SET_DECL_RTL (val, ret);
7104 return ret;
7107 case GOTO_EXPR:
7108 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7109 expand_goto (TREE_OPERAND (exp, 0));
7110 else
7111 expand_computed_goto (TREE_OPERAND (exp, 0));
7112 return const0_rtx;
7114 case CONSTRUCTOR:
7115 /* If we don't need the result, just ensure we evaluate any
7116 subexpressions. */
7117 if (ignore)
7119 unsigned HOST_WIDE_INT idx;
7120 tree value;
7122 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7123 expand_expr (value, const0_rtx, VOIDmode, 0);
7125 return const0_rtx;
7128 /* Try to avoid creating a temporary at all. This is possible
7129 if all of the initializer is zero.
7130 FIXME: try to handle all [0..255] initializers we can handle
7131 with memset. */
7132 else if (TREE_STATIC (exp)
7133 && !TREE_ADDRESSABLE (exp)
7134 && target != 0 && mode == BLKmode
7135 && all_zeros_p (exp))
7137 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7138 return target;
7141 /* All elts simple constants => refer to a constant in memory. But
7142 if this is a non-BLKmode mode, let it store a field at a time
7143 since that should make a CONST_INT or CONST_DOUBLE when we
7144 fold. Likewise, if we have a target we can use, it is best to
7145 store directly into the target unless the type is large enough
7146 that memcpy will be used. If we are making an initializer and
7147 all operands are constant, put it in memory as well.
7149 FIXME: Avoid trying to fill vector constructors piece-meal.
7150 Output them with output_constant_def below unless we're sure
7151 they're zeros. This should go away when vector initializers
7152 are treated like VECTOR_CST instead of arrays.
7154 else if ((TREE_STATIC (exp)
7155 && ((mode == BLKmode
7156 && ! (target != 0 && safe_from_p (target, exp, 1)))
7157 || TREE_ADDRESSABLE (exp)
7158 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7159 && (! MOVE_BY_PIECES_P
7160 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7161 TYPE_ALIGN (type)))
7162 && ! mostly_zeros_p (exp))))
7163 || ((modifier == EXPAND_INITIALIZER
7164 || modifier == EXPAND_CONST_ADDRESS)
7165 && TREE_CONSTANT (exp)))
7167 rtx constructor = expand_expr_constant (exp, 1, modifier);
7169 if (modifier != EXPAND_CONST_ADDRESS
7170 && modifier != EXPAND_INITIALIZER
7171 && modifier != EXPAND_SUM)
7172 constructor = validize_mem (constructor);
7174 return constructor;
7176 else
7178 /* Handle calls that pass values in multiple non-contiguous
7179 locations. The Irix 6 ABI has examples of this. */
7180 if (target == 0 || ! safe_from_p (target, exp, 1)
7181 || GET_CODE (target) == PARALLEL
7182 || modifier == EXPAND_STACK_PARM)
7183 target
7184 = assign_temp (build_qualified_type (type,
7185 (TYPE_QUALS (type)
7186 | (TREE_READONLY (exp)
7187 * TYPE_QUAL_CONST))),
7188 0, TREE_ADDRESSABLE (exp), 1);
7190 store_constructor (exp, target, 0, int_expr_size (exp));
7191 return target;
7194 case MISALIGNED_INDIRECT_REF:
7195 case ALIGN_INDIRECT_REF:
7196 case INDIRECT_REF:
7198 tree exp1 = TREE_OPERAND (exp, 0);
7200 if (modifier != EXPAND_WRITE)
7202 tree t;
7204 t = fold_read_from_constant_string (exp);
7205 if (t)
7206 return expand_expr (t, target, tmode, modifier);
7209 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7210 op0 = memory_address (mode, op0);
7212 if (code == ALIGN_INDIRECT_REF)
7214 int align = TYPE_ALIGN_UNIT (type);
7215 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7216 op0 = memory_address (mode, op0);
7219 temp = gen_rtx_MEM (mode, op0);
7221 set_mem_attributes (temp, exp, 0);
7223 /* Resolve the misalignment now, so that we don't have to remember
7224 to resolve it later. Of course, this only works for reads. */
7225 /* ??? When we get around to supporting writes, we'll have to handle
7226 this in store_expr directly. The vectorizer isn't generating
7227 those yet, however. */
7228 if (code == MISALIGNED_INDIRECT_REF)
7230 int icode;
7231 rtx reg, insn;
7233 gcc_assert (modifier == EXPAND_NORMAL
7234 || modifier == EXPAND_STACK_PARM);
7236 /* The vectorizer should have already checked the mode. */
7237 icode = movmisalign_optab->handlers[mode].insn_code;
7238 gcc_assert (icode != CODE_FOR_nothing);
7240 /* We've already validated the memory, and we're creating a
7241 new pseudo destination. The predicates really can't fail. */
7242 reg = gen_reg_rtx (mode);
7244 /* Nor can the insn generator. */
7245 insn = GEN_FCN (icode) (reg, temp);
7246 emit_insn (insn);
7248 return reg;
7251 return temp;
7254 case TARGET_MEM_REF:
7256 struct mem_address addr;
7258 get_address_description (exp, &addr);
7259 op0 = addr_for_mem_ref (&addr, true);
7260 op0 = memory_address (mode, op0);
7261 temp = gen_rtx_MEM (mode, op0);
7262 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7264 return temp;
7266 case ARRAY_REF:
7269 tree array = TREE_OPERAND (exp, 0);
7270 tree index = TREE_OPERAND (exp, 1);
7272 /* Fold an expression like: "foo"[2].
7273 This is not done in fold so it won't happen inside &.
7274 Don't fold if this is for wide characters since it's too
7275 difficult to do correctly and this is a very rare case. */
7277 if (modifier != EXPAND_CONST_ADDRESS
7278 && modifier != EXPAND_INITIALIZER
7279 && modifier != EXPAND_MEMORY)
7281 tree t = fold_read_from_constant_string (exp);
7283 if (t)
7284 return expand_expr (t, target, tmode, modifier);
7287 /* If this is a constant index into a constant array,
7288 just get the value from the array. Handle both the cases when
7289 we have an explicit constructor and when our operand is a variable
7290 that was declared const. */
7292 if (modifier != EXPAND_CONST_ADDRESS
7293 && modifier != EXPAND_INITIALIZER
7294 && modifier != EXPAND_MEMORY
7295 && TREE_CODE (array) == CONSTRUCTOR
7296 && ! TREE_SIDE_EFFECTS (array)
7297 && TREE_CODE (index) == INTEGER_CST)
7299 unsigned HOST_WIDE_INT ix;
7300 tree field, value;
7302 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7303 field, value)
7304 if (tree_int_cst_equal (field, index))
7306 if (!TREE_SIDE_EFFECTS (value))
7307 return expand_expr (fold (value), target, tmode, modifier);
7308 break;
7312 else if (optimize >= 1
7313 && modifier != EXPAND_CONST_ADDRESS
7314 && modifier != EXPAND_INITIALIZER
7315 && modifier != EXPAND_MEMORY
7316 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7317 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7318 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7319 && targetm.binds_local_p (array))
7321 if (TREE_CODE (index) == INTEGER_CST)
7323 tree init = DECL_INITIAL (array);
7325 if (TREE_CODE (init) == CONSTRUCTOR)
7327 unsigned HOST_WIDE_INT ix;
7328 tree field, value;
7330 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7331 field, value)
7332 if (tree_int_cst_equal (field, index))
7334 if (!TREE_SIDE_EFFECTS (value))
7335 return expand_expr (fold (value), target, tmode,
7336 modifier);
7337 break;
7340 else if(TREE_CODE (init) == STRING_CST)
7342 tree index1 = index;
7343 tree low_bound = array_ref_low_bound (exp);
7344 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7346 /* Optimize the special-case of a zero lower bound.
7348 We convert the low_bound to sizetype to avoid some problems
7349 with constant folding. (E.g. suppose the lower bound is 1,
7350 and its mode is QI. Without the conversion,l (ARRAY
7351 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7352 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7354 if (! integer_zerop (low_bound))
7355 index1 = size_diffop (index1, fold_convert (sizetype,
7356 low_bound));
7358 if (0 > compare_tree_int (index1,
7359 TREE_STRING_LENGTH (init)))
7361 tree type = TREE_TYPE (TREE_TYPE (init));
7362 enum machine_mode mode = TYPE_MODE (type);
7364 if (GET_MODE_CLASS (mode) == MODE_INT
7365 && GET_MODE_SIZE (mode) == 1)
7366 return gen_int_mode (TREE_STRING_POINTER (init)
7367 [TREE_INT_CST_LOW (index1)],
7368 mode);
7374 goto normal_inner_ref;
7376 case COMPONENT_REF:
7377 /* If the operand is a CONSTRUCTOR, we can just extract the
7378 appropriate field if it is present. */
7379 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7381 unsigned HOST_WIDE_INT idx;
7382 tree field, value;
7384 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7385 idx, field, value)
7386 if (field == TREE_OPERAND (exp, 1)
7387 /* We can normally use the value of the field in the
7388 CONSTRUCTOR. However, if this is a bitfield in
7389 an integral mode that we can fit in a HOST_WIDE_INT,
7390 we must mask only the number of bits in the bitfield,
7391 since this is done implicitly by the constructor. If
7392 the bitfield does not meet either of those conditions,
7393 we can't do this optimization. */
7394 && (! DECL_BIT_FIELD (field)
7395 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7396 && (GET_MODE_BITSIZE (DECL_MODE (field))
7397 <= HOST_BITS_PER_WIDE_INT))))
7399 if (DECL_BIT_FIELD (field)
7400 && modifier == EXPAND_STACK_PARM)
7401 target = 0;
7402 op0 = expand_expr (value, target, tmode, modifier);
7403 if (DECL_BIT_FIELD (field))
7405 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7406 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7408 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7410 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7411 op0 = expand_and (imode, op0, op1, target);
7413 else
7415 tree count
7416 = build_int_cst (NULL_TREE,
7417 GET_MODE_BITSIZE (imode) - bitsize);
7419 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7420 target, 0);
7421 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7422 target, 0);
7426 return op0;
7429 goto normal_inner_ref;
7431 case BIT_FIELD_REF:
7432 case ARRAY_RANGE_REF:
7433 normal_inner_ref:
7435 enum machine_mode mode1;
7436 HOST_WIDE_INT bitsize, bitpos;
7437 tree offset;
7438 int volatilep = 0;
7439 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7440 &mode1, &unsignedp, &volatilep, true);
7441 rtx orig_op0;
7443 /* If we got back the original object, something is wrong. Perhaps
7444 we are evaluating an expression too early. In any event, don't
7445 infinitely recurse. */
7446 gcc_assert (tem != exp);
7448 /* If TEM's type is a union of variable size, pass TARGET to the inner
7449 computation, since it will need a temporary and TARGET is known
7450 to have to do. This occurs in unchecked conversion in Ada. */
7452 orig_op0 = op0
7453 = expand_expr (tem,
7454 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7455 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7456 != INTEGER_CST)
7457 && modifier != EXPAND_STACK_PARM
7458 ? target : NULL_RTX),
7459 VOIDmode,
7460 (modifier == EXPAND_INITIALIZER
7461 || modifier == EXPAND_CONST_ADDRESS
7462 || modifier == EXPAND_STACK_PARM)
7463 ? modifier : EXPAND_NORMAL);
7465 /* If this is a constant, put it into a register if it is a legitimate
7466 constant, OFFSET is 0, and we won't try to extract outside the
7467 register (in case we were passed a partially uninitialized object
7468 or a view_conversion to a larger size). Force the constant to
7469 memory otherwise. */
7470 if (CONSTANT_P (op0))
7472 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7473 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7474 && offset == 0
7475 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7476 op0 = force_reg (mode, op0);
7477 else
7478 op0 = validize_mem (force_const_mem (mode, op0));
7481 /* Otherwise, if this object not in memory and we either have an
7482 offset, a BLKmode result, or a reference outside the object, put it
7483 there. Such cases can occur in Ada if we have unchecked conversion
7484 of an expression from a scalar type to an array or record type or
7485 for an ARRAY_RANGE_REF whose type is BLKmode. */
7486 else if (!MEM_P (op0)
7487 && (offset != 0
7488 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7489 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7491 tree nt = build_qualified_type (TREE_TYPE (tem),
7492 (TYPE_QUALS (TREE_TYPE (tem))
7493 | TYPE_QUAL_CONST));
7494 rtx memloc = assign_temp (nt, 1, 1, 1);
7496 emit_move_insn (memloc, op0);
7497 op0 = memloc;
7500 if (offset != 0)
7502 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7503 EXPAND_SUM);
7505 gcc_assert (MEM_P (op0));
7507 #ifdef POINTERS_EXTEND_UNSIGNED
7508 if (GET_MODE (offset_rtx) != Pmode)
7509 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7510 #else
7511 if (GET_MODE (offset_rtx) != ptr_mode)
7512 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7513 #endif
7515 if (GET_MODE (op0) == BLKmode
7516 /* A constant address in OP0 can have VOIDmode, we must
7517 not try to call force_reg in that case. */
7518 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7519 && bitsize != 0
7520 && (bitpos % bitsize) == 0
7521 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7522 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7524 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7525 bitpos = 0;
7528 op0 = offset_address (op0, offset_rtx,
7529 highest_pow2_factor (offset));
7532 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7533 record its alignment as BIGGEST_ALIGNMENT. */
7534 if (MEM_P (op0) && bitpos == 0 && offset != 0
7535 && is_aligning_offset (offset, tem))
7536 set_mem_align (op0, BIGGEST_ALIGNMENT);
7538 /* Don't forget about volatility even if this is a bitfield. */
7539 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7541 if (op0 == orig_op0)
7542 op0 = copy_rtx (op0);
7544 MEM_VOLATILE_P (op0) = 1;
7547 /* The following code doesn't handle CONCAT.
7548 Assume only bitpos == 0 can be used for CONCAT, due to
7549 one element arrays having the same mode as its element. */
7550 if (GET_CODE (op0) == CONCAT)
7552 gcc_assert (bitpos == 0
7553 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7554 return op0;
7557 /* In cases where an aligned union has an unaligned object
7558 as a field, we might be extracting a BLKmode value from
7559 an integer-mode (e.g., SImode) object. Handle this case
7560 by doing the extract into an object as wide as the field
7561 (which we know to be the width of a basic mode), then
7562 storing into memory, and changing the mode to BLKmode. */
7563 if (mode1 == VOIDmode
7564 || REG_P (op0) || GET_CODE (op0) == SUBREG
7565 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7566 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7567 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7568 && modifier != EXPAND_CONST_ADDRESS
7569 && modifier != EXPAND_INITIALIZER)
7570 /* If the field isn't aligned enough to fetch as a memref,
7571 fetch it as a bit field. */
7572 || (mode1 != BLKmode
7573 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7574 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7575 || (MEM_P (op0)
7576 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7577 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7578 && ((modifier == EXPAND_CONST_ADDRESS
7579 || modifier == EXPAND_INITIALIZER)
7580 ? STRICT_ALIGNMENT
7581 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7582 || (bitpos % BITS_PER_UNIT != 0)))
7583 /* If the type and the field are a constant size and the
7584 size of the type isn't the same size as the bitfield,
7585 we must use bitfield operations. */
7586 || (bitsize >= 0
7587 && TYPE_SIZE (TREE_TYPE (exp))
7588 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7589 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7590 bitsize)))
7592 enum machine_mode ext_mode = mode;
7594 if (ext_mode == BLKmode
7595 && ! (target != 0 && MEM_P (op0)
7596 && MEM_P (target)
7597 && bitpos % BITS_PER_UNIT == 0))
7598 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7600 if (ext_mode == BLKmode)
7602 if (target == 0)
7603 target = assign_temp (type, 0, 1, 1);
7605 if (bitsize == 0)
7606 return target;
7608 /* In this case, BITPOS must start at a byte boundary and
7609 TARGET, if specified, must be a MEM. */
7610 gcc_assert (MEM_P (op0)
7611 && (!target || MEM_P (target))
7612 && !(bitpos % BITS_PER_UNIT));
7614 emit_block_move (target,
7615 adjust_address (op0, VOIDmode,
7616 bitpos / BITS_PER_UNIT),
7617 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7618 / BITS_PER_UNIT),
7619 (modifier == EXPAND_STACK_PARM
7620 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7622 return target;
7625 op0 = validize_mem (op0);
7627 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7628 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7630 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7631 (modifier == EXPAND_STACK_PARM
7632 ? NULL_RTX : target),
7633 ext_mode, ext_mode);
7635 /* If the result is a record type and BITSIZE is narrower than
7636 the mode of OP0, an integral mode, and this is a big endian
7637 machine, we must put the field into the high-order bits. */
7638 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7639 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7640 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7641 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7642 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7643 - bitsize),
7644 op0, 1);
7646 /* If the result type is BLKmode, store the data into a temporary
7647 of the appropriate type, but with the mode corresponding to the
7648 mode for the data we have (op0's mode). It's tempting to make
7649 this a constant type, since we know it's only being stored once,
7650 but that can cause problems if we are taking the address of this
7651 COMPONENT_REF because the MEM of any reference via that address
7652 will have flags corresponding to the type, which will not
7653 necessarily be constant. */
7654 if (mode == BLKmode)
7656 rtx new
7657 = assign_stack_temp_for_type
7658 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7660 emit_move_insn (new, op0);
7661 op0 = copy_rtx (new);
7662 PUT_MODE (op0, BLKmode);
7663 set_mem_attributes (op0, exp, 1);
7666 return op0;
7669 /* If the result is BLKmode, use that to access the object
7670 now as well. */
7671 if (mode == BLKmode)
7672 mode1 = BLKmode;
7674 /* Get a reference to just this component. */
7675 if (modifier == EXPAND_CONST_ADDRESS
7676 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7677 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7678 else
7679 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7681 if (op0 == orig_op0)
7682 op0 = copy_rtx (op0);
7684 set_mem_attributes (op0, exp, 0);
7685 if (REG_P (XEXP (op0, 0)))
7686 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7688 MEM_VOLATILE_P (op0) |= volatilep;
7689 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7690 || modifier == EXPAND_CONST_ADDRESS
7691 || modifier == EXPAND_INITIALIZER)
7692 return op0;
7693 else if (target == 0)
7694 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7696 convert_move (target, op0, unsignedp);
7697 return target;
7700 case OBJ_TYPE_REF:
7701 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7703 case CALL_EXPR:
7704 /* Check for a built-in function. */
7705 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7706 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7707 == FUNCTION_DECL)
7708 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7710 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7711 == BUILT_IN_FRONTEND)
7712 return lang_hooks.expand_expr (exp, original_target,
7713 tmode, modifier,
7714 alt_rtl);
7715 else
7716 return expand_builtin (exp, target, subtarget, tmode, ignore);
7719 return expand_call (exp, target, ignore);
7721 case NON_LVALUE_EXPR:
7722 case NOP_EXPR:
7723 case CONVERT_EXPR:
7724 if (TREE_OPERAND (exp, 0) == error_mark_node)
7725 return const0_rtx;
7727 if (TREE_CODE (type) == UNION_TYPE)
7729 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7731 /* If both input and output are BLKmode, this conversion isn't doing
7732 anything except possibly changing memory attribute. */
7733 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7735 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7736 modifier);
7738 result = copy_rtx (result);
7739 set_mem_attributes (result, exp, 0);
7740 return result;
7743 if (target == 0)
7745 if (TYPE_MODE (type) != BLKmode)
7746 target = gen_reg_rtx (TYPE_MODE (type));
7747 else
7748 target = assign_temp (type, 0, 1, 1);
7751 if (MEM_P (target))
7752 /* Store data into beginning of memory target. */
7753 store_expr (TREE_OPERAND (exp, 0),
7754 adjust_address (target, TYPE_MODE (valtype), 0),
7755 modifier == EXPAND_STACK_PARM);
7757 else
7759 gcc_assert (REG_P (target));
7761 /* Store this field into a union of the proper type. */
7762 store_field (target,
7763 MIN ((int_size_in_bytes (TREE_TYPE
7764 (TREE_OPERAND (exp, 0)))
7765 * BITS_PER_UNIT),
7766 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7767 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7768 type, 0);
7771 /* Return the entire union. */
7772 return target;
7775 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7777 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7778 modifier);
7780 /* If the signedness of the conversion differs and OP0 is
7781 a promoted SUBREG, clear that indication since we now
7782 have to do the proper extension. */
7783 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7784 && GET_CODE (op0) == SUBREG)
7785 SUBREG_PROMOTED_VAR_P (op0) = 0;
7787 return REDUCE_BIT_FIELD (op0);
7790 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7791 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7792 if (GET_MODE (op0) == mode)
7795 /* If OP0 is a constant, just convert it into the proper mode. */
7796 else if (CONSTANT_P (op0))
7798 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7799 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7801 if (modifier == EXPAND_INITIALIZER)
7802 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7803 subreg_lowpart_offset (mode,
7804 inner_mode));
7805 else
7806 op0= convert_modes (mode, inner_mode, op0,
7807 TYPE_UNSIGNED (inner_type));
7810 else if (modifier == EXPAND_INITIALIZER)
7811 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7813 else if (target == 0)
7814 op0 = convert_to_mode (mode, op0,
7815 TYPE_UNSIGNED (TREE_TYPE
7816 (TREE_OPERAND (exp, 0))));
7817 else
7819 convert_move (target, op0,
7820 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7821 op0 = target;
7824 return REDUCE_BIT_FIELD (op0);
7826 case VIEW_CONVERT_EXPR:
7827 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7829 /* If the input and output modes are both the same, we are done. */
7830 if (TYPE_MODE (type) == GET_MODE (op0))
7832 /* If neither mode is BLKmode, and both modes are the same size
7833 then we can use gen_lowpart. */
7834 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7835 && GET_MODE_SIZE (TYPE_MODE (type))
7836 == GET_MODE_SIZE (GET_MODE (op0)))
7838 if (GET_CODE (op0) == SUBREG)
7839 op0 = force_reg (GET_MODE (op0), op0);
7840 op0 = gen_lowpart (TYPE_MODE (type), op0);
7842 /* If both modes are integral, then we can convert from one to the
7843 other. */
7844 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7845 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7846 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7847 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7848 /* As a last resort, spill op0 to memory, and reload it in a
7849 different mode. */
7850 else if (!MEM_P (op0))
7852 /* If the operand is not a MEM, force it into memory. Since we
7853 are going to be changing the mode of the MEM, don't call
7854 force_const_mem for constants because we don't allow pool
7855 constants to change mode. */
7856 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7858 gcc_assert (!TREE_ADDRESSABLE (exp));
7860 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7861 target
7862 = assign_stack_temp_for_type
7863 (TYPE_MODE (inner_type),
7864 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7866 emit_move_insn (target, op0);
7867 op0 = target;
7870 /* At this point, OP0 is in the correct mode. If the output type is such
7871 that the operand is known to be aligned, indicate that it is.
7872 Otherwise, we need only be concerned about alignment for non-BLKmode
7873 results. */
7874 if (MEM_P (op0))
7876 op0 = copy_rtx (op0);
7878 if (TYPE_ALIGN_OK (type))
7879 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7880 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7881 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7883 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7884 HOST_WIDE_INT temp_size
7885 = MAX (int_size_in_bytes (inner_type),
7886 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7887 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7888 temp_size, 0, type);
7889 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7891 gcc_assert (!TREE_ADDRESSABLE (exp));
7893 if (GET_MODE (op0) == BLKmode)
7894 emit_block_move (new_with_op0_mode, op0,
7895 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7896 (modifier == EXPAND_STACK_PARM
7897 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7898 else
7899 emit_move_insn (new_with_op0_mode, op0);
7901 op0 = new;
7904 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7907 return op0;
7909 case PLUS_EXPR:
7910 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7911 something else, make sure we add the register to the constant and
7912 then to the other thing. This case can occur during strength
7913 reduction and doing it this way will produce better code if the
7914 frame pointer or argument pointer is eliminated.
7916 fold-const.c will ensure that the constant is always in the inner
7917 PLUS_EXPR, so the only case we need to do anything about is if
7918 sp, ap, or fp is our second argument, in which case we must swap
7919 the innermost first argument and our second argument. */
7921 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7922 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7923 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7924 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7925 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7926 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7928 tree t = TREE_OPERAND (exp, 1);
7930 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7931 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7934 /* If the result is to be ptr_mode and we are adding an integer to
7935 something, we might be forming a constant. So try to use
7936 plus_constant. If it produces a sum and we can't accept it,
7937 use force_operand. This allows P = &ARR[const] to generate
7938 efficient code on machines where a SYMBOL_REF is not a valid
7939 address.
7941 If this is an EXPAND_SUM call, always return the sum. */
7942 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7943 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7945 if (modifier == EXPAND_STACK_PARM)
7946 target = 0;
7947 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7948 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7949 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7951 rtx constant_part;
7953 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7954 EXPAND_SUM);
7955 /* Use immed_double_const to ensure that the constant is
7956 truncated according to the mode of OP1, then sign extended
7957 to a HOST_WIDE_INT. Using the constant directly can result
7958 in non-canonical RTL in a 64x32 cross compile. */
7959 constant_part
7960 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7961 (HOST_WIDE_INT) 0,
7962 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7963 op1 = plus_constant (op1, INTVAL (constant_part));
7964 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7965 op1 = force_operand (op1, target);
7966 return REDUCE_BIT_FIELD (op1);
7969 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7970 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7971 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7973 rtx constant_part;
7975 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7976 (modifier == EXPAND_INITIALIZER
7977 ? EXPAND_INITIALIZER : EXPAND_SUM));
7978 if (! CONSTANT_P (op0))
7980 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7981 VOIDmode, modifier);
7982 /* Return a PLUS if modifier says it's OK. */
7983 if (modifier == EXPAND_SUM
7984 || modifier == EXPAND_INITIALIZER)
7985 return simplify_gen_binary (PLUS, mode, op0, op1);
7986 goto binop2;
7988 /* Use immed_double_const to ensure that the constant is
7989 truncated according to the mode of OP1, then sign extended
7990 to a HOST_WIDE_INT. Using the constant directly can result
7991 in non-canonical RTL in a 64x32 cross compile. */
7992 constant_part
7993 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7994 (HOST_WIDE_INT) 0,
7995 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7996 op0 = plus_constant (op0, INTVAL (constant_part));
7997 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7998 op0 = force_operand (op0, target);
7999 return REDUCE_BIT_FIELD (op0);
8003 /* No sense saving up arithmetic to be done
8004 if it's all in the wrong mode to form part of an address.
8005 And force_operand won't know whether to sign-extend or
8006 zero-extend. */
8007 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8008 || mode != ptr_mode)
8010 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8011 subtarget, &op0, &op1, 0);
8012 if (op0 == const0_rtx)
8013 return op1;
8014 if (op1 == const0_rtx)
8015 return op0;
8016 goto binop2;
8019 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8020 subtarget, &op0, &op1, modifier);
8021 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8023 case MINUS_EXPR:
8024 /* For initializers, we are allowed to return a MINUS of two
8025 symbolic constants. Here we handle all cases when both operands
8026 are constant. */
8027 /* Handle difference of two symbolic constants,
8028 for the sake of an initializer. */
8029 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8030 && really_constant_p (TREE_OPERAND (exp, 0))
8031 && really_constant_p (TREE_OPERAND (exp, 1)))
8033 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8034 NULL_RTX, &op0, &op1, modifier);
8036 /* If the last operand is a CONST_INT, use plus_constant of
8037 the negated constant. Else make the MINUS. */
8038 if (GET_CODE (op1) == CONST_INT)
8039 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8040 else
8041 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8044 /* No sense saving up arithmetic to be done
8045 if it's all in the wrong mode to form part of an address.
8046 And force_operand won't know whether to sign-extend or
8047 zero-extend. */
8048 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8049 || mode != ptr_mode)
8050 goto binop;
8052 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8053 subtarget, &op0, &op1, modifier);
8055 /* Convert A - const to A + (-const). */
8056 if (GET_CODE (op1) == CONST_INT)
8058 op1 = negate_rtx (mode, op1);
8059 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8062 goto binop2;
8064 case MULT_EXPR:
8065 /* If first operand is constant, swap them.
8066 Thus the following special case checks need only
8067 check the second operand. */
8068 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8070 tree t1 = TREE_OPERAND (exp, 0);
8071 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8072 TREE_OPERAND (exp, 1) = t1;
8075 /* Attempt to return something suitable for generating an
8076 indexed address, for machines that support that. */
8078 if (modifier == EXPAND_SUM && mode == ptr_mode
8079 && host_integerp (TREE_OPERAND (exp, 1), 0))
8081 tree exp1 = TREE_OPERAND (exp, 1);
8083 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8084 EXPAND_SUM);
8086 if (!REG_P (op0))
8087 op0 = force_operand (op0, NULL_RTX);
8088 if (!REG_P (op0))
8089 op0 = copy_to_mode_reg (mode, op0);
8091 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8092 gen_int_mode (tree_low_cst (exp1, 0),
8093 TYPE_MODE (TREE_TYPE (exp1)))));
8096 if (modifier == EXPAND_STACK_PARM)
8097 target = 0;
8099 /* Check for multiplying things that have been extended
8100 from a narrower type. If this machine supports multiplying
8101 in that narrower type with a result in the desired type,
8102 do it that way, and avoid the explicit type-conversion. */
8104 subexp0 = TREE_OPERAND (exp, 0);
8105 subexp1 = TREE_OPERAND (exp, 1);
8106 /* First, check if we have a multiplication of one signed and one
8107 unsigned operand. */
8108 if (TREE_CODE (subexp0) == NOP_EXPR
8109 && TREE_CODE (subexp1) == NOP_EXPR
8110 && TREE_CODE (type) == INTEGER_TYPE
8111 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8112 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8113 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8114 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8115 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8116 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8118 enum machine_mode innermode
8119 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8120 this_optab = usmul_widen_optab;
8121 if (mode == GET_MODE_WIDER_MODE (innermode))
8123 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8125 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8126 expand_operands (TREE_OPERAND (subexp0, 0),
8127 TREE_OPERAND (subexp1, 0),
8128 NULL_RTX, &op0, &op1, 0);
8129 else
8130 expand_operands (TREE_OPERAND (subexp0, 0),
8131 TREE_OPERAND (subexp1, 0),
8132 NULL_RTX, &op1, &op0, 0);
8134 goto binop3;
8138 /* Check for a multiplication with matching signedness. */
8139 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8140 && TREE_CODE (type) == INTEGER_TYPE
8141 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8142 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8143 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8144 && int_fits_type_p (TREE_OPERAND (exp, 1),
8145 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8146 /* Don't use a widening multiply if a shift will do. */
8147 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8148 > HOST_BITS_PER_WIDE_INT)
8149 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8151 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8152 && (TYPE_PRECISION (TREE_TYPE
8153 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8154 == TYPE_PRECISION (TREE_TYPE
8155 (TREE_OPERAND
8156 (TREE_OPERAND (exp, 0), 0))))
8157 /* If both operands are extended, they must either both
8158 be zero-extended or both be sign-extended. */
8159 && (TYPE_UNSIGNED (TREE_TYPE
8160 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8161 == TYPE_UNSIGNED (TREE_TYPE
8162 (TREE_OPERAND
8163 (TREE_OPERAND (exp, 0), 0)))))))
8165 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8166 enum machine_mode innermode = TYPE_MODE (op0type);
8167 bool zextend_p = TYPE_UNSIGNED (op0type);
8168 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8169 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8171 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8173 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8175 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8176 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8177 TREE_OPERAND (exp, 1),
8178 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8179 else
8180 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8181 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8182 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8183 goto binop3;
8185 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8186 && innermode == word_mode)
8188 rtx htem, hipart;
8189 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8190 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8191 op1 = convert_modes (innermode, mode,
8192 expand_normal (TREE_OPERAND (exp, 1)),
8193 unsignedp);
8194 else
8195 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8196 temp = expand_binop (mode, other_optab, op0, op1, target,
8197 unsignedp, OPTAB_LIB_WIDEN);
8198 hipart = gen_highpart (innermode, temp);
8199 htem = expand_mult_highpart_adjust (innermode, hipart,
8200 op0, op1, hipart,
8201 zextend_p);
8202 if (htem != hipart)
8203 emit_move_insn (hipart, htem);
8204 return REDUCE_BIT_FIELD (temp);
8208 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8209 subtarget, &op0, &op1, 0);
8210 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8212 case TRUNC_DIV_EXPR:
8213 case FLOOR_DIV_EXPR:
8214 case CEIL_DIV_EXPR:
8215 case ROUND_DIV_EXPR:
8216 case EXACT_DIV_EXPR:
8217 if (modifier == EXPAND_STACK_PARM)
8218 target = 0;
8219 /* Possible optimization: compute the dividend with EXPAND_SUM
8220 then if the divisor is constant can optimize the case
8221 where some terms of the dividend have coeffs divisible by it. */
8222 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8223 subtarget, &op0, &op1, 0);
8224 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8226 case RDIV_EXPR:
8227 goto binop;
8229 case TRUNC_MOD_EXPR:
8230 case FLOOR_MOD_EXPR:
8231 case CEIL_MOD_EXPR:
8232 case ROUND_MOD_EXPR:
8233 if (modifier == EXPAND_STACK_PARM)
8234 target = 0;
8235 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8236 subtarget, &op0, &op1, 0);
8237 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8239 case FIX_ROUND_EXPR:
8240 case FIX_FLOOR_EXPR:
8241 case FIX_CEIL_EXPR:
8242 gcc_unreachable (); /* Not used for C. */
8244 case FIX_TRUNC_EXPR:
8245 op0 = expand_normal (TREE_OPERAND (exp, 0));
8246 if (target == 0 || modifier == EXPAND_STACK_PARM)
8247 target = gen_reg_rtx (mode);
8248 expand_fix (target, op0, unsignedp);
8249 return target;
8251 case FLOAT_EXPR:
8252 op0 = expand_normal (TREE_OPERAND (exp, 0));
8253 if (target == 0 || modifier == EXPAND_STACK_PARM)
8254 target = gen_reg_rtx (mode);
8255 /* expand_float can't figure out what to do if FROM has VOIDmode.
8256 So give it the correct mode. With -O, cse will optimize this. */
8257 if (GET_MODE (op0) == VOIDmode)
8258 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8259 op0);
8260 expand_float (target, op0,
8261 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8262 return target;
8264 case NEGATE_EXPR:
8265 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8266 if (modifier == EXPAND_STACK_PARM)
8267 target = 0;
8268 temp = expand_unop (mode,
8269 optab_for_tree_code (NEGATE_EXPR, type),
8270 op0, target, 0);
8271 gcc_assert (temp);
8272 return REDUCE_BIT_FIELD (temp);
8274 case ABS_EXPR:
8275 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8276 if (modifier == EXPAND_STACK_PARM)
8277 target = 0;
8279 /* ABS_EXPR is not valid for complex arguments. */
8280 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8281 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8283 /* Unsigned abs is simply the operand. Testing here means we don't
8284 risk generating incorrect code below. */
8285 if (TYPE_UNSIGNED (type))
8286 return op0;
8288 return expand_abs (mode, op0, target, unsignedp,
8289 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8291 case MAX_EXPR:
8292 case MIN_EXPR:
8293 target = original_target;
8294 if (target == 0
8295 || modifier == EXPAND_STACK_PARM
8296 || (MEM_P (target) && MEM_VOLATILE_P (target))
8297 || GET_MODE (target) != mode
8298 || (REG_P (target)
8299 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8300 target = gen_reg_rtx (mode);
8301 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8302 target, &op0, &op1, 0);
8304 /* First try to do it with a special MIN or MAX instruction.
8305 If that does not win, use a conditional jump to select the proper
8306 value. */
8307 this_optab = optab_for_tree_code (code, type);
8308 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8309 OPTAB_WIDEN);
8310 if (temp != 0)
8311 return temp;
8313 /* At this point, a MEM target is no longer useful; we will get better
8314 code without it. */
8316 if (! REG_P (target))
8317 target = gen_reg_rtx (mode);
8319 /* If op1 was placed in target, swap op0 and op1. */
8320 if (target != op0 && target == op1)
8322 temp = op0;
8323 op0 = op1;
8324 op1 = temp;
8327 /* We generate better code and avoid problems with op1 mentioning
8328 target by forcing op1 into a pseudo if it isn't a constant. */
8329 if (! CONSTANT_P (op1))
8330 op1 = force_reg (mode, op1);
8333 enum rtx_code comparison_code;
8334 rtx cmpop1 = op1;
8336 if (code == MAX_EXPR)
8337 comparison_code = unsignedp ? GEU : GE;
8338 else
8339 comparison_code = unsignedp ? LEU : LE;
8341 /* Canonicalize to comparisons against 0. */
8342 if (op1 == const1_rtx)
8344 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8345 or (a != 0 ? a : 1) for unsigned.
8346 For MIN we are safe converting (a <= 1 ? a : 1)
8347 into (a <= 0 ? a : 1) */
8348 cmpop1 = const0_rtx;
8349 if (code == MAX_EXPR)
8350 comparison_code = unsignedp ? NE : GT;
8352 if (op1 == constm1_rtx && !unsignedp)
8354 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8355 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8356 cmpop1 = const0_rtx;
8357 if (code == MIN_EXPR)
8358 comparison_code = LT;
8360 #ifdef HAVE_conditional_move
8361 /* Use a conditional move if possible. */
8362 if (can_conditionally_move_p (mode))
8364 rtx insn;
8366 /* ??? Same problem as in expmed.c: emit_conditional_move
8367 forces a stack adjustment via compare_from_rtx, and we
8368 lose the stack adjustment if the sequence we are about
8369 to create is discarded. */
8370 do_pending_stack_adjust ();
8372 start_sequence ();
8374 /* Try to emit the conditional move. */
8375 insn = emit_conditional_move (target, comparison_code,
8376 op0, cmpop1, mode,
8377 op0, op1, mode,
8378 unsignedp);
8380 /* If we could do the conditional move, emit the sequence,
8381 and return. */
8382 if (insn)
8384 rtx seq = get_insns ();
8385 end_sequence ();
8386 emit_insn (seq);
8387 return target;
8390 /* Otherwise discard the sequence and fall back to code with
8391 branches. */
8392 end_sequence ();
8394 #endif
8395 if (target != op0)
8396 emit_move_insn (target, op0);
8398 temp = gen_label_rtx ();
8399 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8400 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8402 emit_move_insn (target, op1);
8403 emit_label (temp);
8404 return target;
8406 case BIT_NOT_EXPR:
8407 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8408 if (modifier == EXPAND_STACK_PARM)
8409 target = 0;
8410 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8411 gcc_assert (temp);
8412 return temp;
8414 /* ??? Can optimize bitwise operations with one arg constant.
8415 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8416 and (a bitwise1 b) bitwise2 b (etc)
8417 but that is probably not worth while. */
8419 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8420 boolean values when we want in all cases to compute both of them. In
8421 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8422 as actual zero-or-1 values and then bitwise anding. In cases where
8423 there cannot be any side effects, better code would be made by
8424 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8425 how to recognize those cases. */
8427 case TRUTH_AND_EXPR:
8428 code = BIT_AND_EXPR;
8429 case BIT_AND_EXPR:
8430 goto binop;
8432 case TRUTH_OR_EXPR:
8433 code = BIT_IOR_EXPR;
8434 case BIT_IOR_EXPR:
8435 goto binop;
8437 case TRUTH_XOR_EXPR:
8438 code = BIT_XOR_EXPR;
8439 case BIT_XOR_EXPR:
8440 goto binop;
8442 case LSHIFT_EXPR:
8443 case RSHIFT_EXPR:
8444 case LROTATE_EXPR:
8445 case RROTATE_EXPR:
8446 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8447 subtarget = 0;
8448 if (modifier == EXPAND_STACK_PARM)
8449 target = 0;
8450 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8451 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8452 unsignedp);
8454 /* Could determine the answer when only additive constants differ. Also,
8455 the addition of one can be handled by changing the condition. */
8456 case LT_EXPR:
8457 case LE_EXPR:
8458 case GT_EXPR:
8459 case GE_EXPR:
8460 case EQ_EXPR:
8461 case NE_EXPR:
8462 case UNORDERED_EXPR:
8463 case ORDERED_EXPR:
8464 case UNLT_EXPR:
8465 case UNLE_EXPR:
8466 case UNGT_EXPR:
8467 case UNGE_EXPR:
8468 case UNEQ_EXPR:
8469 case LTGT_EXPR:
8470 temp = do_store_flag (exp,
8471 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8472 tmode != VOIDmode ? tmode : mode, 0);
8473 if (temp != 0)
8474 return temp;
8476 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8477 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8478 && original_target
8479 && REG_P (original_target)
8480 && (GET_MODE (original_target)
8481 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8483 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8484 VOIDmode, 0);
8486 /* If temp is constant, we can just compute the result. */
8487 if (GET_CODE (temp) == CONST_INT)
8489 if (INTVAL (temp) != 0)
8490 emit_move_insn (target, const1_rtx);
8491 else
8492 emit_move_insn (target, const0_rtx);
8494 return target;
8497 if (temp != original_target)
8499 enum machine_mode mode1 = GET_MODE (temp);
8500 if (mode1 == VOIDmode)
8501 mode1 = tmode != VOIDmode ? tmode : mode;
8503 temp = copy_to_mode_reg (mode1, temp);
8506 op1 = gen_label_rtx ();
8507 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8508 GET_MODE (temp), unsignedp, op1);
8509 emit_move_insn (temp, const1_rtx);
8510 emit_label (op1);
8511 return temp;
8514 /* If no set-flag instruction, must generate a conditional store
8515 into a temporary variable. Drop through and handle this
8516 like && and ||. */
8518 if (! ignore
8519 && (target == 0
8520 || modifier == EXPAND_STACK_PARM
8521 || ! safe_from_p (target, exp, 1)
8522 /* Make sure we don't have a hard reg (such as function's return
8523 value) live across basic blocks, if not optimizing. */
8524 || (!optimize && REG_P (target)
8525 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8526 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8528 if (target)
8529 emit_move_insn (target, const0_rtx);
8531 op1 = gen_label_rtx ();
8532 jumpifnot (exp, op1);
8534 if (target)
8535 emit_move_insn (target, const1_rtx);
8537 emit_label (op1);
8538 return ignore ? const0_rtx : target;
8540 case TRUTH_NOT_EXPR:
8541 if (modifier == EXPAND_STACK_PARM)
8542 target = 0;
8543 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8544 /* The parser is careful to generate TRUTH_NOT_EXPR
8545 only with operands that are always zero or one. */
8546 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8547 target, 1, OPTAB_LIB_WIDEN);
8548 gcc_assert (temp);
8549 return temp;
8551 case STATEMENT_LIST:
8553 tree_stmt_iterator iter;
8555 gcc_assert (ignore);
8557 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8558 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8560 return const0_rtx;
8562 case COND_EXPR:
8563 /* A COND_EXPR with its type being VOID_TYPE represents a
8564 conditional jump and is handled in
8565 expand_gimple_cond_expr. */
8566 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8568 /* Note that COND_EXPRs whose type is a structure or union
8569 are required to be constructed to contain assignments of
8570 a temporary variable, so that we can evaluate them here
8571 for side effect only. If type is void, we must do likewise. */
8573 gcc_assert (!TREE_ADDRESSABLE (type)
8574 && !ignore
8575 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8576 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8578 /* If we are not to produce a result, we have no target. Otherwise,
8579 if a target was specified use it; it will not be used as an
8580 intermediate target unless it is safe. If no target, use a
8581 temporary. */
8583 if (modifier != EXPAND_STACK_PARM
8584 && original_target
8585 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8586 && GET_MODE (original_target) == mode
8587 #ifdef HAVE_conditional_move
8588 && (! can_conditionally_move_p (mode)
8589 || REG_P (original_target))
8590 #endif
8591 && !MEM_P (original_target))
8592 temp = original_target;
8593 else
8594 temp = assign_temp (type, 0, 0, 1);
8596 do_pending_stack_adjust ();
8597 NO_DEFER_POP;
8598 op0 = gen_label_rtx ();
8599 op1 = gen_label_rtx ();
8600 jumpifnot (TREE_OPERAND (exp, 0), op0);
8601 store_expr (TREE_OPERAND (exp, 1), temp,
8602 modifier == EXPAND_STACK_PARM);
8604 emit_jump_insn (gen_jump (op1));
8605 emit_barrier ();
8606 emit_label (op0);
8607 store_expr (TREE_OPERAND (exp, 2), temp,
8608 modifier == EXPAND_STACK_PARM);
8610 emit_label (op1);
8611 OK_DEFER_POP;
8612 return temp;
8614 case VEC_COND_EXPR:
8615 target = expand_vec_cond_expr (exp, target);
8616 return target;
8618 case MODIFY_EXPR:
8620 tree lhs = TREE_OPERAND (exp, 0);
8621 tree rhs = TREE_OPERAND (exp, 1);
8623 gcc_assert (ignore);
8625 /* Check for |= or &= of a bitfield of size one into another bitfield
8626 of size 1. In this case, (unless we need the result of the
8627 assignment) we can do this more efficiently with a
8628 test followed by an assignment, if necessary.
8630 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8631 things change so we do, this code should be enhanced to
8632 support it. */
8633 if (TREE_CODE (lhs) == COMPONENT_REF
8634 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8635 || TREE_CODE (rhs) == BIT_AND_EXPR)
8636 && TREE_OPERAND (rhs, 0) == lhs
8637 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8638 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8639 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8641 rtx label = gen_label_rtx ();
8642 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8643 do_jump (TREE_OPERAND (rhs, 1),
8644 value ? label : 0,
8645 value ? 0 : label);
8646 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8647 do_pending_stack_adjust ();
8648 emit_label (label);
8649 return const0_rtx;
8652 expand_assignment (lhs, rhs);
8654 return const0_rtx;
8657 case RETURN_EXPR:
8658 if (!TREE_OPERAND (exp, 0))
8659 expand_null_return ();
8660 else
8661 expand_return (TREE_OPERAND (exp, 0));
8662 return const0_rtx;
8664 case ADDR_EXPR:
8665 return expand_expr_addr_expr (exp, target, tmode, modifier);
8667 case COMPLEX_EXPR:
8668 /* Get the rtx code of the operands. */
8669 op0 = expand_normal (TREE_OPERAND (exp, 0));
8670 op1 = expand_normal (TREE_OPERAND (exp, 1));
8672 if (!target)
8673 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8675 /* Move the real (op0) and imaginary (op1) parts to their location. */
8676 write_complex_part (target, op0, false);
8677 write_complex_part (target, op1, true);
8679 return target;
8681 case REALPART_EXPR:
8682 op0 = expand_normal (TREE_OPERAND (exp, 0));
8683 return read_complex_part (op0, false);
8685 case IMAGPART_EXPR:
8686 op0 = expand_normal (TREE_OPERAND (exp, 0));
8687 return read_complex_part (op0, true);
8689 case RESX_EXPR:
8690 expand_resx_expr (exp);
8691 return const0_rtx;
8693 case TRY_CATCH_EXPR:
8694 case CATCH_EXPR:
8695 case EH_FILTER_EXPR:
8696 case TRY_FINALLY_EXPR:
8697 /* Lowered by tree-eh.c. */
8698 gcc_unreachable ();
8700 case WITH_CLEANUP_EXPR:
8701 case CLEANUP_POINT_EXPR:
8702 case TARGET_EXPR:
8703 case CASE_LABEL_EXPR:
8704 case VA_ARG_EXPR:
8705 case BIND_EXPR:
8706 case INIT_EXPR:
8707 case CONJ_EXPR:
8708 case COMPOUND_EXPR:
8709 case PREINCREMENT_EXPR:
8710 case PREDECREMENT_EXPR:
8711 case POSTINCREMENT_EXPR:
8712 case POSTDECREMENT_EXPR:
8713 case LOOP_EXPR:
8714 case EXIT_EXPR:
8715 case TRUTH_ANDIF_EXPR:
8716 case TRUTH_ORIF_EXPR:
8717 /* Lowered by gimplify.c. */
8718 gcc_unreachable ();
8720 case EXC_PTR_EXPR:
8721 return get_exception_pointer (cfun);
8723 case FILTER_EXPR:
8724 return get_exception_filter (cfun);
8726 case FDESC_EXPR:
8727 /* Function descriptors are not valid except for as
8728 initialization constants, and should not be expanded. */
8729 gcc_unreachable ();
8731 case SWITCH_EXPR:
8732 expand_case (exp);
8733 return const0_rtx;
8735 case LABEL_EXPR:
8736 expand_label (TREE_OPERAND (exp, 0));
8737 return const0_rtx;
8739 case ASM_EXPR:
8740 expand_asm_expr (exp);
8741 return const0_rtx;
8743 case WITH_SIZE_EXPR:
8744 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8745 have pulled out the size to use in whatever context it needed. */
8746 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8747 modifier, alt_rtl);
8749 case REALIGN_LOAD_EXPR:
8751 tree oprnd0 = TREE_OPERAND (exp, 0);
8752 tree oprnd1 = TREE_OPERAND (exp, 1);
8753 tree oprnd2 = TREE_OPERAND (exp, 2);
8754 rtx op2;
8756 this_optab = optab_for_tree_code (code, type);
8757 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8758 op2 = expand_normal (oprnd2);
8759 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8760 target, unsignedp);
8761 gcc_assert (temp);
8762 return temp;
8765 case DOT_PROD_EXPR:
8767 tree oprnd0 = TREE_OPERAND (exp, 0);
8768 tree oprnd1 = TREE_OPERAND (exp, 1);
8769 tree oprnd2 = TREE_OPERAND (exp, 2);
8770 rtx op2;
8772 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8773 op2 = expand_normal (oprnd2);
8774 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8775 target, unsignedp);
8776 return target;
8779 case WIDEN_SUM_EXPR:
8781 tree oprnd0 = TREE_OPERAND (exp, 0);
8782 tree oprnd1 = TREE_OPERAND (exp, 1);
8784 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8785 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8786 target, unsignedp);
8787 return target;
8790 case REDUC_MAX_EXPR:
8791 case REDUC_MIN_EXPR:
8792 case REDUC_PLUS_EXPR:
8794 op0 = expand_normal (TREE_OPERAND (exp, 0));
8795 this_optab = optab_for_tree_code (code, type);
8796 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8797 gcc_assert (temp);
8798 return temp;
8801 case VEC_LSHIFT_EXPR:
8802 case VEC_RSHIFT_EXPR:
8804 target = expand_vec_shift_expr (exp, target);
8805 return target;
8808 default:
8809 return lang_hooks.expand_expr (exp, original_target, tmode,
8810 modifier, alt_rtl);
8813 /* Here to do an ordinary binary operator. */
8814 binop:
8815 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8816 subtarget, &op0, &op1, 0);
8817 binop2:
8818 this_optab = optab_for_tree_code (code, type);
8819 binop3:
8820 if (modifier == EXPAND_STACK_PARM)
8821 target = 0;
8822 temp = expand_binop (mode, this_optab, op0, op1, target,
8823 unsignedp, OPTAB_LIB_WIDEN);
8824 gcc_assert (temp);
8825 return REDUCE_BIT_FIELD (temp);
8827 #undef REDUCE_BIT_FIELD
8829 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8830 signedness of TYPE), possibly returning the result in TARGET. */
8831 static rtx
8832 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8834 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8835 if (target && GET_MODE (target) != GET_MODE (exp))
8836 target = 0;
8837 /* For constant values, reduce using build_int_cst_type. */
8838 if (GET_CODE (exp) == CONST_INT)
8840 HOST_WIDE_INT value = INTVAL (exp);
8841 tree t = build_int_cst_type (type, value);
8842 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
8844 else if (TYPE_UNSIGNED (type))
8846 rtx mask;
8847 if (prec < HOST_BITS_PER_WIDE_INT)
8848 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8849 GET_MODE (exp));
8850 else
8851 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8852 ((unsigned HOST_WIDE_INT) 1
8853 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8854 GET_MODE (exp));
8855 return expand_and (GET_MODE (exp), exp, mask, target);
8857 else
8859 tree count = build_int_cst (NULL_TREE,
8860 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8861 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8862 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8866 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8867 when applied to the address of EXP produces an address known to be
8868 aligned more than BIGGEST_ALIGNMENT. */
8870 static int
8871 is_aligning_offset (tree offset, tree exp)
8873 /* Strip off any conversions. */
8874 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8875 || TREE_CODE (offset) == NOP_EXPR
8876 || TREE_CODE (offset) == CONVERT_EXPR)
8877 offset = TREE_OPERAND (offset, 0);
8879 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8880 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8881 if (TREE_CODE (offset) != BIT_AND_EXPR
8882 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8883 || compare_tree_int (TREE_OPERAND (offset, 1),
8884 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8885 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8886 return 0;
8888 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8889 It must be NEGATE_EXPR. Then strip any more conversions. */
8890 offset = TREE_OPERAND (offset, 0);
8891 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8892 || TREE_CODE (offset) == NOP_EXPR
8893 || TREE_CODE (offset) == CONVERT_EXPR)
8894 offset = TREE_OPERAND (offset, 0);
8896 if (TREE_CODE (offset) != NEGATE_EXPR)
8897 return 0;
8899 offset = TREE_OPERAND (offset, 0);
8900 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8901 || TREE_CODE (offset) == NOP_EXPR
8902 || TREE_CODE (offset) == CONVERT_EXPR)
8903 offset = TREE_OPERAND (offset, 0);
8905 /* This must now be the address of EXP. */
8906 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8909 /* Return the tree node if an ARG corresponds to a string constant or zero
8910 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8911 in bytes within the string that ARG is accessing. The type of the
8912 offset will be `sizetype'. */
8914 tree
8915 string_constant (tree arg, tree *ptr_offset)
8917 tree array, offset;
8918 STRIP_NOPS (arg);
8920 if (TREE_CODE (arg) == ADDR_EXPR)
8922 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8924 *ptr_offset = size_zero_node;
8925 return TREE_OPERAND (arg, 0);
8927 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8929 array = TREE_OPERAND (arg, 0);
8930 offset = size_zero_node;
8932 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8934 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8935 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8936 if (TREE_CODE (array) != STRING_CST
8937 && TREE_CODE (array) != VAR_DECL)
8938 return 0;
8940 else
8941 return 0;
8943 else if (TREE_CODE (arg) == PLUS_EXPR)
8945 tree arg0 = TREE_OPERAND (arg, 0);
8946 tree arg1 = TREE_OPERAND (arg, 1);
8948 STRIP_NOPS (arg0);
8949 STRIP_NOPS (arg1);
8951 if (TREE_CODE (arg0) == ADDR_EXPR
8952 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8953 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8955 array = TREE_OPERAND (arg0, 0);
8956 offset = arg1;
8958 else if (TREE_CODE (arg1) == ADDR_EXPR
8959 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8960 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8962 array = TREE_OPERAND (arg1, 0);
8963 offset = arg0;
8965 else
8966 return 0;
8968 else
8969 return 0;
8971 if (TREE_CODE (array) == STRING_CST)
8973 *ptr_offset = fold_convert (sizetype, offset);
8974 return array;
8976 else if (TREE_CODE (array) == VAR_DECL)
8978 int length;
8980 /* Variables initialized to string literals can be handled too. */
8981 if (DECL_INITIAL (array) == NULL_TREE
8982 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8983 return 0;
8985 /* If they are read-only, non-volatile and bind locally. */
8986 if (! TREE_READONLY (array)
8987 || TREE_SIDE_EFFECTS (array)
8988 || ! targetm.binds_local_p (array))
8989 return 0;
8991 /* Avoid const char foo[4] = "abcde"; */
8992 if (DECL_SIZE_UNIT (array) == NULL_TREE
8993 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8994 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8995 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8996 return 0;
8998 /* If variable is bigger than the string literal, OFFSET must be constant
8999 and inside of the bounds of the string literal. */
9000 offset = fold_convert (sizetype, offset);
9001 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9002 && (! host_integerp (offset, 1)
9003 || compare_tree_int (offset, length) >= 0))
9004 return 0;
9006 *ptr_offset = offset;
9007 return DECL_INITIAL (array);
9010 return 0;
9013 /* Generate code to calculate EXP using a store-flag instruction
9014 and return an rtx for the result. EXP is either a comparison
9015 or a TRUTH_NOT_EXPR whose operand is a comparison.
9017 If TARGET is nonzero, store the result there if convenient.
9019 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9020 cheap.
9022 Return zero if there is no suitable set-flag instruction
9023 available on this machine.
9025 Once expand_expr has been called on the arguments of the comparison,
9026 we are committed to doing the store flag, since it is not safe to
9027 re-evaluate the expression. We emit the store-flag insn by calling
9028 emit_store_flag, but only expand the arguments if we have a reason
9029 to believe that emit_store_flag will be successful. If we think that
9030 it will, but it isn't, we have to simulate the store-flag with a
9031 set/jump/set sequence. */
9033 static rtx
9034 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9036 enum rtx_code code;
9037 tree arg0, arg1, type;
9038 tree tem;
9039 enum machine_mode operand_mode;
9040 int invert = 0;
9041 int unsignedp;
9042 rtx op0, op1;
9043 enum insn_code icode;
9044 rtx subtarget = target;
9045 rtx result, label;
9047 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9048 result at the end. We can't simply invert the test since it would
9049 have already been inverted if it were valid. This case occurs for
9050 some floating-point comparisons. */
9052 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9053 invert = 1, exp = TREE_OPERAND (exp, 0);
9055 arg0 = TREE_OPERAND (exp, 0);
9056 arg1 = TREE_OPERAND (exp, 1);
9058 /* Don't crash if the comparison was erroneous. */
9059 if (arg0 == error_mark_node || arg1 == error_mark_node)
9060 return const0_rtx;
9062 type = TREE_TYPE (arg0);
9063 operand_mode = TYPE_MODE (type);
9064 unsignedp = TYPE_UNSIGNED (type);
9066 /* We won't bother with BLKmode store-flag operations because it would mean
9067 passing a lot of information to emit_store_flag. */
9068 if (operand_mode == BLKmode)
9069 return 0;
9071 /* We won't bother with store-flag operations involving function pointers
9072 when function pointers must be canonicalized before comparisons. */
9073 #ifdef HAVE_canonicalize_funcptr_for_compare
9074 if (HAVE_canonicalize_funcptr_for_compare
9075 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9076 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9077 == FUNCTION_TYPE))
9078 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9079 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9080 == FUNCTION_TYPE))))
9081 return 0;
9082 #endif
9084 STRIP_NOPS (arg0);
9085 STRIP_NOPS (arg1);
9087 /* Get the rtx comparison code to use. We know that EXP is a comparison
9088 operation of some type. Some comparisons against 1 and -1 can be
9089 converted to comparisons with zero. Do so here so that the tests
9090 below will be aware that we have a comparison with zero. These
9091 tests will not catch constants in the first operand, but constants
9092 are rarely passed as the first operand. */
9094 switch (TREE_CODE (exp))
9096 case EQ_EXPR:
9097 code = EQ;
9098 break;
9099 case NE_EXPR:
9100 code = NE;
9101 break;
9102 case LT_EXPR:
9103 if (integer_onep (arg1))
9104 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9105 else
9106 code = unsignedp ? LTU : LT;
9107 break;
9108 case LE_EXPR:
9109 if (! unsignedp && integer_all_onesp (arg1))
9110 arg1 = integer_zero_node, code = LT;
9111 else
9112 code = unsignedp ? LEU : LE;
9113 break;
9114 case GT_EXPR:
9115 if (! unsignedp && integer_all_onesp (arg1))
9116 arg1 = integer_zero_node, code = GE;
9117 else
9118 code = unsignedp ? GTU : GT;
9119 break;
9120 case GE_EXPR:
9121 if (integer_onep (arg1))
9122 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9123 else
9124 code = unsignedp ? GEU : GE;
9125 break;
9127 case UNORDERED_EXPR:
9128 code = UNORDERED;
9129 break;
9130 case ORDERED_EXPR:
9131 code = ORDERED;
9132 break;
9133 case UNLT_EXPR:
9134 code = UNLT;
9135 break;
9136 case UNLE_EXPR:
9137 code = UNLE;
9138 break;
9139 case UNGT_EXPR:
9140 code = UNGT;
9141 break;
9142 case UNGE_EXPR:
9143 code = UNGE;
9144 break;
9145 case UNEQ_EXPR:
9146 code = UNEQ;
9147 break;
9148 case LTGT_EXPR:
9149 code = LTGT;
9150 break;
9152 default:
9153 gcc_unreachable ();
9156 /* Put a constant second. */
9157 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9159 tem = arg0; arg0 = arg1; arg1 = tem;
9160 code = swap_condition (code);
9163 /* If this is an equality or inequality test of a single bit, we can
9164 do this by shifting the bit being tested to the low-order bit and
9165 masking the result with the constant 1. If the condition was EQ,
9166 we xor it with 1. This does not require an scc insn and is faster
9167 than an scc insn even if we have it.
9169 The code to make this transformation was moved into fold_single_bit_test,
9170 so we just call into the folder and expand its result. */
9172 if ((code == NE || code == EQ)
9173 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9174 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9176 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9177 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9178 arg0, arg1, type),
9179 target, VOIDmode, EXPAND_NORMAL);
9182 /* Now see if we are likely to be able to do this. Return if not. */
9183 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9184 return 0;
9186 icode = setcc_gen_code[(int) code];
9187 if (icode == CODE_FOR_nothing
9188 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9190 /* We can only do this if it is one of the special cases that
9191 can be handled without an scc insn. */
9192 if ((code == LT && integer_zerop (arg1))
9193 || (! only_cheap && code == GE && integer_zerop (arg1)))
9195 else if (! only_cheap && (code == NE || code == EQ)
9196 && TREE_CODE (type) != REAL_TYPE
9197 && ((abs_optab->handlers[(int) operand_mode].insn_code
9198 != CODE_FOR_nothing)
9199 || (ffs_optab->handlers[(int) operand_mode].insn_code
9200 != CODE_FOR_nothing)))
9202 else
9203 return 0;
9206 if (! get_subtarget (target)
9207 || GET_MODE (subtarget) != operand_mode)
9208 subtarget = 0;
9210 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9212 if (target == 0)
9213 target = gen_reg_rtx (mode);
9215 result = emit_store_flag (target, code, op0, op1,
9216 operand_mode, unsignedp, 1);
9218 if (result)
9220 if (invert)
9221 result = expand_binop (mode, xor_optab, result, const1_rtx,
9222 result, 0, OPTAB_LIB_WIDEN);
9223 return result;
9226 /* If this failed, we have to do this with set/compare/jump/set code. */
9227 if (!REG_P (target)
9228 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9229 target = gen_reg_rtx (GET_MODE (target));
9231 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9232 result = compare_from_rtx (op0, op1, code, unsignedp,
9233 operand_mode, NULL_RTX);
9234 if (GET_CODE (result) == CONST_INT)
9235 return (((result == const0_rtx && ! invert)
9236 || (result != const0_rtx && invert))
9237 ? const0_rtx : const1_rtx);
9239 /* The code of RESULT may not match CODE if compare_from_rtx
9240 decided to swap its operands and reverse the original code.
9242 We know that compare_from_rtx returns either a CONST_INT or
9243 a new comparison code, so it is safe to just extract the
9244 code from RESULT. */
9245 code = GET_CODE (result);
9247 label = gen_label_rtx ();
9248 gcc_assert (bcc_gen_fctn[(int) code]);
9250 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9251 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9252 emit_label (label);
9254 return target;
9258 /* Stubs in case we haven't got a casesi insn. */
9259 #ifndef HAVE_casesi
9260 # define HAVE_casesi 0
9261 # define gen_casesi(a, b, c, d, e) (0)
9262 # define CODE_FOR_casesi CODE_FOR_nothing
9263 #endif
9265 /* If the machine does not have a case insn that compares the bounds,
9266 this means extra overhead for dispatch tables, which raises the
9267 threshold for using them. */
9268 #ifndef CASE_VALUES_THRESHOLD
9269 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9270 #endif /* CASE_VALUES_THRESHOLD */
9272 unsigned int
9273 case_values_threshold (void)
9275 return CASE_VALUES_THRESHOLD;
9278 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9279 0 otherwise (i.e. if there is no casesi instruction). */
9281 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9282 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9284 enum machine_mode index_mode = SImode;
9285 int index_bits = GET_MODE_BITSIZE (index_mode);
9286 rtx op1, op2, index;
9287 enum machine_mode op_mode;
9289 if (! HAVE_casesi)
9290 return 0;
9292 /* Convert the index to SImode. */
9293 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9295 enum machine_mode omode = TYPE_MODE (index_type);
9296 rtx rangertx = expand_normal (range);
9298 /* We must handle the endpoints in the original mode. */
9299 index_expr = build2 (MINUS_EXPR, index_type,
9300 index_expr, minval);
9301 minval = integer_zero_node;
9302 index = expand_normal (index_expr);
9303 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9304 omode, 1, default_label);
9305 /* Now we can safely truncate. */
9306 index = convert_to_mode (index_mode, index, 0);
9308 else
9310 if (TYPE_MODE (index_type) != index_mode)
9312 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9313 index_expr = fold_convert (index_type, index_expr);
9316 index = expand_normal (index_expr);
9319 do_pending_stack_adjust ();
9321 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9322 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9323 (index, op_mode))
9324 index = copy_to_mode_reg (op_mode, index);
9326 op1 = expand_normal (minval);
9328 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9329 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9330 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9331 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9332 (op1, op_mode))
9333 op1 = copy_to_mode_reg (op_mode, op1);
9335 op2 = expand_normal (range);
9337 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9338 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9339 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9340 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9341 (op2, op_mode))
9342 op2 = copy_to_mode_reg (op_mode, op2);
9344 emit_jump_insn (gen_casesi (index, op1, op2,
9345 table_label, default_label));
9346 return 1;
9349 /* Attempt to generate a tablejump instruction; same concept. */
9350 #ifndef HAVE_tablejump
9351 #define HAVE_tablejump 0
9352 #define gen_tablejump(x, y) (0)
9353 #endif
9355 /* Subroutine of the next function.
9357 INDEX is the value being switched on, with the lowest value
9358 in the table already subtracted.
9359 MODE is its expected mode (needed if INDEX is constant).
9360 RANGE is the length of the jump table.
9361 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9363 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9364 index value is out of range. */
9366 static void
9367 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9368 rtx default_label)
9370 rtx temp, vector;
9372 if (INTVAL (range) > cfun->max_jumptable_ents)
9373 cfun->max_jumptable_ents = INTVAL (range);
9375 /* Do an unsigned comparison (in the proper mode) between the index
9376 expression and the value which represents the length of the range.
9377 Since we just finished subtracting the lower bound of the range
9378 from the index expression, this comparison allows us to simultaneously
9379 check that the original index expression value is both greater than
9380 or equal to the minimum value of the range and less than or equal to
9381 the maximum value of the range. */
9383 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9384 default_label);
9386 /* If index is in range, it must fit in Pmode.
9387 Convert to Pmode so we can index with it. */
9388 if (mode != Pmode)
9389 index = convert_to_mode (Pmode, index, 1);
9391 /* Don't let a MEM slip through, because then INDEX that comes
9392 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9393 and break_out_memory_refs will go to work on it and mess it up. */
9394 #ifdef PIC_CASE_VECTOR_ADDRESS
9395 if (flag_pic && !REG_P (index))
9396 index = copy_to_mode_reg (Pmode, index);
9397 #endif
9399 /* If flag_force_addr were to affect this address
9400 it could interfere with the tricky assumptions made
9401 about addresses that contain label-refs,
9402 which may be valid only very near the tablejump itself. */
9403 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9404 GET_MODE_SIZE, because this indicates how large insns are. The other
9405 uses should all be Pmode, because they are addresses. This code
9406 could fail if addresses and insns are not the same size. */
9407 index = gen_rtx_PLUS (Pmode,
9408 gen_rtx_MULT (Pmode, index,
9409 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9410 gen_rtx_LABEL_REF (Pmode, table_label));
9411 #ifdef PIC_CASE_VECTOR_ADDRESS
9412 if (flag_pic)
9413 index = PIC_CASE_VECTOR_ADDRESS (index);
9414 else
9415 #endif
9416 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9417 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9418 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9419 convert_move (temp, vector, 0);
9421 emit_jump_insn (gen_tablejump (temp, table_label));
9423 /* If we are generating PIC code or if the table is PC-relative, the
9424 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9425 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9426 emit_barrier ();
9430 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9431 rtx table_label, rtx default_label)
9433 rtx index;
9435 if (! HAVE_tablejump)
9436 return 0;
9438 index_expr = fold_build2 (MINUS_EXPR, index_type,
9439 fold_convert (index_type, index_expr),
9440 fold_convert (index_type, minval));
9441 index = expand_normal (index_expr);
9442 do_pending_stack_adjust ();
9444 do_tablejump (index, TYPE_MODE (index_type),
9445 convert_modes (TYPE_MODE (index_type),
9446 TYPE_MODE (TREE_TYPE (range)),
9447 expand_normal (range),
9448 TYPE_UNSIGNED (TREE_TYPE (range))),
9449 table_label, default_label);
9450 return 1;
9453 /* Nonzero if the mode is a valid vector mode for this architecture.
9454 This returns nonzero even if there is no hardware support for the
9455 vector mode, but we can emulate with narrower modes. */
9458 vector_mode_valid_p (enum machine_mode mode)
9460 enum mode_class class = GET_MODE_CLASS (mode);
9461 enum machine_mode innermode;
9463 /* Doh! What's going on? */
9464 if (class != MODE_VECTOR_INT
9465 && class != MODE_VECTOR_FLOAT)
9466 return 0;
9468 /* Hardware support. Woo hoo! */
9469 if (targetm.vector_mode_supported_p (mode))
9470 return 1;
9472 innermode = GET_MODE_INNER (mode);
9474 /* We should probably return 1 if requesting V4DI and we have no DI,
9475 but we have V2DI, but this is probably very unlikely. */
9477 /* If we have support for the inner mode, we can safely emulate it.
9478 We may not have V2DI, but me can emulate with a pair of DIs. */
9479 return targetm.scalar_mode_supported_p (innermode);
9482 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9483 static rtx
9484 const_vector_from_tree (tree exp)
9486 rtvec v;
9487 int units, i;
9488 tree link, elt;
9489 enum machine_mode inner, mode;
9491 mode = TYPE_MODE (TREE_TYPE (exp));
9493 if (initializer_zerop (exp))
9494 return CONST0_RTX (mode);
9496 units = GET_MODE_NUNITS (mode);
9497 inner = GET_MODE_INNER (mode);
9499 v = rtvec_alloc (units);
9501 link = TREE_VECTOR_CST_ELTS (exp);
9502 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9504 elt = TREE_VALUE (link);
9506 if (TREE_CODE (elt) == REAL_CST)
9507 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9508 inner);
9509 else
9510 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9511 TREE_INT_CST_HIGH (elt),
9512 inner);
9515 /* Initialize remaining elements to 0. */
9516 for (; i < units; ++i)
9517 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9519 return gen_rtx_CONST_VECTOR (mode, v);
9521 #include "gt-expr.h"