05-04-22 Thomas Koenig <Thomas.Koenig@online.de>
[official-gcc.git] / gcc / expr.c
blob07e57a4fbfa58ea61bcea20faf7d007af9a512bc
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, int);
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 #ifdef PUSH_ROUNDING
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 #endif
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
162 static void write_complex_part (rtx, rtx, bool);
164 /* Record for each mode whether we can move a register directly to or
165 from an object of that mode in memory. If we can't, we won't try
166 to use that mode directly when accessing a field of that mode. */
168 static char direct_load[NUM_MACHINE_MODES];
169 static char direct_store[NUM_MACHINE_MODES];
171 /* Record for each mode whether we can float-extend from memory. */
173 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
175 /* This macro is used to determine whether move_by_pieces should be called
176 to perform a structure copy. */
177 #ifndef MOVE_BY_PIECES_P
178 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
180 < (unsigned int) MOVE_RATIO)
181 #endif
183 /* This macro is used to determine whether clear_by_pieces should be
184 called to clear storage. */
185 #ifndef CLEAR_BY_PIECES_P
186 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
188 < (unsigned int) CLEAR_RATIO)
189 #endif
191 /* This macro is used to determine whether store_by_pieces should be
192 called to "memset" storage with byte values other than zero, or
193 to "memcpy" storage when the source is a constant string. */
194 #ifndef STORE_BY_PIECES_P
195 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
196 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
197 < (unsigned int) MOVE_RATIO)
198 #endif
200 /* This array records the insn_code of insns to perform block moves. */
201 enum insn_code movmem_optab[NUM_MACHINE_MODES];
203 /* This array records the insn_code of insns to perform block clears. */
204 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
206 /* These arrays record the insn_code of two different kinds of insns
207 to perform block compares. */
208 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
209 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
211 /* Synchronization primitives. */
212 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
235 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
237 #ifndef SLOW_UNALIGNED_ACCESS
238 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
239 #endif
241 /* This is run once per compilation to set up which modes can be used
242 directly in memory and to initialize the block move optab. */
244 void
245 init_expr_once (void)
247 rtx insn, pat;
248 enum machine_mode mode;
249 int num_clobbers;
250 rtx mem, mem1;
251 rtx reg;
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg = gen_rtx_REG (VOIDmode, -1);
263 insn = rtx_alloc (INSN);
264 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265 PATTERN (insn) = pat;
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
270 int regno;
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
274 PUT_MODE (mem1, mode);
275 PUT_MODE (reg, mode);
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 regno++)
285 if (! HARD_REGNO_MODE_OK (regno, mode))
286 continue;
288 REGNO (reg) = regno;
290 SET_SRC (pat) = mem;
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
300 SET_SRC (pat) = reg;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
312 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315 mode = GET_MODE_WIDER_MODE (mode))
317 enum machine_mode srcmode;
318 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
319 srcmode = GET_MODE_WIDER_MODE (srcmode))
321 enum insn_code ic;
323 ic = can_extend_p (mode, srcmode, 0);
324 if (ic == CODE_FOR_nothing)
325 continue;
327 PUT_MODE (mem, srcmode);
329 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330 float_extend_from_mem[mode][srcmode] = true;
335 /* This is run at the start of compiling a function. */
337 void
338 init_expr (void)
340 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
343 /* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
348 void
349 convert_move (rtx to, rtx from, int unsignedp)
351 enum machine_mode to_mode = GET_MODE (to);
352 enum machine_mode from_mode = GET_MODE (from);
353 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
354 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
355 enum insn_code code;
356 rtx libcall;
358 /* rtx code for making an equivalent value. */
359 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
363 gcc_assert (to_real == from_real);
365 /* If the source and destination are already the same, then there's
366 nothing to do. */
367 if (to == from)
368 return;
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
372 TO here. */
374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 >= GET_MODE_SIZE (to_mode))
377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378 from = gen_lowpart (to_mode, from), from_mode = to_mode;
380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
382 if (to_mode == from_mode
383 || (from_mode == VOIDmode && CONSTANT_P (from)))
385 emit_move_insn (to, from);
386 return;
389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
393 if (VECTOR_MODE_P (to_mode))
394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
395 else
396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
398 emit_move_insn (to, from);
399 return;
402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
406 return;
409 if (to_real)
411 rtx value, insns;
412 convert_optab tab;
414 gcc_assert (GET_MODE_PRECISION (from_mode)
415 != GET_MODE_PRECISION (to_mode));
417 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
418 tab = sext_optab;
419 else
420 tab = trunc_optab;
422 /* Try converting directly if the insn is supported. */
424 code = tab->handlers[to_mode][from_mode].insn_code;
425 if (code != CODE_FOR_nothing)
427 emit_unop_insn (code, to, from,
428 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
429 return;
432 /* Otherwise use a libcall. */
433 libcall = tab->handlers[to_mode][from_mode].libfunc;
435 /* Is this conversion implemented yet? */
436 gcc_assert (libcall);
438 start_sequence ();
439 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
440 1, from, from_mode);
441 insns = get_insns ();
442 end_sequence ();
443 emit_libcall_block (insns, to, value,
444 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
445 from)
446 : gen_rtx_FLOAT_EXTEND (to_mode, from));
447 return;
450 /* Handle pointer conversion. */ /* SPEE 900220. */
451 /* Targets are expected to provide conversion insns between PxImode and
452 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
453 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
455 enum machine_mode full_mode
456 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
458 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
459 != CODE_FOR_nothing);
461 if (full_mode != from_mode)
462 from = convert_to_mode (full_mode, from, unsignedp);
463 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
464 to, from, UNKNOWN);
465 return;
467 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
469 enum machine_mode full_mode
470 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
472 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
473 != CODE_FOR_nothing);
475 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
476 to, from, UNKNOWN);
477 if (to_mode == full_mode)
478 return;
480 /* else proceed to integer conversions below. */
481 from_mode = full_mode;
484 /* Now both modes are integers. */
486 /* Handle expanding beyond a word. */
487 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
488 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
490 rtx insns;
491 rtx lowpart;
492 rtx fill_value;
493 rtx lowfrom;
494 int i;
495 enum machine_mode lowpart_mode;
496 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
498 /* Try converting directly if the insn is supported. */
499 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
500 != CODE_FOR_nothing)
502 /* If FROM is a SUBREG, put it into a register. Do this
503 so that we always generate the same set of insns for
504 better cse'ing; if an intermediate assignment occurred,
505 we won't be doing the operation directly on the SUBREG. */
506 if (optimize > 0 && GET_CODE (from) == SUBREG)
507 from = force_reg (from_mode, from);
508 emit_unop_insn (code, to, from, equiv_code);
509 return;
511 /* Next, try converting via full word. */
512 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
513 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
514 != CODE_FOR_nothing))
516 if (REG_P (to))
518 if (reg_overlap_mentioned_p (to, from))
519 from = force_reg (from_mode, from);
520 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
522 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
523 emit_unop_insn (code, to,
524 gen_lowpart (word_mode, to), equiv_code);
525 return;
528 /* No special multiword conversion insn; do it by hand. */
529 start_sequence ();
531 /* Since we will turn this into a no conflict block, we must ensure
532 that the source does not overlap the target. */
534 if (reg_overlap_mentioned_p (to, from))
535 from = force_reg (from_mode, from);
537 /* Get a copy of FROM widened to a word, if necessary. */
538 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
539 lowpart_mode = word_mode;
540 else
541 lowpart_mode = from_mode;
543 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
545 lowpart = gen_lowpart (lowpart_mode, to);
546 emit_move_insn (lowpart, lowfrom);
548 /* Compute the value to put in each remaining word. */
549 if (unsignedp)
550 fill_value = const0_rtx;
551 else
553 #ifdef HAVE_slt
554 if (HAVE_slt
555 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
556 && STORE_FLAG_VALUE == -1)
558 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
559 lowpart_mode, 0);
560 fill_value = gen_reg_rtx (word_mode);
561 emit_insn (gen_slt (fill_value));
563 else
564 #endif
566 fill_value
567 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
568 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
569 NULL_RTX, 0);
570 fill_value = convert_to_mode (word_mode, fill_value, 1);
574 /* Fill the remaining words. */
575 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
577 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
578 rtx subword = operand_subword (to, index, 1, to_mode);
580 gcc_assert (subword);
582 if (fill_value != subword)
583 emit_move_insn (subword, fill_value);
586 insns = get_insns ();
587 end_sequence ();
589 emit_no_conflict_block (insns, to, from, NULL_RTX,
590 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
591 return;
594 /* Truncating multi-word to a word or less. */
595 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
596 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
598 if (!((MEM_P (from)
599 && ! MEM_VOLATILE_P (from)
600 && direct_load[(int) to_mode]
601 && ! mode_dependent_address_p (XEXP (from, 0)))
602 || REG_P (from)
603 || GET_CODE (from) == SUBREG))
604 from = force_reg (from_mode, from);
605 convert_move (to, gen_lowpart (word_mode, from), 0);
606 return;
609 /* Now follow all the conversions between integers
610 no more than a word long. */
612 /* For truncation, usually we can just refer to FROM in a narrower mode. */
613 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
614 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
615 GET_MODE_BITSIZE (from_mode)))
617 if (!((MEM_P (from)
618 && ! MEM_VOLATILE_P (from)
619 && direct_load[(int) to_mode]
620 && ! mode_dependent_address_p (XEXP (from, 0)))
621 || REG_P (from)
622 || GET_CODE (from) == SUBREG))
623 from = force_reg (from_mode, from);
624 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
625 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
626 from = copy_to_reg (from);
627 emit_move_insn (to, gen_lowpart (to_mode, from));
628 return;
631 /* Handle extension. */
632 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
634 /* Convert directly if that works. */
635 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
636 != CODE_FOR_nothing)
638 if (flag_force_mem)
639 from = force_not_mem (from);
641 emit_unop_insn (code, to, from, equiv_code);
642 return;
644 else
646 enum machine_mode intermediate;
647 rtx tmp;
648 tree shift_amount;
650 /* Search for a mode to convert via. */
651 for (intermediate = from_mode; intermediate != VOIDmode;
652 intermediate = GET_MODE_WIDER_MODE (intermediate))
653 if (((can_extend_p (to_mode, intermediate, unsignedp)
654 != CODE_FOR_nothing)
655 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
656 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
657 GET_MODE_BITSIZE (intermediate))))
658 && (can_extend_p (intermediate, from_mode, unsignedp)
659 != CODE_FOR_nothing))
661 convert_move (to, convert_to_mode (intermediate, from,
662 unsignedp), unsignedp);
663 return;
666 /* No suitable intermediate mode.
667 Generate what we need with shifts. */
668 shift_amount = build_int_cst (NULL_TREE,
669 GET_MODE_BITSIZE (to_mode)
670 - GET_MODE_BITSIZE (from_mode));
671 from = gen_lowpart (to_mode, force_reg (from_mode, from));
672 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
673 to, unsignedp);
674 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
675 to, unsignedp);
676 if (tmp != to)
677 emit_move_insn (to, tmp);
678 return;
682 /* Support special truncate insns for certain modes. */
683 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
685 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
686 to, from, UNKNOWN);
687 return;
690 /* Handle truncation of volatile memrefs, and so on;
691 the things that couldn't be truncated directly,
692 and for which there was no special instruction.
694 ??? Code above formerly short-circuited this, for most integer
695 mode pairs, with a force_reg in from_mode followed by a recursive
696 call to this routine. Appears always to have been wrong. */
697 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
699 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
700 emit_move_insn (to, temp);
701 return;
704 /* Mode combination is not recognized. */
705 gcc_unreachable ();
708 /* Return an rtx for a value that would result
709 from converting X to mode MODE.
710 Both X and MODE may be floating, or both integer.
711 UNSIGNEDP is nonzero if X is an unsigned value.
712 This can be done by referring to a part of X in place
713 or by copying to a new temporary with conversion. */
716 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
718 return convert_modes (mode, VOIDmode, x, unsignedp);
721 /* Return an rtx for a value that would result
722 from converting X from mode OLDMODE to mode MODE.
723 Both modes may be floating, or both integer.
724 UNSIGNEDP is nonzero if X is an unsigned value.
726 This can be done by referring to a part of X in place
727 or by copying to a new temporary with conversion.
729 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
732 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
734 rtx temp;
736 /* If FROM is a SUBREG that indicates that we have already done at least
737 the required extension, strip it. */
739 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
740 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
741 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
742 x = gen_lowpart (mode, x);
744 if (GET_MODE (x) != VOIDmode)
745 oldmode = GET_MODE (x);
747 if (mode == oldmode)
748 return x;
750 /* There is one case that we must handle specially: If we are converting
751 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
752 we are to interpret the constant as unsigned, gen_lowpart will do
753 the wrong if the constant appears negative. What we want to do is
754 make the high-order word of the constant zero, not all ones. */
756 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
758 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
760 HOST_WIDE_INT val = INTVAL (x);
762 if (oldmode != VOIDmode
763 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
765 int width = GET_MODE_BITSIZE (oldmode);
767 /* We need to zero extend VAL. */
768 val &= ((HOST_WIDE_INT) 1 << width) - 1;
771 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
774 /* We can do this with a gen_lowpart if both desired and current modes
775 are integer, and this is either a constant integer, a register, or a
776 non-volatile MEM. Except for the constant case where MODE is no
777 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
779 if ((GET_CODE (x) == CONST_INT
780 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
781 || (GET_MODE_CLASS (mode) == MODE_INT
782 && GET_MODE_CLASS (oldmode) == MODE_INT
783 && (GET_CODE (x) == CONST_DOUBLE
784 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
785 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
786 && direct_load[(int) mode])
787 || (REG_P (x)
788 && (! HARD_REGISTER_P (x)
789 || HARD_REGNO_MODE_OK (REGNO (x), mode))
790 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
791 GET_MODE_BITSIZE (GET_MODE (x)))))))))
793 /* ?? If we don't know OLDMODE, we have to assume here that
794 X does not need sign- or zero-extension. This may not be
795 the case, but it's the best we can do. */
796 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
797 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
799 HOST_WIDE_INT val = INTVAL (x);
800 int width = GET_MODE_BITSIZE (oldmode);
802 /* We must sign or zero-extend in this case. Start by
803 zero-extending, then sign extend if we need to. */
804 val &= ((HOST_WIDE_INT) 1 << width) - 1;
805 if (! unsignedp
806 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
807 val |= (HOST_WIDE_INT) (-1) << width;
809 return gen_int_mode (val, mode);
812 return gen_lowpart (mode, x);
815 /* Converting from integer constant into mode is always equivalent to an
816 subreg operation. */
817 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
819 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
820 return simplify_gen_subreg (mode, x, oldmode, 0);
823 temp = gen_reg_rtx (mode);
824 convert_move (temp, x, unsignedp);
825 return temp;
828 /* STORE_MAX_PIECES is the number of bytes at a time that we can
829 store efficiently. Due to internal GCC limitations, this is
830 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
831 for an immediate constant. */
833 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
835 /* Determine whether the LEN bytes can be moved by using several move
836 instructions. Return nonzero if a call to move_by_pieces should
837 succeed. */
840 can_move_by_pieces (unsigned HOST_WIDE_INT len,
841 unsigned int align ATTRIBUTE_UNUSED)
843 return MOVE_BY_PIECES_P (len, align);
846 /* Generate several move instructions to copy LEN bytes from block FROM to
847 block TO. (These are MEM rtx's with BLKmode).
849 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
850 used to push FROM to the stack.
852 ALIGN is maximum stack alignment we can assume.
854 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
855 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
856 stpcpy. */
859 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
860 unsigned int align, int endp)
862 struct move_by_pieces data;
863 rtx to_addr, from_addr = XEXP (from, 0);
864 unsigned int max_size = MOVE_MAX_PIECES + 1;
865 enum machine_mode mode = VOIDmode, tmode;
866 enum insn_code icode;
868 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
870 data.offset = 0;
871 data.from_addr = from_addr;
872 if (to)
874 to_addr = XEXP (to, 0);
875 data.to = to;
876 data.autinc_to
877 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
878 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
879 data.reverse
880 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
882 else
884 to_addr = NULL_RTX;
885 data.to = NULL_RTX;
886 data.autinc_to = 1;
887 #ifdef STACK_GROWS_DOWNWARD
888 data.reverse = 1;
889 #else
890 data.reverse = 0;
891 #endif
893 data.to_addr = to_addr;
894 data.from = from;
895 data.autinc_from
896 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
897 || GET_CODE (from_addr) == POST_INC
898 || GET_CODE (from_addr) == POST_DEC);
900 data.explicit_inc_from = 0;
901 data.explicit_inc_to = 0;
902 if (data.reverse) data.offset = len;
903 data.len = len;
905 /* If copying requires more than two move insns,
906 copy addresses to registers (to make displacements shorter)
907 and use post-increment if available. */
908 if (!(data.autinc_from && data.autinc_to)
909 && move_by_pieces_ninsns (len, align, max_size) > 2)
911 /* Find the mode of the largest move... */
912 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
913 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
914 if (GET_MODE_SIZE (tmode) < max_size)
915 mode = tmode;
917 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
919 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
920 data.autinc_from = 1;
921 data.explicit_inc_from = -1;
923 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
925 data.from_addr = copy_addr_to_reg (from_addr);
926 data.autinc_from = 1;
927 data.explicit_inc_from = 1;
929 if (!data.autinc_from && CONSTANT_P (from_addr))
930 data.from_addr = copy_addr_to_reg (from_addr);
931 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
933 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
934 data.autinc_to = 1;
935 data.explicit_inc_to = -1;
937 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
939 data.to_addr = copy_addr_to_reg (to_addr);
940 data.autinc_to = 1;
941 data.explicit_inc_to = 1;
943 if (!data.autinc_to && CONSTANT_P (to_addr))
944 data.to_addr = copy_addr_to_reg (to_addr);
947 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
948 if (align >= GET_MODE_ALIGNMENT (tmode))
949 align = GET_MODE_ALIGNMENT (tmode);
950 else
952 enum machine_mode xmode;
954 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
955 tmode != VOIDmode;
956 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
957 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
958 || SLOW_UNALIGNED_ACCESS (tmode, align))
959 break;
961 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
964 /* First move what we can in the largest integer mode, then go to
965 successively smaller modes. */
967 while (max_size > 1)
969 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
970 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
971 if (GET_MODE_SIZE (tmode) < max_size)
972 mode = tmode;
974 if (mode == VOIDmode)
975 break;
977 icode = mov_optab->handlers[(int) mode].insn_code;
978 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
979 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
981 max_size = GET_MODE_SIZE (mode);
984 /* The code above should have handled everything. */
985 gcc_assert (!data.len);
987 if (endp)
989 rtx to1;
991 gcc_assert (!data.reverse);
992 if (data.autinc_to)
994 if (endp == 2)
996 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
997 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
998 else
999 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1000 -1));
1002 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1003 data.offset);
1005 else
1007 if (endp == 2)
1008 --data.offset;
1009 to1 = adjust_address (data.to, QImode, data.offset);
1011 return to1;
1013 else
1014 return data.to;
1017 /* Return number of insns required to move L bytes by pieces.
1018 ALIGN (in bits) is maximum alignment we can assume. */
1020 static unsigned HOST_WIDE_INT
1021 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1022 unsigned int max_size)
1024 unsigned HOST_WIDE_INT n_insns = 0;
1025 enum machine_mode tmode;
1027 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1028 if (align >= GET_MODE_ALIGNMENT (tmode))
1029 align = GET_MODE_ALIGNMENT (tmode);
1030 else
1032 enum machine_mode tmode, xmode;
1034 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1035 tmode != VOIDmode;
1036 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1037 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1038 || SLOW_UNALIGNED_ACCESS (tmode, align))
1039 break;
1041 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1044 while (max_size > 1)
1046 enum machine_mode mode = VOIDmode;
1047 enum insn_code icode;
1049 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1050 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1051 if (GET_MODE_SIZE (tmode) < max_size)
1052 mode = tmode;
1054 if (mode == VOIDmode)
1055 break;
1057 icode = mov_optab->handlers[(int) mode].insn_code;
1058 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1059 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1061 max_size = GET_MODE_SIZE (mode);
1064 gcc_assert (!l);
1065 return n_insns;
1068 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1069 with move instructions for mode MODE. GENFUN is the gen_... function
1070 to make a move insn for that mode. DATA has all the other info. */
1072 static void
1073 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1074 struct move_by_pieces *data)
1076 unsigned int size = GET_MODE_SIZE (mode);
1077 rtx to1 = NULL_RTX, from1;
1079 while (data->len >= size)
1081 if (data->reverse)
1082 data->offset -= size;
1084 if (data->to)
1086 if (data->autinc_to)
1087 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1088 data->offset);
1089 else
1090 to1 = adjust_address (data->to, mode, data->offset);
1093 if (data->autinc_from)
1094 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1095 data->offset);
1096 else
1097 from1 = adjust_address (data->from, mode, data->offset);
1099 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1100 emit_insn (gen_add2_insn (data->to_addr,
1101 GEN_INT (-(HOST_WIDE_INT)size)));
1102 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1103 emit_insn (gen_add2_insn (data->from_addr,
1104 GEN_INT (-(HOST_WIDE_INT)size)));
1106 if (data->to)
1107 emit_insn ((*genfun) (to1, from1));
1108 else
1110 #ifdef PUSH_ROUNDING
1111 emit_single_push_insn (mode, from1, NULL);
1112 #else
1113 gcc_unreachable ();
1114 #endif
1117 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1118 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1119 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1120 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1122 if (! data->reverse)
1123 data->offset += size;
1125 data->len -= size;
1129 /* Emit code to move a block Y to a block X. This may be done with
1130 string-move instructions, with multiple scalar move instructions,
1131 or with a library call.
1133 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1134 SIZE is an rtx that says how long they are.
1135 ALIGN is the maximum alignment we can assume they have.
1136 METHOD describes what kind of copy this is, and what mechanisms may be used.
1138 Return the address of the new block, if memcpy is called and returns it,
1139 0 otherwise. */
1142 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1144 bool may_use_call;
1145 rtx retval = 0;
1146 unsigned int align;
1148 switch (method)
1150 case BLOCK_OP_NORMAL:
1151 may_use_call = true;
1152 break;
1154 case BLOCK_OP_CALL_PARM:
1155 may_use_call = block_move_libcall_safe_for_call_parm ();
1157 /* Make inhibit_defer_pop nonzero around the library call
1158 to force it to pop the arguments right away. */
1159 NO_DEFER_POP;
1160 break;
1162 case BLOCK_OP_NO_LIBCALL:
1163 may_use_call = false;
1164 break;
1166 default:
1167 gcc_unreachable ();
1170 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1172 gcc_assert (MEM_P (x));
1173 gcc_assert (MEM_P (y));
1174 gcc_assert (size);
1176 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1177 block copy is more efficient for other large modes, e.g. DCmode. */
1178 x = adjust_address (x, BLKmode, 0);
1179 y = adjust_address (y, BLKmode, 0);
1181 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1182 can be incorrect is coming from __builtin_memcpy. */
1183 if (GET_CODE (size) == CONST_INT)
1185 if (INTVAL (size) == 0)
1186 return 0;
1188 x = shallow_copy_rtx (x);
1189 y = shallow_copy_rtx (y);
1190 set_mem_size (x, size);
1191 set_mem_size (y, size);
1194 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1195 move_by_pieces (x, y, INTVAL (size), align, 0);
1196 else if (emit_block_move_via_movmem (x, y, size, align))
1198 else if (may_use_call)
1199 retval = emit_block_move_via_libcall (x, y, size);
1200 else
1201 emit_block_move_via_loop (x, y, size, align);
1203 if (method == BLOCK_OP_CALL_PARM)
1204 OK_DEFER_POP;
1206 return retval;
1209 /* A subroutine of emit_block_move. Returns true if calling the
1210 block move libcall will not clobber any parameters which may have
1211 already been placed on the stack. */
1213 static bool
1214 block_move_libcall_safe_for_call_parm (void)
1216 /* If arguments are pushed on the stack, then they're safe. */
1217 if (PUSH_ARGS)
1218 return true;
1220 /* If registers go on the stack anyway, any argument is sure to clobber
1221 an outgoing argument. */
1222 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1224 tree fn = emit_block_move_libcall_fn (false);
1225 (void) fn;
1226 if (REG_PARM_STACK_SPACE (fn) != 0)
1227 return false;
1229 #endif
1231 /* If any argument goes in memory, then it might clobber an outgoing
1232 argument. */
1234 CUMULATIVE_ARGS args_so_far;
1235 tree fn, arg;
1237 fn = emit_block_move_libcall_fn (false);
1238 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1240 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1241 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1243 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1244 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1245 if (!tmp || !REG_P (tmp))
1246 return false;
1247 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1248 return false;
1249 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1252 return true;
1255 /* A subroutine of emit_block_move. Expand a movmem pattern;
1256 return true if successful. */
1258 static bool
1259 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1261 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1262 int save_volatile_ok = volatile_ok;
1263 enum machine_mode mode;
1265 /* Since this is a move insn, we don't care about volatility. */
1266 volatile_ok = 1;
1268 /* Try the most limited insn first, because there's no point
1269 including more than one in the machine description unless
1270 the more limited one has some advantage. */
1272 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1273 mode = GET_MODE_WIDER_MODE (mode))
1275 enum insn_code code = movmem_optab[(int) mode];
1276 insn_operand_predicate_fn pred;
1278 if (code != CODE_FOR_nothing
1279 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1280 here because if SIZE is less than the mode mask, as it is
1281 returned by the macro, it will definitely be less than the
1282 actual mode mask. */
1283 && ((GET_CODE (size) == CONST_INT
1284 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1285 <= (GET_MODE_MASK (mode) >> 1)))
1286 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1287 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1288 || (*pred) (x, BLKmode))
1289 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1290 || (*pred) (y, BLKmode))
1291 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1292 || (*pred) (opalign, VOIDmode)))
1294 rtx op2;
1295 rtx last = get_last_insn ();
1296 rtx pat;
1298 op2 = convert_to_mode (mode, size, 1);
1299 pred = insn_data[(int) code].operand[2].predicate;
1300 if (pred != 0 && ! (*pred) (op2, mode))
1301 op2 = copy_to_mode_reg (mode, op2);
1303 /* ??? When called via emit_block_move_for_call, it'd be
1304 nice if there were some way to inform the backend, so
1305 that it doesn't fail the expansion because it thinks
1306 emitting the libcall would be more efficient. */
1308 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1309 if (pat)
1311 emit_insn (pat);
1312 volatile_ok = save_volatile_ok;
1313 return true;
1315 else
1316 delete_insns_since (last);
1320 volatile_ok = save_volatile_ok;
1321 return false;
1324 /* A subroutine of emit_block_move. Expand a call to memcpy.
1325 Return the return value from memcpy, 0 otherwise. */
1327 static rtx
1328 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1330 rtx dst_addr, src_addr;
1331 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1332 enum machine_mode size_mode;
1333 rtx retval;
1335 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1336 pseudos. We can then place those new pseudos into a VAR_DECL and
1337 use them later. */
1339 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1340 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1342 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1343 src_addr = convert_memory_address (ptr_mode, src_addr);
1345 dst_tree = make_tree (ptr_type_node, dst_addr);
1346 src_tree = make_tree (ptr_type_node, src_addr);
1348 size_mode = TYPE_MODE (sizetype);
1350 size = convert_to_mode (size_mode, size, 1);
1351 size = copy_to_mode_reg (size_mode, size);
1353 /* It is incorrect to use the libcall calling conventions to call
1354 memcpy in this context. This could be a user call to memcpy and
1355 the user may wish to examine the return value from memcpy. For
1356 targets where libcalls and normal calls have different conventions
1357 for returning pointers, we could end up generating incorrect code. */
1359 size_tree = make_tree (sizetype, size);
1361 fn = emit_block_move_libcall_fn (true);
1362 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1363 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1364 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1366 /* Now we have to build up the CALL_EXPR itself. */
1367 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1368 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1369 call_expr, arg_list, NULL_TREE);
1371 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1373 return retval;
1376 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1377 for the function we use for block copies. The first time FOR_CALL
1378 is true, we call assemble_external. */
1380 static GTY(()) tree block_move_fn;
1382 void
1383 init_block_move_fn (const char *asmspec)
1385 if (!block_move_fn)
1387 tree args, fn;
1389 fn = get_identifier ("memcpy");
1390 args = build_function_type_list (ptr_type_node, ptr_type_node,
1391 const_ptr_type_node, sizetype,
1392 NULL_TREE);
1394 fn = build_decl (FUNCTION_DECL, fn, args);
1395 DECL_EXTERNAL (fn) = 1;
1396 TREE_PUBLIC (fn) = 1;
1397 DECL_ARTIFICIAL (fn) = 1;
1398 TREE_NOTHROW (fn) = 1;
1400 block_move_fn = fn;
1403 if (asmspec)
1404 set_user_assembler_name (block_move_fn, asmspec);
1407 static tree
1408 emit_block_move_libcall_fn (int for_call)
1410 static bool emitted_extern;
1412 if (!block_move_fn)
1413 init_block_move_fn (NULL);
1415 if (for_call && !emitted_extern)
1417 emitted_extern = true;
1418 make_decl_rtl (block_move_fn);
1419 assemble_external (block_move_fn);
1422 return block_move_fn;
1425 /* A subroutine of emit_block_move. Copy the data via an explicit
1426 loop. This is used only when libcalls are forbidden. */
1427 /* ??? It'd be nice to copy in hunks larger than QImode. */
1429 static void
1430 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1431 unsigned int align ATTRIBUTE_UNUSED)
1433 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1434 enum machine_mode iter_mode;
1436 iter_mode = GET_MODE (size);
1437 if (iter_mode == VOIDmode)
1438 iter_mode = word_mode;
1440 top_label = gen_label_rtx ();
1441 cmp_label = gen_label_rtx ();
1442 iter = gen_reg_rtx (iter_mode);
1444 emit_move_insn (iter, const0_rtx);
1446 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1447 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1448 do_pending_stack_adjust ();
1450 emit_jump (cmp_label);
1451 emit_label (top_label);
1453 tmp = convert_modes (Pmode, iter_mode, iter, true);
1454 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1455 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1456 x = change_address (x, QImode, x_addr);
1457 y = change_address (y, QImode, y_addr);
1459 emit_move_insn (x, y);
1461 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1462 true, OPTAB_LIB_WIDEN);
1463 if (tmp != iter)
1464 emit_move_insn (iter, tmp);
1466 emit_label (cmp_label);
1468 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1469 true, top_label);
1472 /* Copy all or part of a value X into registers starting at REGNO.
1473 The number of registers to be filled is NREGS. */
1475 void
1476 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1478 int i;
1479 #ifdef HAVE_load_multiple
1480 rtx pat;
1481 rtx last;
1482 #endif
1484 if (nregs == 0)
1485 return;
1487 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1488 x = validize_mem (force_const_mem (mode, x));
1490 /* See if the machine can do this with a load multiple insn. */
1491 #ifdef HAVE_load_multiple
1492 if (HAVE_load_multiple)
1494 last = get_last_insn ();
1495 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1496 GEN_INT (nregs));
1497 if (pat)
1499 emit_insn (pat);
1500 return;
1502 else
1503 delete_insns_since (last);
1505 #endif
1507 for (i = 0; i < nregs; i++)
1508 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1509 operand_subword_force (x, i, mode));
1512 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1513 The number of registers to be filled is NREGS. */
1515 void
1516 move_block_from_reg (int regno, rtx x, int nregs)
1518 int i;
1520 if (nregs == 0)
1521 return;
1523 /* See if the machine can do this with a store multiple insn. */
1524 #ifdef HAVE_store_multiple
1525 if (HAVE_store_multiple)
1527 rtx last = get_last_insn ();
1528 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1529 GEN_INT (nregs));
1530 if (pat)
1532 emit_insn (pat);
1533 return;
1535 else
1536 delete_insns_since (last);
1538 #endif
1540 for (i = 0; i < nregs; i++)
1542 rtx tem = operand_subword (x, i, 1, BLKmode);
1544 gcc_assert (tem);
1546 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1550 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1551 ORIG, where ORIG is a non-consecutive group of registers represented by
1552 a PARALLEL. The clone is identical to the original except in that the
1553 original set of registers is replaced by a new set of pseudo registers.
1554 The new set has the same modes as the original set. */
1557 gen_group_rtx (rtx orig)
1559 int i, length;
1560 rtx *tmps;
1562 gcc_assert (GET_CODE (orig) == PARALLEL);
1564 length = XVECLEN (orig, 0);
1565 tmps = alloca (sizeof (rtx) * length);
1567 /* Skip a NULL entry in first slot. */
1568 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1570 if (i)
1571 tmps[0] = 0;
1573 for (; i < length; i++)
1575 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1576 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1578 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1581 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1584 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1585 except that values are placed in TMPS[i], and must later be moved
1586 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1588 static void
1589 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1591 rtx src;
1592 int start, i;
1593 enum machine_mode m = GET_MODE (orig_src);
1595 gcc_assert (GET_CODE (dst) == PARALLEL);
1597 if (m != VOIDmode
1598 && !SCALAR_INT_MODE_P (m)
1599 && !MEM_P (orig_src)
1600 && GET_CODE (orig_src) != CONCAT)
1602 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1603 if (imode == BLKmode)
1604 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1605 else
1606 src = gen_reg_rtx (imode);
1607 if (imode != BLKmode)
1608 src = gen_lowpart (GET_MODE (orig_src), src);
1609 emit_move_insn (src, orig_src);
1610 /* ...and back again. */
1611 if (imode != BLKmode)
1612 src = gen_lowpart (imode, src);
1613 emit_group_load_1 (tmps, dst, src, type, ssize);
1614 return;
1617 /* Check for a NULL entry, used to indicate that the parameter goes
1618 both on the stack and in registers. */
1619 if (XEXP (XVECEXP (dst, 0, 0), 0))
1620 start = 0;
1621 else
1622 start = 1;
1624 /* Process the pieces. */
1625 for (i = start; i < XVECLEN (dst, 0); i++)
1627 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1628 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1629 unsigned int bytelen = GET_MODE_SIZE (mode);
1630 int shift = 0;
1632 /* Handle trailing fragments that run over the size of the struct. */
1633 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1635 /* Arrange to shift the fragment to where it belongs.
1636 extract_bit_field loads to the lsb of the reg. */
1637 if (
1638 #ifdef BLOCK_REG_PADDING
1639 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1640 == (BYTES_BIG_ENDIAN ? upward : downward)
1641 #else
1642 BYTES_BIG_ENDIAN
1643 #endif
1645 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1646 bytelen = ssize - bytepos;
1647 gcc_assert (bytelen > 0);
1650 /* If we won't be loading directly from memory, protect the real source
1651 from strange tricks we might play; but make sure that the source can
1652 be loaded directly into the destination. */
1653 src = orig_src;
1654 if (!MEM_P (orig_src)
1655 && (!CONSTANT_P (orig_src)
1656 || (GET_MODE (orig_src) != mode
1657 && GET_MODE (orig_src) != VOIDmode)))
1659 if (GET_MODE (orig_src) == VOIDmode)
1660 src = gen_reg_rtx (mode);
1661 else
1662 src = gen_reg_rtx (GET_MODE (orig_src));
1664 emit_move_insn (src, orig_src);
1667 /* Optimize the access just a bit. */
1668 if (MEM_P (src)
1669 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1670 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1671 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1672 && bytelen == GET_MODE_SIZE (mode))
1674 tmps[i] = gen_reg_rtx (mode);
1675 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1677 else if (COMPLEX_MODE_P (mode)
1678 && GET_MODE (src) == mode
1679 && bytelen == GET_MODE_SIZE (mode))
1680 /* Let emit_move_complex do the bulk of the work. */
1681 tmps[i] = src;
1682 else if (GET_CODE (src) == CONCAT)
1684 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1685 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1687 if ((bytepos == 0 && bytelen == slen0)
1688 || (bytepos != 0 && bytepos + bytelen <= slen))
1690 /* The following assumes that the concatenated objects all
1691 have the same size. In this case, a simple calculation
1692 can be used to determine the object and the bit field
1693 to be extracted. */
1694 tmps[i] = XEXP (src, bytepos / slen0);
1695 if (! CONSTANT_P (tmps[i])
1696 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1697 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1698 (bytepos % slen0) * BITS_PER_UNIT,
1699 1, NULL_RTX, mode, mode);
1701 else
1703 rtx mem;
1705 gcc_assert (!bytepos);
1706 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1707 emit_move_insn (mem, src);
1708 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1709 0, 1, NULL_RTX, mode, mode);
1712 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1713 SIMD register, which is currently broken. While we get GCC
1714 to emit proper RTL for these cases, let's dump to memory. */
1715 else if (VECTOR_MODE_P (GET_MODE (dst))
1716 && REG_P (src))
1718 int slen = GET_MODE_SIZE (GET_MODE (src));
1719 rtx mem;
1721 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1722 emit_move_insn (mem, src);
1723 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1725 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1726 && XVECLEN (dst, 0) > 1)
1727 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1728 else if (CONSTANT_P (src)
1729 || (REG_P (src) && GET_MODE (src) == mode))
1730 tmps[i] = src;
1731 else
1732 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1733 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1734 mode, mode);
1736 if (shift)
1737 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1738 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1742 /* Emit code to move a block SRC of type TYPE to a block DST,
1743 where DST is non-consecutive registers represented by a PARALLEL.
1744 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1745 if not known. */
1747 void
1748 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1750 rtx *tmps;
1751 int i;
1753 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1754 emit_group_load_1 (tmps, dst, src, type, ssize);
1756 /* Copy the extracted pieces into the proper (probable) hard regs. */
1757 for (i = 0; i < XVECLEN (dst, 0); i++)
1759 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1760 if (d == NULL)
1761 continue;
1762 emit_move_insn (d, tmps[i]);
1766 /* Similar, but load SRC into new pseudos in a format that looks like
1767 PARALLEL. This can later be fed to emit_group_move to get things
1768 in the right place. */
1771 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1773 rtvec vec;
1774 int i;
1776 vec = rtvec_alloc (XVECLEN (parallel, 0));
1777 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1779 /* Convert the vector to look just like the original PARALLEL, except
1780 with the computed values. */
1781 for (i = 0; i < XVECLEN (parallel, 0); i++)
1783 rtx e = XVECEXP (parallel, 0, i);
1784 rtx d = XEXP (e, 0);
1786 if (d)
1788 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1789 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1791 RTVEC_ELT (vec, i) = e;
1794 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1797 /* Emit code to move a block SRC to block DST, where SRC and DST are
1798 non-consecutive groups of registers, each represented by a PARALLEL. */
1800 void
1801 emit_group_move (rtx dst, rtx src)
1803 int i;
1805 gcc_assert (GET_CODE (src) == PARALLEL
1806 && GET_CODE (dst) == PARALLEL
1807 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1809 /* Skip first entry if NULL. */
1810 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1811 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1812 XEXP (XVECEXP (src, 0, i), 0));
1815 /* Move a group of registers represented by a PARALLEL into pseudos. */
1818 emit_group_move_into_temps (rtx src)
1820 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1821 int i;
1823 for (i = 0; i < XVECLEN (src, 0); i++)
1825 rtx e = XVECEXP (src, 0, i);
1826 rtx d = XEXP (e, 0);
1828 if (d)
1829 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1830 RTVEC_ELT (vec, i) = e;
1833 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1836 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1837 where SRC is non-consecutive registers represented by a PARALLEL.
1838 SSIZE represents the total size of block ORIG_DST, or -1 if not
1839 known. */
1841 void
1842 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1844 rtx *tmps, dst;
1845 int start, i;
1846 enum machine_mode m = GET_MODE (orig_dst);
1848 gcc_assert (GET_CODE (src) == PARALLEL);
1850 if (!SCALAR_INT_MODE_P (m)
1851 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1853 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1854 if (imode == BLKmode)
1855 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1856 else
1857 dst = gen_reg_rtx (imode);
1858 emit_group_store (dst, src, type, ssize);
1859 if (imode != BLKmode)
1860 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1861 emit_move_insn (orig_dst, dst);
1862 return;
1865 /* Check for a NULL entry, used to indicate that the parameter goes
1866 both on the stack and in registers. */
1867 if (XEXP (XVECEXP (src, 0, 0), 0))
1868 start = 0;
1869 else
1870 start = 1;
1872 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1874 /* Copy the (probable) hard regs into pseudos. */
1875 for (i = start; i < XVECLEN (src, 0); i++)
1877 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1878 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1879 emit_move_insn (tmps[i], reg);
1882 /* If we won't be storing directly into memory, protect the real destination
1883 from strange tricks we might play. */
1884 dst = orig_dst;
1885 if (GET_CODE (dst) == PARALLEL)
1887 rtx temp;
1889 /* We can get a PARALLEL dst if there is a conditional expression in
1890 a return statement. In that case, the dst and src are the same,
1891 so no action is necessary. */
1892 if (rtx_equal_p (dst, src))
1893 return;
1895 /* It is unclear if we can ever reach here, but we may as well handle
1896 it. Allocate a temporary, and split this into a store/load to/from
1897 the temporary. */
1899 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1900 emit_group_store (temp, src, type, ssize);
1901 emit_group_load (dst, temp, type, ssize);
1902 return;
1904 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1906 dst = gen_reg_rtx (GET_MODE (orig_dst));
1907 /* Make life a bit easier for combine. */
1908 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1911 /* Process the pieces. */
1912 for (i = start; i < XVECLEN (src, 0); i++)
1914 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1915 enum machine_mode mode = GET_MODE (tmps[i]);
1916 unsigned int bytelen = GET_MODE_SIZE (mode);
1917 rtx dest = dst;
1919 /* Handle trailing fragments that run over the size of the struct. */
1920 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1922 /* store_bit_field always takes its value from the lsb.
1923 Move the fragment to the lsb if it's not already there. */
1924 if (
1925 #ifdef BLOCK_REG_PADDING
1926 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1927 == (BYTES_BIG_ENDIAN ? upward : downward)
1928 #else
1929 BYTES_BIG_ENDIAN
1930 #endif
1933 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1934 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1935 build_int_cst (NULL_TREE, shift),
1936 tmps[i], 0);
1938 bytelen = ssize - bytepos;
1941 if (GET_CODE (dst) == CONCAT)
1943 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1944 dest = XEXP (dst, 0);
1945 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1947 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1948 dest = XEXP (dst, 1);
1950 else
1952 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1953 dest = assign_stack_temp (GET_MODE (dest),
1954 GET_MODE_SIZE (GET_MODE (dest)), 0);
1955 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1956 tmps[i]);
1957 dst = dest;
1958 break;
1962 /* Optimize the access just a bit. */
1963 if (MEM_P (dest)
1964 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1965 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1966 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1967 && bytelen == GET_MODE_SIZE (mode))
1968 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1969 else
1970 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1971 mode, tmps[i]);
1974 /* Copy from the pseudo into the (probable) hard reg. */
1975 if (orig_dst != dst)
1976 emit_move_insn (orig_dst, dst);
1979 /* Generate code to copy a BLKmode object of TYPE out of a
1980 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1981 is null, a stack temporary is created. TGTBLK is returned.
1983 The purpose of this routine is to handle functions that return
1984 BLKmode structures in registers. Some machines (the PA for example)
1985 want to return all small structures in registers regardless of the
1986 structure's alignment. */
1989 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1991 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1992 rtx src = NULL, dst = NULL;
1993 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1994 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1996 if (tgtblk == 0)
1998 tgtblk = assign_temp (build_qualified_type (type,
1999 (TYPE_QUALS (type)
2000 | TYPE_QUAL_CONST)),
2001 0, 1, 1);
2002 preserve_temp_slots (tgtblk);
2005 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2006 into a new pseudo which is a full word. */
2008 if (GET_MODE (srcreg) != BLKmode
2009 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2010 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2012 /* If the structure doesn't take up a whole number of words, see whether
2013 SRCREG is padded on the left or on the right. If it's on the left,
2014 set PADDING_CORRECTION to the number of bits to skip.
2016 In most ABIs, the structure will be returned at the least end of
2017 the register, which translates to right padding on little-endian
2018 targets and left padding on big-endian targets. The opposite
2019 holds if the structure is returned at the most significant
2020 end of the register. */
2021 if (bytes % UNITS_PER_WORD != 0
2022 && (targetm.calls.return_in_msb (type)
2023 ? !BYTES_BIG_ENDIAN
2024 : BYTES_BIG_ENDIAN))
2025 padding_correction
2026 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2028 /* Copy the structure BITSIZE bites at a time.
2030 We could probably emit more efficient code for machines which do not use
2031 strict alignment, but it doesn't seem worth the effort at the current
2032 time. */
2033 for (bitpos = 0, xbitpos = padding_correction;
2034 bitpos < bytes * BITS_PER_UNIT;
2035 bitpos += bitsize, xbitpos += bitsize)
2037 /* We need a new source operand each time xbitpos is on a
2038 word boundary and when xbitpos == padding_correction
2039 (the first time through). */
2040 if (xbitpos % BITS_PER_WORD == 0
2041 || xbitpos == padding_correction)
2042 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2043 GET_MODE (srcreg));
2045 /* We need a new destination operand each time bitpos is on
2046 a word boundary. */
2047 if (bitpos % BITS_PER_WORD == 0)
2048 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2050 /* Use xbitpos for the source extraction (right justified) and
2051 xbitpos for the destination store (left justified). */
2052 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2053 extract_bit_field (src, bitsize,
2054 xbitpos % BITS_PER_WORD, 1,
2055 NULL_RTX, word_mode, word_mode));
2058 return tgtblk;
2061 /* Add a USE expression for REG to the (possibly empty) list pointed
2062 to by CALL_FUSAGE. REG must denote a hard register. */
2064 void
2065 use_reg (rtx *call_fusage, rtx reg)
2067 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2069 *call_fusage
2070 = gen_rtx_EXPR_LIST (VOIDmode,
2071 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2074 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2075 starting at REGNO. All of these registers must be hard registers. */
2077 void
2078 use_regs (rtx *call_fusage, int regno, int nregs)
2080 int i;
2082 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2084 for (i = 0; i < nregs; i++)
2085 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2088 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2089 PARALLEL REGS. This is for calls that pass values in multiple
2090 non-contiguous locations. The Irix 6 ABI has examples of this. */
2092 void
2093 use_group_regs (rtx *call_fusage, rtx regs)
2095 int i;
2097 for (i = 0; i < XVECLEN (regs, 0); i++)
2099 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2101 /* A NULL entry means the parameter goes both on the stack and in
2102 registers. This can also be a MEM for targets that pass values
2103 partially on the stack and partially in registers. */
2104 if (reg != 0 && REG_P (reg))
2105 use_reg (call_fusage, reg);
2110 /* Determine whether the LEN bytes generated by CONSTFUN can be
2111 stored to memory using several move instructions. CONSTFUNDATA is
2112 a pointer which will be passed as argument in every CONSTFUN call.
2113 ALIGN is maximum alignment we can assume. Return nonzero if a
2114 call to store_by_pieces should succeed. */
2117 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2119 void *constfundata, unsigned int align)
2121 unsigned HOST_WIDE_INT l;
2122 unsigned int max_size;
2123 HOST_WIDE_INT offset = 0;
2124 enum machine_mode mode, tmode;
2125 enum insn_code icode;
2126 int reverse;
2127 rtx cst;
2129 if (len == 0)
2130 return 1;
2132 if (! STORE_BY_PIECES_P (len, align))
2133 return 0;
2135 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2136 if (align >= GET_MODE_ALIGNMENT (tmode))
2137 align = GET_MODE_ALIGNMENT (tmode);
2138 else
2140 enum machine_mode xmode;
2142 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2143 tmode != VOIDmode;
2144 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2145 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2146 || SLOW_UNALIGNED_ACCESS (tmode, align))
2147 break;
2149 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2152 /* We would first store what we can in the largest integer mode, then go to
2153 successively smaller modes. */
2155 for (reverse = 0;
2156 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2157 reverse++)
2159 l = len;
2160 mode = VOIDmode;
2161 max_size = STORE_MAX_PIECES + 1;
2162 while (max_size > 1)
2164 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2165 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2166 if (GET_MODE_SIZE (tmode) < max_size)
2167 mode = tmode;
2169 if (mode == VOIDmode)
2170 break;
2172 icode = mov_optab->handlers[(int) mode].insn_code;
2173 if (icode != CODE_FOR_nothing
2174 && align >= GET_MODE_ALIGNMENT (mode))
2176 unsigned int size = GET_MODE_SIZE (mode);
2178 while (l >= size)
2180 if (reverse)
2181 offset -= size;
2183 cst = (*constfun) (constfundata, offset, mode);
2184 if (!LEGITIMATE_CONSTANT_P (cst))
2185 return 0;
2187 if (!reverse)
2188 offset += size;
2190 l -= size;
2194 max_size = GET_MODE_SIZE (mode);
2197 /* The code above should have handled everything. */
2198 gcc_assert (!l);
2201 return 1;
2204 /* Generate several move instructions to store LEN bytes generated by
2205 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2206 pointer which will be passed as argument in every CONSTFUN call.
2207 ALIGN is maximum alignment we can assume.
2208 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2209 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2210 stpcpy. */
2213 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2214 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2215 void *constfundata, unsigned int align, int endp)
2217 struct store_by_pieces data;
2219 if (len == 0)
2221 gcc_assert (endp != 2);
2222 return to;
2225 gcc_assert (STORE_BY_PIECES_P (len, align));
2226 data.constfun = constfun;
2227 data.constfundata = constfundata;
2228 data.len = len;
2229 data.to = to;
2230 store_by_pieces_1 (&data, align);
2231 if (endp)
2233 rtx to1;
2235 gcc_assert (!data.reverse);
2236 if (data.autinc_to)
2238 if (endp == 2)
2240 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2241 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2242 else
2243 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2244 -1));
2246 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2247 data.offset);
2249 else
2251 if (endp == 2)
2252 --data.offset;
2253 to1 = adjust_address (data.to, QImode, data.offset);
2255 return to1;
2257 else
2258 return data.to;
2261 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2262 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2264 static void
2265 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2267 struct store_by_pieces data;
2269 if (len == 0)
2270 return;
2272 data.constfun = clear_by_pieces_1;
2273 data.constfundata = NULL;
2274 data.len = len;
2275 data.to = to;
2276 store_by_pieces_1 (&data, align);
2279 /* Callback routine for clear_by_pieces.
2280 Return const0_rtx unconditionally. */
2282 static rtx
2283 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2284 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2285 enum machine_mode mode ATTRIBUTE_UNUSED)
2287 return const0_rtx;
2290 /* Subroutine of clear_by_pieces and store_by_pieces.
2291 Generate several move instructions to store LEN bytes of block TO. (A MEM
2292 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2294 static void
2295 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2296 unsigned int align ATTRIBUTE_UNUSED)
2298 rtx to_addr = XEXP (data->to, 0);
2299 unsigned int max_size = STORE_MAX_PIECES + 1;
2300 enum machine_mode mode = VOIDmode, tmode;
2301 enum insn_code icode;
2303 data->offset = 0;
2304 data->to_addr = to_addr;
2305 data->autinc_to
2306 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2307 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2309 data->explicit_inc_to = 0;
2310 data->reverse
2311 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2312 if (data->reverse)
2313 data->offset = data->len;
2315 /* If storing requires more than two move insns,
2316 copy addresses to registers (to make displacements shorter)
2317 and use post-increment if available. */
2318 if (!data->autinc_to
2319 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2321 /* Determine the main mode we'll be using. */
2322 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2323 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2324 if (GET_MODE_SIZE (tmode) < max_size)
2325 mode = tmode;
2327 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2329 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2330 data->autinc_to = 1;
2331 data->explicit_inc_to = -1;
2334 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2335 && ! data->autinc_to)
2337 data->to_addr = copy_addr_to_reg (to_addr);
2338 data->autinc_to = 1;
2339 data->explicit_inc_to = 1;
2342 if ( !data->autinc_to && CONSTANT_P (to_addr))
2343 data->to_addr = copy_addr_to_reg (to_addr);
2346 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2347 if (align >= GET_MODE_ALIGNMENT (tmode))
2348 align = GET_MODE_ALIGNMENT (tmode);
2349 else
2351 enum machine_mode xmode;
2353 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2354 tmode != VOIDmode;
2355 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2356 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2357 || SLOW_UNALIGNED_ACCESS (tmode, align))
2358 break;
2360 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2363 /* First store what we can in the largest integer mode, then go to
2364 successively smaller modes. */
2366 while (max_size > 1)
2368 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2369 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2370 if (GET_MODE_SIZE (tmode) < max_size)
2371 mode = tmode;
2373 if (mode == VOIDmode)
2374 break;
2376 icode = mov_optab->handlers[(int) mode].insn_code;
2377 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2378 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2380 max_size = GET_MODE_SIZE (mode);
2383 /* The code above should have handled everything. */
2384 gcc_assert (!data->len);
2387 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2388 with move instructions for mode MODE. GENFUN is the gen_... function
2389 to make a move insn for that mode. DATA has all the other info. */
2391 static void
2392 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2393 struct store_by_pieces *data)
2395 unsigned int size = GET_MODE_SIZE (mode);
2396 rtx to1, cst;
2398 while (data->len >= size)
2400 if (data->reverse)
2401 data->offset -= size;
2403 if (data->autinc_to)
2404 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2405 data->offset);
2406 else
2407 to1 = adjust_address (data->to, mode, data->offset);
2409 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2410 emit_insn (gen_add2_insn (data->to_addr,
2411 GEN_INT (-(HOST_WIDE_INT) size)));
2413 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2414 emit_insn ((*genfun) (to1, cst));
2416 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2417 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2419 if (! data->reverse)
2420 data->offset += size;
2422 data->len -= size;
2426 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2427 its length in bytes. */
2430 clear_storage (rtx object, rtx size)
2432 enum machine_mode mode = GET_MODE (object);
2433 unsigned int align;
2435 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2436 just move a zero. Otherwise, do this a piece at a time. */
2437 if (mode != BLKmode
2438 && GET_CODE (size) == CONST_INT
2439 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2441 rtx zero = CONST0_RTX (mode);
2442 if (zero != NULL)
2444 emit_move_insn (object, zero);
2445 return NULL;
2448 if (COMPLEX_MODE_P (mode))
2450 zero = CONST0_RTX (GET_MODE_INNER (mode));
2451 if (zero != NULL)
2453 write_complex_part (object, zero, 0);
2454 write_complex_part (object, zero, 1);
2455 return NULL;
2460 if (size == const0_rtx)
2461 return NULL;
2463 align = MEM_ALIGN (object);
2465 if (GET_CODE (size) == CONST_INT
2466 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2467 clear_by_pieces (object, INTVAL (size), align);
2468 else if (clear_storage_via_clrmem (object, size, align))
2470 else
2471 return clear_storage_via_libcall (object, size);
2473 return NULL;
2476 /* A subroutine of clear_storage. Expand a clrmem pattern;
2477 return true if successful. */
2479 static bool
2480 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2482 /* Try the most limited insn first, because there's no point
2483 including more than one in the machine description unless
2484 the more limited one has some advantage. */
2486 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2487 enum machine_mode mode;
2489 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2490 mode = GET_MODE_WIDER_MODE (mode))
2492 enum insn_code code = clrmem_optab[(int) mode];
2493 insn_operand_predicate_fn pred;
2495 if (code != CODE_FOR_nothing
2496 /* We don't need MODE to be narrower than
2497 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2498 the mode mask, as it is returned by the macro, it will
2499 definitely be less than the actual mode mask. */
2500 && ((GET_CODE (size) == CONST_INT
2501 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2502 <= (GET_MODE_MASK (mode) >> 1)))
2503 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2504 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2505 || (*pred) (object, BLKmode))
2506 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2507 || (*pred) (opalign, VOIDmode)))
2509 rtx op1;
2510 rtx last = get_last_insn ();
2511 rtx pat;
2513 op1 = convert_to_mode (mode, size, 1);
2514 pred = insn_data[(int) code].operand[1].predicate;
2515 if (pred != 0 && ! (*pred) (op1, mode))
2516 op1 = copy_to_mode_reg (mode, op1);
2518 pat = GEN_FCN ((int) code) (object, op1, opalign);
2519 if (pat)
2521 emit_insn (pat);
2522 return true;
2524 else
2525 delete_insns_since (last);
2529 return false;
2532 /* A subroutine of clear_storage. Expand a call to memset.
2533 Return the return value of memset, 0 otherwise. */
2535 static rtx
2536 clear_storage_via_libcall (rtx object, rtx size)
2538 tree call_expr, arg_list, fn, object_tree, size_tree;
2539 enum machine_mode size_mode;
2540 rtx retval;
2542 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2543 place those into new pseudos into a VAR_DECL and use them later. */
2545 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2547 size_mode = TYPE_MODE (sizetype);
2548 size = convert_to_mode (size_mode, size, 1);
2549 size = copy_to_mode_reg (size_mode, size);
2551 /* It is incorrect to use the libcall calling conventions to call
2552 memset in this context. This could be a user call to memset and
2553 the user may wish to examine the return value from memset. For
2554 targets where libcalls and normal calls have different conventions
2555 for returning pointers, we could end up generating incorrect code. */
2557 object_tree = make_tree (ptr_type_node, object);
2558 size_tree = make_tree (sizetype, size);
2560 fn = clear_storage_libcall_fn (true);
2561 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2562 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2563 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2565 /* Now we have to build up the CALL_EXPR itself. */
2566 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2567 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2568 call_expr, arg_list, NULL_TREE);
2570 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2572 return retval;
2575 /* A subroutine of clear_storage_via_libcall. Create the tree node
2576 for the function we use for block clears. The first time FOR_CALL
2577 is true, we call assemble_external. */
2579 static GTY(()) tree block_clear_fn;
2581 void
2582 init_block_clear_fn (const char *asmspec)
2584 if (!block_clear_fn)
2586 tree fn, args;
2588 fn = get_identifier ("memset");
2589 args = build_function_type_list (ptr_type_node, ptr_type_node,
2590 integer_type_node, sizetype,
2591 NULL_TREE);
2593 fn = build_decl (FUNCTION_DECL, fn, args);
2594 DECL_EXTERNAL (fn) = 1;
2595 TREE_PUBLIC (fn) = 1;
2596 DECL_ARTIFICIAL (fn) = 1;
2597 TREE_NOTHROW (fn) = 1;
2599 block_clear_fn = fn;
2602 if (asmspec)
2603 set_user_assembler_name (block_clear_fn, asmspec);
2606 static tree
2607 clear_storage_libcall_fn (int for_call)
2609 static bool emitted_extern;
2611 if (!block_clear_fn)
2612 init_block_clear_fn (NULL);
2614 if (for_call && !emitted_extern)
2616 emitted_extern = true;
2617 make_decl_rtl (block_clear_fn);
2618 assemble_external (block_clear_fn);
2621 return block_clear_fn;
2624 /* Write to one of the components of the complex value CPLX. Write VAL to
2625 the real part if IMAG_P is false, and the imaginary part if its true. */
2627 static void
2628 write_complex_part (rtx cplx, rtx val, bool imag_p)
2630 enum machine_mode cmode;
2631 enum machine_mode imode;
2632 unsigned ibitsize;
2634 if (GET_CODE (cplx) == CONCAT)
2636 emit_move_insn (XEXP (cplx, imag_p), val);
2637 return;
2640 cmode = GET_MODE (cplx);
2641 imode = GET_MODE_INNER (cmode);
2642 ibitsize = GET_MODE_BITSIZE (imode);
2644 /* If the sub-object is at least word sized, then we know that subregging
2645 will work. This special case is important, since store_bit_field
2646 wants to operate on integer modes, and there's rarely an OImode to
2647 correspond to TCmode. */
2648 if (ibitsize >= BITS_PER_WORD
2649 /* For hard regs we have exact predicates. Assume we can split
2650 the original object if it spans an even number of hard regs.
2651 This special case is important for SCmode on 64-bit platforms
2652 where the natural size of floating-point regs is 32-bit. */
2653 || (GET_CODE (cplx) == REG
2654 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2655 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
2656 /* For MEMs we always try to make a "subreg", that is to adjust
2657 the MEM, because store_bit_field may generate overly
2658 convoluted RTL for sub-word fields. */
2659 || MEM_P (cplx))
2661 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2662 imag_p ? GET_MODE_SIZE (imode) : 0);
2663 if (part)
2665 emit_move_insn (part, val);
2666 return;
2668 else
2669 /* simplify_gen_subreg may fail for sub-word MEMs. */
2670 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2673 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2676 /* Extract one of the components of the complex value CPLX. Extract the
2677 real part if IMAG_P is false, and the imaginary part if it's true. */
2679 static rtx
2680 read_complex_part (rtx cplx, bool imag_p)
2682 enum machine_mode cmode, imode;
2683 unsigned ibitsize;
2685 if (GET_CODE (cplx) == CONCAT)
2686 return XEXP (cplx, imag_p);
2688 cmode = GET_MODE (cplx);
2689 imode = GET_MODE_INNER (cmode);
2690 ibitsize = GET_MODE_BITSIZE (imode);
2692 /* Special case reads from complex constants that got spilled to memory. */
2693 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2695 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2696 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2698 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2699 if (CONSTANT_CLASS_P (part))
2700 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2704 /* If the sub-object is at least word sized, then we know that subregging
2705 will work. This special case is important, since extract_bit_field
2706 wants to operate on integer modes, and there's rarely an OImode to
2707 correspond to TCmode. */
2708 if (ibitsize >= BITS_PER_WORD
2709 /* For hard regs we have exact predicates. Assume we can split
2710 the original object if it spans an even number of hard regs.
2711 This special case is important for SCmode on 64-bit platforms
2712 where the natural size of floating-point regs is 32-bit. */
2713 || (GET_CODE (cplx) == REG
2714 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2715 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
2716 /* For MEMs we always try to make a "subreg", that is to adjust
2717 the MEM, because extract_bit_field may generate overly
2718 convoluted RTL for sub-word fields. */
2719 || MEM_P (cplx))
2721 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2722 imag_p ? GET_MODE_SIZE (imode) : 0);
2723 if (ret)
2724 return ret;
2725 else
2726 /* simplify_gen_subreg may fail for sub-word MEMs. */
2727 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2730 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2731 true, NULL_RTX, imode, imode);
2734 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2735 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2736 represented in NEW_MODE. If FORCE is true, this will never happen, as
2737 we'll force-create a SUBREG if needed. */
2739 static rtx
2740 emit_move_change_mode (enum machine_mode new_mode,
2741 enum machine_mode old_mode, rtx x, bool force)
2743 rtx ret;
2745 if (reload_in_progress && MEM_P (x))
2747 /* We can't use gen_lowpart here because it may call change_address
2748 which is not appropriate if we were called when a reload was in
2749 progress. We don't have to worry about changing the address since
2750 the size in bytes is supposed to be the same. Copy the MEM to
2751 change the mode and move any substitutions from the old MEM to
2752 the new one. */
2754 ret = adjust_address_nv (x, new_mode, 0);
2755 copy_replacements (x, ret);
2757 else
2759 /* Note that we do want simplify_subreg's behavior of validating
2760 that the new mode is ok for a hard register. If we were to use
2761 simplify_gen_subreg, we would create the subreg, but would
2762 probably run into the target not being able to implement it. */
2763 /* Except, of course, when FORCE is true, when this is exactly what
2764 we want. Which is needed for CCmodes on some targets. */
2765 if (force)
2766 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2767 else
2768 ret = simplify_subreg (new_mode, x, old_mode, 0);
2771 return ret;
2774 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2775 an integer mode of the same size as MODE. Returns the instruction
2776 emitted, or NULL if such a move could not be generated. */
2778 static rtx
2779 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y)
2781 enum machine_mode imode;
2782 enum insn_code code;
2784 /* There must exist a mode of the exact size we require. */
2785 imode = int_mode_for_mode (mode);
2786 if (imode == BLKmode)
2787 return NULL_RTX;
2789 /* The target must support moves in this mode. */
2790 code = mov_optab->handlers[imode].insn_code;
2791 if (code == CODE_FOR_nothing)
2792 return NULL_RTX;
2794 x = emit_move_change_mode (imode, mode, x, false);
2795 if (x == NULL_RTX)
2796 return NULL_RTX;
2797 y = emit_move_change_mode (imode, mode, y, false);
2798 if (y == NULL_RTX)
2799 return NULL_RTX;
2800 return emit_insn (GEN_FCN (code) (x, y));
2803 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2804 Return an equivalent MEM that does not use an auto-increment. */
2806 static rtx
2807 emit_move_resolve_push (enum machine_mode mode, rtx x)
2809 enum rtx_code code = GET_CODE (XEXP (x, 0));
2810 HOST_WIDE_INT adjust;
2811 rtx temp;
2813 adjust = GET_MODE_SIZE (mode);
2814 #ifdef PUSH_ROUNDING
2815 adjust = PUSH_ROUNDING (adjust);
2816 #endif
2817 if (code == PRE_DEC || code == POST_DEC)
2818 adjust = -adjust;
2820 /* Do not use anti_adjust_stack, since we don't want to update
2821 stack_pointer_delta. */
2822 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2823 GEN_INT (adjust), stack_pointer_rtx,
2824 0, OPTAB_LIB_WIDEN);
2825 if (temp != stack_pointer_rtx)
2826 emit_move_insn (stack_pointer_rtx, temp);
2828 switch (code)
2830 case PRE_INC:
2831 case PRE_DEC:
2832 temp = stack_pointer_rtx;
2833 break;
2834 case POST_INC:
2835 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2836 break;
2837 case POST_DEC:
2838 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2839 break;
2840 default:
2841 gcc_unreachable ();
2844 return replace_equiv_address (x, temp);
2847 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2848 X is known to satisfy push_operand, and MODE is known to be complex.
2849 Returns the last instruction emitted. */
2851 static rtx
2852 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2854 enum machine_mode submode = GET_MODE_INNER (mode);
2855 bool imag_first;
2857 #ifdef PUSH_ROUNDING
2858 unsigned int submodesize = GET_MODE_SIZE (submode);
2860 /* In case we output to the stack, but the size is smaller than the
2861 machine can push exactly, we need to use move instructions. */
2862 if (PUSH_ROUNDING (submodesize) != submodesize)
2864 x = emit_move_resolve_push (mode, x);
2865 return emit_move_insn (x, y);
2867 #endif
2869 /* Note that the real part always precedes the imag part in memory
2870 regardless of machine's endianness. */
2871 switch (GET_CODE (XEXP (x, 0)))
2873 case PRE_DEC:
2874 case POST_DEC:
2875 imag_first = true;
2876 break;
2877 case PRE_INC:
2878 case POST_INC:
2879 imag_first = false;
2880 break;
2881 default:
2882 gcc_unreachable ();
2885 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2886 read_complex_part (y, imag_first));
2887 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2888 read_complex_part (y, !imag_first));
2891 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2892 MODE is known to be complex. Returns the last instruction emitted. */
2894 static rtx
2895 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2897 bool try_int;
2899 /* Need to take special care for pushes, to maintain proper ordering
2900 of the data, and possibly extra padding. */
2901 if (push_operand (x, mode))
2902 return emit_move_complex_push (mode, x, y);
2904 /* See if we can coerce the target into moving both values at once. */
2906 /* Move floating point as parts. */
2907 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
2908 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
2909 try_int = false;
2910 /* Not possible if the values are inherently not adjacent. */
2911 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2912 try_int = false;
2913 /* Is possible if both are registers (or subregs of registers). */
2914 else if (register_operand (x, mode) && register_operand (y, mode))
2915 try_int = true;
2916 /* If one of the operands is a memory, and alignment constraints
2917 are friendly enough, we may be able to do combined memory operations.
2918 We do not attempt this if Y is a constant because that combination is
2919 usually better with the by-parts thing below. */
2920 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2921 && (!STRICT_ALIGNMENT
2922 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2923 try_int = true;
2924 else
2925 try_int = false;
2927 if (try_int)
2929 rtx ret;
2931 /* For memory to memory moves, optimal behavior can be had with the
2932 existing block move logic. */
2933 if (MEM_P (x) && MEM_P (y))
2935 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2936 BLOCK_OP_NO_LIBCALL);
2937 return get_last_insn ();
2940 ret = emit_move_via_integer (mode, x, y);
2941 if (ret)
2942 return ret;
2945 /* Show the output dies here. This is necessary for SUBREGs
2946 of pseudos since we cannot track their lifetimes correctly;
2947 hard regs shouldn't appear here except as return values. */
2948 if (!reload_completed && !reload_in_progress
2949 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2950 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2952 write_complex_part (x, read_complex_part (y, false), false);
2953 write_complex_part (x, read_complex_part (y, true), true);
2954 return get_last_insn ();
2957 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2958 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2960 static rtx
2961 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
2963 rtx ret;
2965 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
2966 if (mode != CCmode)
2968 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
2969 if (code != CODE_FOR_nothing)
2971 x = emit_move_change_mode (CCmode, mode, x, true);
2972 y = emit_move_change_mode (CCmode, mode, y, true);
2973 return emit_insn (GEN_FCN (code) (x, y));
2977 /* Otherwise, find the MODE_INT mode of the same width. */
2978 ret = emit_move_via_integer (mode, x, y);
2979 gcc_assert (ret != NULL);
2980 return ret;
2983 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2984 MODE is any multi-word or full-word mode that lacks a move_insn
2985 pattern. Note that you will get better code if you define such
2986 patterns, even if they must turn into multiple assembler instructions. */
2988 static rtx
2989 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
2991 rtx last_insn = 0;
2992 rtx seq, inner;
2993 bool need_clobber;
2994 int i;
2996 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2998 /* If X is a push on the stack, do the push now and replace
2999 X with a reference to the stack pointer. */
3000 if (push_operand (x, mode))
3001 x = emit_move_resolve_push (mode, x);
3003 /* If we are in reload, see if either operand is a MEM whose address
3004 is scheduled for replacement. */
3005 if (reload_in_progress && MEM_P (x)
3006 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3007 x = replace_equiv_address_nv (x, inner);
3008 if (reload_in_progress && MEM_P (y)
3009 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3010 y = replace_equiv_address_nv (y, inner);
3012 start_sequence ();
3014 need_clobber = false;
3015 for (i = 0;
3016 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3017 i++)
3019 rtx xpart = operand_subword (x, i, 1, mode);
3020 rtx ypart = operand_subword (y, i, 1, mode);
3022 /* If we can't get a part of Y, put Y into memory if it is a
3023 constant. Otherwise, force it into a register. Then we must
3024 be able to get a part of Y. */
3025 if (ypart == 0 && CONSTANT_P (y))
3027 y = force_const_mem (mode, y);
3028 ypart = operand_subword (y, i, 1, mode);
3030 else if (ypart == 0)
3031 ypart = operand_subword_force (y, i, mode);
3033 gcc_assert (xpart && ypart);
3035 need_clobber |= (GET_CODE (xpart) == SUBREG);
3037 last_insn = emit_move_insn (xpart, ypart);
3040 seq = get_insns ();
3041 end_sequence ();
3043 /* Show the output dies here. This is necessary for SUBREGs
3044 of pseudos since we cannot track their lifetimes correctly;
3045 hard regs shouldn't appear here except as return values.
3046 We never want to emit such a clobber after reload. */
3047 if (x != y
3048 && ! (reload_in_progress || reload_completed)
3049 && need_clobber != 0)
3050 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3052 emit_insn (seq);
3054 return last_insn;
3057 /* Low level part of emit_move_insn.
3058 Called just like emit_move_insn, but assumes X and Y
3059 are basically valid. */
3062 emit_move_insn_1 (rtx x, rtx y)
3064 enum machine_mode mode = GET_MODE (x);
3065 enum insn_code code;
3067 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3069 code = mov_optab->handlers[mode].insn_code;
3070 if (code != CODE_FOR_nothing)
3071 return emit_insn (GEN_FCN (code) (x, y));
3073 /* Expand complex moves by moving real part and imag part. */
3074 if (COMPLEX_MODE_P (mode))
3075 return emit_move_complex (mode, x, y);
3077 if (GET_MODE_CLASS (mode) == MODE_CC)
3078 return emit_move_ccmode (mode, x, y);
3080 /* Try using a move pattern for the corresponding integer mode. This is
3081 only safe when simplify_subreg can convert MODE constants into integer
3082 constants. At present, it can only do this reliably if the value
3083 fits within a HOST_WIDE_INT. */
3084 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3086 rtx ret = emit_move_via_integer (mode, x, y);
3087 if (ret)
3088 return ret;
3091 return emit_move_multi_word (mode, x, y);
3094 /* Generate code to copy Y into X.
3095 Both Y and X must have the same mode, except that
3096 Y can be a constant with VOIDmode.
3097 This mode cannot be BLKmode; use emit_block_move for that.
3099 Return the last instruction emitted. */
3102 emit_move_insn (rtx x, rtx y)
3104 enum machine_mode mode = GET_MODE (x);
3105 rtx y_cst = NULL_RTX;
3106 rtx last_insn, set;
3108 gcc_assert (mode != BLKmode
3109 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3111 if (CONSTANT_P (y))
3113 if (optimize
3114 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3115 && (last_insn = compress_float_constant (x, y)))
3116 return last_insn;
3118 y_cst = y;
3120 if (!LEGITIMATE_CONSTANT_P (y))
3122 y = force_const_mem (mode, y);
3124 /* If the target's cannot_force_const_mem prevented the spill,
3125 assume that the target's move expanders will also take care
3126 of the non-legitimate constant. */
3127 if (!y)
3128 y = y_cst;
3132 /* If X or Y are memory references, verify that their addresses are valid
3133 for the machine. */
3134 if (MEM_P (x)
3135 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3136 && ! push_operand (x, GET_MODE (x)))
3137 || (flag_force_addr
3138 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3139 x = validize_mem (x);
3141 if (MEM_P (y)
3142 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3143 || (flag_force_addr
3144 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3145 y = validize_mem (y);
3147 gcc_assert (mode != BLKmode);
3149 last_insn = emit_move_insn_1 (x, y);
3151 if (y_cst && REG_P (x)
3152 && (set = single_set (last_insn)) != NULL_RTX
3153 && SET_DEST (set) == x
3154 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3155 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3157 return last_insn;
3160 /* If Y is representable exactly in a narrower mode, and the target can
3161 perform the extension directly from constant or memory, then emit the
3162 move as an extension. */
3164 static rtx
3165 compress_float_constant (rtx x, rtx y)
3167 enum machine_mode dstmode = GET_MODE (x);
3168 enum machine_mode orig_srcmode = GET_MODE (y);
3169 enum machine_mode srcmode;
3170 REAL_VALUE_TYPE r;
3172 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3174 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3175 srcmode != orig_srcmode;
3176 srcmode = GET_MODE_WIDER_MODE (srcmode))
3178 enum insn_code ic;
3179 rtx trunc_y, last_insn;
3181 /* Skip if the target can't extend this way. */
3182 ic = can_extend_p (dstmode, srcmode, 0);
3183 if (ic == CODE_FOR_nothing)
3184 continue;
3186 /* Skip if the narrowed value isn't exact. */
3187 if (! exact_real_truncate (srcmode, &r))
3188 continue;
3190 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3192 if (LEGITIMATE_CONSTANT_P (trunc_y))
3194 /* Skip if the target needs extra instructions to perform
3195 the extension. */
3196 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3197 continue;
3199 else if (float_extend_from_mem[dstmode][srcmode])
3200 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3201 else
3202 continue;
3204 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3205 last_insn = get_last_insn ();
3207 if (REG_P (x))
3208 set_unique_reg_note (last_insn, REG_EQUAL, y);
3210 return last_insn;
3213 return NULL_RTX;
3216 /* Pushing data onto the stack. */
3218 /* Push a block of length SIZE (perhaps variable)
3219 and return an rtx to address the beginning of the block.
3220 The value may be virtual_outgoing_args_rtx.
3222 EXTRA is the number of bytes of padding to push in addition to SIZE.
3223 BELOW nonzero means this padding comes at low addresses;
3224 otherwise, the padding comes at high addresses. */
3227 push_block (rtx size, int extra, int below)
3229 rtx temp;
3231 size = convert_modes (Pmode, ptr_mode, size, 1);
3232 if (CONSTANT_P (size))
3233 anti_adjust_stack (plus_constant (size, extra));
3234 else if (REG_P (size) && extra == 0)
3235 anti_adjust_stack (size);
3236 else
3238 temp = copy_to_mode_reg (Pmode, size);
3239 if (extra != 0)
3240 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3241 temp, 0, OPTAB_LIB_WIDEN);
3242 anti_adjust_stack (temp);
3245 #ifndef STACK_GROWS_DOWNWARD
3246 if (0)
3247 #else
3248 if (1)
3249 #endif
3251 temp = virtual_outgoing_args_rtx;
3252 if (extra != 0 && below)
3253 temp = plus_constant (temp, extra);
3255 else
3257 if (GET_CODE (size) == CONST_INT)
3258 temp = plus_constant (virtual_outgoing_args_rtx,
3259 -INTVAL (size) - (below ? 0 : extra));
3260 else if (extra != 0 && !below)
3261 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3262 negate_rtx (Pmode, plus_constant (size, extra)));
3263 else
3264 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3265 negate_rtx (Pmode, size));
3268 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3271 #ifdef PUSH_ROUNDING
3273 /* Emit single push insn. */
3275 static void
3276 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3278 rtx dest_addr;
3279 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3280 rtx dest;
3281 enum insn_code icode;
3282 insn_operand_predicate_fn pred;
3284 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3285 /* If there is push pattern, use it. Otherwise try old way of throwing
3286 MEM representing push operation to move expander. */
3287 icode = push_optab->handlers[(int) mode].insn_code;
3288 if (icode != CODE_FOR_nothing)
3290 if (((pred = insn_data[(int) icode].operand[0].predicate)
3291 && !((*pred) (x, mode))))
3292 x = force_reg (mode, x);
3293 emit_insn (GEN_FCN (icode) (x));
3294 return;
3296 if (GET_MODE_SIZE (mode) == rounded_size)
3297 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3298 /* If we are to pad downward, adjust the stack pointer first and
3299 then store X into the stack location using an offset. This is
3300 because emit_move_insn does not know how to pad; it does not have
3301 access to type. */
3302 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3304 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3305 HOST_WIDE_INT offset;
3307 emit_move_insn (stack_pointer_rtx,
3308 expand_binop (Pmode,
3309 #ifdef STACK_GROWS_DOWNWARD
3310 sub_optab,
3311 #else
3312 add_optab,
3313 #endif
3314 stack_pointer_rtx,
3315 GEN_INT (rounded_size),
3316 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3318 offset = (HOST_WIDE_INT) padding_size;
3319 #ifdef STACK_GROWS_DOWNWARD
3320 if (STACK_PUSH_CODE == POST_DEC)
3321 /* We have already decremented the stack pointer, so get the
3322 previous value. */
3323 offset += (HOST_WIDE_INT) rounded_size;
3324 #else
3325 if (STACK_PUSH_CODE == POST_INC)
3326 /* We have already incremented the stack pointer, so get the
3327 previous value. */
3328 offset -= (HOST_WIDE_INT) rounded_size;
3329 #endif
3330 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3332 else
3334 #ifdef STACK_GROWS_DOWNWARD
3335 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3336 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3337 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3338 #else
3339 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3340 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3341 GEN_INT (rounded_size));
3342 #endif
3343 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3346 dest = gen_rtx_MEM (mode, dest_addr);
3348 if (type != 0)
3350 set_mem_attributes (dest, type, 1);
3352 if (flag_optimize_sibling_calls)
3353 /* Function incoming arguments may overlap with sibling call
3354 outgoing arguments and we cannot allow reordering of reads
3355 from function arguments with stores to outgoing arguments
3356 of sibling calls. */
3357 set_mem_alias_set (dest, 0);
3359 emit_move_insn (dest, x);
3361 #endif
3363 /* Generate code to push X onto the stack, assuming it has mode MODE and
3364 type TYPE.
3365 MODE is redundant except when X is a CONST_INT (since they don't
3366 carry mode info).
3367 SIZE is an rtx for the size of data to be copied (in bytes),
3368 needed only if X is BLKmode.
3370 ALIGN (in bits) is maximum alignment we can assume.
3372 If PARTIAL and REG are both nonzero, then copy that many of the first
3373 bytes of X into registers starting with REG, and push the rest of X.
3374 The amount of space pushed is decreased by PARTIAL bytes.
3375 REG must be a hard register in this case.
3376 If REG is zero but PARTIAL is not, take any all others actions for an
3377 argument partially in registers, but do not actually load any
3378 registers.
3380 EXTRA is the amount in bytes of extra space to leave next to this arg.
3381 This is ignored if an argument block has already been allocated.
3383 On a machine that lacks real push insns, ARGS_ADDR is the address of
3384 the bottom of the argument block for this call. We use indexing off there
3385 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3386 argument block has not been preallocated.
3388 ARGS_SO_FAR is the size of args previously pushed for this call.
3390 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3391 for arguments passed in registers. If nonzero, it will be the number
3392 of bytes required. */
3394 void
3395 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3396 unsigned int align, int partial, rtx reg, int extra,
3397 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3398 rtx alignment_pad)
3400 rtx xinner;
3401 enum direction stack_direction
3402 #ifdef STACK_GROWS_DOWNWARD
3403 = downward;
3404 #else
3405 = upward;
3406 #endif
3408 /* Decide where to pad the argument: `downward' for below,
3409 `upward' for above, or `none' for don't pad it.
3410 Default is below for small data on big-endian machines; else above. */
3411 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3413 /* Invert direction if stack is post-decrement.
3414 FIXME: why? */
3415 if (STACK_PUSH_CODE == POST_DEC)
3416 if (where_pad != none)
3417 where_pad = (where_pad == downward ? upward : downward);
3419 xinner = x;
3421 if (mode == BLKmode)
3423 /* Copy a block into the stack, entirely or partially. */
3425 rtx temp;
3426 int used;
3427 int offset;
3428 int skip;
3430 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3431 used = partial - offset;
3433 gcc_assert (size);
3435 /* USED is now the # of bytes we need not copy to the stack
3436 because registers will take care of them. */
3438 if (partial != 0)
3439 xinner = adjust_address (xinner, BLKmode, used);
3441 /* If the partial register-part of the arg counts in its stack size,
3442 skip the part of stack space corresponding to the registers.
3443 Otherwise, start copying to the beginning of the stack space,
3444 by setting SKIP to 0. */
3445 skip = (reg_parm_stack_space == 0) ? 0 : used;
3447 #ifdef PUSH_ROUNDING
3448 /* Do it with several push insns if that doesn't take lots of insns
3449 and if there is no difficulty with push insns that skip bytes
3450 on the stack for alignment purposes. */
3451 if (args_addr == 0
3452 && PUSH_ARGS
3453 && GET_CODE (size) == CONST_INT
3454 && skip == 0
3455 && MEM_ALIGN (xinner) >= align
3456 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3457 /* Here we avoid the case of a structure whose weak alignment
3458 forces many pushes of a small amount of data,
3459 and such small pushes do rounding that causes trouble. */
3460 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3461 || align >= BIGGEST_ALIGNMENT
3462 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3463 == (align / BITS_PER_UNIT)))
3464 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3466 /* Push padding now if padding above and stack grows down,
3467 or if padding below and stack grows up.
3468 But if space already allocated, this has already been done. */
3469 if (extra && args_addr == 0
3470 && where_pad != none && where_pad != stack_direction)
3471 anti_adjust_stack (GEN_INT (extra));
3473 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3475 else
3476 #endif /* PUSH_ROUNDING */
3478 rtx target;
3480 /* Otherwise make space on the stack and copy the data
3481 to the address of that space. */
3483 /* Deduct words put into registers from the size we must copy. */
3484 if (partial != 0)
3486 if (GET_CODE (size) == CONST_INT)
3487 size = GEN_INT (INTVAL (size) - used);
3488 else
3489 size = expand_binop (GET_MODE (size), sub_optab, size,
3490 GEN_INT (used), NULL_RTX, 0,
3491 OPTAB_LIB_WIDEN);
3494 /* Get the address of the stack space.
3495 In this case, we do not deal with EXTRA separately.
3496 A single stack adjust will do. */
3497 if (! args_addr)
3499 temp = push_block (size, extra, where_pad == downward);
3500 extra = 0;
3502 else if (GET_CODE (args_so_far) == CONST_INT)
3503 temp = memory_address (BLKmode,
3504 plus_constant (args_addr,
3505 skip + INTVAL (args_so_far)));
3506 else
3507 temp = memory_address (BLKmode,
3508 plus_constant (gen_rtx_PLUS (Pmode,
3509 args_addr,
3510 args_so_far),
3511 skip));
3513 if (!ACCUMULATE_OUTGOING_ARGS)
3515 /* If the source is referenced relative to the stack pointer,
3516 copy it to another register to stabilize it. We do not need
3517 to do this if we know that we won't be changing sp. */
3519 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3520 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3521 temp = copy_to_reg (temp);
3524 target = gen_rtx_MEM (BLKmode, temp);
3526 /* We do *not* set_mem_attributes here, because incoming arguments
3527 may overlap with sibling call outgoing arguments and we cannot
3528 allow reordering of reads from function arguments with stores
3529 to outgoing arguments of sibling calls. We do, however, want
3530 to record the alignment of the stack slot. */
3531 /* ALIGN may well be better aligned than TYPE, e.g. due to
3532 PARM_BOUNDARY. Assume the caller isn't lying. */
3533 set_mem_align (target, align);
3535 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3538 else if (partial > 0)
3540 /* Scalar partly in registers. */
3542 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3543 int i;
3544 int not_stack;
3545 /* # bytes of start of argument
3546 that we must make space for but need not store. */
3547 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3548 int args_offset = INTVAL (args_so_far);
3549 int skip;
3551 /* Push padding now if padding above and stack grows down,
3552 or if padding below and stack grows up.
3553 But if space already allocated, this has already been done. */
3554 if (extra && args_addr == 0
3555 && where_pad != none && where_pad != stack_direction)
3556 anti_adjust_stack (GEN_INT (extra));
3558 /* If we make space by pushing it, we might as well push
3559 the real data. Otherwise, we can leave OFFSET nonzero
3560 and leave the space uninitialized. */
3561 if (args_addr == 0)
3562 offset = 0;
3564 /* Now NOT_STACK gets the number of words that we don't need to
3565 allocate on the stack. Convert OFFSET to words too. */
3566 not_stack = (partial - offset) / UNITS_PER_WORD;
3567 offset /= UNITS_PER_WORD;
3569 /* If the partial register-part of the arg counts in its stack size,
3570 skip the part of stack space corresponding to the registers.
3571 Otherwise, start copying to the beginning of the stack space,
3572 by setting SKIP to 0. */
3573 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3575 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3576 x = validize_mem (force_const_mem (mode, x));
3578 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3579 SUBREGs of such registers are not allowed. */
3580 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3581 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3582 x = copy_to_reg (x);
3584 /* Loop over all the words allocated on the stack for this arg. */
3585 /* We can do it by words, because any scalar bigger than a word
3586 has a size a multiple of a word. */
3587 #ifndef PUSH_ARGS_REVERSED
3588 for (i = not_stack; i < size; i++)
3589 #else
3590 for (i = size - 1; i >= not_stack; i--)
3591 #endif
3592 if (i >= not_stack + offset)
3593 emit_push_insn (operand_subword_force (x, i, mode),
3594 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3595 0, args_addr,
3596 GEN_INT (args_offset + ((i - not_stack + skip)
3597 * UNITS_PER_WORD)),
3598 reg_parm_stack_space, alignment_pad);
3600 else
3602 rtx addr;
3603 rtx dest;
3605 /* Push padding now if padding above and stack grows down,
3606 or if padding below and stack grows up.
3607 But if space already allocated, this has already been done. */
3608 if (extra && args_addr == 0
3609 && where_pad != none && where_pad != stack_direction)
3610 anti_adjust_stack (GEN_INT (extra));
3612 #ifdef PUSH_ROUNDING
3613 if (args_addr == 0 && PUSH_ARGS)
3614 emit_single_push_insn (mode, x, type);
3615 else
3616 #endif
3618 if (GET_CODE (args_so_far) == CONST_INT)
3619 addr
3620 = memory_address (mode,
3621 plus_constant (args_addr,
3622 INTVAL (args_so_far)));
3623 else
3624 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3625 args_so_far));
3626 dest = gen_rtx_MEM (mode, addr);
3628 /* We do *not* set_mem_attributes here, because incoming arguments
3629 may overlap with sibling call outgoing arguments and we cannot
3630 allow reordering of reads from function arguments with stores
3631 to outgoing arguments of sibling calls. We do, however, want
3632 to record the alignment of the stack slot. */
3633 /* ALIGN may well be better aligned than TYPE, e.g. due to
3634 PARM_BOUNDARY. Assume the caller isn't lying. */
3635 set_mem_align (dest, align);
3637 emit_move_insn (dest, x);
3641 /* If part should go in registers, copy that part
3642 into the appropriate registers. Do this now, at the end,
3643 since mem-to-mem copies above may do function calls. */
3644 if (partial > 0 && reg != 0)
3646 /* Handle calls that pass values in multiple non-contiguous locations.
3647 The Irix 6 ABI has examples of this. */
3648 if (GET_CODE (reg) == PARALLEL)
3649 emit_group_load (reg, x, type, -1);
3650 else
3652 gcc_assert (partial % UNITS_PER_WORD == 0);
3653 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3657 if (extra && args_addr == 0 && where_pad == stack_direction)
3658 anti_adjust_stack (GEN_INT (extra));
3660 if (alignment_pad && args_addr == 0)
3661 anti_adjust_stack (alignment_pad);
3664 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3665 operations. */
3667 static rtx
3668 get_subtarget (rtx x)
3670 return (optimize
3671 || x == 0
3672 /* Only registers can be subtargets. */
3673 || !REG_P (x)
3674 /* Don't use hard regs to avoid extending their life. */
3675 || REGNO (x) < FIRST_PSEUDO_REGISTER
3676 ? 0 : x);
3679 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3680 FIELD is a bitfield. Returns true if the optimization was successful,
3681 and there's nothing else to do. */
3683 static bool
3684 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3685 unsigned HOST_WIDE_INT bitpos,
3686 enum machine_mode mode1, rtx str_rtx,
3687 tree to, tree src)
3689 enum machine_mode str_mode = GET_MODE (str_rtx);
3690 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3691 tree op0, op1;
3692 rtx value, result;
3693 optab binop;
3695 if (mode1 != VOIDmode
3696 || bitsize >= BITS_PER_WORD
3697 || str_bitsize > BITS_PER_WORD
3698 || TREE_SIDE_EFFECTS (to)
3699 || TREE_THIS_VOLATILE (to))
3700 return false;
3702 STRIP_NOPS (src);
3703 if (!BINARY_CLASS_P (src)
3704 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3705 return false;
3707 op0 = TREE_OPERAND (src, 0);
3708 op1 = TREE_OPERAND (src, 1);
3709 STRIP_NOPS (op0);
3711 if (!operand_equal_p (to, op0, 0))
3712 return false;
3714 if (MEM_P (str_rtx))
3716 unsigned HOST_WIDE_INT offset1;
3718 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3719 str_mode = word_mode;
3720 str_mode = get_best_mode (bitsize, bitpos,
3721 MEM_ALIGN (str_rtx), str_mode, 0);
3722 if (str_mode == VOIDmode)
3723 return false;
3724 str_bitsize = GET_MODE_BITSIZE (str_mode);
3726 offset1 = bitpos;
3727 bitpos %= str_bitsize;
3728 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3729 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3731 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3732 return false;
3734 /* If the bit field covers the whole REG/MEM, store_field
3735 will likely generate better code. */
3736 if (bitsize >= str_bitsize)
3737 return false;
3739 /* We can't handle fields split across multiple entities. */
3740 if (bitpos + bitsize > str_bitsize)
3741 return false;
3743 if (BYTES_BIG_ENDIAN)
3744 bitpos = str_bitsize - bitpos - bitsize;
3746 switch (TREE_CODE (src))
3748 case PLUS_EXPR:
3749 case MINUS_EXPR:
3750 /* For now, just optimize the case of the topmost bitfield
3751 where we don't need to do any masking and also
3752 1 bit bitfields where xor can be used.
3753 We might win by one instruction for the other bitfields
3754 too if insv/extv instructions aren't used, so that
3755 can be added later. */
3756 if (bitpos + bitsize != str_bitsize
3757 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3758 break;
3760 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3761 value = convert_modes (str_mode,
3762 TYPE_MODE (TREE_TYPE (op1)), value,
3763 TYPE_UNSIGNED (TREE_TYPE (op1)));
3765 /* We may be accessing data outside the field, which means
3766 we can alias adjacent data. */
3767 if (MEM_P (str_rtx))
3769 str_rtx = shallow_copy_rtx (str_rtx);
3770 set_mem_alias_set (str_rtx, 0);
3771 set_mem_expr (str_rtx, 0);
3774 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3775 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3777 value = expand_and (str_mode, value, const1_rtx, NULL);
3778 binop = xor_optab;
3780 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3781 build_int_cst (NULL_TREE, bitpos),
3782 NULL_RTX, 1);
3783 result = expand_binop (str_mode, binop, str_rtx,
3784 value, str_rtx, 1, OPTAB_WIDEN);
3785 if (result != str_rtx)
3786 emit_move_insn (str_rtx, result);
3787 return true;
3789 case BIT_IOR_EXPR:
3790 case BIT_XOR_EXPR:
3791 if (TREE_CODE (op1) != INTEGER_CST)
3792 break;
3793 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3794 value = convert_modes (GET_MODE (str_rtx),
3795 TYPE_MODE (TREE_TYPE (op1)), value,
3796 TYPE_UNSIGNED (TREE_TYPE (op1)));
3798 /* We may be accessing data outside the field, which means
3799 we can alias adjacent data. */
3800 if (MEM_P (str_rtx))
3802 str_rtx = shallow_copy_rtx (str_rtx);
3803 set_mem_alias_set (str_rtx, 0);
3804 set_mem_expr (str_rtx, 0);
3807 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3808 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3810 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3811 - 1);
3812 value = expand_and (GET_MODE (str_rtx), value, mask,
3813 NULL_RTX);
3815 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3816 build_int_cst (NULL_TREE, bitpos),
3817 NULL_RTX, 1);
3818 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3819 value, str_rtx, 1, OPTAB_WIDEN);
3820 if (result != str_rtx)
3821 emit_move_insn (str_rtx, result);
3822 return true;
3824 default:
3825 break;
3828 return false;
3832 /* Expand an assignment that stores the value of FROM into TO. */
3834 void
3835 expand_assignment (tree to, tree from)
3837 rtx to_rtx = 0;
3838 rtx result;
3840 /* Don't crash if the lhs of the assignment was erroneous. */
3842 if (TREE_CODE (to) == ERROR_MARK)
3844 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3845 return;
3848 /* Assignment of a structure component needs special treatment
3849 if the structure component's rtx is not simply a MEM.
3850 Assignment of an array element at a constant index, and assignment of
3851 an array element in an unaligned packed structure field, has the same
3852 problem. */
3853 if (handled_component_p (to)
3854 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3856 enum machine_mode mode1;
3857 HOST_WIDE_INT bitsize, bitpos;
3858 tree offset;
3859 int unsignedp;
3860 int volatilep = 0;
3861 tree tem;
3863 push_temp_slots ();
3864 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3865 &unsignedp, &volatilep, true);
3867 /* If we are going to use store_bit_field and extract_bit_field,
3868 make sure to_rtx will be safe for multiple use. */
3870 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3872 if (offset != 0)
3874 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3876 gcc_assert (MEM_P (to_rtx));
3878 #ifdef POINTERS_EXTEND_UNSIGNED
3879 if (GET_MODE (offset_rtx) != Pmode)
3880 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3881 #else
3882 if (GET_MODE (offset_rtx) != ptr_mode)
3883 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3884 #endif
3886 /* A constant address in TO_RTX can have VOIDmode, we must not try
3887 to call force_reg for that case. Avoid that case. */
3888 if (MEM_P (to_rtx)
3889 && GET_MODE (to_rtx) == BLKmode
3890 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3891 && bitsize > 0
3892 && (bitpos % bitsize) == 0
3893 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3894 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3896 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3897 bitpos = 0;
3900 to_rtx = offset_address (to_rtx, offset_rtx,
3901 highest_pow2_factor_for_target (to,
3902 offset));
3905 /* Handle expand_expr of a complex value returning a CONCAT. */
3906 if (GET_CODE (to_rtx) == CONCAT)
3908 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3910 gcc_assert (bitpos == 0);
3911 result = store_expr (from, to_rtx, false);
3913 else
3915 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3916 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3919 else
3921 if (MEM_P (to_rtx))
3923 /* If the field is at offset zero, we could have been given the
3924 DECL_RTX of the parent struct. Don't munge it. */
3925 to_rtx = shallow_copy_rtx (to_rtx);
3927 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3929 /* Deal with volatile and readonly fields. The former is only
3930 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3931 if (volatilep)
3932 MEM_VOLATILE_P (to_rtx) = 1;
3933 if (component_uses_parent_alias_set (to))
3934 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3937 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3938 to_rtx, to, from))
3939 result = NULL;
3940 else
3941 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3942 TREE_TYPE (tem), get_alias_set (to));
3945 if (result)
3946 preserve_temp_slots (result);
3947 free_temp_slots ();
3948 pop_temp_slots ();
3949 return;
3952 /* If the rhs is a function call and its value is not an aggregate,
3953 call the function before we start to compute the lhs.
3954 This is needed for correct code for cases such as
3955 val = setjmp (buf) on machines where reference to val
3956 requires loading up part of an address in a separate insn.
3958 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3959 since it might be a promoted variable where the zero- or sign- extension
3960 needs to be done. Handling this in the normal way is safe because no
3961 computation is done before the call. */
3962 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3963 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3964 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3965 && REG_P (DECL_RTL (to))))
3967 rtx value;
3969 push_temp_slots ();
3970 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3971 if (to_rtx == 0)
3972 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3974 /* Handle calls that return values in multiple non-contiguous locations.
3975 The Irix 6 ABI has examples of this. */
3976 if (GET_CODE (to_rtx) == PARALLEL)
3977 emit_group_load (to_rtx, value, TREE_TYPE (from),
3978 int_size_in_bytes (TREE_TYPE (from)));
3979 else if (GET_MODE (to_rtx) == BLKmode)
3980 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3981 else
3983 if (POINTER_TYPE_P (TREE_TYPE (to)))
3984 value = convert_memory_address (GET_MODE (to_rtx), value);
3985 emit_move_insn (to_rtx, value);
3987 preserve_temp_slots (to_rtx);
3988 free_temp_slots ();
3989 pop_temp_slots ();
3990 return;
3993 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3994 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3996 if (to_rtx == 0)
3997 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3999 /* Don't move directly into a return register. */
4000 if (TREE_CODE (to) == RESULT_DECL
4001 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4003 rtx temp;
4005 push_temp_slots ();
4006 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4008 if (GET_CODE (to_rtx) == PARALLEL)
4009 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4010 int_size_in_bytes (TREE_TYPE (from)));
4011 else
4012 emit_move_insn (to_rtx, temp);
4014 preserve_temp_slots (to_rtx);
4015 free_temp_slots ();
4016 pop_temp_slots ();
4017 return;
4020 /* In case we are returning the contents of an object which overlaps
4021 the place the value is being stored, use a safe function when copying
4022 a value through a pointer into a structure value return block. */
4023 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4024 && current_function_returns_struct
4025 && !current_function_returns_pcc_struct)
4027 rtx from_rtx, size;
4029 push_temp_slots ();
4030 size = expr_size (from);
4031 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4033 emit_library_call (memmove_libfunc, LCT_NORMAL,
4034 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4035 XEXP (from_rtx, 0), Pmode,
4036 convert_to_mode (TYPE_MODE (sizetype),
4037 size, TYPE_UNSIGNED (sizetype)),
4038 TYPE_MODE (sizetype));
4040 preserve_temp_slots (to_rtx);
4041 free_temp_slots ();
4042 pop_temp_slots ();
4043 return;
4046 /* Compute FROM and store the value in the rtx we got. */
4048 push_temp_slots ();
4049 result = store_expr (from, to_rtx, 0);
4050 preserve_temp_slots (result);
4051 free_temp_slots ();
4052 pop_temp_slots ();
4053 return;
4056 /* Generate code for computing expression EXP,
4057 and storing the value into TARGET.
4059 If the mode is BLKmode then we may return TARGET itself.
4060 It turns out that in BLKmode it doesn't cause a problem.
4061 because C has no operators that could combine two different
4062 assignments into the same BLKmode object with different values
4063 with no sequence point. Will other languages need this to
4064 be more thorough?
4066 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4067 stack, and block moves may need to be treated specially. */
4070 store_expr (tree exp, rtx target, int call_param_p)
4072 rtx temp;
4073 rtx alt_rtl = NULL_RTX;
4074 int dont_return_target = 0;
4076 if (VOID_TYPE_P (TREE_TYPE (exp)))
4078 /* C++ can generate ?: expressions with a throw expression in one
4079 branch and an rvalue in the other. Here, we resolve attempts to
4080 store the throw expression's nonexistent result. */
4081 gcc_assert (!call_param_p);
4082 expand_expr (exp, const0_rtx, VOIDmode, 0);
4083 return NULL_RTX;
4085 if (TREE_CODE (exp) == COMPOUND_EXPR)
4087 /* Perform first part of compound expression, then assign from second
4088 part. */
4089 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4090 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4091 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4093 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4095 /* For conditional expression, get safe form of the target. Then
4096 test the condition, doing the appropriate assignment on either
4097 side. This avoids the creation of unnecessary temporaries.
4098 For non-BLKmode, it is more efficient not to do this. */
4100 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4102 do_pending_stack_adjust ();
4103 NO_DEFER_POP;
4104 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4105 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4106 emit_jump_insn (gen_jump (lab2));
4107 emit_barrier ();
4108 emit_label (lab1);
4109 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4110 emit_label (lab2);
4111 OK_DEFER_POP;
4113 return NULL_RTX;
4115 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4116 /* If this is a scalar in a register that is stored in a wider mode
4117 than the declared mode, compute the result into its declared mode
4118 and then convert to the wider mode. Our value is the computed
4119 expression. */
4121 rtx inner_target = 0;
4123 /* We can do the conversion inside EXP, which will often result
4124 in some optimizations. Do the conversion in two steps: first
4125 change the signedness, if needed, then the extend. But don't
4126 do this if the type of EXP is a subtype of something else
4127 since then the conversion might involve more than just
4128 converting modes. */
4129 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4130 && TREE_TYPE (TREE_TYPE (exp)) == 0
4131 && (!lang_hooks.reduce_bit_field_operations
4132 || (GET_MODE_PRECISION (GET_MODE (target))
4133 == TYPE_PRECISION (TREE_TYPE (exp)))))
4135 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4136 != SUBREG_PROMOTED_UNSIGNED_P (target))
4137 exp = convert
4138 (lang_hooks.types.signed_or_unsigned_type
4139 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4141 exp = convert (lang_hooks.types.type_for_mode
4142 (GET_MODE (SUBREG_REG (target)),
4143 SUBREG_PROMOTED_UNSIGNED_P (target)),
4144 exp);
4146 inner_target = SUBREG_REG (target);
4149 temp = expand_expr (exp, inner_target, VOIDmode,
4150 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4152 /* If TEMP is a VOIDmode constant, use convert_modes to make
4153 sure that we properly convert it. */
4154 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4156 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4157 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4158 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4159 GET_MODE (target), temp,
4160 SUBREG_PROMOTED_UNSIGNED_P (target));
4163 convert_move (SUBREG_REG (target), temp,
4164 SUBREG_PROMOTED_UNSIGNED_P (target));
4166 return NULL_RTX;
4168 else
4170 temp = expand_expr_real (exp, target, GET_MODE (target),
4171 (call_param_p
4172 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4173 &alt_rtl);
4174 /* Return TARGET if it's a specified hardware register.
4175 If TARGET is a volatile mem ref, either return TARGET
4176 or return a reg copied *from* TARGET; ANSI requires this.
4178 Otherwise, if TEMP is not TARGET, return TEMP
4179 if it is constant (for efficiency),
4180 or if we really want the correct value. */
4181 if (!(target && REG_P (target)
4182 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4183 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4184 && ! rtx_equal_p (temp, target)
4185 && CONSTANT_P (temp))
4186 dont_return_target = 1;
4189 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4190 the same as that of TARGET, adjust the constant. This is needed, for
4191 example, in case it is a CONST_DOUBLE and we want only a word-sized
4192 value. */
4193 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4194 && TREE_CODE (exp) != ERROR_MARK
4195 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4196 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4197 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4199 /* If value was not generated in the target, store it there.
4200 Convert the value to TARGET's type first if necessary and emit the
4201 pending incrementations that have been queued when expanding EXP.
4202 Note that we cannot emit the whole queue blindly because this will
4203 effectively disable the POST_INC optimization later.
4205 If TEMP and TARGET compare equal according to rtx_equal_p, but
4206 one or both of them are volatile memory refs, we have to distinguish
4207 two cases:
4208 - expand_expr has used TARGET. In this case, we must not generate
4209 another copy. This can be detected by TARGET being equal according
4210 to == .
4211 - expand_expr has not used TARGET - that means that the source just
4212 happens to have the same RTX form. Since temp will have been created
4213 by expand_expr, it will compare unequal according to == .
4214 We must generate a copy in this case, to reach the correct number
4215 of volatile memory references. */
4217 if ((! rtx_equal_p (temp, target)
4218 || (temp != target && (side_effects_p (temp)
4219 || side_effects_p (target))))
4220 && TREE_CODE (exp) != ERROR_MARK
4221 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4222 but TARGET is not valid memory reference, TEMP will differ
4223 from TARGET although it is really the same location. */
4224 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4225 /* If there's nothing to copy, don't bother. Don't call
4226 expr_size unless necessary, because some front-ends (C++)
4227 expr_size-hook must not be given objects that are not
4228 supposed to be bit-copied or bit-initialized. */
4229 && expr_size (exp) != const0_rtx)
4231 if (GET_MODE (temp) != GET_MODE (target)
4232 && GET_MODE (temp) != VOIDmode)
4234 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4235 if (dont_return_target)
4237 /* In this case, we will return TEMP,
4238 so make sure it has the proper mode.
4239 But don't forget to store the value into TARGET. */
4240 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4241 emit_move_insn (target, temp);
4243 else
4244 convert_move (target, temp, unsignedp);
4247 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4249 /* Handle copying a string constant into an array. The string
4250 constant may be shorter than the array. So copy just the string's
4251 actual length, and clear the rest. First get the size of the data
4252 type of the string, which is actually the size of the target. */
4253 rtx size = expr_size (exp);
4255 if (GET_CODE (size) == CONST_INT
4256 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4257 emit_block_move (target, temp, size,
4258 (call_param_p
4259 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4260 else
4262 /* Compute the size of the data to copy from the string. */
4263 tree copy_size
4264 = size_binop (MIN_EXPR,
4265 make_tree (sizetype, size),
4266 size_int (TREE_STRING_LENGTH (exp)));
4267 rtx copy_size_rtx
4268 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4269 (call_param_p
4270 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4271 rtx label = 0;
4273 /* Copy that much. */
4274 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4275 TYPE_UNSIGNED (sizetype));
4276 emit_block_move (target, temp, copy_size_rtx,
4277 (call_param_p
4278 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4280 /* Figure out how much is left in TARGET that we have to clear.
4281 Do all calculations in ptr_mode. */
4282 if (GET_CODE (copy_size_rtx) == CONST_INT)
4284 size = plus_constant (size, -INTVAL (copy_size_rtx));
4285 target = adjust_address (target, BLKmode,
4286 INTVAL (copy_size_rtx));
4288 else
4290 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4291 copy_size_rtx, NULL_RTX, 0,
4292 OPTAB_LIB_WIDEN);
4294 #ifdef POINTERS_EXTEND_UNSIGNED
4295 if (GET_MODE (copy_size_rtx) != Pmode)
4296 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4297 TYPE_UNSIGNED (sizetype));
4298 #endif
4300 target = offset_address (target, copy_size_rtx,
4301 highest_pow2_factor (copy_size));
4302 label = gen_label_rtx ();
4303 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4304 GET_MODE (size), 0, label);
4307 if (size != const0_rtx)
4308 clear_storage (target, size);
4310 if (label)
4311 emit_label (label);
4314 /* Handle calls that return values in multiple non-contiguous locations.
4315 The Irix 6 ABI has examples of this. */
4316 else if (GET_CODE (target) == PARALLEL)
4317 emit_group_load (target, temp, TREE_TYPE (exp),
4318 int_size_in_bytes (TREE_TYPE (exp)));
4319 else if (GET_MODE (temp) == BLKmode)
4320 emit_block_move (target, temp, expr_size (exp),
4321 (call_param_p
4322 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4323 else
4325 temp = force_operand (temp, target);
4326 if (temp != target)
4327 emit_move_insn (target, temp);
4331 return NULL_RTX;
4334 /* Examine CTOR to discover:
4335 * how many scalar fields are set to nonzero values,
4336 and place it in *P_NZ_ELTS;
4337 * how many scalar fields are set to non-constant values,
4338 and place it in *P_NC_ELTS; and
4339 * how many scalar fields in total are in CTOR,
4340 and place it in *P_ELT_COUNT.
4341 * if a type is a union, and the initializer from the constructor
4342 is not the largest element in the union, then set *p_must_clear. */
4344 static void
4345 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4346 HOST_WIDE_INT *p_nc_elts,
4347 HOST_WIDE_INT *p_elt_count,
4348 bool *p_must_clear)
4350 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4351 tree list;
4353 nz_elts = 0;
4354 nc_elts = 0;
4355 elt_count = 0;
4357 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4359 tree value = TREE_VALUE (list);
4360 tree purpose = TREE_PURPOSE (list);
4361 HOST_WIDE_INT mult;
4363 mult = 1;
4364 if (TREE_CODE (purpose) == RANGE_EXPR)
4366 tree lo_index = TREE_OPERAND (purpose, 0);
4367 tree hi_index = TREE_OPERAND (purpose, 1);
4369 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4370 mult = (tree_low_cst (hi_index, 1)
4371 - tree_low_cst (lo_index, 1) + 1);
4374 switch (TREE_CODE (value))
4376 case CONSTRUCTOR:
4378 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4379 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4380 nz_elts += mult * nz;
4381 nc_elts += mult * nc;
4382 elt_count += mult * ic;
4384 break;
4386 case INTEGER_CST:
4387 case REAL_CST:
4388 if (!initializer_zerop (value))
4389 nz_elts += mult;
4390 elt_count += mult;
4391 break;
4393 case STRING_CST:
4394 nz_elts += mult * TREE_STRING_LENGTH (value);
4395 elt_count += mult * TREE_STRING_LENGTH (value);
4396 break;
4398 case COMPLEX_CST:
4399 if (!initializer_zerop (TREE_REALPART (value)))
4400 nz_elts += mult;
4401 if (!initializer_zerop (TREE_IMAGPART (value)))
4402 nz_elts += mult;
4403 elt_count += mult;
4404 break;
4406 case VECTOR_CST:
4408 tree v;
4409 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4411 if (!initializer_zerop (TREE_VALUE (v)))
4412 nz_elts += mult;
4413 elt_count += mult;
4416 break;
4418 default:
4419 nz_elts += mult;
4420 elt_count += mult;
4421 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4422 nc_elts += mult;
4423 break;
4427 if (!*p_must_clear
4428 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4429 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4431 tree init_sub_type;
4432 bool clear_this = true;
4434 list = CONSTRUCTOR_ELTS (ctor);
4435 if (list)
4437 /* We don't expect more than one element of the union to be
4438 initialized. Not sure what we should do otherwise... */
4439 gcc_assert (TREE_CHAIN (list) == NULL);
4441 init_sub_type = TREE_TYPE (TREE_VALUE (list));
4443 /* ??? We could look at each element of the union, and find the
4444 largest element. Which would avoid comparing the size of the
4445 initialized element against any tail padding in the union.
4446 Doesn't seem worth the effort... */
4447 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4448 TYPE_SIZE (init_sub_type)) == 1)
4450 /* And now we have to find out if the element itself is fully
4451 constructed. E.g. for union { struct { int a, b; } s; } u
4452 = { .s = { .a = 1 } }. */
4453 if (elt_count == count_type_elements (init_sub_type))
4454 clear_this = false;
4458 *p_must_clear = clear_this;
4461 *p_nz_elts += nz_elts;
4462 *p_nc_elts += nc_elts;
4463 *p_elt_count += elt_count;
4466 void
4467 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4468 HOST_WIDE_INT *p_nc_elts,
4469 HOST_WIDE_INT *p_elt_count,
4470 bool *p_must_clear)
4472 *p_nz_elts = 0;
4473 *p_nc_elts = 0;
4474 *p_elt_count = 0;
4475 *p_must_clear = false;
4476 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4477 p_must_clear);
4480 /* Count the number of scalars in TYPE. Return -1 on overflow or
4481 variable-sized. */
4483 HOST_WIDE_INT
4484 count_type_elements (tree type)
4486 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4487 switch (TREE_CODE (type))
4489 case ARRAY_TYPE:
4491 tree telts = array_type_nelts (type);
4492 if (telts && host_integerp (telts, 1))
4494 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4495 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4496 if (n == 0)
4497 return 0;
4498 else if (max / n > m)
4499 return n * m;
4501 return -1;
4504 case RECORD_TYPE:
4506 HOST_WIDE_INT n = 0, t;
4507 tree f;
4509 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4510 if (TREE_CODE (f) == FIELD_DECL)
4512 t = count_type_elements (TREE_TYPE (f));
4513 if (t < 0)
4514 return -1;
4515 n += t;
4518 return n;
4521 case UNION_TYPE:
4522 case QUAL_UNION_TYPE:
4524 /* Ho hum. How in the world do we guess here? Clearly it isn't
4525 right to count the fields. Guess based on the number of words. */
4526 HOST_WIDE_INT n = int_size_in_bytes (type);
4527 if (n < 0)
4528 return -1;
4529 return n / UNITS_PER_WORD;
4532 case COMPLEX_TYPE:
4533 return 2;
4535 case VECTOR_TYPE:
4536 return TYPE_VECTOR_SUBPARTS (type);
4538 case INTEGER_TYPE:
4539 case REAL_TYPE:
4540 case ENUMERAL_TYPE:
4541 case BOOLEAN_TYPE:
4542 case CHAR_TYPE:
4543 case POINTER_TYPE:
4544 case OFFSET_TYPE:
4545 case REFERENCE_TYPE:
4546 return 1;
4548 case VOID_TYPE:
4549 case METHOD_TYPE:
4550 case FUNCTION_TYPE:
4551 case LANG_TYPE:
4552 default:
4553 gcc_unreachable ();
4557 /* Return 1 if EXP contains mostly (3/4) zeros. */
4559 static int
4560 mostly_zeros_p (tree exp)
4562 if (TREE_CODE (exp) == CONSTRUCTOR)
4565 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4566 bool must_clear;
4568 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4569 if (must_clear)
4570 return 1;
4572 elts = count_type_elements (TREE_TYPE (exp));
4574 return nz_elts < elts / 4;
4577 return initializer_zerop (exp);
4580 /* Helper function for store_constructor.
4581 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4582 TYPE is the type of the CONSTRUCTOR, not the element type.
4583 CLEARED is as for store_constructor.
4584 ALIAS_SET is the alias set to use for any stores.
4586 This provides a recursive shortcut back to store_constructor when it isn't
4587 necessary to go through store_field. This is so that we can pass through
4588 the cleared field to let store_constructor know that we may not have to
4589 clear a substructure if the outer structure has already been cleared. */
4591 static void
4592 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4593 HOST_WIDE_INT bitpos, enum machine_mode mode,
4594 tree exp, tree type, int cleared, int alias_set)
4596 if (TREE_CODE (exp) == CONSTRUCTOR
4597 /* We can only call store_constructor recursively if the size and
4598 bit position are on a byte boundary. */
4599 && bitpos % BITS_PER_UNIT == 0
4600 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4601 /* If we have a nonzero bitpos for a register target, then we just
4602 let store_field do the bitfield handling. This is unlikely to
4603 generate unnecessary clear instructions anyways. */
4604 && (bitpos == 0 || MEM_P (target)))
4606 if (MEM_P (target))
4607 target
4608 = adjust_address (target,
4609 GET_MODE (target) == BLKmode
4610 || 0 != (bitpos
4611 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4612 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4615 /* Update the alias set, if required. */
4616 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4617 && MEM_ALIAS_SET (target) != 0)
4619 target = copy_rtx (target);
4620 set_mem_alias_set (target, alias_set);
4623 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4625 else
4626 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4629 /* Store the value of constructor EXP into the rtx TARGET.
4630 TARGET is either a REG or a MEM; we know it cannot conflict, since
4631 safe_from_p has been called.
4632 CLEARED is true if TARGET is known to have been zero'd.
4633 SIZE is the number of bytes of TARGET we are allowed to modify: this
4634 may not be the same as the size of EXP if we are assigning to a field
4635 which has been packed to exclude padding bits. */
4637 static void
4638 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4640 tree type = TREE_TYPE (exp);
4641 #ifdef WORD_REGISTER_OPERATIONS
4642 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4643 #endif
4645 switch (TREE_CODE (type))
4647 case RECORD_TYPE:
4648 case UNION_TYPE:
4649 case QUAL_UNION_TYPE:
4651 tree elt;
4653 /* If size is zero or the target is already cleared, do nothing. */
4654 if (size == 0 || cleared)
4655 cleared = 1;
4656 /* We either clear the aggregate or indicate the value is dead. */
4657 else if ((TREE_CODE (type) == UNION_TYPE
4658 || TREE_CODE (type) == QUAL_UNION_TYPE)
4659 && ! CONSTRUCTOR_ELTS (exp))
4660 /* If the constructor is empty, clear the union. */
4662 clear_storage (target, expr_size (exp));
4663 cleared = 1;
4666 /* If we are building a static constructor into a register,
4667 set the initial value as zero so we can fold the value into
4668 a constant. But if more than one register is involved,
4669 this probably loses. */
4670 else if (REG_P (target) && TREE_STATIC (exp)
4671 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4673 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4674 cleared = 1;
4677 /* If the constructor has fewer fields than the structure or
4678 if we are initializing the structure to mostly zeros, clear
4679 the whole structure first. Don't do this if TARGET is a
4680 register whose mode size isn't equal to SIZE since
4681 clear_storage can't handle this case. */
4682 else if (size > 0
4683 && ((list_length (CONSTRUCTOR_ELTS (exp))
4684 != fields_length (type))
4685 || mostly_zeros_p (exp))
4686 && (!REG_P (target)
4687 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4688 == size)))
4690 clear_storage (target, GEN_INT (size));
4691 cleared = 1;
4694 if (! cleared)
4695 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4697 /* Store each element of the constructor into the
4698 corresponding field of TARGET. */
4700 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4702 tree field = TREE_PURPOSE (elt);
4703 tree value = TREE_VALUE (elt);
4704 enum machine_mode mode;
4705 HOST_WIDE_INT bitsize;
4706 HOST_WIDE_INT bitpos = 0;
4707 tree offset;
4708 rtx to_rtx = target;
4710 /* Just ignore missing fields. We cleared the whole
4711 structure, above, if any fields are missing. */
4712 if (field == 0)
4713 continue;
4715 if (cleared && initializer_zerop (value))
4716 continue;
4718 if (host_integerp (DECL_SIZE (field), 1))
4719 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4720 else
4721 bitsize = -1;
4723 mode = DECL_MODE (field);
4724 if (DECL_BIT_FIELD (field))
4725 mode = VOIDmode;
4727 offset = DECL_FIELD_OFFSET (field);
4728 if (host_integerp (offset, 0)
4729 && host_integerp (bit_position (field), 0))
4731 bitpos = int_bit_position (field);
4732 offset = 0;
4734 else
4735 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4737 if (offset)
4739 rtx offset_rtx;
4741 offset
4742 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4743 make_tree (TREE_TYPE (exp),
4744 target));
4746 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4747 gcc_assert (MEM_P (to_rtx));
4749 #ifdef POINTERS_EXTEND_UNSIGNED
4750 if (GET_MODE (offset_rtx) != Pmode)
4751 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4752 #else
4753 if (GET_MODE (offset_rtx) != ptr_mode)
4754 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4755 #endif
4757 to_rtx = offset_address (to_rtx, offset_rtx,
4758 highest_pow2_factor (offset));
4761 #ifdef WORD_REGISTER_OPERATIONS
4762 /* If this initializes a field that is smaller than a
4763 word, at the start of a word, try to widen it to a full
4764 word. This special case allows us to output C++ member
4765 function initializations in a form that the optimizers
4766 can understand. */
4767 if (REG_P (target)
4768 && bitsize < BITS_PER_WORD
4769 && bitpos % BITS_PER_WORD == 0
4770 && GET_MODE_CLASS (mode) == MODE_INT
4771 && TREE_CODE (value) == INTEGER_CST
4772 && exp_size >= 0
4773 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4775 tree type = TREE_TYPE (value);
4777 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4779 type = lang_hooks.types.type_for_size
4780 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4781 value = convert (type, value);
4784 if (BYTES_BIG_ENDIAN)
4785 value
4786 = fold (build2 (LSHIFT_EXPR, type, value,
4787 build_int_cst (NULL_TREE,
4788 BITS_PER_WORD - bitsize)));
4789 bitsize = BITS_PER_WORD;
4790 mode = word_mode;
4792 #endif
4794 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4795 && DECL_NONADDRESSABLE_P (field))
4797 to_rtx = copy_rtx (to_rtx);
4798 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4801 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4802 value, type, cleared,
4803 get_alias_set (TREE_TYPE (field)));
4805 break;
4807 case ARRAY_TYPE:
4809 tree elt;
4810 int i;
4811 int need_to_clear;
4812 tree domain;
4813 tree elttype = TREE_TYPE (type);
4814 int const_bounds_p;
4815 HOST_WIDE_INT minelt = 0;
4816 HOST_WIDE_INT maxelt = 0;
4818 domain = TYPE_DOMAIN (type);
4819 const_bounds_p = (TYPE_MIN_VALUE (domain)
4820 && TYPE_MAX_VALUE (domain)
4821 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4822 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4824 /* If we have constant bounds for the range of the type, get them. */
4825 if (const_bounds_p)
4827 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4828 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4831 /* If the constructor has fewer elements than the array, clear
4832 the whole array first. Similarly if this is static
4833 constructor of a non-BLKmode object. */
4834 if (cleared)
4835 need_to_clear = 0;
4836 else if (REG_P (target) && TREE_STATIC (exp))
4837 need_to_clear = 1;
4838 else
4840 HOST_WIDE_INT count = 0, zero_count = 0;
4841 need_to_clear = ! const_bounds_p;
4843 /* This loop is a more accurate version of the loop in
4844 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4845 is also needed to check for missing elements. */
4846 for (elt = CONSTRUCTOR_ELTS (exp);
4847 elt != NULL_TREE && ! need_to_clear;
4848 elt = TREE_CHAIN (elt))
4850 tree index = TREE_PURPOSE (elt);
4851 HOST_WIDE_INT this_node_count;
4853 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4855 tree lo_index = TREE_OPERAND (index, 0);
4856 tree hi_index = TREE_OPERAND (index, 1);
4858 if (! host_integerp (lo_index, 1)
4859 || ! host_integerp (hi_index, 1))
4861 need_to_clear = 1;
4862 break;
4865 this_node_count = (tree_low_cst (hi_index, 1)
4866 - tree_low_cst (lo_index, 1) + 1);
4868 else
4869 this_node_count = 1;
4871 count += this_node_count;
4872 if (mostly_zeros_p (TREE_VALUE (elt)))
4873 zero_count += this_node_count;
4876 /* Clear the entire array first if there are any missing
4877 elements, or if the incidence of zero elements is >=
4878 75%. */
4879 if (! need_to_clear
4880 && (count < maxelt - minelt + 1
4881 || 4 * zero_count >= 3 * count))
4882 need_to_clear = 1;
4885 if (need_to_clear && size > 0)
4887 if (REG_P (target))
4888 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4889 else
4890 clear_storage (target, GEN_INT (size));
4891 cleared = 1;
4894 if (!cleared && REG_P (target))
4895 /* Inform later passes that the old value is dead. */
4896 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4898 /* Store each element of the constructor into the
4899 corresponding element of TARGET, determined by counting the
4900 elements. */
4901 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4902 elt;
4903 elt = TREE_CHAIN (elt), i++)
4905 enum machine_mode mode;
4906 HOST_WIDE_INT bitsize;
4907 HOST_WIDE_INT bitpos;
4908 int unsignedp;
4909 tree value = TREE_VALUE (elt);
4910 tree index = TREE_PURPOSE (elt);
4911 rtx xtarget = target;
4913 if (cleared && initializer_zerop (value))
4914 continue;
4916 unsignedp = TYPE_UNSIGNED (elttype);
4917 mode = TYPE_MODE (elttype);
4918 if (mode == BLKmode)
4919 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4920 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4921 : -1);
4922 else
4923 bitsize = GET_MODE_BITSIZE (mode);
4925 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4927 tree lo_index = TREE_OPERAND (index, 0);
4928 tree hi_index = TREE_OPERAND (index, 1);
4929 rtx index_r, pos_rtx;
4930 HOST_WIDE_INT lo, hi, count;
4931 tree position;
4933 /* If the range is constant and "small", unroll the loop. */
4934 if (const_bounds_p
4935 && host_integerp (lo_index, 0)
4936 && host_integerp (hi_index, 0)
4937 && (lo = tree_low_cst (lo_index, 0),
4938 hi = tree_low_cst (hi_index, 0),
4939 count = hi - lo + 1,
4940 (!MEM_P (target)
4941 || count <= 2
4942 || (host_integerp (TYPE_SIZE (elttype), 1)
4943 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4944 <= 40 * 8)))))
4946 lo -= minelt; hi -= minelt;
4947 for (; lo <= hi; lo++)
4949 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4951 if (MEM_P (target)
4952 && !MEM_KEEP_ALIAS_SET_P (target)
4953 && TREE_CODE (type) == ARRAY_TYPE
4954 && TYPE_NONALIASED_COMPONENT (type))
4956 target = copy_rtx (target);
4957 MEM_KEEP_ALIAS_SET_P (target) = 1;
4960 store_constructor_field
4961 (target, bitsize, bitpos, mode, value, type, cleared,
4962 get_alias_set (elttype));
4965 else
4967 rtx loop_start = gen_label_rtx ();
4968 rtx loop_end = gen_label_rtx ();
4969 tree exit_cond;
4971 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4972 unsignedp = TYPE_UNSIGNED (domain);
4974 index = build_decl (VAR_DECL, NULL_TREE, domain);
4976 index_r
4977 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4978 &unsignedp, 0));
4979 SET_DECL_RTL (index, index_r);
4980 store_expr (lo_index, index_r, 0);
4982 /* Build the head of the loop. */
4983 do_pending_stack_adjust ();
4984 emit_label (loop_start);
4986 /* Assign value to element index. */
4987 position
4988 = convert (ssizetype,
4989 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4990 index, TYPE_MIN_VALUE (domain))));
4991 position = size_binop (MULT_EXPR, position,
4992 convert (ssizetype,
4993 TYPE_SIZE_UNIT (elttype)));
4995 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4996 xtarget = offset_address (target, pos_rtx,
4997 highest_pow2_factor (position));
4998 xtarget = adjust_address (xtarget, mode, 0);
4999 if (TREE_CODE (value) == CONSTRUCTOR)
5000 store_constructor (value, xtarget, cleared,
5001 bitsize / BITS_PER_UNIT);
5002 else
5003 store_expr (value, xtarget, 0);
5005 /* Generate a conditional jump to exit the loop. */
5006 exit_cond = build2 (LT_EXPR, integer_type_node,
5007 index, hi_index);
5008 jumpif (exit_cond, loop_end);
5010 /* Update the loop counter, and jump to the head of
5011 the loop. */
5012 expand_assignment (index,
5013 build2 (PLUS_EXPR, TREE_TYPE (index),
5014 index, integer_one_node));
5016 emit_jump (loop_start);
5018 /* Build the end of the loop. */
5019 emit_label (loop_end);
5022 else if ((index != 0 && ! host_integerp (index, 0))
5023 || ! host_integerp (TYPE_SIZE (elttype), 1))
5025 tree position;
5027 if (index == 0)
5028 index = ssize_int (1);
5030 if (minelt)
5031 index = fold_convert (ssizetype,
5032 fold (build2 (MINUS_EXPR,
5033 TREE_TYPE (index),
5034 index,
5035 TYPE_MIN_VALUE (domain))));
5037 position = size_binop (MULT_EXPR, index,
5038 convert (ssizetype,
5039 TYPE_SIZE_UNIT (elttype)));
5040 xtarget = offset_address (target,
5041 expand_expr (position, 0, VOIDmode, 0),
5042 highest_pow2_factor (position));
5043 xtarget = adjust_address (xtarget, mode, 0);
5044 store_expr (value, xtarget, 0);
5046 else
5048 if (index != 0)
5049 bitpos = ((tree_low_cst (index, 0) - minelt)
5050 * tree_low_cst (TYPE_SIZE (elttype), 1));
5051 else
5052 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5054 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5055 && TREE_CODE (type) == ARRAY_TYPE
5056 && TYPE_NONALIASED_COMPONENT (type))
5058 target = copy_rtx (target);
5059 MEM_KEEP_ALIAS_SET_P (target) = 1;
5061 store_constructor_field (target, bitsize, bitpos, mode, value,
5062 type, cleared, get_alias_set (elttype));
5065 break;
5068 case VECTOR_TYPE:
5070 tree elt;
5071 int i;
5072 int need_to_clear;
5073 int icode = 0;
5074 tree elttype = TREE_TYPE (type);
5075 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5076 enum machine_mode eltmode = TYPE_MODE (elttype);
5077 HOST_WIDE_INT bitsize;
5078 HOST_WIDE_INT bitpos;
5079 rtvec vector = NULL;
5080 unsigned n_elts;
5082 gcc_assert (eltmode != BLKmode);
5084 n_elts = TYPE_VECTOR_SUBPARTS (type);
5085 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5087 enum machine_mode mode = GET_MODE (target);
5089 icode = (int) vec_init_optab->handlers[mode].insn_code;
5090 if (icode != CODE_FOR_nothing)
5092 unsigned int i;
5094 vector = rtvec_alloc (n_elts);
5095 for (i = 0; i < n_elts; i++)
5096 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5100 /* If the constructor has fewer elements than the vector,
5101 clear the whole array first. Similarly if this is static
5102 constructor of a non-BLKmode object. */
5103 if (cleared)
5104 need_to_clear = 0;
5105 else if (REG_P (target) && TREE_STATIC (exp))
5106 need_to_clear = 1;
5107 else
5109 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5111 for (elt = CONSTRUCTOR_ELTS (exp);
5112 elt != NULL_TREE;
5113 elt = TREE_CHAIN (elt))
5115 int n_elts_here = tree_low_cst
5116 (int_const_binop (TRUNC_DIV_EXPR,
5117 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
5118 TYPE_SIZE (elttype), 0), 1);
5120 count += n_elts_here;
5121 if (mostly_zeros_p (TREE_VALUE (elt)))
5122 zero_count += n_elts_here;
5125 /* Clear the entire vector first if there are any missing elements,
5126 or if the incidence of zero elements is >= 75%. */
5127 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5130 if (need_to_clear && size > 0 && !vector)
5132 if (REG_P (target))
5133 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5134 else
5135 clear_storage (target, GEN_INT (size));
5136 cleared = 1;
5139 if (!cleared && REG_P (target))
5140 /* Inform later passes that the old value is dead. */
5141 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5143 /* Store each element of the constructor into the corresponding
5144 element of TARGET, determined by counting the elements. */
5145 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5146 elt;
5147 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
5149 tree value = TREE_VALUE (elt);
5150 tree index = TREE_PURPOSE (elt);
5151 HOST_WIDE_INT eltpos;
5153 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5154 if (cleared && initializer_zerop (value))
5155 continue;
5157 if (index != 0)
5158 eltpos = tree_low_cst (index, 1);
5159 else
5160 eltpos = i;
5162 if (vector)
5164 /* Vector CONSTRUCTORs should only be built from smaller
5165 vectors in the case of BLKmode vectors. */
5166 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5167 RTVEC_ELT (vector, eltpos)
5168 = expand_expr (value, NULL_RTX, VOIDmode, 0);
5170 else
5172 enum machine_mode value_mode =
5173 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5174 ? TYPE_MODE (TREE_TYPE (value))
5175 : eltmode;
5176 bitpos = eltpos * elt_size;
5177 store_constructor_field (target, bitsize, bitpos,
5178 value_mode, value, type,
5179 cleared, get_alias_set (elttype));
5183 if (vector)
5184 emit_insn (GEN_FCN (icode)
5185 (target,
5186 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5187 break;
5190 default:
5191 gcc_unreachable ();
5195 /* Store the value of EXP (an expression tree)
5196 into a subfield of TARGET which has mode MODE and occupies
5197 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5198 If MODE is VOIDmode, it means that we are storing into a bit-field.
5200 Always return const0_rtx unless we have something particular to
5201 return.
5203 TYPE is the type of the underlying object,
5205 ALIAS_SET is the alias set for the destination. This value will
5206 (in general) be different from that for TARGET, since TARGET is a
5207 reference to the containing structure. */
5209 static rtx
5210 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5211 enum machine_mode mode, tree exp, tree type, int alias_set)
5213 HOST_WIDE_INT width_mask = 0;
5215 if (TREE_CODE (exp) == ERROR_MARK)
5216 return const0_rtx;
5218 /* If we have nothing to store, do nothing unless the expression has
5219 side-effects. */
5220 if (bitsize == 0)
5221 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5222 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5223 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5225 /* If we are storing into an unaligned field of an aligned union that is
5226 in a register, we may have the mode of TARGET being an integer mode but
5227 MODE == BLKmode. In that case, get an aligned object whose size and
5228 alignment are the same as TARGET and store TARGET into it (we can avoid
5229 the store if the field being stored is the entire width of TARGET). Then
5230 call ourselves recursively to store the field into a BLKmode version of
5231 that object. Finally, load from the object into TARGET. This is not
5232 very efficient in general, but should only be slightly more expensive
5233 than the otherwise-required unaligned accesses. Perhaps this can be
5234 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5235 twice, once with emit_move_insn and once via store_field. */
5237 if (mode == BLKmode
5238 && (REG_P (target) || GET_CODE (target) == SUBREG))
5240 rtx object = assign_temp (type, 0, 1, 1);
5241 rtx blk_object = adjust_address (object, BLKmode, 0);
5243 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5244 emit_move_insn (object, target);
5246 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5248 emit_move_insn (target, object);
5250 /* We want to return the BLKmode version of the data. */
5251 return blk_object;
5254 if (GET_CODE (target) == CONCAT)
5256 /* We're storing into a struct containing a single __complex. */
5258 gcc_assert (!bitpos);
5259 return store_expr (exp, target, 0);
5262 /* If the structure is in a register or if the component
5263 is a bit field, we cannot use addressing to access it.
5264 Use bit-field techniques or SUBREG to store in it. */
5266 if (mode == VOIDmode
5267 || (mode != BLKmode && ! direct_store[(int) mode]
5268 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5269 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5270 || REG_P (target)
5271 || GET_CODE (target) == SUBREG
5272 /* If the field isn't aligned enough to store as an ordinary memref,
5273 store it as a bit field. */
5274 || (mode != BLKmode
5275 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5276 || bitpos % GET_MODE_ALIGNMENT (mode))
5277 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5278 || (bitpos % BITS_PER_UNIT != 0)))
5279 /* If the RHS and field are a constant size and the size of the
5280 RHS isn't the same size as the bitfield, we must use bitfield
5281 operations. */
5282 || (bitsize >= 0
5283 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5284 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5286 rtx temp;
5288 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5289 implies a mask operation. If the precision is the same size as
5290 the field we're storing into, that mask is redundant. This is
5291 particularly common with bit field assignments generated by the
5292 C front end. */
5293 if (TREE_CODE (exp) == NOP_EXPR)
5295 tree type = TREE_TYPE (exp);
5296 if (INTEGRAL_TYPE_P (type)
5297 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5298 && bitsize == TYPE_PRECISION (type))
5300 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5301 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5302 exp = TREE_OPERAND (exp, 0);
5306 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5308 /* If BITSIZE is narrower than the size of the type of EXP
5309 we will be narrowing TEMP. Normally, what's wanted are the
5310 low-order bits. However, if EXP's type is a record and this is
5311 big-endian machine, we want the upper BITSIZE bits. */
5312 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5313 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5314 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5315 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5316 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5317 - bitsize),
5318 NULL_RTX, 1);
5320 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5321 MODE. */
5322 if (mode != VOIDmode && mode != BLKmode
5323 && mode != TYPE_MODE (TREE_TYPE (exp)))
5324 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5326 /* If the modes of TARGET and TEMP are both BLKmode, both
5327 must be in memory and BITPOS must be aligned on a byte
5328 boundary. If so, we simply do a block copy. */
5329 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5331 gcc_assert (MEM_P (target) && MEM_P (temp)
5332 && !(bitpos % BITS_PER_UNIT));
5334 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5335 emit_block_move (target, temp,
5336 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5337 / BITS_PER_UNIT),
5338 BLOCK_OP_NORMAL);
5340 return const0_rtx;
5343 /* Store the value in the bitfield. */
5344 store_bit_field (target, bitsize, bitpos, mode, temp);
5346 return const0_rtx;
5348 else
5350 /* Now build a reference to just the desired component. */
5351 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5353 if (to_rtx == target)
5354 to_rtx = copy_rtx (to_rtx);
5356 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5357 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5358 set_mem_alias_set (to_rtx, alias_set);
5360 return store_expr (exp, to_rtx, 0);
5364 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5365 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5366 codes and find the ultimate containing object, which we return.
5368 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5369 bit position, and *PUNSIGNEDP to the signedness of the field.
5370 If the position of the field is variable, we store a tree
5371 giving the variable offset (in units) in *POFFSET.
5372 This offset is in addition to the bit position.
5373 If the position is not variable, we store 0 in *POFFSET.
5375 If any of the extraction expressions is volatile,
5376 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5378 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5379 is a mode that can be used to access the field. In that case, *PBITSIZE
5380 is redundant.
5382 If the field describes a variable-sized object, *PMODE is set to
5383 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5384 this case, but the address of the object can be found.
5386 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5387 look through nodes that serve as markers of a greater alignment than
5388 the one that can be deduced from the expression. These nodes make it
5389 possible for front-ends to prevent temporaries from being created by
5390 the middle-end on alignment considerations. For that purpose, the
5391 normal operating mode at high-level is to always pass FALSE so that
5392 the ultimate containing object is really returned; moreover, the
5393 associated predicate handled_component_p will always return TRUE
5394 on these nodes, thus indicating that they are essentially handled
5395 by get_inner_reference. TRUE should only be passed when the caller
5396 is scanning the expression in order to build another representation
5397 and specifically knows how to handle these nodes; as such, this is
5398 the normal operating mode in the RTL expanders. */
5400 tree
5401 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5402 HOST_WIDE_INT *pbitpos, tree *poffset,
5403 enum machine_mode *pmode, int *punsignedp,
5404 int *pvolatilep, bool keep_aligning)
5406 tree size_tree = 0;
5407 enum machine_mode mode = VOIDmode;
5408 tree offset = size_zero_node;
5409 tree bit_offset = bitsize_zero_node;
5410 tree tem;
5412 /* First get the mode, signedness, and size. We do this from just the
5413 outermost expression. */
5414 if (TREE_CODE (exp) == COMPONENT_REF)
5416 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5417 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5418 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5420 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5422 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5424 size_tree = TREE_OPERAND (exp, 1);
5425 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5427 else
5429 mode = TYPE_MODE (TREE_TYPE (exp));
5430 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5432 if (mode == BLKmode)
5433 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5434 else
5435 *pbitsize = GET_MODE_BITSIZE (mode);
5438 if (size_tree != 0)
5440 if (! host_integerp (size_tree, 1))
5441 mode = BLKmode, *pbitsize = -1;
5442 else
5443 *pbitsize = tree_low_cst (size_tree, 1);
5446 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5447 and find the ultimate containing object. */
5448 while (1)
5450 switch (TREE_CODE (exp))
5452 case BIT_FIELD_REF:
5453 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5454 TREE_OPERAND (exp, 2));
5455 break;
5457 case COMPONENT_REF:
5459 tree field = TREE_OPERAND (exp, 1);
5460 tree this_offset = component_ref_field_offset (exp);
5462 /* If this field hasn't been filled in yet, don't go past it.
5463 This should only happen when folding expressions made during
5464 type construction. */
5465 if (this_offset == 0)
5466 break;
5468 offset = size_binop (PLUS_EXPR, offset, this_offset);
5469 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5470 DECL_FIELD_BIT_OFFSET (field));
5472 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5474 break;
5476 case ARRAY_REF:
5477 case ARRAY_RANGE_REF:
5479 tree index = TREE_OPERAND (exp, 1);
5480 tree low_bound = array_ref_low_bound (exp);
5481 tree unit_size = array_ref_element_size (exp);
5483 /* We assume all arrays have sizes that are a multiple of a byte.
5484 First subtract the lower bound, if any, in the type of the
5485 index, then convert to sizetype and multiply by the size of
5486 the array element. */
5487 if (! integer_zerop (low_bound))
5488 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5489 index, low_bound));
5491 offset = size_binop (PLUS_EXPR, offset,
5492 size_binop (MULT_EXPR,
5493 convert (sizetype, index),
5494 unit_size));
5496 break;
5498 case REALPART_EXPR:
5499 break;
5501 case IMAGPART_EXPR:
5502 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5503 bitsize_int (*pbitsize));
5504 break;
5506 case VIEW_CONVERT_EXPR:
5507 if (keep_aligning && STRICT_ALIGNMENT
5508 && (TYPE_ALIGN (TREE_TYPE (exp))
5509 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5510 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5511 < BIGGEST_ALIGNMENT)
5512 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5513 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5514 goto done;
5515 break;
5517 default:
5518 goto done;
5521 /* If any reference in the chain is volatile, the effect is volatile. */
5522 if (TREE_THIS_VOLATILE (exp))
5523 *pvolatilep = 1;
5525 exp = TREE_OPERAND (exp, 0);
5527 done:
5529 /* If OFFSET is constant, see if we can return the whole thing as a
5530 constant bit position. Otherwise, split it up. */
5531 if (host_integerp (offset, 0)
5532 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5533 bitsize_unit_node))
5534 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5535 && host_integerp (tem, 0))
5536 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5537 else
5538 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5540 *pmode = mode;
5541 return exp;
5544 /* Return a tree of sizetype representing the size, in bytes, of the element
5545 of EXP, an ARRAY_REF. */
5547 tree
5548 array_ref_element_size (tree exp)
5550 tree aligned_size = TREE_OPERAND (exp, 3);
5551 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5553 /* If a size was specified in the ARRAY_REF, it's the size measured
5554 in alignment units of the element type. So multiply by that value. */
5555 if (aligned_size)
5557 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5558 sizetype from another type of the same width and signedness. */
5559 if (TREE_TYPE (aligned_size) != sizetype)
5560 aligned_size = fold_convert (sizetype, aligned_size);
5561 return size_binop (MULT_EXPR, aligned_size,
5562 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5565 /* Otherwise, take the size from that of the element type. Substitute
5566 any PLACEHOLDER_EXPR that we have. */
5567 else
5568 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5571 /* Return a tree representing the lower bound of the array mentioned in
5572 EXP, an ARRAY_REF. */
5574 tree
5575 array_ref_low_bound (tree exp)
5577 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5579 /* If a lower bound is specified in EXP, use it. */
5580 if (TREE_OPERAND (exp, 2))
5581 return TREE_OPERAND (exp, 2);
5583 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5584 substituting for a PLACEHOLDER_EXPR as needed. */
5585 if (domain_type && TYPE_MIN_VALUE (domain_type))
5586 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5588 /* Otherwise, return a zero of the appropriate type. */
5589 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5592 /* Return a tree representing the upper bound of the array mentioned in
5593 EXP, an ARRAY_REF. */
5595 tree
5596 array_ref_up_bound (tree exp)
5598 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5600 /* If there is a domain type and it has an upper bound, use it, substituting
5601 for a PLACEHOLDER_EXPR as needed. */
5602 if (domain_type && TYPE_MAX_VALUE (domain_type))
5603 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5605 /* Otherwise fail. */
5606 return NULL_TREE;
5609 /* Return a tree representing the offset, in bytes, of the field referenced
5610 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5612 tree
5613 component_ref_field_offset (tree exp)
5615 tree aligned_offset = TREE_OPERAND (exp, 2);
5616 tree field = TREE_OPERAND (exp, 1);
5618 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5619 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5620 value. */
5621 if (aligned_offset)
5623 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5624 sizetype from another type of the same width and signedness. */
5625 if (TREE_TYPE (aligned_offset) != sizetype)
5626 aligned_offset = fold_convert (sizetype, aligned_offset);
5627 return size_binop (MULT_EXPR, aligned_offset,
5628 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5631 /* Otherwise, take the offset from that of the field. Substitute
5632 any PLACEHOLDER_EXPR that we have. */
5633 else
5634 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5637 /* Return 1 if T is an expression that get_inner_reference handles. */
5640 handled_component_p (tree t)
5642 switch (TREE_CODE (t))
5644 case BIT_FIELD_REF:
5645 case COMPONENT_REF:
5646 case ARRAY_REF:
5647 case ARRAY_RANGE_REF:
5648 case VIEW_CONVERT_EXPR:
5649 case REALPART_EXPR:
5650 case IMAGPART_EXPR:
5651 return 1;
5653 default:
5654 return 0;
5658 /* Given an rtx VALUE that may contain additions and multiplications, return
5659 an equivalent value that just refers to a register, memory, or constant.
5660 This is done by generating instructions to perform the arithmetic and
5661 returning a pseudo-register containing the value.
5663 The returned value may be a REG, SUBREG, MEM or constant. */
5666 force_operand (rtx value, rtx target)
5668 rtx op1, op2;
5669 /* Use subtarget as the target for operand 0 of a binary operation. */
5670 rtx subtarget = get_subtarget (target);
5671 enum rtx_code code = GET_CODE (value);
5673 /* Check for subreg applied to an expression produced by loop optimizer. */
5674 if (code == SUBREG
5675 && !REG_P (SUBREG_REG (value))
5676 && !MEM_P (SUBREG_REG (value)))
5678 value = simplify_gen_subreg (GET_MODE (value),
5679 force_reg (GET_MODE (SUBREG_REG (value)),
5680 force_operand (SUBREG_REG (value),
5681 NULL_RTX)),
5682 GET_MODE (SUBREG_REG (value)),
5683 SUBREG_BYTE (value));
5684 code = GET_CODE (value);
5687 /* Check for a PIC address load. */
5688 if ((code == PLUS || code == MINUS)
5689 && XEXP (value, 0) == pic_offset_table_rtx
5690 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5691 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5692 || GET_CODE (XEXP (value, 1)) == CONST))
5694 if (!subtarget)
5695 subtarget = gen_reg_rtx (GET_MODE (value));
5696 emit_move_insn (subtarget, value);
5697 return subtarget;
5700 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5702 if (!target)
5703 target = gen_reg_rtx (GET_MODE (value));
5704 convert_move (target, force_operand (XEXP (value, 0), NULL),
5705 code == ZERO_EXTEND);
5706 return target;
5709 if (ARITHMETIC_P (value))
5711 op2 = XEXP (value, 1);
5712 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5713 subtarget = 0;
5714 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5716 code = PLUS;
5717 op2 = negate_rtx (GET_MODE (value), op2);
5720 /* Check for an addition with OP2 a constant integer and our first
5721 operand a PLUS of a virtual register and something else. In that
5722 case, we want to emit the sum of the virtual register and the
5723 constant first and then add the other value. This allows virtual
5724 register instantiation to simply modify the constant rather than
5725 creating another one around this addition. */
5726 if (code == PLUS && GET_CODE (op2) == CONST_INT
5727 && GET_CODE (XEXP (value, 0)) == PLUS
5728 && REG_P (XEXP (XEXP (value, 0), 0))
5729 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5730 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5732 rtx temp = expand_simple_binop (GET_MODE (value), code,
5733 XEXP (XEXP (value, 0), 0), op2,
5734 subtarget, 0, OPTAB_LIB_WIDEN);
5735 return expand_simple_binop (GET_MODE (value), code, temp,
5736 force_operand (XEXP (XEXP (value,
5737 0), 1), 0),
5738 target, 0, OPTAB_LIB_WIDEN);
5741 op1 = force_operand (XEXP (value, 0), subtarget);
5742 op2 = force_operand (op2, NULL_RTX);
5743 switch (code)
5745 case MULT:
5746 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5747 case DIV:
5748 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5749 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5750 target, 1, OPTAB_LIB_WIDEN);
5751 else
5752 return expand_divmod (0,
5753 FLOAT_MODE_P (GET_MODE (value))
5754 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5755 GET_MODE (value), op1, op2, target, 0);
5756 break;
5757 case MOD:
5758 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5759 target, 0);
5760 break;
5761 case UDIV:
5762 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5763 target, 1);
5764 break;
5765 case UMOD:
5766 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5767 target, 1);
5768 break;
5769 case ASHIFTRT:
5770 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5771 target, 0, OPTAB_LIB_WIDEN);
5772 break;
5773 default:
5774 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5775 target, 1, OPTAB_LIB_WIDEN);
5778 if (UNARY_P (value))
5780 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5781 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5784 #ifdef INSN_SCHEDULING
5785 /* On machines that have insn scheduling, we want all memory reference to be
5786 explicit, so we need to deal with such paradoxical SUBREGs. */
5787 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5788 && (GET_MODE_SIZE (GET_MODE (value))
5789 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5790 value
5791 = simplify_gen_subreg (GET_MODE (value),
5792 force_reg (GET_MODE (SUBREG_REG (value)),
5793 force_operand (SUBREG_REG (value),
5794 NULL_RTX)),
5795 GET_MODE (SUBREG_REG (value)),
5796 SUBREG_BYTE (value));
5797 #endif
5799 return value;
5802 /* Subroutine of expand_expr: return nonzero iff there is no way that
5803 EXP can reference X, which is being modified. TOP_P is nonzero if this
5804 call is going to be used to determine whether we need a temporary
5805 for EXP, as opposed to a recursive call to this function.
5807 It is always safe for this routine to return zero since it merely
5808 searches for optimization opportunities. */
5811 safe_from_p (rtx x, tree exp, int top_p)
5813 rtx exp_rtl = 0;
5814 int i, nops;
5816 if (x == 0
5817 /* If EXP has varying size, we MUST use a target since we currently
5818 have no way of allocating temporaries of variable size
5819 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5820 So we assume here that something at a higher level has prevented a
5821 clash. This is somewhat bogus, but the best we can do. Only
5822 do this when X is BLKmode and when we are at the top level. */
5823 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5824 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5825 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5826 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5827 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5828 != INTEGER_CST)
5829 && GET_MODE (x) == BLKmode)
5830 /* If X is in the outgoing argument area, it is always safe. */
5831 || (MEM_P (x)
5832 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5833 || (GET_CODE (XEXP (x, 0)) == PLUS
5834 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5835 return 1;
5837 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5838 find the underlying pseudo. */
5839 if (GET_CODE (x) == SUBREG)
5841 x = SUBREG_REG (x);
5842 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5843 return 0;
5846 /* Now look at our tree code and possibly recurse. */
5847 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5849 case tcc_declaration:
5850 exp_rtl = DECL_RTL_IF_SET (exp);
5851 break;
5853 case tcc_constant:
5854 return 1;
5856 case tcc_exceptional:
5857 if (TREE_CODE (exp) == TREE_LIST)
5859 while (1)
5861 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5862 return 0;
5863 exp = TREE_CHAIN (exp);
5864 if (!exp)
5865 return 1;
5866 if (TREE_CODE (exp) != TREE_LIST)
5867 return safe_from_p (x, exp, 0);
5870 else if (TREE_CODE (exp) == ERROR_MARK)
5871 return 1; /* An already-visited SAVE_EXPR? */
5872 else
5873 return 0;
5875 case tcc_statement:
5876 /* The only case we look at here is the DECL_INITIAL inside a
5877 DECL_EXPR. */
5878 return (TREE_CODE (exp) != DECL_EXPR
5879 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5880 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5881 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5883 case tcc_binary:
5884 case tcc_comparison:
5885 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5886 return 0;
5887 /* Fall through. */
5889 case tcc_unary:
5890 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5892 case tcc_expression:
5893 case tcc_reference:
5894 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5895 the expression. If it is set, we conflict iff we are that rtx or
5896 both are in memory. Otherwise, we check all operands of the
5897 expression recursively. */
5899 switch (TREE_CODE (exp))
5901 case ADDR_EXPR:
5902 /* If the operand is static or we are static, we can't conflict.
5903 Likewise if we don't conflict with the operand at all. */
5904 if (staticp (TREE_OPERAND (exp, 0))
5905 || TREE_STATIC (exp)
5906 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5907 return 1;
5909 /* Otherwise, the only way this can conflict is if we are taking
5910 the address of a DECL a that address if part of X, which is
5911 very rare. */
5912 exp = TREE_OPERAND (exp, 0);
5913 if (DECL_P (exp))
5915 if (!DECL_RTL_SET_P (exp)
5916 || !MEM_P (DECL_RTL (exp)))
5917 return 0;
5918 else
5919 exp_rtl = XEXP (DECL_RTL (exp), 0);
5921 break;
5923 case MISALIGNED_INDIRECT_REF:
5924 case ALIGN_INDIRECT_REF:
5925 case INDIRECT_REF:
5926 if (MEM_P (x)
5927 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5928 get_alias_set (exp)))
5929 return 0;
5930 break;
5932 case CALL_EXPR:
5933 /* Assume that the call will clobber all hard registers and
5934 all of memory. */
5935 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5936 || MEM_P (x))
5937 return 0;
5938 break;
5940 case WITH_CLEANUP_EXPR:
5941 case CLEANUP_POINT_EXPR:
5942 /* Lowered by gimplify.c. */
5943 gcc_unreachable ();
5945 case SAVE_EXPR:
5946 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5948 default:
5949 break;
5952 /* If we have an rtx, we do not need to scan our operands. */
5953 if (exp_rtl)
5954 break;
5956 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5957 for (i = 0; i < nops; i++)
5958 if (TREE_OPERAND (exp, i) != 0
5959 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5960 return 0;
5962 /* If this is a language-specific tree code, it may require
5963 special handling. */
5964 if ((unsigned int) TREE_CODE (exp)
5965 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5966 && !lang_hooks.safe_from_p (x, exp))
5967 return 0;
5968 break;
5970 case tcc_type:
5971 /* Should never get a type here. */
5972 gcc_unreachable ();
5975 /* If we have an rtl, find any enclosed object. Then see if we conflict
5976 with it. */
5977 if (exp_rtl)
5979 if (GET_CODE (exp_rtl) == SUBREG)
5981 exp_rtl = SUBREG_REG (exp_rtl);
5982 if (REG_P (exp_rtl)
5983 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5984 return 0;
5987 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5988 are memory and they conflict. */
5989 return ! (rtx_equal_p (x, exp_rtl)
5990 || (MEM_P (x) && MEM_P (exp_rtl)
5991 && true_dependence (exp_rtl, VOIDmode, x,
5992 rtx_addr_varies_p)));
5995 /* If we reach here, it is safe. */
5996 return 1;
6000 /* Return the highest power of two that EXP is known to be a multiple of.
6001 This is used in updating alignment of MEMs in array references. */
6003 static unsigned HOST_WIDE_INT
6004 highest_pow2_factor (tree exp)
6006 unsigned HOST_WIDE_INT c0, c1;
6008 switch (TREE_CODE (exp))
6010 case INTEGER_CST:
6011 /* We can find the lowest bit that's a one. If the low
6012 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6013 We need to handle this case since we can find it in a COND_EXPR,
6014 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6015 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6016 later ICE. */
6017 if (TREE_CONSTANT_OVERFLOW (exp))
6018 return BIGGEST_ALIGNMENT;
6019 else
6021 /* Note: tree_low_cst is intentionally not used here,
6022 we don't care about the upper bits. */
6023 c0 = TREE_INT_CST_LOW (exp);
6024 c0 &= -c0;
6025 return c0 ? c0 : BIGGEST_ALIGNMENT;
6027 break;
6029 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6030 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6031 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6032 return MIN (c0, c1);
6034 case MULT_EXPR:
6035 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6036 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6037 return c0 * c1;
6039 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6040 case CEIL_DIV_EXPR:
6041 if (integer_pow2p (TREE_OPERAND (exp, 1))
6042 && host_integerp (TREE_OPERAND (exp, 1), 1))
6044 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6045 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6046 return MAX (1, c0 / c1);
6048 break;
6050 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6051 case SAVE_EXPR:
6052 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6054 case COMPOUND_EXPR:
6055 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6057 case COND_EXPR:
6058 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6059 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6060 return MIN (c0, c1);
6062 default:
6063 break;
6066 return 1;
6069 /* Similar, except that the alignment requirements of TARGET are
6070 taken into account. Assume it is at least as aligned as its
6071 type, unless it is a COMPONENT_REF in which case the layout of
6072 the structure gives the alignment. */
6074 static unsigned HOST_WIDE_INT
6075 highest_pow2_factor_for_target (tree target, tree exp)
6077 unsigned HOST_WIDE_INT target_align, factor;
6079 factor = highest_pow2_factor (exp);
6080 if (TREE_CODE (target) == COMPONENT_REF)
6081 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6082 else
6083 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6084 return MAX (factor, target_align);
6087 /* Expands variable VAR. */
6089 void
6090 expand_var (tree var)
6092 if (DECL_EXTERNAL (var))
6093 return;
6095 if (TREE_STATIC (var))
6096 /* If this is an inlined copy of a static local variable,
6097 look up the original decl. */
6098 var = DECL_ORIGIN (var);
6100 if (TREE_STATIC (var)
6101 ? !TREE_ASM_WRITTEN (var)
6102 : !DECL_RTL_SET_P (var))
6104 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
6105 /* Should be ignored. */;
6106 else if (lang_hooks.expand_decl (var))
6107 /* OK. */;
6108 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6109 expand_decl (var);
6110 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6111 rest_of_decl_compilation (var, 0, 0);
6112 else
6113 /* No expansion needed. */
6114 gcc_assert (TREE_CODE (var) == TYPE_DECL
6115 || TREE_CODE (var) == CONST_DECL
6116 || TREE_CODE (var) == FUNCTION_DECL
6117 || TREE_CODE (var) == LABEL_DECL);
6121 /* Subroutine of expand_expr. Expand the two operands of a binary
6122 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6123 The value may be stored in TARGET if TARGET is nonzero. The
6124 MODIFIER argument is as documented by expand_expr. */
6126 static void
6127 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6128 enum expand_modifier modifier)
6130 if (! safe_from_p (target, exp1, 1))
6131 target = 0;
6132 if (operand_equal_p (exp0, exp1, 0))
6134 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6135 *op1 = copy_rtx (*op0);
6137 else
6139 /* If we need to preserve evaluation order, copy exp0 into its own
6140 temporary variable so that it can't be clobbered by exp1. */
6141 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6142 exp0 = save_expr (exp0);
6143 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6144 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6149 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6150 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6152 static rtx
6153 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6154 enum expand_modifier modifier)
6156 rtx result, subtarget;
6157 tree inner, offset;
6158 HOST_WIDE_INT bitsize, bitpos;
6159 int volatilep, unsignedp;
6160 enum machine_mode mode1;
6162 /* If we are taking the address of a constant and are at the top level,
6163 we have to use output_constant_def since we can't call force_const_mem
6164 at top level. */
6165 /* ??? This should be considered a front-end bug. We should not be
6166 generating ADDR_EXPR of something that isn't an LVALUE. The only
6167 exception here is STRING_CST. */
6168 if (TREE_CODE (exp) == CONSTRUCTOR
6169 || CONSTANT_CLASS_P (exp))
6170 return XEXP (output_constant_def (exp, 0), 0);
6172 /* Everything must be something allowed by is_gimple_addressable. */
6173 switch (TREE_CODE (exp))
6175 case INDIRECT_REF:
6176 /* This case will happen via recursion for &a->b. */
6177 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6179 case CONST_DECL:
6180 /* Recurse and make the output_constant_def clause above handle this. */
6181 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6182 tmode, modifier);
6184 case REALPART_EXPR:
6185 /* The real part of the complex number is always first, therefore
6186 the address is the same as the address of the parent object. */
6187 offset = 0;
6188 bitpos = 0;
6189 inner = TREE_OPERAND (exp, 0);
6190 break;
6192 case IMAGPART_EXPR:
6193 /* The imaginary part of the complex number is always second.
6194 The expression is therefore always offset by the size of the
6195 scalar type. */
6196 offset = 0;
6197 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6198 inner = TREE_OPERAND (exp, 0);
6199 break;
6201 default:
6202 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6203 expand_expr, as that can have various side effects; LABEL_DECLs for
6204 example, may not have their DECL_RTL set yet. Assume language
6205 specific tree nodes can be expanded in some interesting way. */
6206 if (DECL_P (exp)
6207 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6209 result = expand_expr (exp, target, tmode,
6210 modifier == EXPAND_INITIALIZER
6211 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6213 /* If the DECL isn't in memory, then the DECL wasn't properly
6214 marked TREE_ADDRESSABLE, which will be either a front-end
6215 or a tree optimizer bug. */
6216 gcc_assert (GET_CODE (result) == MEM);
6217 result = XEXP (result, 0);
6219 /* ??? Is this needed anymore? */
6220 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6222 assemble_external (exp);
6223 TREE_USED (exp) = 1;
6226 if (modifier != EXPAND_INITIALIZER
6227 && modifier != EXPAND_CONST_ADDRESS)
6228 result = force_operand (result, target);
6229 return result;
6232 /* Pass FALSE as the last argument to get_inner_reference although
6233 we are expanding to RTL. The rationale is that we know how to
6234 handle "aligning nodes" here: we can just bypass them because
6235 they won't change the final object whose address will be returned
6236 (they actually exist only for that purpose). */
6237 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6238 &mode1, &unsignedp, &volatilep, false);
6239 break;
6242 /* We must have made progress. */
6243 gcc_assert (inner != exp);
6245 subtarget = offset || bitpos ? NULL_RTX : target;
6246 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6248 if (offset)
6250 rtx tmp;
6252 if (modifier != EXPAND_NORMAL)
6253 result = force_operand (result, NULL);
6254 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6256 result = convert_memory_address (tmode, result);
6257 tmp = convert_memory_address (tmode, tmp);
6259 if (modifier == EXPAND_SUM)
6260 result = gen_rtx_PLUS (tmode, result, tmp);
6261 else
6263 subtarget = bitpos ? NULL_RTX : target;
6264 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6265 1, OPTAB_LIB_WIDEN);
6269 if (bitpos)
6271 /* Someone beforehand should have rejected taking the address
6272 of such an object. */
6273 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6275 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6276 if (modifier < EXPAND_SUM)
6277 result = force_operand (result, target);
6280 return result;
6283 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6284 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6286 static rtx
6287 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6288 enum expand_modifier modifier)
6290 enum machine_mode rmode;
6291 rtx result;
6293 /* Target mode of VOIDmode says "whatever's natural". */
6294 if (tmode == VOIDmode)
6295 tmode = TYPE_MODE (TREE_TYPE (exp));
6297 /* We can get called with some Weird Things if the user does silliness
6298 like "(short) &a". In that case, convert_memory_address won't do
6299 the right thing, so ignore the given target mode. */
6300 if (tmode != Pmode && tmode != ptr_mode)
6301 tmode = Pmode;
6303 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6304 tmode, modifier);
6306 /* Despite expand_expr claims concerning ignoring TMODE when not
6307 strictly convenient, stuff breaks if we don't honor it. Note
6308 that combined with the above, we only do this for pointer modes. */
6309 rmode = GET_MODE (result);
6310 if (rmode == VOIDmode)
6311 rmode = tmode;
6312 if (rmode != tmode)
6313 result = convert_memory_address (tmode, result);
6315 return result;
6319 /* expand_expr: generate code for computing expression EXP.
6320 An rtx for the computed value is returned. The value is never null.
6321 In the case of a void EXP, const0_rtx is returned.
6323 The value may be stored in TARGET if TARGET is nonzero.
6324 TARGET is just a suggestion; callers must assume that
6325 the rtx returned may not be the same as TARGET.
6327 If TARGET is CONST0_RTX, it means that the value will be ignored.
6329 If TMODE is not VOIDmode, it suggests generating the
6330 result in mode TMODE. But this is done only when convenient.
6331 Otherwise, TMODE is ignored and the value generated in its natural mode.
6332 TMODE is just a suggestion; callers must assume that
6333 the rtx returned may not have mode TMODE.
6335 Note that TARGET may have neither TMODE nor MODE. In that case, it
6336 probably will not be used.
6338 If MODIFIER is EXPAND_SUM then when EXP is an addition
6339 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6340 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6341 products as above, or REG or MEM, or constant.
6342 Ordinarily in such cases we would output mul or add instructions
6343 and then return a pseudo reg containing the sum.
6345 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6346 it also marks a label as absolutely required (it can't be dead).
6347 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6348 This is used for outputting expressions used in initializers.
6350 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6351 with a constant address even if that address is not normally legitimate.
6352 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6354 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6355 a call parameter. Such targets require special care as we haven't yet
6356 marked TARGET so that it's safe from being trashed by libcalls. We
6357 don't want to use TARGET for anything but the final result;
6358 Intermediate values must go elsewhere. Additionally, calls to
6359 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6361 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6362 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6363 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6364 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6365 recursively. */
6367 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6368 enum expand_modifier, rtx *);
6371 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6372 enum expand_modifier modifier, rtx *alt_rtl)
6374 int rn = -1;
6375 rtx ret, last = NULL;
6377 /* Handle ERROR_MARK before anybody tries to access its type. */
6378 if (TREE_CODE (exp) == ERROR_MARK
6379 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6381 ret = CONST0_RTX (tmode);
6382 return ret ? ret : const0_rtx;
6385 if (flag_non_call_exceptions)
6387 rn = lookup_stmt_eh_region (exp);
6388 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6389 if (rn >= 0)
6390 last = get_last_insn ();
6393 /* If this is an expression of some kind and it has an associated line
6394 number, then emit the line number before expanding the expression.
6396 We need to save and restore the file and line information so that
6397 errors discovered during expansion are emitted with the right
6398 information. It would be better of the diagnostic routines
6399 used the file/line information embedded in the tree nodes rather
6400 than globals. */
6401 if (cfun && EXPR_HAS_LOCATION (exp))
6403 location_t saved_location = input_location;
6404 input_location = EXPR_LOCATION (exp);
6405 emit_line_note (input_location);
6407 /* Record where the insns produced belong. */
6408 record_block_change (TREE_BLOCK (exp));
6410 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6412 input_location = saved_location;
6414 else
6416 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6419 /* If using non-call exceptions, mark all insns that may trap.
6420 expand_call() will mark CALL_INSNs before we get to this code,
6421 but it doesn't handle libcalls, and these may trap. */
6422 if (rn >= 0)
6424 rtx insn;
6425 for (insn = next_real_insn (last); insn;
6426 insn = next_real_insn (insn))
6428 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6429 /* If we want exceptions for non-call insns, any
6430 may_trap_p instruction may throw. */
6431 && GET_CODE (PATTERN (insn)) != CLOBBER
6432 && GET_CODE (PATTERN (insn)) != USE
6433 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6435 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6436 REG_NOTES (insn));
6441 return ret;
6444 static rtx
6445 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6446 enum expand_modifier modifier, rtx *alt_rtl)
6448 rtx op0, op1, temp;
6449 tree type = TREE_TYPE (exp);
6450 int unsignedp;
6451 enum machine_mode mode;
6452 enum tree_code code = TREE_CODE (exp);
6453 optab this_optab;
6454 rtx subtarget, original_target;
6455 int ignore;
6456 tree context;
6457 bool reduce_bit_field = false;
6458 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6459 ? reduce_to_bit_field_precision ((expr), \
6460 target, \
6461 type) \
6462 : (expr))
6464 mode = TYPE_MODE (type);
6465 unsignedp = TYPE_UNSIGNED (type);
6466 if (lang_hooks.reduce_bit_field_operations
6467 && TREE_CODE (type) == INTEGER_TYPE
6468 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6470 /* An operation in what may be a bit-field type needs the
6471 result to be reduced to the precision of the bit-field type,
6472 which is narrower than that of the type's mode. */
6473 reduce_bit_field = true;
6474 if (modifier == EXPAND_STACK_PARM)
6475 target = 0;
6478 /* Use subtarget as the target for operand 0 of a binary operation. */
6479 subtarget = get_subtarget (target);
6480 original_target = target;
6481 ignore = (target == const0_rtx
6482 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6483 || code == CONVERT_EXPR || code == COND_EXPR
6484 || code == VIEW_CONVERT_EXPR)
6485 && TREE_CODE (type) == VOID_TYPE));
6487 /* If we are going to ignore this result, we need only do something
6488 if there is a side-effect somewhere in the expression. If there
6489 is, short-circuit the most common cases here. Note that we must
6490 not call expand_expr with anything but const0_rtx in case this
6491 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6493 if (ignore)
6495 if (! TREE_SIDE_EFFECTS (exp))
6496 return const0_rtx;
6498 /* Ensure we reference a volatile object even if value is ignored, but
6499 don't do this if all we are doing is taking its address. */
6500 if (TREE_THIS_VOLATILE (exp)
6501 && TREE_CODE (exp) != FUNCTION_DECL
6502 && mode != VOIDmode && mode != BLKmode
6503 && modifier != EXPAND_CONST_ADDRESS)
6505 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6506 if (MEM_P (temp))
6507 temp = copy_to_reg (temp);
6508 return const0_rtx;
6511 if (TREE_CODE_CLASS (code) == tcc_unary
6512 || code == COMPONENT_REF || code == INDIRECT_REF)
6513 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6514 modifier);
6516 else if (TREE_CODE_CLASS (code) == tcc_binary
6517 || TREE_CODE_CLASS (code) == tcc_comparison
6518 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6520 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6521 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6522 return const0_rtx;
6524 else if (code == BIT_FIELD_REF)
6526 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6527 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6528 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6529 return const0_rtx;
6532 target = 0;
6535 /* If will do cse, generate all results into pseudo registers
6536 since 1) that allows cse to find more things
6537 and 2) otherwise cse could produce an insn the machine
6538 cannot support. An exception is a CONSTRUCTOR into a multi-word
6539 MEM: that's much more likely to be most efficient into the MEM.
6540 Another is a CALL_EXPR which must return in memory. */
6542 if (! cse_not_expected && mode != BLKmode && target
6543 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6544 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6545 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6546 target = 0;
6548 switch (code)
6550 case LABEL_DECL:
6552 tree function = decl_function_context (exp);
6554 temp = label_rtx (exp);
6555 temp = gen_rtx_LABEL_REF (Pmode, temp);
6557 if (function != current_function_decl
6558 && function != 0)
6559 LABEL_REF_NONLOCAL_P (temp) = 1;
6561 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6562 return temp;
6565 case SSA_NAME:
6566 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6567 NULL);
6569 case PARM_DECL:
6570 case VAR_DECL:
6571 /* If a static var's type was incomplete when the decl was written,
6572 but the type is complete now, lay out the decl now. */
6573 if (DECL_SIZE (exp) == 0
6574 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6575 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6576 layout_decl (exp, 0);
6578 /* ... fall through ... */
6580 case FUNCTION_DECL:
6581 case RESULT_DECL:
6582 gcc_assert (DECL_RTL (exp));
6584 /* Ensure variable marked as used even if it doesn't go through
6585 a parser. If it hasn't be used yet, write out an external
6586 definition. */
6587 if (! TREE_USED (exp))
6589 assemble_external (exp);
6590 TREE_USED (exp) = 1;
6593 /* Show we haven't gotten RTL for this yet. */
6594 temp = 0;
6596 /* Variables inherited from containing functions should have
6597 been lowered by this point. */
6598 context = decl_function_context (exp);
6599 gcc_assert (!context
6600 || context == current_function_decl
6601 || TREE_STATIC (exp)
6602 /* ??? C++ creates functions that are not TREE_STATIC. */
6603 || TREE_CODE (exp) == FUNCTION_DECL);
6605 /* This is the case of an array whose size is to be determined
6606 from its initializer, while the initializer is still being parsed.
6607 See expand_decl. */
6609 if (MEM_P (DECL_RTL (exp))
6610 && REG_P (XEXP (DECL_RTL (exp), 0)))
6611 temp = validize_mem (DECL_RTL (exp));
6613 /* If DECL_RTL is memory, we are in the normal case and either
6614 the address is not valid or it is not a register and -fforce-addr
6615 is specified, get the address into a register. */
6617 else if (MEM_P (DECL_RTL (exp))
6618 && modifier != EXPAND_CONST_ADDRESS
6619 && modifier != EXPAND_SUM
6620 && modifier != EXPAND_INITIALIZER
6621 && (! memory_address_p (DECL_MODE (exp),
6622 XEXP (DECL_RTL (exp), 0))
6623 || (flag_force_addr
6624 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6626 if (alt_rtl)
6627 *alt_rtl = DECL_RTL (exp);
6628 temp = replace_equiv_address (DECL_RTL (exp),
6629 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6632 /* If we got something, return it. But first, set the alignment
6633 if the address is a register. */
6634 if (temp != 0)
6636 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6637 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6639 return temp;
6642 /* If the mode of DECL_RTL does not match that of the decl, it
6643 must be a promoted value. We return a SUBREG of the wanted mode,
6644 but mark it so that we know that it was already extended. */
6646 if (REG_P (DECL_RTL (exp))
6647 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6649 enum machine_mode pmode;
6651 /* Get the signedness used for this variable. Ensure we get the
6652 same mode we got when the variable was declared. */
6653 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6654 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6655 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6657 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6658 SUBREG_PROMOTED_VAR_P (temp) = 1;
6659 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6660 return temp;
6663 return DECL_RTL (exp);
6665 case INTEGER_CST:
6666 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6667 TREE_INT_CST_HIGH (exp), mode);
6669 /* ??? If overflow is set, fold will have done an incomplete job,
6670 which can result in (plus xx (const_int 0)), which can get
6671 simplified by validate_replace_rtx during virtual register
6672 instantiation, which can result in unrecognizable insns.
6673 Avoid this by forcing all overflows into registers. */
6674 if (TREE_CONSTANT_OVERFLOW (exp)
6675 && modifier != EXPAND_INITIALIZER)
6676 temp = force_reg (mode, temp);
6678 return temp;
6680 case VECTOR_CST:
6681 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6682 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6683 return const_vector_from_tree (exp);
6684 else
6685 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6686 TREE_VECTOR_CST_ELTS (exp)),
6687 ignore ? const0_rtx : target, tmode, modifier);
6689 case CONST_DECL:
6690 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6692 case REAL_CST:
6693 /* If optimized, generate immediate CONST_DOUBLE
6694 which will be turned into memory by reload if necessary.
6696 We used to force a register so that loop.c could see it. But
6697 this does not allow gen_* patterns to perform optimizations with
6698 the constants. It also produces two insns in cases like "x = 1.0;".
6699 On most machines, floating-point constants are not permitted in
6700 many insns, so we'd end up copying it to a register in any case.
6702 Now, we do the copying in expand_binop, if appropriate. */
6703 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6704 TYPE_MODE (TREE_TYPE (exp)));
6706 case COMPLEX_CST:
6707 /* Handle evaluating a complex constant in a CONCAT target. */
6708 if (original_target && GET_CODE (original_target) == CONCAT)
6710 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6711 rtx rtarg, itarg;
6713 rtarg = XEXP (original_target, 0);
6714 itarg = XEXP (original_target, 1);
6716 /* Move the real and imaginary parts separately. */
6717 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6718 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6720 if (op0 != rtarg)
6721 emit_move_insn (rtarg, op0);
6722 if (op1 != itarg)
6723 emit_move_insn (itarg, op1);
6725 return original_target;
6728 /* ... fall through ... */
6730 case STRING_CST:
6731 temp = output_constant_def (exp, 1);
6733 /* temp contains a constant address.
6734 On RISC machines where a constant address isn't valid,
6735 make some insns to get that address into a register. */
6736 if (modifier != EXPAND_CONST_ADDRESS
6737 && modifier != EXPAND_INITIALIZER
6738 && modifier != EXPAND_SUM
6739 && (! memory_address_p (mode, XEXP (temp, 0))
6740 || flag_force_addr))
6741 return replace_equiv_address (temp,
6742 copy_rtx (XEXP (temp, 0)));
6743 return temp;
6745 case SAVE_EXPR:
6747 tree val = TREE_OPERAND (exp, 0);
6748 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6750 if (!SAVE_EXPR_RESOLVED_P (exp))
6752 /* We can indeed still hit this case, typically via builtin
6753 expanders calling save_expr immediately before expanding
6754 something. Assume this means that we only have to deal
6755 with non-BLKmode values. */
6756 gcc_assert (GET_MODE (ret) != BLKmode);
6758 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6759 DECL_ARTIFICIAL (val) = 1;
6760 DECL_IGNORED_P (val) = 1;
6761 TREE_OPERAND (exp, 0) = val;
6762 SAVE_EXPR_RESOLVED_P (exp) = 1;
6764 if (!CONSTANT_P (ret))
6765 ret = copy_to_reg (ret);
6766 SET_DECL_RTL (val, ret);
6769 return ret;
6772 case GOTO_EXPR:
6773 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6774 expand_goto (TREE_OPERAND (exp, 0));
6775 else
6776 expand_computed_goto (TREE_OPERAND (exp, 0));
6777 return const0_rtx;
6779 case CONSTRUCTOR:
6780 /* If we don't need the result, just ensure we evaluate any
6781 subexpressions. */
6782 if (ignore)
6784 tree elt;
6786 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6787 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6789 return const0_rtx;
6792 /* All elts simple constants => refer to a constant in memory. But
6793 if this is a non-BLKmode mode, let it store a field at a time
6794 since that should make a CONST_INT or CONST_DOUBLE when we
6795 fold. Likewise, if we have a target we can use, it is best to
6796 store directly into the target unless the type is large enough
6797 that memcpy will be used. If we are making an initializer and
6798 all operands are constant, put it in memory as well.
6800 FIXME: Avoid trying to fill vector constructors piece-meal.
6801 Output them with output_constant_def below unless we're sure
6802 they're zeros. This should go away when vector initializers
6803 are treated like VECTOR_CST instead of arrays.
6805 else if ((TREE_STATIC (exp)
6806 && ((mode == BLKmode
6807 && ! (target != 0 && safe_from_p (target, exp, 1)))
6808 || TREE_ADDRESSABLE (exp)
6809 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6810 && (! MOVE_BY_PIECES_P
6811 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6812 TYPE_ALIGN (type)))
6813 && ! mostly_zeros_p (exp))))
6814 || ((modifier == EXPAND_INITIALIZER
6815 || modifier == EXPAND_CONST_ADDRESS)
6816 && TREE_CONSTANT (exp)))
6818 rtx constructor = output_constant_def (exp, 1);
6820 if (modifier != EXPAND_CONST_ADDRESS
6821 && modifier != EXPAND_INITIALIZER
6822 && modifier != EXPAND_SUM)
6823 constructor = validize_mem (constructor);
6825 return constructor;
6827 else
6829 /* Handle calls that pass values in multiple non-contiguous
6830 locations. The Irix 6 ABI has examples of this. */
6831 if (target == 0 || ! safe_from_p (target, exp, 1)
6832 || GET_CODE (target) == PARALLEL
6833 || modifier == EXPAND_STACK_PARM)
6834 target
6835 = assign_temp (build_qualified_type (type,
6836 (TYPE_QUALS (type)
6837 | (TREE_READONLY (exp)
6838 * TYPE_QUAL_CONST))),
6839 0, TREE_ADDRESSABLE (exp), 1);
6841 store_constructor (exp, target, 0, int_expr_size (exp));
6842 return target;
6845 case MISALIGNED_INDIRECT_REF:
6846 case ALIGN_INDIRECT_REF:
6847 case INDIRECT_REF:
6849 tree exp1 = TREE_OPERAND (exp, 0);
6850 tree orig;
6852 if (modifier != EXPAND_WRITE)
6854 tree t;
6856 t = fold_read_from_constant_string (exp);
6857 if (t)
6858 return expand_expr (t, target, tmode, modifier);
6861 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6862 op0 = memory_address (mode, op0);
6864 if (code == ALIGN_INDIRECT_REF)
6866 int align = TYPE_ALIGN_UNIT (type);
6867 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6868 op0 = memory_address (mode, op0);
6871 temp = gen_rtx_MEM (mode, op0);
6873 orig = REF_ORIGINAL (exp);
6874 if (!orig)
6875 orig = exp;
6876 set_mem_attributes (temp, orig, 0);
6878 /* Resolve the misalignment now, so that we don't have to remember
6879 to resolve it later. Of course, this only works for reads. */
6880 /* ??? When we get around to supporting writes, we'll have to handle
6881 this in store_expr directly. The vectorizer isn't generating
6882 those yet, however. */
6883 if (code == MISALIGNED_INDIRECT_REF)
6885 int icode;
6886 rtx reg, insn;
6888 gcc_assert (modifier == EXPAND_NORMAL);
6890 /* The vectorizer should have already checked the mode. */
6891 icode = movmisalign_optab->handlers[mode].insn_code;
6892 gcc_assert (icode != CODE_FOR_nothing);
6894 /* We've already validated the memory, and we're creating a
6895 new pseudo destination. The predicates really can't fail. */
6896 reg = gen_reg_rtx (mode);
6898 /* Nor can the insn generator. */
6899 insn = GEN_FCN (icode) (reg, temp);
6900 emit_insn (insn);
6902 return reg;
6905 return temp;
6908 case ARRAY_REF:
6911 tree array = TREE_OPERAND (exp, 0);
6912 tree index = TREE_OPERAND (exp, 1);
6914 /* Fold an expression like: "foo"[2].
6915 This is not done in fold so it won't happen inside &.
6916 Don't fold if this is for wide characters since it's too
6917 difficult to do correctly and this is a very rare case. */
6919 if (modifier != EXPAND_CONST_ADDRESS
6920 && modifier != EXPAND_INITIALIZER
6921 && modifier != EXPAND_MEMORY)
6923 tree t = fold_read_from_constant_string (exp);
6925 if (t)
6926 return expand_expr (t, target, tmode, modifier);
6929 /* If this is a constant index into a constant array,
6930 just get the value from the array. Handle both the cases when
6931 we have an explicit constructor and when our operand is a variable
6932 that was declared const. */
6934 if (modifier != EXPAND_CONST_ADDRESS
6935 && modifier != EXPAND_INITIALIZER
6936 && modifier != EXPAND_MEMORY
6937 && TREE_CODE (array) == CONSTRUCTOR
6938 && ! TREE_SIDE_EFFECTS (array)
6939 && TREE_CODE (index) == INTEGER_CST)
6941 tree elem;
6943 for (elem = CONSTRUCTOR_ELTS (array);
6944 (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6945 elem = TREE_CHAIN (elem))
6948 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6949 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6950 modifier);
6953 else if (optimize >= 1
6954 && modifier != EXPAND_CONST_ADDRESS
6955 && modifier != EXPAND_INITIALIZER
6956 && modifier != EXPAND_MEMORY
6957 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6958 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6959 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6960 && targetm.binds_local_p (array))
6962 if (TREE_CODE (index) == INTEGER_CST)
6964 tree init = DECL_INITIAL (array);
6966 if (TREE_CODE (init) == CONSTRUCTOR)
6968 tree elem;
6970 for (elem = CONSTRUCTOR_ELTS (init);
6971 (elem
6972 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6973 elem = TREE_CHAIN (elem))
6976 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6977 return expand_expr (fold (TREE_VALUE (elem)), target,
6978 tmode, modifier);
6980 else if (TREE_CODE (init) == STRING_CST
6981 && 0 > compare_tree_int (index,
6982 TREE_STRING_LENGTH (init)))
6984 tree type = TREE_TYPE (TREE_TYPE (init));
6985 enum machine_mode mode = TYPE_MODE (type);
6987 if (GET_MODE_CLASS (mode) == MODE_INT
6988 && GET_MODE_SIZE (mode) == 1)
6989 return gen_int_mode (TREE_STRING_POINTER (init)
6990 [TREE_INT_CST_LOW (index)], mode);
6995 goto normal_inner_ref;
6997 case COMPONENT_REF:
6998 /* If the operand is a CONSTRUCTOR, we can just extract the
6999 appropriate field if it is present. */
7000 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7002 tree elt;
7004 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7005 elt = TREE_CHAIN (elt))
7006 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7007 /* We can normally use the value of the field in the
7008 CONSTRUCTOR. However, if this is a bitfield in
7009 an integral mode that we can fit in a HOST_WIDE_INT,
7010 we must mask only the number of bits in the bitfield,
7011 since this is done implicitly by the constructor. If
7012 the bitfield does not meet either of those conditions,
7013 we can't do this optimization. */
7014 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7015 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7016 == MODE_INT)
7017 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7018 <= HOST_BITS_PER_WIDE_INT))))
7020 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7021 && modifier == EXPAND_STACK_PARM)
7022 target = 0;
7023 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7024 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7026 HOST_WIDE_INT bitsize
7027 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7028 enum machine_mode imode
7029 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7031 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7033 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7034 op0 = expand_and (imode, op0, op1, target);
7036 else
7038 tree count
7039 = build_int_cst (NULL_TREE,
7040 GET_MODE_BITSIZE (imode) - bitsize);
7042 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7043 target, 0);
7044 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7045 target, 0);
7049 return op0;
7052 goto normal_inner_ref;
7054 case BIT_FIELD_REF:
7055 case ARRAY_RANGE_REF:
7056 normal_inner_ref:
7058 enum machine_mode mode1;
7059 HOST_WIDE_INT bitsize, bitpos;
7060 tree offset;
7061 int volatilep = 0;
7062 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7063 &mode1, &unsignedp, &volatilep, true);
7064 rtx orig_op0;
7066 /* If we got back the original object, something is wrong. Perhaps
7067 we are evaluating an expression too early. In any event, don't
7068 infinitely recurse. */
7069 gcc_assert (tem != exp);
7071 /* If TEM's type is a union of variable size, pass TARGET to the inner
7072 computation, since it will need a temporary and TARGET is known
7073 to have to do. This occurs in unchecked conversion in Ada. */
7075 orig_op0 = op0
7076 = expand_expr (tem,
7077 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7078 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7079 != INTEGER_CST)
7080 && modifier != EXPAND_STACK_PARM
7081 ? target : NULL_RTX),
7082 VOIDmode,
7083 (modifier == EXPAND_INITIALIZER
7084 || modifier == EXPAND_CONST_ADDRESS
7085 || modifier == EXPAND_STACK_PARM)
7086 ? modifier : EXPAND_NORMAL);
7088 /* If this is a constant, put it into a register if it is a
7089 legitimate constant and OFFSET is 0 and memory if it isn't. */
7090 if (CONSTANT_P (op0))
7092 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7093 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7094 && offset == 0)
7095 op0 = force_reg (mode, op0);
7096 else
7097 op0 = validize_mem (force_const_mem (mode, op0));
7100 /* Otherwise, if this object not in memory and we either have an
7101 offset or a BLKmode result, put it there. This case can't occur in
7102 C, but can in Ada if we have unchecked conversion of an expression
7103 from a scalar type to an array or record type or for an
7104 ARRAY_RANGE_REF whose type is BLKmode. */
7105 else if (!MEM_P (op0)
7106 && (offset != 0
7107 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7109 tree nt = build_qualified_type (TREE_TYPE (tem),
7110 (TYPE_QUALS (TREE_TYPE (tem))
7111 | TYPE_QUAL_CONST));
7112 rtx memloc = assign_temp (nt, 1, 1, 1);
7114 emit_move_insn (memloc, op0);
7115 op0 = memloc;
7118 if (offset != 0)
7120 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7121 EXPAND_SUM);
7123 gcc_assert (MEM_P (op0));
7125 #ifdef POINTERS_EXTEND_UNSIGNED
7126 if (GET_MODE (offset_rtx) != Pmode)
7127 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7128 #else
7129 if (GET_MODE (offset_rtx) != ptr_mode)
7130 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7131 #endif
7133 if (GET_MODE (op0) == BLKmode
7134 /* A constant address in OP0 can have VOIDmode, we must
7135 not try to call force_reg in that case. */
7136 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7137 && bitsize != 0
7138 && (bitpos % bitsize) == 0
7139 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7140 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7142 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7143 bitpos = 0;
7146 op0 = offset_address (op0, offset_rtx,
7147 highest_pow2_factor (offset));
7150 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7151 record its alignment as BIGGEST_ALIGNMENT. */
7152 if (MEM_P (op0) && bitpos == 0 && offset != 0
7153 && is_aligning_offset (offset, tem))
7154 set_mem_align (op0, BIGGEST_ALIGNMENT);
7156 /* Don't forget about volatility even if this is a bitfield. */
7157 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7159 if (op0 == orig_op0)
7160 op0 = copy_rtx (op0);
7162 MEM_VOLATILE_P (op0) = 1;
7165 /* The following code doesn't handle CONCAT.
7166 Assume only bitpos == 0 can be used for CONCAT, due to
7167 one element arrays having the same mode as its element. */
7168 if (GET_CODE (op0) == CONCAT)
7170 gcc_assert (bitpos == 0
7171 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7172 return op0;
7175 /* In cases where an aligned union has an unaligned object
7176 as a field, we might be extracting a BLKmode value from
7177 an integer-mode (e.g., SImode) object. Handle this case
7178 by doing the extract into an object as wide as the field
7179 (which we know to be the width of a basic mode), then
7180 storing into memory, and changing the mode to BLKmode. */
7181 if (mode1 == VOIDmode
7182 || REG_P (op0) || GET_CODE (op0) == SUBREG
7183 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7184 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7185 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7186 && modifier != EXPAND_CONST_ADDRESS
7187 && modifier != EXPAND_INITIALIZER)
7188 /* If the field isn't aligned enough to fetch as a memref,
7189 fetch it as a bit field. */
7190 || (mode1 != BLKmode
7191 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7192 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7193 || (MEM_P (op0)
7194 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7195 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7196 && ((modifier == EXPAND_CONST_ADDRESS
7197 || modifier == EXPAND_INITIALIZER)
7198 ? STRICT_ALIGNMENT
7199 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7200 || (bitpos % BITS_PER_UNIT != 0)))
7201 /* If the type and the field are a constant size and the
7202 size of the type isn't the same size as the bitfield,
7203 we must use bitfield operations. */
7204 || (bitsize >= 0
7205 && TYPE_SIZE (TREE_TYPE (exp))
7206 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7207 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7208 bitsize)))
7210 enum machine_mode ext_mode = mode;
7212 if (ext_mode == BLKmode
7213 && ! (target != 0 && MEM_P (op0)
7214 && MEM_P (target)
7215 && bitpos % BITS_PER_UNIT == 0))
7216 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7218 if (ext_mode == BLKmode)
7220 if (target == 0)
7221 target = assign_temp (type, 0, 1, 1);
7223 if (bitsize == 0)
7224 return target;
7226 /* In this case, BITPOS must start at a byte boundary and
7227 TARGET, if specified, must be a MEM. */
7228 gcc_assert (MEM_P (op0)
7229 && (!target || MEM_P (target))
7230 && !(bitpos % BITS_PER_UNIT));
7232 emit_block_move (target,
7233 adjust_address (op0, VOIDmode,
7234 bitpos / BITS_PER_UNIT),
7235 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7236 / BITS_PER_UNIT),
7237 (modifier == EXPAND_STACK_PARM
7238 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7240 return target;
7243 op0 = validize_mem (op0);
7245 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7246 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7248 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7249 (modifier == EXPAND_STACK_PARM
7250 ? NULL_RTX : target),
7251 ext_mode, ext_mode);
7253 /* If the result is a record type and BITSIZE is narrower than
7254 the mode of OP0, an integral mode, and this is a big endian
7255 machine, we must put the field into the high-order bits. */
7256 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7257 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7258 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7259 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7260 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7261 - bitsize),
7262 op0, 1);
7264 /* If the result type is BLKmode, store the data into a temporary
7265 of the appropriate type, but with the mode corresponding to the
7266 mode for the data we have (op0's mode). It's tempting to make
7267 this a constant type, since we know it's only being stored once,
7268 but that can cause problems if we are taking the address of this
7269 COMPONENT_REF because the MEM of any reference via that address
7270 will have flags corresponding to the type, which will not
7271 necessarily be constant. */
7272 if (mode == BLKmode)
7274 rtx new
7275 = assign_stack_temp_for_type
7276 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7278 emit_move_insn (new, op0);
7279 op0 = copy_rtx (new);
7280 PUT_MODE (op0, BLKmode);
7281 set_mem_attributes (op0, exp, 1);
7284 return op0;
7287 /* If the result is BLKmode, use that to access the object
7288 now as well. */
7289 if (mode == BLKmode)
7290 mode1 = BLKmode;
7292 /* Get a reference to just this component. */
7293 if (modifier == EXPAND_CONST_ADDRESS
7294 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7295 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7296 else
7297 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7299 if (op0 == orig_op0)
7300 op0 = copy_rtx (op0);
7302 set_mem_attributes (op0, exp, 0);
7303 if (REG_P (XEXP (op0, 0)))
7304 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7306 MEM_VOLATILE_P (op0) |= volatilep;
7307 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7308 || modifier == EXPAND_CONST_ADDRESS
7309 || modifier == EXPAND_INITIALIZER)
7310 return op0;
7311 else if (target == 0)
7312 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7314 convert_move (target, op0, unsignedp);
7315 return target;
7318 case OBJ_TYPE_REF:
7319 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7321 case CALL_EXPR:
7322 /* Check for a built-in function. */
7323 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7324 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7325 == FUNCTION_DECL)
7326 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7328 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7329 == BUILT_IN_FRONTEND)
7330 return lang_hooks.expand_expr (exp, original_target,
7331 tmode, modifier,
7332 alt_rtl);
7333 else
7334 return expand_builtin (exp, target, subtarget, tmode, ignore);
7337 return expand_call (exp, target, ignore);
7339 case NON_LVALUE_EXPR:
7340 case NOP_EXPR:
7341 case CONVERT_EXPR:
7342 if (TREE_OPERAND (exp, 0) == error_mark_node)
7343 return const0_rtx;
7345 if (TREE_CODE (type) == UNION_TYPE)
7347 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7349 /* If both input and output are BLKmode, this conversion isn't doing
7350 anything except possibly changing memory attribute. */
7351 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7353 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7354 modifier);
7356 result = copy_rtx (result);
7357 set_mem_attributes (result, exp, 0);
7358 return result;
7361 if (target == 0)
7363 if (TYPE_MODE (type) != BLKmode)
7364 target = gen_reg_rtx (TYPE_MODE (type));
7365 else
7366 target = assign_temp (type, 0, 1, 1);
7369 if (MEM_P (target))
7370 /* Store data into beginning of memory target. */
7371 store_expr (TREE_OPERAND (exp, 0),
7372 adjust_address (target, TYPE_MODE (valtype), 0),
7373 modifier == EXPAND_STACK_PARM);
7375 else
7377 gcc_assert (REG_P (target));
7379 /* Store this field into a union of the proper type. */
7380 store_field (target,
7381 MIN ((int_size_in_bytes (TREE_TYPE
7382 (TREE_OPERAND (exp, 0)))
7383 * BITS_PER_UNIT),
7384 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7385 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7386 type, 0);
7389 /* Return the entire union. */
7390 return target;
7393 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7395 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7396 modifier);
7398 /* If the signedness of the conversion differs and OP0 is
7399 a promoted SUBREG, clear that indication since we now
7400 have to do the proper extension. */
7401 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7402 && GET_CODE (op0) == SUBREG)
7403 SUBREG_PROMOTED_VAR_P (op0) = 0;
7405 return REDUCE_BIT_FIELD (op0);
7408 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7409 if (GET_MODE (op0) == mode)
7412 /* If OP0 is a constant, just convert it into the proper mode. */
7413 else if (CONSTANT_P (op0))
7415 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7416 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7418 if (modifier == EXPAND_INITIALIZER)
7419 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7420 subreg_lowpart_offset (mode,
7421 inner_mode));
7422 else
7423 op0= convert_modes (mode, inner_mode, op0,
7424 TYPE_UNSIGNED (inner_type));
7427 else if (modifier == EXPAND_INITIALIZER)
7428 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7430 else if (target == 0)
7431 op0 = convert_to_mode (mode, op0,
7432 TYPE_UNSIGNED (TREE_TYPE
7433 (TREE_OPERAND (exp, 0))));
7434 else
7436 convert_move (target, op0,
7437 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7438 op0 = target;
7441 return REDUCE_BIT_FIELD (op0);
7443 case VIEW_CONVERT_EXPR:
7444 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7446 /* If the input and output modes are both the same, we are done.
7447 Otherwise, if neither mode is BLKmode and both are integral and within
7448 a word, we can use gen_lowpart. If neither is true, make sure the
7449 operand is in memory and convert the MEM to the new mode. */
7450 if (TYPE_MODE (type) == GET_MODE (op0))
7452 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7453 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7454 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7455 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7456 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7457 op0 = gen_lowpart (TYPE_MODE (type), op0);
7458 else if (!MEM_P (op0))
7460 /* If the operand is not a MEM, force it into memory. Since we
7461 are going to be be changing the mode of the MEM, don't call
7462 force_const_mem for constants because we don't allow pool
7463 constants to change mode. */
7464 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7466 gcc_assert (!TREE_ADDRESSABLE (exp));
7468 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7469 target
7470 = assign_stack_temp_for_type
7471 (TYPE_MODE (inner_type),
7472 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7474 emit_move_insn (target, op0);
7475 op0 = target;
7478 /* At this point, OP0 is in the correct mode. If the output type is such
7479 that the operand is known to be aligned, indicate that it is.
7480 Otherwise, we need only be concerned about alignment for non-BLKmode
7481 results. */
7482 if (MEM_P (op0))
7484 op0 = copy_rtx (op0);
7486 if (TYPE_ALIGN_OK (type))
7487 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7488 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7489 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7491 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7492 HOST_WIDE_INT temp_size
7493 = MAX (int_size_in_bytes (inner_type),
7494 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7495 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7496 temp_size, 0, type);
7497 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7499 gcc_assert (!TREE_ADDRESSABLE (exp));
7501 if (GET_MODE (op0) == BLKmode)
7502 emit_block_move (new_with_op0_mode, op0,
7503 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7504 (modifier == EXPAND_STACK_PARM
7505 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7506 else
7507 emit_move_insn (new_with_op0_mode, op0);
7509 op0 = new;
7512 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7515 return op0;
7517 case PLUS_EXPR:
7518 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7519 something else, make sure we add the register to the constant and
7520 then to the other thing. This case can occur during strength
7521 reduction and doing it this way will produce better code if the
7522 frame pointer or argument pointer is eliminated.
7524 fold-const.c will ensure that the constant is always in the inner
7525 PLUS_EXPR, so the only case we need to do anything about is if
7526 sp, ap, or fp is our second argument, in which case we must swap
7527 the innermost first argument and our second argument. */
7529 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7530 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7531 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7532 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7533 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7534 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7536 tree t = TREE_OPERAND (exp, 1);
7538 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7539 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7542 /* If the result is to be ptr_mode and we are adding an integer to
7543 something, we might be forming a constant. So try to use
7544 plus_constant. If it produces a sum and we can't accept it,
7545 use force_operand. This allows P = &ARR[const] to generate
7546 efficient code on machines where a SYMBOL_REF is not a valid
7547 address.
7549 If this is an EXPAND_SUM call, always return the sum. */
7550 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7551 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7553 if (modifier == EXPAND_STACK_PARM)
7554 target = 0;
7555 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7556 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7557 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7559 rtx constant_part;
7561 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7562 EXPAND_SUM);
7563 /* Use immed_double_const to ensure that the constant is
7564 truncated according to the mode of OP1, then sign extended
7565 to a HOST_WIDE_INT. Using the constant directly can result
7566 in non-canonical RTL in a 64x32 cross compile. */
7567 constant_part
7568 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7569 (HOST_WIDE_INT) 0,
7570 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7571 op1 = plus_constant (op1, INTVAL (constant_part));
7572 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7573 op1 = force_operand (op1, target);
7574 return REDUCE_BIT_FIELD (op1);
7577 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7578 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7579 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7581 rtx constant_part;
7583 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7584 (modifier == EXPAND_INITIALIZER
7585 ? EXPAND_INITIALIZER : EXPAND_SUM));
7586 if (! CONSTANT_P (op0))
7588 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7589 VOIDmode, modifier);
7590 /* Return a PLUS if modifier says it's OK. */
7591 if (modifier == EXPAND_SUM
7592 || modifier == EXPAND_INITIALIZER)
7593 return simplify_gen_binary (PLUS, mode, op0, op1);
7594 goto binop2;
7596 /* Use immed_double_const to ensure that the constant is
7597 truncated according to the mode of OP1, then sign extended
7598 to a HOST_WIDE_INT. Using the constant directly can result
7599 in non-canonical RTL in a 64x32 cross compile. */
7600 constant_part
7601 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7602 (HOST_WIDE_INT) 0,
7603 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7604 op0 = plus_constant (op0, INTVAL (constant_part));
7605 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7606 op0 = force_operand (op0, target);
7607 return REDUCE_BIT_FIELD (op0);
7611 /* No sense saving up arithmetic to be done
7612 if it's all in the wrong mode to form part of an address.
7613 And force_operand won't know whether to sign-extend or
7614 zero-extend. */
7615 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7616 || mode != ptr_mode)
7618 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7619 subtarget, &op0, &op1, 0);
7620 if (op0 == const0_rtx)
7621 return op1;
7622 if (op1 == const0_rtx)
7623 return op0;
7624 goto binop2;
7627 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7628 subtarget, &op0, &op1, modifier);
7629 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7631 case MINUS_EXPR:
7632 /* For initializers, we are allowed to return a MINUS of two
7633 symbolic constants. Here we handle all cases when both operands
7634 are constant. */
7635 /* Handle difference of two symbolic constants,
7636 for the sake of an initializer. */
7637 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7638 && really_constant_p (TREE_OPERAND (exp, 0))
7639 && really_constant_p (TREE_OPERAND (exp, 1)))
7641 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7642 NULL_RTX, &op0, &op1, modifier);
7644 /* If the last operand is a CONST_INT, use plus_constant of
7645 the negated constant. Else make the MINUS. */
7646 if (GET_CODE (op1) == CONST_INT)
7647 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7648 else
7649 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7652 /* No sense saving up arithmetic to be done
7653 if it's all in the wrong mode to form part of an address.
7654 And force_operand won't know whether to sign-extend or
7655 zero-extend. */
7656 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7657 || mode != ptr_mode)
7658 goto binop;
7660 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7661 subtarget, &op0, &op1, modifier);
7663 /* Convert A - const to A + (-const). */
7664 if (GET_CODE (op1) == CONST_INT)
7666 op1 = negate_rtx (mode, op1);
7667 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7670 goto binop2;
7672 case MULT_EXPR:
7673 /* If first operand is constant, swap them.
7674 Thus the following special case checks need only
7675 check the second operand. */
7676 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7678 tree t1 = TREE_OPERAND (exp, 0);
7679 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7680 TREE_OPERAND (exp, 1) = t1;
7683 /* Attempt to return something suitable for generating an
7684 indexed address, for machines that support that. */
7686 if (modifier == EXPAND_SUM && mode == ptr_mode
7687 && host_integerp (TREE_OPERAND (exp, 1), 0))
7689 tree exp1 = TREE_OPERAND (exp, 1);
7691 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7692 EXPAND_SUM);
7694 if (!REG_P (op0))
7695 op0 = force_operand (op0, NULL_RTX);
7696 if (!REG_P (op0))
7697 op0 = copy_to_mode_reg (mode, op0);
7699 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7700 gen_int_mode (tree_low_cst (exp1, 0),
7701 TYPE_MODE (TREE_TYPE (exp1)))));
7704 if (modifier == EXPAND_STACK_PARM)
7705 target = 0;
7707 /* Check for multiplying things that have been extended
7708 from a narrower type. If this machine supports multiplying
7709 in that narrower type with a result in the desired type,
7710 do it that way, and avoid the explicit type-conversion. */
7711 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7712 && TREE_CODE (type) == INTEGER_TYPE
7713 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7714 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7715 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7716 && int_fits_type_p (TREE_OPERAND (exp, 1),
7717 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7718 /* Don't use a widening multiply if a shift will do. */
7719 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7720 > HOST_BITS_PER_WIDE_INT)
7721 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7723 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7724 && (TYPE_PRECISION (TREE_TYPE
7725 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7726 == TYPE_PRECISION (TREE_TYPE
7727 (TREE_OPERAND
7728 (TREE_OPERAND (exp, 0), 0))))
7729 /* If both operands are extended, they must either both
7730 be zero-extended or both be sign-extended. */
7731 && (TYPE_UNSIGNED (TREE_TYPE
7732 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7733 == TYPE_UNSIGNED (TREE_TYPE
7734 (TREE_OPERAND
7735 (TREE_OPERAND (exp, 0), 0)))))))
7737 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7738 enum machine_mode innermode = TYPE_MODE (op0type);
7739 bool zextend_p = TYPE_UNSIGNED (op0type);
7740 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7741 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7743 if (mode == GET_MODE_WIDER_MODE (innermode))
7745 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7747 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7748 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7749 TREE_OPERAND (exp, 1),
7750 NULL_RTX, &op0, &op1, 0);
7751 else
7752 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7753 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7754 NULL_RTX, &op0, &op1, 0);
7755 goto binop3;
7757 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7758 && innermode == word_mode)
7760 rtx htem, hipart;
7761 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7762 NULL_RTX, VOIDmode, 0);
7763 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7764 op1 = convert_modes (innermode, mode,
7765 expand_expr (TREE_OPERAND (exp, 1),
7766 NULL_RTX, VOIDmode, 0),
7767 unsignedp);
7768 else
7769 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7770 NULL_RTX, VOIDmode, 0);
7771 temp = expand_binop (mode, other_optab, op0, op1, target,
7772 unsignedp, OPTAB_LIB_WIDEN);
7773 hipart = gen_highpart (innermode, temp);
7774 htem = expand_mult_highpart_adjust (innermode, hipart,
7775 op0, op1, hipart,
7776 zextend_p);
7777 if (htem != hipart)
7778 emit_move_insn (hipart, htem);
7779 return REDUCE_BIT_FIELD (temp);
7783 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7784 subtarget, &op0, &op1, 0);
7785 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7787 case TRUNC_DIV_EXPR:
7788 case FLOOR_DIV_EXPR:
7789 case CEIL_DIV_EXPR:
7790 case ROUND_DIV_EXPR:
7791 case EXACT_DIV_EXPR:
7792 if (modifier == EXPAND_STACK_PARM)
7793 target = 0;
7794 /* Possible optimization: compute the dividend with EXPAND_SUM
7795 then if the divisor is constant can optimize the case
7796 where some terms of the dividend have coeffs divisible by it. */
7797 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7798 subtarget, &op0, &op1, 0);
7799 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7801 case RDIV_EXPR:
7802 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7803 expensive divide. If not, combine will rebuild the original
7804 computation. */
7805 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7806 && TREE_CODE (type) == REAL_TYPE
7807 && !real_onep (TREE_OPERAND (exp, 0)))
7808 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7809 build2 (RDIV_EXPR, type,
7810 build_real (type, dconst1),
7811 TREE_OPERAND (exp, 1))),
7812 target, tmode, modifier);
7814 goto binop;
7816 case TRUNC_MOD_EXPR:
7817 case FLOOR_MOD_EXPR:
7818 case CEIL_MOD_EXPR:
7819 case ROUND_MOD_EXPR:
7820 if (modifier == EXPAND_STACK_PARM)
7821 target = 0;
7822 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7823 subtarget, &op0, &op1, 0);
7824 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7826 case FIX_ROUND_EXPR:
7827 case FIX_FLOOR_EXPR:
7828 case FIX_CEIL_EXPR:
7829 gcc_unreachable (); /* Not used for C. */
7831 case FIX_TRUNC_EXPR:
7832 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7833 if (target == 0 || modifier == EXPAND_STACK_PARM)
7834 target = gen_reg_rtx (mode);
7835 expand_fix (target, op0, unsignedp);
7836 return target;
7838 case FLOAT_EXPR:
7839 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7840 if (target == 0 || modifier == EXPAND_STACK_PARM)
7841 target = gen_reg_rtx (mode);
7842 /* expand_float can't figure out what to do if FROM has VOIDmode.
7843 So give it the correct mode. With -O, cse will optimize this. */
7844 if (GET_MODE (op0) == VOIDmode)
7845 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7846 op0);
7847 expand_float (target, op0,
7848 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7849 return target;
7851 case NEGATE_EXPR:
7852 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7853 if (modifier == EXPAND_STACK_PARM)
7854 target = 0;
7855 temp = expand_unop (mode,
7856 optab_for_tree_code (NEGATE_EXPR, type),
7857 op0, target, 0);
7858 gcc_assert (temp);
7859 return REDUCE_BIT_FIELD (temp);
7861 case ABS_EXPR:
7862 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7863 if (modifier == EXPAND_STACK_PARM)
7864 target = 0;
7866 /* ABS_EXPR is not valid for complex arguments. */
7867 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7868 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7870 /* Unsigned abs is simply the operand. Testing here means we don't
7871 risk generating incorrect code below. */
7872 if (TYPE_UNSIGNED (type))
7873 return op0;
7875 return expand_abs (mode, op0, target, unsignedp,
7876 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7878 case MAX_EXPR:
7879 case MIN_EXPR:
7880 target = original_target;
7881 if (target == 0
7882 || modifier == EXPAND_STACK_PARM
7883 || (MEM_P (target) && MEM_VOLATILE_P (target))
7884 || GET_MODE (target) != mode
7885 || (REG_P (target)
7886 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7887 target = gen_reg_rtx (mode);
7888 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7889 target, &op0, &op1, 0);
7891 /* First try to do it with a special MIN or MAX instruction.
7892 If that does not win, use a conditional jump to select the proper
7893 value. */
7894 this_optab = optab_for_tree_code (code, type);
7895 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7896 OPTAB_WIDEN);
7897 if (temp != 0)
7898 return temp;
7900 /* At this point, a MEM target is no longer useful; we will get better
7901 code without it. */
7903 if (! REG_P (target))
7904 target = gen_reg_rtx (mode);
7906 /* If op1 was placed in target, swap op0 and op1. */
7907 if (target != op0 && target == op1)
7909 temp = op0;
7910 op0 = op1;
7911 op1 = temp;
7914 /* We generate better code and avoid problems with op1 mentioning
7915 target by forcing op1 into a pseudo if it isn't a constant. */
7916 if (! CONSTANT_P (op1))
7917 op1 = force_reg (mode, op1);
7919 #ifdef HAVE_conditional_move
7920 /* Use a conditional move if possible. */
7921 if (can_conditionally_move_p (mode))
7923 enum rtx_code comparison_code;
7924 rtx insn;
7926 if (code == MAX_EXPR)
7927 comparison_code = unsignedp ? GEU : GE;
7928 else
7929 comparison_code = unsignedp ? LEU : LE;
7931 /* ??? Same problem as in expmed.c: emit_conditional_move
7932 forces a stack adjustment via compare_from_rtx, and we
7933 lose the stack adjustment if the sequence we are about
7934 to create is discarded. */
7935 do_pending_stack_adjust ();
7937 start_sequence ();
7939 /* Try to emit the conditional move. */
7940 insn = emit_conditional_move (target, comparison_code,
7941 op0, op1, mode,
7942 op0, op1, mode,
7943 unsignedp);
7945 /* If we could do the conditional move, emit the sequence,
7946 and return. */
7947 if (insn)
7949 rtx seq = get_insns ();
7950 end_sequence ();
7951 emit_insn (seq);
7952 return target;
7955 /* Otherwise discard the sequence and fall back to code with
7956 branches. */
7957 end_sequence ();
7959 #endif
7960 if (target != op0)
7961 emit_move_insn (target, op0);
7963 temp = gen_label_rtx ();
7965 /* If this mode is an integer too wide to compare properly,
7966 compare word by word. Rely on cse to optimize constant cases. */
7967 if (GET_MODE_CLASS (mode) == MODE_INT
7968 && ! can_compare_p (GE, mode, ccp_jump))
7970 if (code == MAX_EXPR)
7971 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7972 NULL_RTX, temp);
7973 else
7974 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7975 NULL_RTX, temp);
7977 else
7979 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7980 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
7982 emit_move_insn (target, op1);
7983 emit_label (temp);
7984 return target;
7986 case BIT_NOT_EXPR:
7987 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7988 if (modifier == EXPAND_STACK_PARM)
7989 target = 0;
7990 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7991 gcc_assert (temp);
7992 return temp;
7994 /* ??? Can optimize bitwise operations with one arg constant.
7995 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7996 and (a bitwise1 b) bitwise2 b (etc)
7997 but that is probably not worth while. */
7999 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8000 boolean values when we want in all cases to compute both of them. In
8001 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8002 as actual zero-or-1 values and then bitwise anding. In cases where
8003 there cannot be any side effects, better code would be made by
8004 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8005 how to recognize those cases. */
8007 case TRUTH_AND_EXPR:
8008 code = BIT_AND_EXPR;
8009 case BIT_AND_EXPR:
8010 goto binop;
8012 case TRUTH_OR_EXPR:
8013 code = BIT_IOR_EXPR;
8014 case BIT_IOR_EXPR:
8015 goto binop;
8017 case TRUTH_XOR_EXPR:
8018 code = BIT_XOR_EXPR;
8019 case BIT_XOR_EXPR:
8020 goto binop;
8022 case LSHIFT_EXPR:
8023 case RSHIFT_EXPR:
8024 case LROTATE_EXPR:
8025 case RROTATE_EXPR:
8026 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8027 subtarget = 0;
8028 if (modifier == EXPAND_STACK_PARM)
8029 target = 0;
8030 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8031 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8032 unsignedp);
8034 /* Could determine the answer when only additive constants differ. Also,
8035 the addition of one can be handled by changing the condition. */
8036 case LT_EXPR:
8037 case LE_EXPR:
8038 case GT_EXPR:
8039 case GE_EXPR:
8040 case EQ_EXPR:
8041 case NE_EXPR:
8042 case UNORDERED_EXPR:
8043 case ORDERED_EXPR:
8044 case UNLT_EXPR:
8045 case UNLE_EXPR:
8046 case UNGT_EXPR:
8047 case UNGE_EXPR:
8048 case UNEQ_EXPR:
8049 case LTGT_EXPR:
8050 temp = do_store_flag (exp,
8051 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8052 tmode != VOIDmode ? tmode : mode, 0);
8053 if (temp != 0)
8054 return temp;
8056 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8057 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8058 && original_target
8059 && REG_P (original_target)
8060 && (GET_MODE (original_target)
8061 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8063 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8064 VOIDmode, 0);
8066 /* If temp is constant, we can just compute the result. */
8067 if (GET_CODE (temp) == CONST_INT)
8069 if (INTVAL (temp) != 0)
8070 emit_move_insn (target, const1_rtx);
8071 else
8072 emit_move_insn (target, const0_rtx);
8074 return target;
8077 if (temp != original_target)
8079 enum machine_mode mode1 = GET_MODE (temp);
8080 if (mode1 == VOIDmode)
8081 mode1 = tmode != VOIDmode ? tmode : mode;
8083 temp = copy_to_mode_reg (mode1, temp);
8086 op1 = gen_label_rtx ();
8087 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8088 GET_MODE (temp), unsignedp, op1);
8089 emit_move_insn (temp, const1_rtx);
8090 emit_label (op1);
8091 return temp;
8094 /* If no set-flag instruction, must generate a conditional store
8095 into a temporary variable. Drop through and handle this
8096 like && and ||. */
8098 if (! ignore
8099 && (target == 0
8100 || modifier == EXPAND_STACK_PARM
8101 || ! safe_from_p (target, exp, 1)
8102 /* Make sure we don't have a hard reg (such as function's return
8103 value) live across basic blocks, if not optimizing. */
8104 || (!optimize && REG_P (target)
8105 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8106 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8108 if (target)
8109 emit_move_insn (target, const0_rtx);
8111 op1 = gen_label_rtx ();
8112 jumpifnot (exp, op1);
8114 if (target)
8115 emit_move_insn (target, const1_rtx);
8117 emit_label (op1);
8118 return ignore ? const0_rtx : target;
8120 case TRUTH_NOT_EXPR:
8121 if (modifier == EXPAND_STACK_PARM)
8122 target = 0;
8123 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8124 /* The parser is careful to generate TRUTH_NOT_EXPR
8125 only with operands that are always zero or one. */
8126 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8127 target, 1, OPTAB_LIB_WIDEN);
8128 gcc_assert (temp);
8129 return temp;
8131 case STATEMENT_LIST:
8133 tree_stmt_iterator iter;
8135 gcc_assert (ignore);
8137 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8138 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8140 return const0_rtx;
8142 case COND_EXPR:
8143 /* A COND_EXPR with its type being VOID_TYPE represents a
8144 conditional jump and is handled in
8145 expand_gimple_cond_expr. */
8146 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8148 /* Note that COND_EXPRs whose type is a structure or union
8149 are required to be constructed to contain assignments of
8150 a temporary variable, so that we can evaluate them here
8151 for side effect only. If type is void, we must do likewise. */
8153 gcc_assert (!TREE_ADDRESSABLE (type)
8154 && !ignore
8155 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8156 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8158 /* If we are not to produce a result, we have no target. Otherwise,
8159 if a target was specified use it; it will not be used as an
8160 intermediate target unless it is safe. If no target, use a
8161 temporary. */
8163 if (modifier != EXPAND_STACK_PARM
8164 && original_target
8165 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8166 && GET_MODE (original_target) == mode
8167 #ifdef HAVE_conditional_move
8168 && (! can_conditionally_move_p (mode)
8169 || REG_P (original_target))
8170 #endif
8171 && !MEM_P (original_target))
8172 temp = original_target;
8173 else
8174 temp = assign_temp (type, 0, 0, 1);
8176 do_pending_stack_adjust ();
8177 NO_DEFER_POP;
8178 op0 = gen_label_rtx ();
8179 op1 = gen_label_rtx ();
8180 jumpifnot (TREE_OPERAND (exp, 0), op0);
8181 store_expr (TREE_OPERAND (exp, 1), temp,
8182 modifier == EXPAND_STACK_PARM);
8184 emit_jump_insn (gen_jump (op1));
8185 emit_barrier ();
8186 emit_label (op0);
8187 store_expr (TREE_OPERAND (exp, 2), temp,
8188 modifier == EXPAND_STACK_PARM);
8190 emit_label (op1);
8191 OK_DEFER_POP;
8192 return temp;
8194 case VEC_COND_EXPR:
8195 target = expand_vec_cond_expr (exp, target);
8196 return target;
8198 case MODIFY_EXPR:
8200 tree lhs = TREE_OPERAND (exp, 0);
8201 tree rhs = TREE_OPERAND (exp, 1);
8203 gcc_assert (ignore);
8205 /* Check for |= or &= of a bitfield of size one into another bitfield
8206 of size 1. In this case, (unless we need the result of the
8207 assignment) we can do this more efficiently with a
8208 test followed by an assignment, if necessary.
8210 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8211 things change so we do, this code should be enhanced to
8212 support it. */
8213 if (TREE_CODE (lhs) == COMPONENT_REF
8214 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8215 || TREE_CODE (rhs) == BIT_AND_EXPR)
8216 && TREE_OPERAND (rhs, 0) == lhs
8217 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8218 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8219 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8221 rtx label = gen_label_rtx ();
8223 do_jump (TREE_OPERAND (rhs, 1),
8224 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8225 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8226 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8227 (TREE_CODE (rhs) == BIT_IOR_EXPR
8228 ? integer_one_node
8229 : integer_zero_node)));
8230 do_pending_stack_adjust ();
8231 emit_label (label);
8232 return const0_rtx;
8235 expand_assignment (lhs, rhs);
8237 return const0_rtx;
8240 case RETURN_EXPR:
8241 if (!TREE_OPERAND (exp, 0))
8242 expand_null_return ();
8243 else
8244 expand_return (TREE_OPERAND (exp, 0));
8245 return const0_rtx;
8247 case ADDR_EXPR:
8248 return expand_expr_addr_expr (exp, target, tmode, modifier);
8250 case COMPLEX_EXPR:
8251 /* Get the rtx code of the operands. */
8252 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8253 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8255 if (!target)
8256 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8258 /* Move the real (op0) and imaginary (op1) parts to their location. */
8259 write_complex_part (target, op0, false);
8260 write_complex_part (target, op1, true);
8262 return target;
8264 case REALPART_EXPR:
8265 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8266 return read_complex_part (op0, false);
8268 case IMAGPART_EXPR:
8269 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8270 return read_complex_part (op0, true);
8272 case RESX_EXPR:
8273 expand_resx_expr (exp);
8274 return const0_rtx;
8276 case TRY_CATCH_EXPR:
8277 case CATCH_EXPR:
8278 case EH_FILTER_EXPR:
8279 case TRY_FINALLY_EXPR:
8280 /* Lowered by tree-eh.c. */
8281 gcc_unreachable ();
8283 case WITH_CLEANUP_EXPR:
8284 case CLEANUP_POINT_EXPR:
8285 case TARGET_EXPR:
8286 case CASE_LABEL_EXPR:
8287 case VA_ARG_EXPR:
8288 case BIND_EXPR:
8289 case INIT_EXPR:
8290 case CONJ_EXPR:
8291 case COMPOUND_EXPR:
8292 case PREINCREMENT_EXPR:
8293 case PREDECREMENT_EXPR:
8294 case POSTINCREMENT_EXPR:
8295 case POSTDECREMENT_EXPR:
8296 case LOOP_EXPR:
8297 case EXIT_EXPR:
8298 case TRUTH_ANDIF_EXPR:
8299 case TRUTH_ORIF_EXPR:
8300 /* Lowered by gimplify.c. */
8301 gcc_unreachable ();
8303 case EXC_PTR_EXPR:
8304 return get_exception_pointer (cfun);
8306 case FILTER_EXPR:
8307 return get_exception_filter (cfun);
8309 case FDESC_EXPR:
8310 /* Function descriptors are not valid except for as
8311 initialization constants, and should not be expanded. */
8312 gcc_unreachable ();
8314 case SWITCH_EXPR:
8315 expand_case (exp);
8316 return const0_rtx;
8318 case LABEL_EXPR:
8319 expand_label (TREE_OPERAND (exp, 0));
8320 return const0_rtx;
8322 case ASM_EXPR:
8323 expand_asm_expr (exp);
8324 return const0_rtx;
8326 case WITH_SIZE_EXPR:
8327 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8328 have pulled out the size to use in whatever context it needed. */
8329 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8330 modifier, alt_rtl);
8332 case REALIGN_LOAD_EXPR:
8334 tree oprnd0 = TREE_OPERAND (exp, 0);
8335 tree oprnd1 = TREE_OPERAND (exp, 1);
8336 tree oprnd2 = TREE_OPERAND (exp, 2);
8337 rtx op2;
8339 this_optab = optab_for_tree_code (code, type);
8340 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8341 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8342 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8343 target, unsignedp);
8344 gcc_assert (temp);
8345 return temp;
8349 default:
8350 return lang_hooks.expand_expr (exp, original_target, tmode,
8351 modifier, alt_rtl);
8354 /* Here to do an ordinary binary operator. */
8355 binop:
8356 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8357 subtarget, &op0, &op1, 0);
8358 binop2:
8359 this_optab = optab_for_tree_code (code, type);
8360 binop3:
8361 if (modifier == EXPAND_STACK_PARM)
8362 target = 0;
8363 temp = expand_binop (mode, this_optab, op0, op1, target,
8364 unsignedp, OPTAB_LIB_WIDEN);
8365 gcc_assert (temp);
8366 return REDUCE_BIT_FIELD (temp);
8368 #undef REDUCE_BIT_FIELD
8370 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8371 signedness of TYPE), possibly returning the result in TARGET. */
8372 static rtx
8373 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8375 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8376 if (target && GET_MODE (target) != GET_MODE (exp))
8377 target = 0;
8378 if (TYPE_UNSIGNED (type))
8380 rtx mask;
8381 if (prec < HOST_BITS_PER_WIDE_INT)
8382 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8383 GET_MODE (exp));
8384 else
8385 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8386 ((unsigned HOST_WIDE_INT) 1
8387 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8388 GET_MODE (exp));
8389 return expand_and (GET_MODE (exp), exp, mask, target);
8391 else
8393 tree count = build_int_cst (NULL_TREE,
8394 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8395 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8396 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8400 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8401 when applied to the address of EXP produces an address known to be
8402 aligned more than BIGGEST_ALIGNMENT. */
8404 static int
8405 is_aligning_offset (tree offset, tree exp)
8407 /* Strip off any conversions. */
8408 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8409 || TREE_CODE (offset) == NOP_EXPR
8410 || TREE_CODE (offset) == CONVERT_EXPR)
8411 offset = TREE_OPERAND (offset, 0);
8413 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8414 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8415 if (TREE_CODE (offset) != BIT_AND_EXPR
8416 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8417 || compare_tree_int (TREE_OPERAND (offset, 1),
8418 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8419 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8420 return 0;
8422 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8423 It must be NEGATE_EXPR. Then strip any more conversions. */
8424 offset = TREE_OPERAND (offset, 0);
8425 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8426 || TREE_CODE (offset) == NOP_EXPR
8427 || TREE_CODE (offset) == CONVERT_EXPR)
8428 offset = TREE_OPERAND (offset, 0);
8430 if (TREE_CODE (offset) != NEGATE_EXPR)
8431 return 0;
8433 offset = TREE_OPERAND (offset, 0);
8434 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8435 || TREE_CODE (offset) == NOP_EXPR
8436 || TREE_CODE (offset) == CONVERT_EXPR)
8437 offset = TREE_OPERAND (offset, 0);
8439 /* This must now be the address of EXP. */
8440 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8443 /* Return the tree node if an ARG corresponds to a string constant or zero
8444 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8445 in bytes within the string that ARG is accessing. The type of the
8446 offset will be `sizetype'. */
8448 tree
8449 string_constant (tree arg, tree *ptr_offset)
8451 tree array, offset;
8452 STRIP_NOPS (arg);
8454 if (TREE_CODE (arg) == ADDR_EXPR)
8456 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8458 *ptr_offset = size_zero_node;
8459 return TREE_OPERAND (arg, 0);
8461 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8463 array = TREE_OPERAND (arg, 0);
8464 offset = size_zero_node;
8466 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8468 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8469 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8470 if (TREE_CODE (array) != STRING_CST
8471 && TREE_CODE (array) != VAR_DECL)
8472 return 0;
8474 else
8475 return 0;
8477 else if (TREE_CODE (arg) == PLUS_EXPR)
8479 tree arg0 = TREE_OPERAND (arg, 0);
8480 tree arg1 = TREE_OPERAND (arg, 1);
8482 STRIP_NOPS (arg0);
8483 STRIP_NOPS (arg1);
8485 if (TREE_CODE (arg0) == ADDR_EXPR
8486 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8487 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8489 array = TREE_OPERAND (arg0, 0);
8490 offset = arg1;
8492 else if (TREE_CODE (arg1) == ADDR_EXPR
8493 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8494 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8496 array = TREE_OPERAND (arg1, 0);
8497 offset = arg0;
8499 else
8500 return 0;
8502 else
8503 return 0;
8505 if (TREE_CODE (array) == STRING_CST)
8507 *ptr_offset = convert (sizetype, offset);
8508 return array;
8510 else if (TREE_CODE (array) == VAR_DECL)
8512 int length;
8514 /* Variables initialized to string literals can be handled too. */
8515 if (DECL_INITIAL (array) == NULL_TREE
8516 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8517 return 0;
8519 /* If they are read-only, non-volatile and bind locally. */
8520 if (! TREE_READONLY (array)
8521 || TREE_SIDE_EFFECTS (array)
8522 || ! targetm.binds_local_p (array))
8523 return 0;
8525 /* Avoid const char foo[4] = "abcde"; */
8526 if (DECL_SIZE_UNIT (array) == NULL_TREE
8527 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8528 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8529 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8530 return 0;
8532 /* If variable is bigger than the string literal, OFFSET must be constant
8533 and inside of the bounds of the string literal. */
8534 offset = convert (sizetype, offset);
8535 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8536 && (! host_integerp (offset, 1)
8537 || compare_tree_int (offset, length) >= 0))
8538 return 0;
8540 *ptr_offset = offset;
8541 return DECL_INITIAL (array);
8544 return 0;
8547 /* Generate code to calculate EXP using a store-flag instruction
8548 and return an rtx for the result. EXP is either a comparison
8549 or a TRUTH_NOT_EXPR whose operand is a comparison.
8551 If TARGET is nonzero, store the result there if convenient.
8553 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8554 cheap.
8556 Return zero if there is no suitable set-flag instruction
8557 available on this machine.
8559 Once expand_expr has been called on the arguments of the comparison,
8560 we are committed to doing the store flag, since it is not safe to
8561 re-evaluate the expression. We emit the store-flag insn by calling
8562 emit_store_flag, but only expand the arguments if we have a reason
8563 to believe that emit_store_flag will be successful. If we think that
8564 it will, but it isn't, we have to simulate the store-flag with a
8565 set/jump/set sequence. */
8567 static rtx
8568 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8570 enum rtx_code code;
8571 tree arg0, arg1, type;
8572 tree tem;
8573 enum machine_mode operand_mode;
8574 int invert = 0;
8575 int unsignedp;
8576 rtx op0, op1;
8577 enum insn_code icode;
8578 rtx subtarget = target;
8579 rtx result, label;
8581 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8582 result at the end. We can't simply invert the test since it would
8583 have already been inverted if it were valid. This case occurs for
8584 some floating-point comparisons. */
8586 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8587 invert = 1, exp = TREE_OPERAND (exp, 0);
8589 arg0 = TREE_OPERAND (exp, 0);
8590 arg1 = TREE_OPERAND (exp, 1);
8592 /* Don't crash if the comparison was erroneous. */
8593 if (arg0 == error_mark_node || arg1 == error_mark_node)
8594 return const0_rtx;
8596 type = TREE_TYPE (arg0);
8597 operand_mode = TYPE_MODE (type);
8598 unsignedp = TYPE_UNSIGNED (type);
8600 /* We won't bother with BLKmode store-flag operations because it would mean
8601 passing a lot of information to emit_store_flag. */
8602 if (operand_mode == BLKmode)
8603 return 0;
8605 /* We won't bother with store-flag operations involving function pointers
8606 when function pointers must be canonicalized before comparisons. */
8607 #ifdef HAVE_canonicalize_funcptr_for_compare
8608 if (HAVE_canonicalize_funcptr_for_compare
8609 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8610 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8611 == FUNCTION_TYPE))
8612 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8613 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8614 == FUNCTION_TYPE))))
8615 return 0;
8616 #endif
8618 STRIP_NOPS (arg0);
8619 STRIP_NOPS (arg1);
8621 /* Get the rtx comparison code to use. We know that EXP is a comparison
8622 operation of some type. Some comparisons against 1 and -1 can be
8623 converted to comparisons with zero. Do so here so that the tests
8624 below will be aware that we have a comparison with zero. These
8625 tests will not catch constants in the first operand, but constants
8626 are rarely passed as the first operand. */
8628 switch (TREE_CODE (exp))
8630 case EQ_EXPR:
8631 code = EQ;
8632 break;
8633 case NE_EXPR:
8634 code = NE;
8635 break;
8636 case LT_EXPR:
8637 if (integer_onep (arg1))
8638 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8639 else
8640 code = unsignedp ? LTU : LT;
8641 break;
8642 case LE_EXPR:
8643 if (! unsignedp && integer_all_onesp (arg1))
8644 arg1 = integer_zero_node, code = LT;
8645 else
8646 code = unsignedp ? LEU : LE;
8647 break;
8648 case GT_EXPR:
8649 if (! unsignedp && integer_all_onesp (arg1))
8650 arg1 = integer_zero_node, code = GE;
8651 else
8652 code = unsignedp ? GTU : GT;
8653 break;
8654 case GE_EXPR:
8655 if (integer_onep (arg1))
8656 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8657 else
8658 code = unsignedp ? GEU : GE;
8659 break;
8661 case UNORDERED_EXPR:
8662 code = UNORDERED;
8663 break;
8664 case ORDERED_EXPR:
8665 code = ORDERED;
8666 break;
8667 case UNLT_EXPR:
8668 code = UNLT;
8669 break;
8670 case UNLE_EXPR:
8671 code = UNLE;
8672 break;
8673 case UNGT_EXPR:
8674 code = UNGT;
8675 break;
8676 case UNGE_EXPR:
8677 code = UNGE;
8678 break;
8679 case UNEQ_EXPR:
8680 code = UNEQ;
8681 break;
8682 case LTGT_EXPR:
8683 code = LTGT;
8684 break;
8686 default:
8687 gcc_unreachable ();
8690 /* Put a constant second. */
8691 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8693 tem = arg0; arg0 = arg1; arg1 = tem;
8694 code = swap_condition (code);
8697 /* If this is an equality or inequality test of a single bit, we can
8698 do this by shifting the bit being tested to the low-order bit and
8699 masking the result with the constant 1. If the condition was EQ,
8700 we xor it with 1. This does not require an scc insn and is faster
8701 than an scc insn even if we have it.
8703 The code to make this transformation was moved into fold_single_bit_test,
8704 so we just call into the folder and expand its result. */
8706 if ((code == NE || code == EQ)
8707 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8708 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8710 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8711 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8712 arg0, arg1, type),
8713 target, VOIDmode, EXPAND_NORMAL);
8716 /* Now see if we are likely to be able to do this. Return if not. */
8717 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8718 return 0;
8720 icode = setcc_gen_code[(int) code];
8721 if (icode == CODE_FOR_nothing
8722 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8724 /* We can only do this if it is one of the special cases that
8725 can be handled without an scc insn. */
8726 if ((code == LT && integer_zerop (arg1))
8727 || (! only_cheap && code == GE && integer_zerop (arg1)))
8729 else if (BRANCH_COST >= 0
8730 && ! only_cheap && (code == NE || code == EQ)
8731 && TREE_CODE (type) != REAL_TYPE
8732 && ((abs_optab->handlers[(int) operand_mode].insn_code
8733 != CODE_FOR_nothing)
8734 || (ffs_optab->handlers[(int) operand_mode].insn_code
8735 != CODE_FOR_nothing)))
8737 else
8738 return 0;
8741 if (! get_subtarget (target)
8742 || GET_MODE (subtarget) != operand_mode)
8743 subtarget = 0;
8745 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8747 if (target == 0)
8748 target = gen_reg_rtx (mode);
8750 result = emit_store_flag (target, code, op0, op1,
8751 operand_mode, unsignedp, 1);
8753 if (result)
8755 if (invert)
8756 result = expand_binop (mode, xor_optab, result, const1_rtx,
8757 result, 0, OPTAB_LIB_WIDEN);
8758 return result;
8761 /* If this failed, we have to do this with set/compare/jump/set code. */
8762 if (!REG_P (target)
8763 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8764 target = gen_reg_rtx (GET_MODE (target));
8766 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8767 result = compare_from_rtx (op0, op1, code, unsignedp,
8768 operand_mode, NULL_RTX);
8769 if (GET_CODE (result) == CONST_INT)
8770 return (((result == const0_rtx && ! invert)
8771 || (result != const0_rtx && invert))
8772 ? const0_rtx : const1_rtx);
8774 /* The code of RESULT may not match CODE if compare_from_rtx
8775 decided to swap its operands and reverse the original code.
8777 We know that compare_from_rtx returns either a CONST_INT or
8778 a new comparison code, so it is safe to just extract the
8779 code from RESULT. */
8780 code = GET_CODE (result);
8782 label = gen_label_rtx ();
8783 gcc_assert (bcc_gen_fctn[(int) code]);
8785 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8786 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8787 emit_label (label);
8789 return target;
8793 /* Stubs in case we haven't got a casesi insn. */
8794 #ifndef HAVE_casesi
8795 # define HAVE_casesi 0
8796 # define gen_casesi(a, b, c, d, e) (0)
8797 # define CODE_FOR_casesi CODE_FOR_nothing
8798 #endif
8800 /* If the machine does not have a case insn that compares the bounds,
8801 this means extra overhead for dispatch tables, which raises the
8802 threshold for using them. */
8803 #ifndef CASE_VALUES_THRESHOLD
8804 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8805 #endif /* CASE_VALUES_THRESHOLD */
8807 unsigned int
8808 case_values_threshold (void)
8810 return CASE_VALUES_THRESHOLD;
8813 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8814 0 otherwise (i.e. if there is no casesi instruction). */
8816 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8817 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8819 enum machine_mode index_mode = SImode;
8820 int index_bits = GET_MODE_BITSIZE (index_mode);
8821 rtx op1, op2, index;
8822 enum machine_mode op_mode;
8824 if (! HAVE_casesi)
8825 return 0;
8827 /* Convert the index to SImode. */
8828 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8830 enum machine_mode omode = TYPE_MODE (index_type);
8831 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8833 /* We must handle the endpoints in the original mode. */
8834 index_expr = build2 (MINUS_EXPR, index_type,
8835 index_expr, minval);
8836 minval = integer_zero_node;
8837 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8838 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8839 omode, 1, default_label);
8840 /* Now we can safely truncate. */
8841 index = convert_to_mode (index_mode, index, 0);
8843 else
8845 if (TYPE_MODE (index_type) != index_mode)
8847 index_expr = convert (lang_hooks.types.type_for_size
8848 (index_bits, 0), index_expr);
8849 index_type = TREE_TYPE (index_expr);
8852 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8855 do_pending_stack_adjust ();
8857 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8858 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8859 (index, op_mode))
8860 index = copy_to_mode_reg (op_mode, index);
8862 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8864 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8865 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8866 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8867 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8868 (op1, op_mode))
8869 op1 = copy_to_mode_reg (op_mode, op1);
8871 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8873 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8874 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8875 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8876 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8877 (op2, op_mode))
8878 op2 = copy_to_mode_reg (op_mode, op2);
8880 emit_jump_insn (gen_casesi (index, op1, op2,
8881 table_label, default_label));
8882 return 1;
8885 /* Attempt to generate a tablejump instruction; same concept. */
8886 #ifndef HAVE_tablejump
8887 #define HAVE_tablejump 0
8888 #define gen_tablejump(x, y) (0)
8889 #endif
8891 /* Subroutine of the next function.
8893 INDEX is the value being switched on, with the lowest value
8894 in the table already subtracted.
8895 MODE is its expected mode (needed if INDEX is constant).
8896 RANGE is the length of the jump table.
8897 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8899 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8900 index value is out of range. */
8902 static void
8903 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8904 rtx default_label)
8906 rtx temp, vector;
8908 if (INTVAL (range) > cfun->max_jumptable_ents)
8909 cfun->max_jumptable_ents = INTVAL (range);
8911 /* Do an unsigned comparison (in the proper mode) between the index
8912 expression and the value which represents the length of the range.
8913 Since we just finished subtracting the lower bound of the range
8914 from the index expression, this comparison allows us to simultaneously
8915 check that the original index expression value is both greater than
8916 or equal to the minimum value of the range and less than or equal to
8917 the maximum value of the range. */
8919 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8920 default_label);
8922 /* If index is in range, it must fit in Pmode.
8923 Convert to Pmode so we can index with it. */
8924 if (mode != Pmode)
8925 index = convert_to_mode (Pmode, index, 1);
8927 /* Don't let a MEM slip through, because then INDEX that comes
8928 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8929 and break_out_memory_refs will go to work on it and mess it up. */
8930 #ifdef PIC_CASE_VECTOR_ADDRESS
8931 if (flag_pic && !REG_P (index))
8932 index = copy_to_mode_reg (Pmode, index);
8933 #endif
8935 /* If flag_force_addr were to affect this address
8936 it could interfere with the tricky assumptions made
8937 about addresses that contain label-refs,
8938 which may be valid only very near the tablejump itself. */
8939 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8940 GET_MODE_SIZE, because this indicates how large insns are. The other
8941 uses should all be Pmode, because they are addresses. This code
8942 could fail if addresses and insns are not the same size. */
8943 index = gen_rtx_PLUS (Pmode,
8944 gen_rtx_MULT (Pmode, index,
8945 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8946 gen_rtx_LABEL_REF (Pmode, table_label));
8947 #ifdef PIC_CASE_VECTOR_ADDRESS
8948 if (flag_pic)
8949 index = PIC_CASE_VECTOR_ADDRESS (index);
8950 else
8951 #endif
8952 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8953 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8954 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8955 convert_move (temp, vector, 0);
8957 emit_jump_insn (gen_tablejump (temp, table_label));
8959 /* If we are generating PIC code or if the table is PC-relative, the
8960 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8961 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8962 emit_barrier ();
8966 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8967 rtx table_label, rtx default_label)
8969 rtx index;
8971 if (! HAVE_tablejump)
8972 return 0;
8974 index_expr = fold (build2 (MINUS_EXPR, index_type,
8975 convert (index_type, index_expr),
8976 convert (index_type, minval)));
8977 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8978 do_pending_stack_adjust ();
8980 do_tablejump (index, TYPE_MODE (index_type),
8981 convert_modes (TYPE_MODE (index_type),
8982 TYPE_MODE (TREE_TYPE (range)),
8983 expand_expr (range, NULL_RTX,
8984 VOIDmode, 0),
8985 TYPE_UNSIGNED (TREE_TYPE (range))),
8986 table_label, default_label);
8987 return 1;
8990 /* Nonzero if the mode is a valid vector mode for this architecture.
8991 This returns nonzero even if there is no hardware support for the
8992 vector mode, but we can emulate with narrower modes. */
8995 vector_mode_valid_p (enum machine_mode mode)
8997 enum mode_class class = GET_MODE_CLASS (mode);
8998 enum machine_mode innermode;
9000 /* Doh! What's going on? */
9001 if (class != MODE_VECTOR_INT
9002 && class != MODE_VECTOR_FLOAT)
9003 return 0;
9005 /* Hardware support. Woo hoo! */
9006 if (targetm.vector_mode_supported_p (mode))
9007 return 1;
9009 innermode = GET_MODE_INNER (mode);
9011 /* We should probably return 1 if requesting V4DI and we have no DI,
9012 but we have V2DI, but this is probably very unlikely. */
9014 /* If we have support for the inner mode, we can safely emulate it.
9015 We may not have V2DI, but me can emulate with a pair of DIs. */
9016 return targetm.scalar_mode_supported_p (innermode);
9019 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9020 static rtx
9021 const_vector_from_tree (tree exp)
9023 rtvec v;
9024 int units, i;
9025 tree link, elt;
9026 enum machine_mode inner, mode;
9028 mode = TYPE_MODE (TREE_TYPE (exp));
9030 if (initializer_zerop (exp))
9031 return CONST0_RTX (mode);
9033 units = GET_MODE_NUNITS (mode);
9034 inner = GET_MODE_INNER (mode);
9036 v = rtvec_alloc (units);
9038 link = TREE_VECTOR_CST_ELTS (exp);
9039 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9041 elt = TREE_VALUE (link);
9043 if (TREE_CODE (elt) == REAL_CST)
9044 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9045 inner);
9046 else
9047 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9048 TREE_INT_CST_HIGH (elt),
9049 inner);
9052 /* Initialize remaining elements to 0. */
9053 for (; i < units; ++i)
9054 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9056 return gen_rtx_CONST_VECTOR (mode, v);
9058 #include "gt-expr.h"