2008-05-07 Kai Tietz <kai,tietz@onevision.com>
[official-gcc.git] / gcc / expr.c
blobae25d40a68111808ae4377692841807eae8c85e8
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
56 #include "diagnostic.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
64 #ifdef PUSH_ROUNDING
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
69 #endif
70 #endif
72 #endif
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
77 #else
78 #define STACK_PUSH_CODE PRE_INC
79 #endif
80 #endif
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
91 /* This structure is used by move_by_pieces to describe the move to
92 be performed. */
93 struct move_by_pieces
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 rtx from;
100 rtx from_addr;
101 int autinc_from;
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
105 int reverse;
108 /* This structure is used by store_by_pieces to describe the clear to
109 be performed. */
111 struct store_by_pieces
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
120 void *constfundata;
121 int reverse;
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 unsigned int,
126 unsigned int);
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
179 #endif
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
187 #endif
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero. */
191 #ifndef SET_BY_PIECES_P
192 #define SET_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) SET_RATIO)
195 #endif
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memcpy" storage when the source is a constant string. */
199 #ifndef STORE_BY_PIECES_P
200 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
202 < (unsigned int) MOVE_RATIO)
203 #endif
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movmem_optab[NUM_MACHINE_MODES];
208 /* This array records the insn_code of insns to perform block sets. */
209 enum insn_code setmem_optab[NUM_MACHINE_MODES];
211 /* These arrays record the insn_code of three different kinds of insns
212 to perform block compares. */
213 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217 /* Synchronization primitives. */
218 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
245 #endif
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
251 void
252 init_expr_target (void)
254 rtx insn, pat;
255 enum machine_mode mode;
256 int num_clobbers;
257 rtx mem, mem1;
258 rtx reg;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
277 int regno;
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 regno++)
292 if (! HARD_REGNO_MODE_OK (regno, mode))
293 continue;
295 SET_REGNO (reg, regno);
297 SET_SRC (pat) = mem;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
328 enum insn_code ic;
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
332 continue;
334 PUT_MODE (mem, srcmode);
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
342 /* This is run at the start of compiling a function. */
344 void
345 init_expr (void)
347 memset (&crtl->expr, 0, sizeof (crtl->expr));
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
352 fixed-point.
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
356 void
357 convert_move (rtx to, rtx from, int unsignedp)
359 enum machine_mode to_mode = GET_MODE (to);
360 enum machine_mode from_mode = GET_MODE (from);
361 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
363 enum insn_code code;
364 rtx libcall;
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
371 gcc_assert (to_real == from_real);
372 gcc_assert (to_mode != BLKmode);
373 gcc_assert (from_mode != BLKmode);
375 /* If the source and destination are already the same, then there's
376 nothing to do. */
377 if (to == from)
378 return;
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
382 TO here. */
384 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 >= GET_MODE_SIZE (to_mode))
387 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388 from = gen_lowpart (to_mode, from), from_mode = to_mode;
390 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
392 if (to_mode == from_mode
393 || (from_mode == VOIDmode && CONSTANT_P (from)))
395 emit_move_insn (to, from);
396 return;
399 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
401 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
403 if (VECTOR_MODE_P (to_mode))
404 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
405 else
406 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
408 emit_move_insn (to, from);
409 return;
412 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
414 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
416 return;
419 if (to_real)
421 rtx value, insns;
422 convert_optab tab;
424 gcc_assert ((GET_MODE_PRECISION (from_mode)
425 != GET_MODE_PRECISION (to_mode))
426 || (DECIMAL_FLOAT_MODE_P (from_mode)
427 != DECIMAL_FLOAT_MODE_P (to_mode)));
429 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 /* Conversion between decimal float and binary float, same size. */
431 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
433 tab = sext_optab;
434 else
435 tab = trunc_optab;
437 /* Try converting directly if the insn is supported. */
439 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440 if (code != CODE_FOR_nothing)
442 emit_unop_insn (code, to, from,
443 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
444 return;
447 /* Otherwise use a libcall. */
448 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall);
453 start_sequence ();
454 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
455 1, from, from_mode);
456 insns = get_insns ();
457 end_sequence ();
458 emit_libcall_block (insns, to, value,
459 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
460 from)
461 : gen_rtx_FLOAT_EXTEND (to_mode, from));
462 return;
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
473 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 != CODE_FOR_nothing);
476 if (full_mode != from_mode)
477 from = convert_to_mode (full_mode, from, unsignedp);
478 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
479 to, from, UNKNOWN);
480 return;
482 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
484 rtx new_from;
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
488 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 != CODE_FOR_nothing);
491 if (to_mode == full_mode)
493 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
494 to, from, UNKNOWN);
495 return;
498 new_from = gen_reg_rtx (full_mode);
499 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 new_from, from, UNKNOWN);
502 /* else proceed to integer conversions below. */
503 from_mode = full_mode;
504 from = new_from;
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 expand_fixed_convert (to, from, 0, 0);
518 else
519 expand_fixed_convert (to, from, 0, 1);
520 return;
523 /* Now both modes are integers. */
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
529 rtx insns;
530 rtx lowpart;
531 rtx fill_value;
532 rtx lowfrom;
533 int i;
534 enum machine_mode lowpart_mode;
535 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
539 != CODE_FOR_nothing)
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize > 0 && GET_CODE (from) == SUBREG)
546 from = force_reg (from_mode, from);
547 emit_unop_insn (code, to, from, equiv_code);
548 return;
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 != CODE_FOR_nothing))
555 rtx word_to = gen_reg_rtx (word_mode);
556 if (REG_P (to))
558 if (reg_overlap_mentioned_p (to, from))
559 from = force_reg (from_mode, from);
560 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
562 convert_move (word_to, from, unsignedp);
563 emit_unop_insn (code, to, word_to, equiv_code);
564 return;
567 /* No special multiword conversion insn; do it by hand. */
568 start_sequence ();
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
573 if (reg_overlap_mentioned_p (to, from))
574 from = force_reg (from_mode, from);
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 lowpart_mode = word_mode;
579 else
580 lowpart_mode = from_mode;
582 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
584 lowpart = gen_lowpart (lowpart_mode, to);
585 emit_move_insn (lowpart, lowfrom);
587 /* Compute the value to put in each remaining word. */
588 if (unsignedp)
589 fill_value = const0_rtx;
590 else
592 #ifdef HAVE_slt
593 if (HAVE_slt
594 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
595 && STORE_FLAG_VALUE == -1)
597 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
598 lowpart_mode, 0);
599 fill_value = gen_reg_rtx (word_mode);
600 emit_insn (gen_slt (fill_value));
602 else
603 #endif
605 fill_value
606 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
607 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
608 NULL_RTX, 0);
609 fill_value = convert_to_mode (word_mode, fill_value, 1);
613 /* Fill the remaining words. */
614 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
616 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
617 rtx subword = operand_subword (to, index, 1, to_mode);
619 gcc_assert (subword);
621 if (fill_value != subword)
622 emit_move_insn (subword, fill_value);
625 insns = get_insns ();
626 end_sequence ();
628 emit_no_conflict_block (insns, to, from, NULL_RTX,
629 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
630 return;
633 /* Truncating multi-word to a word or less. */
634 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
635 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
637 if (!((MEM_P (from)
638 && ! MEM_VOLATILE_P (from)
639 && direct_load[(int) to_mode]
640 && ! mode_dependent_address_p (XEXP (from, 0)))
641 || REG_P (from)
642 || GET_CODE (from) == SUBREG))
643 from = force_reg (from_mode, from);
644 convert_move (to, gen_lowpart (word_mode, from), 0);
645 return;
648 /* Now follow all the conversions between integers
649 no more than a word long. */
651 /* For truncation, usually we can just refer to FROM in a narrower mode. */
652 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
653 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
654 GET_MODE_BITSIZE (from_mode)))
656 if (!((MEM_P (from)
657 && ! MEM_VOLATILE_P (from)
658 && direct_load[(int) to_mode]
659 && ! mode_dependent_address_p (XEXP (from, 0)))
660 || REG_P (from)
661 || GET_CODE (from) == SUBREG))
662 from = force_reg (from_mode, from);
663 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
664 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
665 from = copy_to_reg (from);
666 emit_move_insn (to, gen_lowpart (to_mode, from));
667 return;
670 /* Handle extension. */
671 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
673 /* Convert directly if that works. */
674 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
675 != CODE_FOR_nothing)
677 emit_unop_insn (code, to, from, equiv_code);
678 return;
680 else
682 enum machine_mode intermediate;
683 rtx tmp;
684 tree shift_amount;
686 /* Search for a mode to convert via. */
687 for (intermediate = from_mode; intermediate != VOIDmode;
688 intermediate = GET_MODE_WIDER_MODE (intermediate))
689 if (((can_extend_p (to_mode, intermediate, unsignedp)
690 != CODE_FOR_nothing)
691 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
692 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
693 GET_MODE_BITSIZE (intermediate))))
694 && (can_extend_p (intermediate, from_mode, unsignedp)
695 != CODE_FOR_nothing))
697 convert_move (to, convert_to_mode (intermediate, from,
698 unsignedp), unsignedp);
699 return;
702 /* No suitable intermediate mode.
703 Generate what we need with shifts. */
704 shift_amount = build_int_cst (NULL_TREE,
705 GET_MODE_BITSIZE (to_mode)
706 - GET_MODE_BITSIZE (from_mode));
707 from = gen_lowpart (to_mode, force_reg (from_mode, from));
708 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
709 to, unsignedp);
710 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
711 to, unsignedp);
712 if (tmp != to)
713 emit_move_insn (to, tmp);
714 return;
718 /* Support special truncate insns for certain modes. */
719 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
721 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
722 to, from, UNKNOWN);
723 return;
726 /* Handle truncation of volatile memrefs, and so on;
727 the things that couldn't be truncated directly,
728 and for which there was no special instruction.
730 ??? Code above formerly short-circuited this, for most integer
731 mode pairs, with a force_reg in from_mode followed by a recursive
732 call to this routine. Appears always to have been wrong. */
733 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
735 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
736 emit_move_insn (to, temp);
737 return;
740 /* Mode combination is not recognized. */
741 gcc_unreachable ();
744 /* Return an rtx for a value that would result
745 from converting X to mode MODE.
746 Both X and MODE may be floating, or both integer.
747 UNSIGNEDP is nonzero if X is an unsigned value.
748 This can be done by referring to a part of X in place
749 or by copying to a new temporary with conversion. */
752 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
754 return convert_modes (mode, VOIDmode, x, unsignedp);
757 /* Return an rtx for a value that would result
758 from converting X from mode OLDMODE to mode MODE.
759 Both modes may be floating, or both integer.
760 UNSIGNEDP is nonzero if X is an unsigned value.
762 This can be done by referring to a part of X in place
763 or by copying to a new temporary with conversion.
765 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
768 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
770 rtx temp;
772 /* If FROM is a SUBREG that indicates that we have already done at least
773 the required extension, strip it. */
775 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
776 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
777 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
778 x = gen_lowpart (mode, x);
780 if (GET_MODE (x) != VOIDmode)
781 oldmode = GET_MODE (x);
783 if (mode == oldmode)
784 return x;
786 /* There is one case that we must handle specially: If we are converting
787 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
788 we are to interpret the constant as unsigned, gen_lowpart will do
789 the wrong if the constant appears negative. What we want to do is
790 make the high-order word of the constant zero, not all ones. */
792 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
793 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
794 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
796 HOST_WIDE_INT val = INTVAL (x);
798 if (oldmode != VOIDmode
799 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
801 int width = GET_MODE_BITSIZE (oldmode);
803 /* We need to zero extend VAL. */
804 val &= ((HOST_WIDE_INT) 1 << width) - 1;
807 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
810 /* We can do this with a gen_lowpart if both desired and current modes
811 are integer, and this is either a constant integer, a register, or a
812 non-volatile MEM. Except for the constant case where MODE is no
813 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
815 if ((GET_CODE (x) == CONST_INT
816 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
817 || (GET_MODE_CLASS (mode) == MODE_INT
818 && GET_MODE_CLASS (oldmode) == MODE_INT
819 && (GET_CODE (x) == CONST_DOUBLE
820 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
821 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
822 && direct_load[(int) mode])
823 || (REG_P (x)
824 && (! HARD_REGISTER_P (x)
825 || HARD_REGNO_MODE_OK (REGNO (x), mode))
826 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
827 GET_MODE_BITSIZE (GET_MODE (x)))))))))
829 /* ?? If we don't know OLDMODE, we have to assume here that
830 X does not need sign- or zero-extension. This may not be
831 the case, but it's the best we can do. */
832 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
833 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
835 HOST_WIDE_INT val = INTVAL (x);
836 int width = GET_MODE_BITSIZE (oldmode);
838 /* We must sign or zero-extend in this case. Start by
839 zero-extending, then sign extend if we need to. */
840 val &= ((HOST_WIDE_INT) 1 << width) - 1;
841 if (! unsignedp
842 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
843 val |= (HOST_WIDE_INT) (-1) << width;
845 return gen_int_mode (val, mode);
848 return gen_lowpart (mode, x);
851 /* Converting from integer constant into mode is always equivalent to an
852 subreg operation. */
853 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
855 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
856 return simplify_gen_subreg (mode, x, oldmode, 0);
859 temp = gen_reg_rtx (mode);
860 convert_move (temp, x, unsignedp);
861 return temp;
864 /* STORE_MAX_PIECES is the number of bytes at a time that we can
865 store efficiently. Due to internal GCC limitations, this is
866 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
867 for an immediate constant. */
869 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
871 /* Determine whether the LEN bytes can be moved by using several move
872 instructions. Return nonzero if a call to move_by_pieces should
873 succeed. */
876 can_move_by_pieces (unsigned HOST_WIDE_INT len,
877 unsigned int align ATTRIBUTE_UNUSED)
879 return MOVE_BY_PIECES_P (len, align);
882 /* Generate several move instructions to copy LEN bytes from block FROM to
883 block TO. (These are MEM rtx's with BLKmode).
885 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
886 used to push FROM to the stack.
888 ALIGN is maximum stack alignment we can assume.
890 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
891 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
892 stpcpy. */
895 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
896 unsigned int align, int endp)
898 struct move_by_pieces data;
899 rtx to_addr, from_addr = XEXP (from, 0);
900 unsigned int max_size = MOVE_MAX_PIECES + 1;
901 enum machine_mode mode = VOIDmode, tmode;
902 enum insn_code icode;
904 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
906 data.offset = 0;
907 data.from_addr = from_addr;
908 if (to)
910 to_addr = XEXP (to, 0);
911 data.to = to;
912 data.autinc_to
913 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
914 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
915 data.reverse
916 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
918 else
920 to_addr = NULL_RTX;
921 data.to = NULL_RTX;
922 data.autinc_to = 1;
923 #ifdef STACK_GROWS_DOWNWARD
924 data.reverse = 1;
925 #else
926 data.reverse = 0;
927 #endif
929 data.to_addr = to_addr;
930 data.from = from;
931 data.autinc_from
932 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
933 || GET_CODE (from_addr) == POST_INC
934 || GET_CODE (from_addr) == POST_DEC);
936 data.explicit_inc_from = 0;
937 data.explicit_inc_to = 0;
938 if (data.reverse) data.offset = len;
939 data.len = len;
941 /* If copying requires more than two move insns,
942 copy addresses to registers (to make displacements shorter)
943 and use post-increment if available. */
944 if (!(data.autinc_from && data.autinc_to)
945 && move_by_pieces_ninsns (len, align, max_size) > 2)
947 /* Find the mode of the largest move... */
948 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
949 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
950 if (GET_MODE_SIZE (tmode) < max_size)
951 mode = tmode;
953 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
955 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
956 data.autinc_from = 1;
957 data.explicit_inc_from = -1;
959 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
961 data.from_addr = copy_addr_to_reg (from_addr);
962 data.autinc_from = 1;
963 data.explicit_inc_from = 1;
965 if (!data.autinc_from && CONSTANT_P (from_addr))
966 data.from_addr = copy_addr_to_reg (from_addr);
967 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
969 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
970 data.autinc_to = 1;
971 data.explicit_inc_to = -1;
973 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
975 data.to_addr = copy_addr_to_reg (to_addr);
976 data.autinc_to = 1;
977 data.explicit_inc_to = 1;
979 if (!data.autinc_to && CONSTANT_P (to_addr))
980 data.to_addr = copy_addr_to_reg (to_addr);
983 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
984 if (align >= GET_MODE_ALIGNMENT (tmode))
985 align = GET_MODE_ALIGNMENT (tmode);
986 else
988 enum machine_mode xmode;
990 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
991 tmode != VOIDmode;
992 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
993 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
994 || SLOW_UNALIGNED_ACCESS (tmode, align))
995 break;
997 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1000 /* First move what we can in the largest integer mode, then go to
1001 successively smaller modes. */
1003 while (max_size > 1)
1005 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1006 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1007 if (GET_MODE_SIZE (tmode) < max_size)
1008 mode = tmode;
1010 if (mode == VOIDmode)
1011 break;
1013 icode = optab_handler (mov_optab, mode)->insn_code;
1014 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1015 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1017 max_size = GET_MODE_SIZE (mode);
1020 /* The code above should have handled everything. */
1021 gcc_assert (!data.len);
1023 if (endp)
1025 rtx to1;
1027 gcc_assert (!data.reverse);
1028 if (data.autinc_to)
1030 if (endp == 2)
1032 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1033 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1034 else
1035 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1036 -1));
1038 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1039 data.offset);
1041 else
1043 if (endp == 2)
1044 --data.offset;
1045 to1 = adjust_address (data.to, QImode, data.offset);
1047 return to1;
1049 else
1050 return data.to;
1053 /* Return number of insns required to move L bytes by pieces.
1054 ALIGN (in bits) is maximum alignment we can assume. */
1056 static unsigned HOST_WIDE_INT
1057 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1058 unsigned int max_size)
1060 unsigned HOST_WIDE_INT n_insns = 0;
1061 enum machine_mode tmode;
1063 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1064 if (align >= GET_MODE_ALIGNMENT (tmode))
1065 align = GET_MODE_ALIGNMENT (tmode);
1066 else
1068 enum machine_mode tmode, xmode;
1070 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1071 tmode != VOIDmode;
1072 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1073 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1074 || SLOW_UNALIGNED_ACCESS (tmode, align))
1075 break;
1077 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1080 while (max_size > 1)
1082 enum machine_mode mode = VOIDmode;
1083 enum insn_code icode;
1085 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1086 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1087 if (GET_MODE_SIZE (tmode) < max_size)
1088 mode = tmode;
1090 if (mode == VOIDmode)
1091 break;
1093 icode = optab_handler (mov_optab, mode)->insn_code;
1094 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1095 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1097 max_size = GET_MODE_SIZE (mode);
1100 gcc_assert (!l);
1101 return n_insns;
1104 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1105 with move instructions for mode MODE. GENFUN is the gen_... function
1106 to make a move insn for that mode. DATA has all the other info. */
1108 static void
1109 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1110 struct move_by_pieces *data)
1112 unsigned int size = GET_MODE_SIZE (mode);
1113 rtx to1 = NULL_RTX, from1;
1115 while (data->len >= size)
1117 if (data->reverse)
1118 data->offset -= size;
1120 if (data->to)
1122 if (data->autinc_to)
1123 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1124 data->offset);
1125 else
1126 to1 = adjust_address (data->to, mode, data->offset);
1129 if (data->autinc_from)
1130 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1131 data->offset);
1132 else
1133 from1 = adjust_address (data->from, mode, data->offset);
1135 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1136 emit_insn (gen_add2_insn (data->to_addr,
1137 GEN_INT (-(HOST_WIDE_INT)size)));
1138 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1139 emit_insn (gen_add2_insn (data->from_addr,
1140 GEN_INT (-(HOST_WIDE_INT)size)));
1142 if (data->to)
1143 emit_insn ((*genfun) (to1, from1));
1144 else
1146 #ifdef PUSH_ROUNDING
1147 emit_single_push_insn (mode, from1, NULL);
1148 #else
1149 gcc_unreachable ();
1150 #endif
1153 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1154 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1155 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1156 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1158 if (! data->reverse)
1159 data->offset += size;
1161 data->len -= size;
1165 /* Emit code to move a block Y to a block X. This may be done with
1166 string-move instructions, with multiple scalar move instructions,
1167 or with a library call.
1169 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1170 SIZE is an rtx that says how long they are.
1171 ALIGN is the maximum alignment we can assume they have.
1172 METHOD describes what kind of copy this is, and what mechanisms may be used.
1174 Return the address of the new block, if memcpy is called and returns it,
1175 0 otherwise. */
1178 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1179 unsigned int expected_align, HOST_WIDE_INT expected_size)
1181 bool may_use_call;
1182 rtx retval = 0;
1183 unsigned int align;
1185 switch (method)
1187 case BLOCK_OP_NORMAL:
1188 case BLOCK_OP_TAILCALL:
1189 may_use_call = true;
1190 break;
1192 case BLOCK_OP_CALL_PARM:
1193 may_use_call = block_move_libcall_safe_for_call_parm ();
1195 /* Make inhibit_defer_pop nonzero around the library call
1196 to force it to pop the arguments right away. */
1197 NO_DEFER_POP;
1198 break;
1200 case BLOCK_OP_NO_LIBCALL:
1201 may_use_call = false;
1202 break;
1204 default:
1205 gcc_unreachable ();
1208 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1210 gcc_assert (MEM_P (x));
1211 gcc_assert (MEM_P (y));
1212 gcc_assert (size);
1214 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1215 block copy is more efficient for other large modes, e.g. DCmode. */
1216 x = adjust_address (x, BLKmode, 0);
1217 y = adjust_address (y, BLKmode, 0);
1219 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1220 can be incorrect is coming from __builtin_memcpy. */
1221 if (GET_CODE (size) == CONST_INT)
1223 if (INTVAL (size) == 0)
1224 return 0;
1226 x = shallow_copy_rtx (x);
1227 y = shallow_copy_rtx (y);
1228 set_mem_size (x, size);
1229 set_mem_size (y, size);
1232 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1233 move_by_pieces (x, y, INTVAL (size), align, 0);
1234 else if (emit_block_move_via_movmem (x, y, size, align,
1235 expected_align, expected_size))
1237 else if (may_use_call)
1238 retval = emit_block_move_via_libcall (x, y, size,
1239 method == BLOCK_OP_TAILCALL);
1240 else
1241 emit_block_move_via_loop (x, y, size, align);
1243 if (method == BLOCK_OP_CALL_PARM)
1244 OK_DEFER_POP;
1246 return retval;
1250 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1252 return emit_block_move_hints (x, y, size, method, 0, -1);
1255 /* A subroutine of emit_block_move. Returns true if calling the
1256 block move libcall will not clobber any parameters which may have
1257 already been placed on the stack. */
1259 static bool
1260 block_move_libcall_safe_for_call_parm (void)
1262 #if defined (REG_PARM_STACK_SPACE)
1263 tree fn;
1264 #endif
1266 /* If arguments are pushed on the stack, then they're safe. */
1267 if (PUSH_ARGS)
1268 return true;
1270 /* If registers go on the stack anyway, any argument is sure to clobber
1271 an outgoing argument. */
1272 #if defined (REG_PARM_STACK_SPACE)
1273 fn = emit_block_move_libcall_fn (false);
1274 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1275 && REG_PARM_STACK_SPACE (fn) != 0)
1276 return false;
1277 #endif
1279 /* If any argument goes in memory, then it might clobber an outgoing
1280 argument. */
1282 CUMULATIVE_ARGS args_so_far;
1283 tree fn, arg;
1285 fn = emit_block_move_libcall_fn (false);
1286 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1288 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1289 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1291 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1292 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1293 if (!tmp || !REG_P (tmp))
1294 return false;
1295 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1296 return false;
1297 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1300 return true;
1303 /* A subroutine of emit_block_move. Expand a movmem pattern;
1304 return true if successful. */
1306 static bool
1307 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1308 unsigned int expected_align, HOST_WIDE_INT expected_size)
1310 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1311 int save_volatile_ok = volatile_ok;
1312 enum machine_mode mode;
1314 if (expected_align < align)
1315 expected_align = align;
1317 /* Since this is a move insn, we don't care about volatility. */
1318 volatile_ok = 1;
1320 /* Try the most limited insn first, because there's no point
1321 including more than one in the machine description unless
1322 the more limited one has some advantage. */
1324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1325 mode = GET_MODE_WIDER_MODE (mode))
1327 enum insn_code code = movmem_optab[(int) mode];
1328 insn_operand_predicate_fn pred;
1330 if (code != CODE_FOR_nothing
1331 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1332 here because if SIZE is less than the mode mask, as it is
1333 returned by the macro, it will definitely be less than the
1334 actual mode mask. */
1335 && ((GET_CODE (size) == CONST_INT
1336 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1337 <= (GET_MODE_MASK (mode) >> 1)))
1338 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1339 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1340 || (*pred) (x, BLKmode))
1341 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1342 || (*pred) (y, BLKmode))
1343 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1344 || (*pred) (opalign, VOIDmode)))
1346 rtx op2;
1347 rtx last = get_last_insn ();
1348 rtx pat;
1350 op2 = convert_to_mode (mode, size, 1);
1351 pred = insn_data[(int) code].operand[2].predicate;
1352 if (pred != 0 && ! (*pred) (op2, mode))
1353 op2 = copy_to_mode_reg (mode, op2);
1355 /* ??? When called via emit_block_move_for_call, it'd be
1356 nice if there were some way to inform the backend, so
1357 that it doesn't fail the expansion because it thinks
1358 emitting the libcall would be more efficient. */
1360 if (insn_data[(int) code].n_operands == 4)
1361 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1362 else
1363 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1364 GEN_INT (expected_align),
1365 GEN_INT (expected_size));
1366 if (pat)
1368 emit_insn (pat);
1369 volatile_ok = save_volatile_ok;
1370 return true;
1372 else
1373 delete_insns_since (last);
1377 volatile_ok = save_volatile_ok;
1378 return false;
1381 /* A subroutine of emit_block_move. Expand a call to memcpy.
1382 Return the return value from memcpy, 0 otherwise. */
1385 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1387 rtx dst_addr, src_addr;
1388 tree call_expr, fn, src_tree, dst_tree, size_tree;
1389 enum machine_mode size_mode;
1390 rtx retval;
1392 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1393 pseudos. We can then place those new pseudos into a VAR_DECL and
1394 use them later. */
1396 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1397 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1399 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1400 src_addr = convert_memory_address (ptr_mode, src_addr);
1402 dst_tree = make_tree (ptr_type_node, dst_addr);
1403 src_tree = make_tree (ptr_type_node, src_addr);
1405 size_mode = TYPE_MODE (sizetype);
1407 size = convert_to_mode (size_mode, size, 1);
1408 size = copy_to_mode_reg (size_mode, size);
1410 /* It is incorrect to use the libcall calling conventions to call
1411 memcpy in this context. This could be a user call to memcpy and
1412 the user may wish to examine the return value from memcpy. For
1413 targets where libcalls and normal calls have different conventions
1414 for returning pointers, we could end up generating incorrect code. */
1416 size_tree = make_tree (sizetype, size);
1418 fn = emit_block_move_libcall_fn (true);
1419 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1420 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1422 retval = expand_normal (call_expr);
1424 return retval;
1427 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1428 for the function we use for block copies. The first time FOR_CALL
1429 is true, we call assemble_external. */
1431 static GTY(()) tree block_move_fn;
1433 void
1434 init_block_move_fn (const char *asmspec)
1436 if (!block_move_fn)
1438 tree args, fn;
1440 fn = get_identifier ("memcpy");
1441 args = build_function_type_list (ptr_type_node, ptr_type_node,
1442 const_ptr_type_node, sizetype,
1443 NULL_TREE);
1445 fn = build_decl (FUNCTION_DECL, fn, args);
1446 DECL_EXTERNAL (fn) = 1;
1447 TREE_PUBLIC (fn) = 1;
1448 DECL_ARTIFICIAL (fn) = 1;
1449 TREE_NOTHROW (fn) = 1;
1450 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1451 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1453 block_move_fn = fn;
1456 if (asmspec)
1457 set_user_assembler_name (block_move_fn, asmspec);
1460 static tree
1461 emit_block_move_libcall_fn (int for_call)
1463 static bool emitted_extern;
1465 if (!block_move_fn)
1466 init_block_move_fn (NULL);
1468 if (for_call && !emitted_extern)
1470 emitted_extern = true;
1471 make_decl_rtl (block_move_fn);
1472 assemble_external (block_move_fn);
1475 return block_move_fn;
1478 /* A subroutine of emit_block_move. Copy the data via an explicit
1479 loop. This is used only when libcalls are forbidden. */
1480 /* ??? It'd be nice to copy in hunks larger than QImode. */
1482 static void
1483 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1484 unsigned int align ATTRIBUTE_UNUSED)
1486 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1487 enum machine_mode iter_mode;
1489 iter_mode = GET_MODE (size);
1490 if (iter_mode == VOIDmode)
1491 iter_mode = word_mode;
1493 top_label = gen_label_rtx ();
1494 cmp_label = gen_label_rtx ();
1495 iter = gen_reg_rtx (iter_mode);
1497 emit_move_insn (iter, const0_rtx);
1499 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1500 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1501 do_pending_stack_adjust ();
1503 emit_jump (cmp_label);
1504 emit_label (top_label);
1506 tmp = convert_modes (Pmode, iter_mode, iter, true);
1507 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1508 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1509 x = change_address (x, QImode, x_addr);
1510 y = change_address (y, QImode, y_addr);
1512 emit_move_insn (x, y);
1514 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1515 true, OPTAB_LIB_WIDEN);
1516 if (tmp != iter)
1517 emit_move_insn (iter, tmp);
1519 emit_label (cmp_label);
1521 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1522 true, top_label);
1525 /* Copy all or part of a value X into registers starting at REGNO.
1526 The number of registers to be filled is NREGS. */
1528 void
1529 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1531 int i;
1532 #ifdef HAVE_load_multiple
1533 rtx pat;
1534 rtx last;
1535 #endif
1537 if (nregs == 0)
1538 return;
1540 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1541 x = validize_mem (force_const_mem (mode, x));
1543 /* See if the machine can do this with a load multiple insn. */
1544 #ifdef HAVE_load_multiple
1545 if (HAVE_load_multiple)
1547 last = get_last_insn ();
1548 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1549 GEN_INT (nregs));
1550 if (pat)
1552 emit_insn (pat);
1553 return;
1555 else
1556 delete_insns_since (last);
1558 #endif
1560 for (i = 0; i < nregs; i++)
1561 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1562 operand_subword_force (x, i, mode));
1565 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1566 The number of registers to be filled is NREGS. */
1568 void
1569 move_block_from_reg (int regno, rtx x, int nregs)
1571 int i;
1573 if (nregs == 0)
1574 return;
1576 /* See if the machine can do this with a store multiple insn. */
1577 #ifdef HAVE_store_multiple
1578 if (HAVE_store_multiple)
1580 rtx last = get_last_insn ();
1581 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1582 GEN_INT (nregs));
1583 if (pat)
1585 emit_insn (pat);
1586 return;
1588 else
1589 delete_insns_since (last);
1591 #endif
1593 for (i = 0; i < nregs; i++)
1595 rtx tem = operand_subword (x, i, 1, BLKmode);
1597 gcc_assert (tem);
1599 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1603 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1604 ORIG, where ORIG is a non-consecutive group of registers represented by
1605 a PARALLEL. The clone is identical to the original except in that the
1606 original set of registers is replaced by a new set of pseudo registers.
1607 The new set has the same modes as the original set. */
1610 gen_group_rtx (rtx orig)
1612 int i, length;
1613 rtx *tmps;
1615 gcc_assert (GET_CODE (orig) == PARALLEL);
1617 length = XVECLEN (orig, 0);
1618 tmps = alloca (sizeof (rtx) * length);
1620 /* Skip a NULL entry in first slot. */
1621 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1623 if (i)
1624 tmps[0] = 0;
1626 for (; i < length; i++)
1628 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1629 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1631 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1634 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1637 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1638 except that values are placed in TMPS[i], and must later be moved
1639 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1641 static void
1642 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1644 rtx src;
1645 int start, i;
1646 enum machine_mode m = GET_MODE (orig_src);
1648 gcc_assert (GET_CODE (dst) == PARALLEL);
1650 if (m != VOIDmode
1651 && !SCALAR_INT_MODE_P (m)
1652 && !MEM_P (orig_src)
1653 && GET_CODE (orig_src) != CONCAT)
1655 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1656 if (imode == BLKmode)
1657 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1658 else
1659 src = gen_reg_rtx (imode);
1660 if (imode != BLKmode)
1661 src = gen_lowpart (GET_MODE (orig_src), src);
1662 emit_move_insn (src, orig_src);
1663 /* ...and back again. */
1664 if (imode != BLKmode)
1665 src = gen_lowpart (imode, src);
1666 emit_group_load_1 (tmps, dst, src, type, ssize);
1667 return;
1670 /* Check for a NULL entry, used to indicate that the parameter goes
1671 both on the stack and in registers. */
1672 if (XEXP (XVECEXP (dst, 0, 0), 0))
1673 start = 0;
1674 else
1675 start = 1;
1677 /* Process the pieces. */
1678 for (i = start; i < XVECLEN (dst, 0); i++)
1680 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1681 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1682 unsigned int bytelen = GET_MODE_SIZE (mode);
1683 int shift = 0;
1685 /* Handle trailing fragments that run over the size of the struct. */
1686 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1688 /* Arrange to shift the fragment to where it belongs.
1689 extract_bit_field loads to the lsb of the reg. */
1690 if (
1691 #ifdef BLOCK_REG_PADDING
1692 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1693 == (BYTES_BIG_ENDIAN ? upward : downward)
1694 #else
1695 BYTES_BIG_ENDIAN
1696 #endif
1698 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1699 bytelen = ssize - bytepos;
1700 gcc_assert (bytelen > 0);
1703 /* If we won't be loading directly from memory, protect the real source
1704 from strange tricks we might play; but make sure that the source can
1705 be loaded directly into the destination. */
1706 src = orig_src;
1707 if (!MEM_P (orig_src)
1708 && (!CONSTANT_P (orig_src)
1709 || (GET_MODE (orig_src) != mode
1710 && GET_MODE (orig_src) != VOIDmode)))
1712 if (GET_MODE (orig_src) == VOIDmode)
1713 src = gen_reg_rtx (mode);
1714 else
1715 src = gen_reg_rtx (GET_MODE (orig_src));
1717 emit_move_insn (src, orig_src);
1720 /* Optimize the access just a bit. */
1721 if (MEM_P (src)
1722 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1723 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1724 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1725 && bytelen == GET_MODE_SIZE (mode))
1727 tmps[i] = gen_reg_rtx (mode);
1728 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1730 else if (COMPLEX_MODE_P (mode)
1731 && GET_MODE (src) == mode
1732 && bytelen == GET_MODE_SIZE (mode))
1733 /* Let emit_move_complex do the bulk of the work. */
1734 tmps[i] = src;
1735 else if (GET_CODE (src) == CONCAT)
1737 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1738 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1740 if ((bytepos == 0 && bytelen == slen0)
1741 || (bytepos != 0 && bytepos + bytelen <= slen))
1743 /* The following assumes that the concatenated objects all
1744 have the same size. In this case, a simple calculation
1745 can be used to determine the object and the bit field
1746 to be extracted. */
1747 tmps[i] = XEXP (src, bytepos / slen0);
1748 if (! CONSTANT_P (tmps[i])
1749 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1750 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1751 (bytepos % slen0) * BITS_PER_UNIT,
1752 1, NULL_RTX, mode, mode);
1754 else
1756 rtx mem;
1758 gcc_assert (!bytepos);
1759 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1760 emit_move_insn (mem, src);
1761 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1762 0, 1, NULL_RTX, mode, mode);
1765 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1766 SIMD register, which is currently broken. While we get GCC
1767 to emit proper RTL for these cases, let's dump to memory. */
1768 else if (VECTOR_MODE_P (GET_MODE (dst))
1769 && REG_P (src))
1771 int slen = GET_MODE_SIZE (GET_MODE (src));
1772 rtx mem;
1774 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1775 emit_move_insn (mem, src);
1776 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1778 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1779 && XVECLEN (dst, 0) > 1)
1780 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1781 else if (CONSTANT_P (src))
1783 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1785 if (len == ssize)
1786 tmps[i] = src;
1787 else
1789 rtx first, second;
1791 gcc_assert (2 * len == ssize);
1792 split_double (src, &first, &second);
1793 if (i)
1794 tmps[i] = second;
1795 else
1796 tmps[i] = first;
1799 else if (REG_P (src) && GET_MODE (src) == mode)
1800 tmps[i] = src;
1801 else
1802 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1803 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1804 mode, mode);
1806 if (shift)
1807 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1808 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1812 /* Emit code to move a block SRC of type TYPE to a block DST,
1813 where DST is non-consecutive registers represented by a PARALLEL.
1814 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1815 if not known. */
1817 void
1818 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1820 rtx *tmps;
1821 int i;
1823 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1824 emit_group_load_1 (tmps, dst, src, type, ssize);
1826 /* Copy the extracted pieces into the proper (probable) hard regs. */
1827 for (i = 0; i < XVECLEN (dst, 0); i++)
1829 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1830 if (d == NULL)
1831 continue;
1832 emit_move_insn (d, tmps[i]);
1836 /* Similar, but load SRC into new pseudos in a format that looks like
1837 PARALLEL. This can later be fed to emit_group_move to get things
1838 in the right place. */
1841 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1843 rtvec vec;
1844 int i;
1846 vec = rtvec_alloc (XVECLEN (parallel, 0));
1847 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1849 /* Convert the vector to look just like the original PARALLEL, except
1850 with the computed values. */
1851 for (i = 0; i < XVECLEN (parallel, 0); i++)
1853 rtx e = XVECEXP (parallel, 0, i);
1854 rtx d = XEXP (e, 0);
1856 if (d)
1858 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1859 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1861 RTVEC_ELT (vec, i) = e;
1864 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1867 /* Emit code to move a block SRC to block DST, where SRC and DST are
1868 non-consecutive groups of registers, each represented by a PARALLEL. */
1870 void
1871 emit_group_move (rtx dst, rtx src)
1873 int i;
1875 gcc_assert (GET_CODE (src) == PARALLEL
1876 && GET_CODE (dst) == PARALLEL
1877 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1879 /* Skip first entry if NULL. */
1880 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1881 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1882 XEXP (XVECEXP (src, 0, i), 0));
1885 /* Move a group of registers represented by a PARALLEL into pseudos. */
1888 emit_group_move_into_temps (rtx src)
1890 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1891 int i;
1893 for (i = 0; i < XVECLEN (src, 0); i++)
1895 rtx e = XVECEXP (src, 0, i);
1896 rtx d = XEXP (e, 0);
1898 if (d)
1899 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1900 RTVEC_ELT (vec, i) = e;
1903 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1906 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1907 where SRC is non-consecutive registers represented by a PARALLEL.
1908 SSIZE represents the total size of block ORIG_DST, or -1 if not
1909 known. */
1911 void
1912 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1914 rtx *tmps, dst;
1915 int start, finish, i;
1916 enum machine_mode m = GET_MODE (orig_dst);
1918 gcc_assert (GET_CODE (src) == PARALLEL);
1920 if (!SCALAR_INT_MODE_P (m)
1921 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1923 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1924 if (imode == BLKmode)
1925 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1926 else
1927 dst = gen_reg_rtx (imode);
1928 emit_group_store (dst, src, type, ssize);
1929 if (imode != BLKmode)
1930 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1931 emit_move_insn (orig_dst, dst);
1932 return;
1935 /* Check for a NULL entry, used to indicate that the parameter goes
1936 both on the stack and in registers. */
1937 if (XEXP (XVECEXP (src, 0, 0), 0))
1938 start = 0;
1939 else
1940 start = 1;
1941 finish = XVECLEN (src, 0);
1943 tmps = alloca (sizeof (rtx) * finish);
1945 /* Copy the (probable) hard regs into pseudos. */
1946 for (i = start; i < finish; i++)
1948 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1949 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1951 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1952 emit_move_insn (tmps[i], reg);
1954 else
1955 tmps[i] = reg;
1958 /* If we won't be storing directly into memory, protect the real destination
1959 from strange tricks we might play. */
1960 dst = orig_dst;
1961 if (GET_CODE (dst) == PARALLEL)
1963 rtx temp;
1965 /* We can get a PARALLEL dst if there is a conditional expression in
1966 a return statement. In that case, the dst and src are the same,
1967 so no action is necessary. */
1968 if (rtx_equal_p (dst, src))
1969 return;
1971 /* It is unclear if we can ever reach here, but we may as well handle
1972 it. Allocate a temporary, and split this into a store/load to/from
1973 the temporary. */
1975 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1976 emit_group_store (temp, src, type, ssize);
1977 emit_group_load (dst, temp, type, ssize);
1978 return;
1980 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1982 enum machine_mode outer = GET_MODE (dst);
1983 enum machine_mode inner;
1984 HOST_WIDE_INT bytepos;
1985 bool done = false;
1986 rtx temp;
1988 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1989 dst = gen_reg_rtx (outer);
1991 /* Make life a bit easier for combine. */
1992 /* If the first element of the vector is the low part
1993 of the destination mode, use a paradoxical subreg to
1994 initialize the destination. */
1995 if (start < finish)
1997 inner = GET_MODE (tmps[start]);
1998 bytepos = subreg_lowpart_offset (inner, outer);
1999 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2001 temp = simplify_gen_subreg (outer, tmps[start],
2002 inner, 0);
2003 if (temp)
2005 emit_move_insn (dst, temp);
2006 done = true;
2007 start++;
2012 /* If the first element wasn't the low part, try the last. */
2013 if (!done
2014 && start < finish - 1)
2016 inner = GET_MODE (tmps[finish - 1]);
2017 bytepos = subreg_lowpart_offset (inner, outer);
2018 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2020 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2021 inner, 0);
2022 if (temp)
2024 emit_move_insn (dst, temp);
2025 done = true;
2026 finish--;
2031 /* Otherwise, simply initialize the result to zero. */
2032 if (!done)
2033 emit_move_insn (dst, CONST0_RTX (outer));
2036 /* Process the pieces. */
2037 for (i = start; i < finish; i++)
2039 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2040 enum machine_mode mode = GET_MODE (tmps[i]);
2041 unsigned int bytelen = GET_MODE_SIZE (mode);
2042 rtx dest = dst;
2044 /* Handle trailing fragments that run over the size of the struct. */
2045 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2047 /* store_bit_field always takes its value from the lsb.
2048 Move the fragment to the lsb if it's not already there. */
2049 if (
2050 #ifdef BLOCK_REG_PADDING
2051 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2052 == (BYTES_BIG_ENDIAN ? upward : downward)
2053 #else
2054 BYTES_BIG_ENDIAN
2055 #endif
2058 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2059 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2060 build_int_cst (NULL_TREE, shift),
2061 tmps[i], 0);
2063 bytelen = ssize - bytepos;
2066 if (GET_CODE (dst) == CONCAT)
2068 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2069 dest = XEXP (dst, 0);
2070 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2072 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2073 dest = XEXP (dst, 1);
2075 else
2077 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2078 dest = assign_stack_temp (GET_MODE (dest),
2079 GET_MODE_SIZE (GET_MODE (dest)), 0);
2080 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2081 tmps[i]);
2082 dst = dest;
2083 break;
2087 /* Optimize the access just a bit. */
2088 if (MEM_P (dest)
2089 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2090 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2091 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2092 && bytelen == GET_MODE_SIZE (mode))
2093 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2094 else
2095 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2096 mode, tmps[i]);
2099 /* Copy from the pseudo into the (probable) hard reg. */
2100 if (orig_dst != dst)
2101 emit_move_insn (orig_dst, dst);
2104 /* Generate code to copy a BLKmode object of TYPE out of a
2105 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2106 is null, a stack temporary is created. TGTBLK is returned.
2108 The purpose of this routine is to handle functions that return
2109 BLKmode structures in registers. Some machines (the PA for example)
2110 want to return all small structures in registers regardless of the
2111 structure's alignment. */
2114 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2116 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2117 rtx src = NULL, dst = NULL;
2118 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2119 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2120 enum machine_mode copy_mode;
2122 if (tgtblk == 0)
2124 tgtblk = assign_temp (build_qualified_type (type,
2125 (TYPE_QUALS (type)
2126 | TYPE_QUAL_CONST)),
2127 0, 1, 1);
2128 preserve_temp_slots (tgtblk);
2131 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2132 into a new pseudo which is a full word. */
2134 if (GET_MODE (srcreg) != BLKmode
2135 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2136 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2138 /* If the structure doesn't take up a whole number of words, see whether
2139 SRCREG is padded on the left or on the right. If it's on the left,
2140 set PADDING_CORRECTION to the number of bits to skip.
2142 In most ABIs, the structure will be returned at the least end of
2143 the register, which translates to right padding on little-endian
2144 targets and left padding on big-endian targets. The opposite
2145 holds if the structure is returned at the most significant
2146 end of the register. */
2147 if (bytes % UNITS_PER_WORD != 0
2148 && (targetm.calls.return_in_msb (type)
2149 ? !BYTES_BIG_ENDIAN
2150 : BYTES_BIG_ENDIAN))
2151 padding_correction
2152 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2154 /* Copy the structure BITSIZE bits at a time. If the target lives in
2155 memory, take care of not reading/writing past its end by selecting
2156 a copy mode suited to BITSIZE. This should always be possible given
2157 how it is computed.
2159 We could probably emit more efficient code for machines which do not use
2160 strict alignment, but it doesn't seem worth the effort at the current
2161 time. */
2163 copy_mode = word_mode;
2164 if (MEM_P (tgtblk))
2166 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2167 if (mem_mode != BLKmode)
2168 copy_mode = mem_mode;
2171 for (bitpos = 0, xbitpos = padding_correction;
2172 bitpos < bytes * BITS_PER_UNIT;
2173 bitpos += bitsize, xbitpos += bitsize)
2175 /* We need a new source operand each time xbitpos is on a
2176 word boundary and when xbitpos == padding_correction
2177 (the first time through). */
2178 if (xbitpos % BITS_PER_WORD == 0
2179 || xbitpos == padding_correction)
2180 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2181 GET_MODE (srcreg));
2183 /* We need a new destination operand each time bitpos is on
2184 a word boundary. */
2185 if (bitpos % BITS_PER_WORD == 0)
2186 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2188 /* Use xbitpos for the source extraction (right justified) and
2189 bitpos for the destination store (left justified). */
2190 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2191 extract_bit_field (src, bitsize,
2192 xbitpos % BITS_PER_WORD, 1,
2193 NULL_RTX, copy_mode, copy_mode));
2196 return tgtblk;
2199 /* Add a USE expression for REG to the (possibly empty) list pointed
2200 to by CALL_FUSAGE. REG must denote a hard register. */
2202 void
2203 use_reg (rtx *call_fusage, rtx reg)
2205 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2207 *call_fusage
2208 = gen_rtx_EXPR_LIST (VOIDmode,
2209 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2212 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2213 starting at REGNO. All of these registers must be hard registers. */
2215 void
2216 use_regs (rtx *call_fusage, int regno, int nregs)
2218 int i;
2220 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2222 for (i = 0; i < nregs; i++)
2223 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2226 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2227 PARALLEL REGS. This is for calls that pass values in multiple
2228 non-contiguous locations. The Irix 6 ABI has examples of this. */
2230 void
2231 use_group_regs (rtx *call_fusage, rtx regs)
2233 int i;
2235 for (i = 0; i < XVECLEN (regs, 0); i++)
2237 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2239 /* A NULL entry means the parameter goes both on the stack and in
2240 registers. This can also be a MEM for targets that pass values
2241 partially on the stack and partially in registers. */
2242 if (reg != 0 && REG_P (reg))
2243 use_reg (call_fusage, reg);
2248 /* Determine whether the LEN bytes generated by CONSTFUN can be
2249 stored to memory using several move instructions. CONSTFUNDATA is
2250 a pointer which will be passed as argument in every CONSTFUN call.
2251 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2252 a memset operation and false if it's a copy of a constant string.
2253 Return nonzero if a call to store_by_pieces should succeed. */
2256 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2257 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2258 void *constfundata, unsigned int align, bool memsetp)
2260 unsigned HOST_WIDE_INT l;
2261 unsigned int max_size;
2262 HOST_WIDE_INT offset = 0;
2263 enum machine_mode mode, tmode;
2264 enum insn_code icode;
2265 int reverse;
2266 rtx cst;
2268 if (len == 0)
2269 return 1;
2271 if (! (memsetp
2272 ? SET_BY_PIECES_P (len, align)
2273 : STORE_BY_PIECES_P (len, align)))
2274 return 0;
2276 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2277 if (align >= GET_MODE_ALIGNMENT (tmode))
2278 align = GET_MODE_ALIGNMENT (tmode);
2279 else
2281 enum machine_mode xmode;
2283 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2284 tmode != VOIDmode;
2285 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2286 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2287 || SLOW_UNALIGNED_ACCESS (tmode, align))
2288 break;
2290 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2293 /* We would first store what we can in the largest integer mode, then go to
2294 successively smaller modes. */
2296 for (reverse = 0;
2297 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2298 reverse++)
2300 l = len;
2301 mode = VOIDmode;
2302 max_size = STORE_MAX_PIECES + 1;
2303 while (max_size > 1)
2305 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2306 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2307 if (GET_MODE_SIZE (tmode) < max_size)
2308 mode = tmode;
2310 if (mode == VOIDmode)
2311 break;
2313 icode = optab_handler (mov_optab, mode)->insn_code;
2314 if (icode != CODE_FOR_nothing
2315 && align >= GET_MODE_ALIGNMENT (mode))
2317 unsigned int size = GET_MODE_SIZE (mode);
2319 while (l >= size)
2321 if (reverse)
2322 offset -= size;
2324 cst = (*constfun) (constfundata, offset, mode);
2325 if (!LEGITIMATE_CONSTANT_P (cst))
2326 return 0;
2328 if (!reverse)
2329 offset += size;
2331 l -= size;
2335 max_size = GET_MODE_SIZE (mode);
2338 /* The code above should have handled everything. */
2339 gcc_assert (!l);
2342 return 1;
2345 /* Generate several move instructions to store LEN bytes generated by
2346 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2347 pointer which will be passed as argument in every CONSTFUN call.
2348 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2349 a memset operation and false if it's a copy of a constant string.
2350 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2351 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2352 stpcpy. */
2355 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2356 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2357 void *constfundata, unsigned int align, bool memsetp, int endp)
2359 struct store_by_pieces data;
2361 if (len == 0)
2363 gcc_assert (endp != 2);
2364 return to;
2367 gcc_assert (memsetp
2368 ? SET_BY_PIECES_P (len, align)
2369 : STORE_BY_PIECES_P (len, align));
2370 data.constfun = constfun;
2371 data.constfundata = constfundata;
2372 data.len = len;
2373 data.to = to;
2374 store_by_pieces_1 (&data, align);
2375 if (endp)
2377 rtx to1;
2379 gcc_assert (!data.reverse);
2380 if (data.autinc_to)
2382 if (endp == 2)
2384 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2385 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2386 else
2387 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2388 -1));
2390 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2391 data.offset);
2393 else
2395 if (endp == 2)
2396 --data.offset;
2397 to1 = adjust_address (data.to, QImode, data.offset);
2399 return to1;
2401 else
2402 return data.to;
2405 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2406 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2408 static void
2409 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2411 struct store_by_pieces data;
2413 if (len == 0)
2414 return;
2416 data.constfun = clear_by_pieces_1;
2417 data.constfundata = NULL;
2418 data.len = len;
2419 data.to = to;
2420 store_by_pieces_1 (&data, align);
2423 /* Callback routine for clear_by_pieces.
2424 Return const0_rtx unconditionally. */
2426 static rtx
2427 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2428 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2429 enum machine_mode mode ATTRIBUTE_UNUSED)
2431 return const0_rtx;
2434 /* Subroutine of clear_by_pieces and store_by_pieces.
2435 Generate several move instructions to store LEN bytes of block TO. (A MEM
2436 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2438 static void
2439 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2440 unsigned int align ATTRIBUTE_UNUSED)
2442 rtx to_addr = XEXP (data->to, 0);
2443 unsigned int max_size = STORE_MAX_PIECES + 1;
2444 enum machine_mode mode = VOIDmode, tmode;
2445 enum insn_code icode;
2447 data->offset = 0;
2448 data->to_addr = to_addr;
2449 data->autinc_to
2450 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2451 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2453 data->explicit_inc_to = 0;
2454 data->reverse
2455 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2456 if (data->reverse)
2457 data->offset = data->len;
2459 /* If storing requires more than two move insns,
2460 copy addresses to registers (to make displacements shorter)
2461 and use post-increment if available. */
2462 if (!data->autinc_to
2463 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2465 /* Determine the main mode we'll be using. */
2466 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2467 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2468 if (GET_MODE_SIZE (tmode) < max_size)
2469 mode = tmode;
2471 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2473 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2474 data->autinc_to = 1;
2475 data->explicit_inc_to = -1;
2478 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2479 && ! data->autinc_to)
2481 data->to_addr = copy_addr_to_reg (to_addr);
2482 data->autinc_to = 1;
2483 data->explicit_inc_to = 1;
2486 if ( !data->autinc_to && CONSTANT_P (to_addr))
2487 data->to_addr = copy_addr_to_reg (to_addr);
2490 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2491 if (align >= GET_MODE_ALIGNMENT (tmode))
2492 align = GET_MODE_ALIGNMENT (tmode);
2493 else
2495 enum machine_mode xmode;
2497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2498 tmode != VOIDmode;
2499 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2500 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2501 || SLOW_UNALIGNED_ACCESS (tmode, align))
2502 break;
2504 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2507 /* First store what we can in the largest integer mode, then go to
2508 successively smaller modes. */
2510 while (max_size > 1)
2512 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2513 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2514 if (GET_MODE_SIZE (tmode) < max_size)
2515 mode = tmode;
2517 if (mode == VOIDmode)
2518 break;
2520 icode = optab_handler (mov_optab, mode)->insn_code;
2521 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2522 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2524 max_size = GET_MODE_SIZE (mode);
2527 /* The code above should have handled everything. */
2528 gcc_assert (!data->len);
2531 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2532 with move instructions for mode MODE. GENFUN is the gen_... function
2533 to make a move insn for that mode. DATA has all the other info. */
2535 static void
2536 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2537 struct store_by_pieces *data)
2539 unsigned int size = GET_MODE_SIZE (mode);
2540 rtx to1, cst;
2542 while (data->len >= size)
2544 if (data->reverse)
2545 data->offset -= size;
2547 if (data->autinc_to)
2548 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2549 data->offset);
2550 else
2551 to1 = adjust_address (data->to, mode, data->offset);
2553 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2554 emit_insn (gen_add2_insn (data->to_addr,
2555 GEN_INT (-(HOST_WIDE_INT) size)));
2557 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2558 emit_insn ((*genfun) (to1, cst));
2560 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2561 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2563 if (! data->reverse)
2564 data->offset += size;
2566 data->len -= size;
2570 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2571 its length in bytes. */
2574 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2575 unsigned int expected_align, HOST_WIDE_INT expected_size)
2577 enum machine_mode mode = GET_MODE (object);
2578 unsigned int align;
2580 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2582 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2583 just move a zero. Otherwise, do this a piece at a time. */
2584 if (mode != BLKmode
2585 && GET_CODE (size) == CONST_INT
2586 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2588 rtx zero = CONST0_RTX (mode);
2589 if (zero != NULL)
2591 emit_move_insn (object, zero);
2592 return NULL;
2595 if (COMPLEX_MODE_P (mode))
2597 zero = CONST0_RTX (GET_MODE_INNER (mode));
2598 if (zero != NULL)
2600 write_complex_part (object, zero, 0);
2601 write_complex_part (object, zero, 1);
2602 return NULL;
2607 if (size == const0_rtx)
2608 return NULL;
2610 align = MEM_ALIGN (object);
2612 if (GET_CODE (size) == CONST_INT
2613 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2614 clear_by_pieces (object, INTVAL (size), align);
2615 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2616 expected_align, expected_size))
2618 else
2619 return set_storage_via_libcall (object, size, const0_rtx,
2620 method == BLOCK_OP_TAILCALL);
2622 return NULL;
2626 clear_storage (rtx object, rtx size, enum block_op_methods method)
2628 return clear_storage_hints (object, size, method, 0, -1);
2632 /* A subroutine of clear_storage. Expand a call to memset.
2633 Return the return value of memset, 0 otherwise. */
2636 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2638 tree call_expr, fn, object_tree, size_tree, val_tree;
2639 enum machine_mode size_mode;
2640 rtx retval;
2642 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2643 place those into new pseudos into a VAR_DECL and use them later. */
2645 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2647 size_mode = TYPE_MODE (sizetype);
2648 size = convert_to_mode (size_mode, size, 1);
2649 size = copy_to_mode_reg (size_mode, size);
2651 /* It is incorrect to use the libcall calling conventions to call
2652 memset in this context. This could be a user call to memset and
2653 the user may wish to examine the return value from memset. For
2654 targets where libcalls and normal calls have different conventions
2655 for returning pointers, we could end up generating incorrect code. */
2657 object_tree = make_tree (ptr_type_node, object);
2658 if (GET_CODE (val) != CONST_INT)
2659 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2660 size_tree = make_tree (sizetype, size);
2661 val_tree = make_tree (integer_type_node, val);
2663 fn = clear_storage_libcall_fn (true);
2664 call_expr = build_call_expr (fn, 3,
2665 object_tree, integer_zero_node, size_tree);
2666 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2668 retval = expand_normal (call_expr);
2670 return retval;
2673 /* A subroutine of set_storage_via_libcall. Create the tree node
2674 for the function we use for block clears. The first time FOR_CALL
2675 is true, we call assemble_external. */
2677 static GTY(()) tree block_clear_fn;
2679 void
2680 init_block_clear_fn (const char *asmspec)
2682 if (!block_clear_fn)
2684 tree fn, args;
2686 fn = get_identifier ("memset");
2687 args = build_function_type_list (ptr_type_node, ptr_type_node,
2688 integer_type_node, sizetype,
2689 NULL_TREE);
2691 fn = build_decl (FUNCTION_DECL, fn, args);
2692 DECL_EXTERNAL (fn) = 1;
2693 TREE_PUBLIC (fn) = 1;
2694 DECL_ARTIFICIAL (fn) = 1;
2695 TREE_NOTHROW (fn) = 1;
2696 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2697 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2699 block_clear_fn = fn;
2702 if (asmspec)
2703 set_user_assembler_name (block_clear_fn, asmspec);
2706 static tree
2707 clear_storage_libcall_fn (int for_call)
2709 static bool emitted_extern;
2711 if (!block_clear_fn)
2712 init_block_clear_fn (NULL);
2714 if (for_call && !emitted_extern)
2716 emitted_extern = true;
2717 make_decl_rtl (block_clear_fn);
2718 assemble_external (block_clear_fn);
2721 return block_clear_fn;
2724 /* Expand a setmem pattern; return true if successful. */
2726 bool
2727 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2728 unsigned int expected_align, HOST_WIDE_INT expected_size)
2730 /* Try the most limited insn first, because there's no point
2731 including more than one in the machine description unless
2732 the more limited one has some advantage. */
2734 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2735 enum machine_mode mode;
2737 if (expected_align < align)
2738 expected_align = align;
2740 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2741 mode = GET_MODE_WIDER_MODE (mode))
2743 enum insn_code code = setmem_optab[(int) mode];
2744 insn_operand_predicate_fn pred;
2746 if (code != CODE_FOR_nothing
2747 /* We don't need MODE to be narrower than
2748 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2749 the mode mask, as it is returned by the macro, it will
2750 definitely be less than the actual mode mask. */
2751 && ((GET_CODE (size) == CONST_INT
2752 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2753 <= (GET_MODE_MASK (mode) >> 1)))
2754 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2755 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2756 || (*pred) (object, BLKmode))
2757 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2758 || (*pred) (opalign, VOIDmode)))
2760 rtx opsize, opchar;
2761 enum machine_mode char_mode;
2762 rtx last = get_last_insn ();
2763 rtx pat;
2765 opsize = convert_to_mode (mode, size, 1);
2766 pred = insn_data[(int) code].operand[1].predicate;
2767 if (pred != 0 && ! (*pred) (opsize, mode))
2768 opsize = copy_to_mode_reg (mode, opsize);
2770 opchar = val;
2771 char_mode = insn_data[(int) code].operand[2].mode;
2772 if (char_mode != VOIDmode)
2774 opchar = convert_to_mode (char_mode, opchar, 1);
2775 pred = insn_data[(int) code].operand[2].predicate;
2776 if (pred != 0 && ! (*pred) (opchar, char_mode))
2777 opchar = copy_to_mode_reg (char_mode, opchar);
2780 if (insn_data[(int) code].n_operands == 4)
2781 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2782 else
2783 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2784 GEN_INT (expected_align),
2785 GEN_INT (expected_size));
2786 if (pat)
2788 emit_insn (pat);
2789 return true;
2791 else
2792 delete_insns_since (last);
2796 return false;
2800 /* Write to one of the components of the complex value CPLX. Write VAL to
2801 the real part if IMAG_P is false, and the imaginary part if its true. */
2803 static void
2804 write_complex_part (rtx cplx, rtx val, bool imag_p)
2806 enum machine_mode cmode;
2807 enum machine_mode imode;
2808 unsigned ibitsize;
2810 if (GET_CODE (cplx) == CONCAT)
2812 emit_move_insn (XEXP (cplx, imag_p), val);
2813 return;
2816 cmode = GET_MODE (cplx);
2817 imode = GET_MODE_INNER (cmode);
2818 ibitsize = GET_MODE_BITSIZE (imode);
2820 /* For MEMs simplify_gen_subreg may generate an invalid new address
2821 because, e.g., the original address is considered mode-dependent
2822 by the target, which restricts simplify_subreg from invoking
2823 adjust_address_nv. Instead of preparing fallback support for an
2824 invalid address, we call adjust_address_nv directly. */
2825 if (MEM_P (cplx))
2827 emit_move_insn (adjust_address_nv (cplx, imode,
2828 imag_p ? GET_MODE_SIZE (imode) : 0),
2829 val);
2830 return;
2833 /* If the sub-object is at least word sized, then we know that subregging
2834 will work. This special case is important, since store_bit_field
2835 wants to operate on integer modes, and there's rarely an OImode to
2836 correspond to TCmode. */
2837 if (ibitsize >= BITS_PER_WORD
2838 /* For hard regs we have exact predicates. Assume we can split
2839 the original object if it spans an even number of hard regs.
2840 This special case is important for SCmode on 64-bit platforms
2841 where the natural size of floating-point regs is 32-bit. */
2842 || (REG_P (cplx)
2843 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2844 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2846 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2847 imag_p ? GET_MODE_SIZE (imode) : 0);
2848 if (part)
2850 emit_move_insn (part, val);
2851 return;
2853 else
2854 /* simplify_gen_subreg may fail for sub-word MEMs. */
2855 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2858 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2861 /* Extract one of the components of the complex value CPLX. Extract the
2862 real part if IMAG_P is false, and the imaginary part if it's true. */
2864 static rtx
2865 read_complex_part (rtx cplx, bool imag_p)
2867 enum machine_mode cmode, imode;
2868 unsigned ibitsize;
2870 if (GET_CODE (cplx) == CONCAT)
2871 return XEXP (cplx, imag_p);
2873 cmode = GET_MODE (cplx);
2874 imode = GET_MODE_INNER (cmode);
2875 ibitsize = GET_MODE_BITSIZE (imode);
2877 /* Special case reads from complex constants that got spilled to memory. */
2878 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2880 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2881 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2883 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2884 if (CONSTANT_CLASS_P (part))
2885 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2889 /* For MEMs simplify_gen_subreg may generate an invalid new address
2890 because, e.g., the original address is considered mode-dependent
2891 by the target, which restricts simplify_subreg from invoking
2892 adjust_address_nv. Instead of preparing fallback support for an
2893 invalid address, we call adjust_address_nv directly. */
2894 if (MEM_P (cplx))
2895 return adjust_address_nv (cplx, imode,
2896 imag_p ? GET_MODE_SIZE (imode) : 0);
2898 /* If the sub-object is at least word sized, then we know that subregging
2899 will work. This special case is important, since extract_bit_field
2900 wants to operate on integer modes, and there's rarely an OImode to
2901 correspond to TCmode. */
2902 if (ibitsize >= BITS_PER_WORD
2903 /* For hard regs we have exact predicates. Assume we can split
2904 the original object if it spans an even number of hard regs.
2905 This special case is important for SCmode on 64-bit platforms
2906 where the natural size of floating-point regs is 32-bit. */
2907 || (REG_P (cplx)
2908 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2909 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2911 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2912 imag_p ? GET_MODE_SIZE (imode) : 0);
2913 if (ret)
2914 return ret;
2915 else
2916 /* simplify_gen_subreg may fail for sub-word MEMs. */
2917 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2920 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2921 true, NULL_RTX, imode, imode);
2924 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2925 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2926 represented in NEW_MODE. If FORCE is true, this will never happen, as
2927 we'll force-create a SUBREG if needed. */
2929 static rtx
2930 emit_move_change_mode (enum machine_mode new_mode,
2931 enum machine_mode old_mode, rtx x, bool force)
2933 rtx ret;
2935 if (push_operand (x, GET_MODE (x)))
2937 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2938 MEM_COPY_ATTRIBUTES (ret, x);
2940 else if (MEM_P (x))
2942 /* We don't have to worry about changing the address since the
2943 size in bytes is supposed to be the same. */
2944 if (reload_in_progress)
2946 /* Copy the MEM to change the mode and move any
2947 substitutions from the old MEM to the new one. */
2948 ret = adjust_address_nv (x, new_mode, 0);
2949 copy_replacements (x, ret);
2951 else
2952 ret = adjust_address (x, new_mode, 0);
2954 else
2956 /* Note that we do want simplify_subreg's behavior of validating
2957 that the new mode is ok for a hard register. If we were to use
2958 simplify_gen_subreg, we would create the subreg, but would
2959 probably run into the target not being able to implement it. */
2960 /* Except, of course, when FORCE is true, when this is exactly what
2961 we want. Which is needed for CCmodes on some targets. */
2962 if (force)
2963 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2964 else
2965 ret = simplify_subreg (new_mode, x, old_mode, 0);
2968 return ret;
2971 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2972 an integer mode of the same size as MODE. Returns the instruction
2973 emitted, or NULL if such a move could not be generated. */
2975 static rtx
2976 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2978 enum machine_mode imode;
2979 enum insn_code code;
2981 /* There must exist a mode of the exact size we require. */
2982 imode = int_mode_for_mode (mode);
2983 if (imode == BLKmode)
2984 return NULL_RTX;
2986 /* The target must support moves in this mode. */
2987 code = optab_handler (mov_optab, imode)->insn_code;
2988 if (code == CODE_FOR_nothing)
2989 return NULL_RTX;
2991 x = emit_move_change_mode (imode, mode, x, force);
2992 if (x == NULL_RTX)
2993 return NULL_RTX;
2994 y = emit_move_change_mode (imode, mode, y, force);
2995 if (y == NULL_RTX)
2996 return NULL_RTX;
2997 return emit_insn (GEN_FCN (code) (x, y));
3000 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3001 Return an equivalent MEM that does not use an auto-increment. */
3003 static rtx
3004 emit_move_resolve_push (enum machine_mode mode, rtx x)
3006 enum rtx_code code = GET_CODE (XEXP (x, 0));
3007 HOST_WIDE_INT adjust;
3008 rtx temp;
3010 adjust = GET_MODE_SIZE (mode);
3011 #ifdef PUSH_ROUNDING
3012 adjust = PUSH_ROUNDING (adjust);
3013 #endif
3014 if (code == PRE_DEC || code == POST_DEC)
3015 adjust = -adjust;
3016 else if (code == PRE_MODIFY || code == POST_MODIFY)
3018 rtx expr = XEXP (XEXP (x, 0), 1);
3019 HOST_WIDE_INT val;
3021 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3022 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
3023 val = INTVAL (XEXP (expr, 1));
3024 if (GET_CODE (expr) == MINUS)
3025 val = -val;
3026 gcc_assert (adjust == val || adjust == -val);
3027 adjust = val;
3030 /* Do not use anti_adjust_stack, since we don't want to update
3031 stack_pointer_delta. */
3032 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3033 GEN_INT (adjust), stack_pointer_rtx,
3034 0, OPTAB_LIB_WIDEN);
3035 if (temp != stack_pointer_rtx)
3036 emit_move_insn (stack_pointer_rtx, temp);
3038 switch (code)
3040 case PRE_INC:
3041 case PRE_DEC:
3042 case PRE_MODIFY:
3043 temp = stack_pointer_rtx;
3044 break;
3045 case POST_INC:
3046 case POST_DEC:
3047 case POST_MODIFY:
3048 temp = plus_constant (stack_pointer_rtx, -adjust);
3049 break;
3050 default:
3051 gcc_unreachable ();
3054 return replace_equiv_address (x, temp);
3057 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3058 X is known to satisfy push_operand, and MODE is known to be complex.
3059 Returns the last instruction emitted. */
3062 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3064 enum machine_mode submode = GET_MODE_INNER (mode);
3065 bool imag_first;
3067 #ifdef PUSH_ROUNDING
3068 unsigned int submodesize = GET_MODE_SIZE (submode);
3070 /* In case we output to the stack, but the size is smaller than the
3071 machine can push exactly, we need to use move instructions. */
3072 if (PUSH_ROUNDING (submodesize) != submodesize)
3074 x = emit_move_resolve_push (mode, x);
3075 return emit_move_insn (x, y);
3077 #endif
3079 /* Note that the real part always precedes the imag part in memory
3080 regardless of machine's endianness. */
3081 switch (GET_CODE (XEXP (x, 0)))
3083 case PRE_DEC:
3084 case POST_DEC:
3085 imag_first = true;
3086 break;
3087 case PRE_INC:
3088 case POST_INC:
3089 imag_first = false;
3090 break;
3091 default:
3092 gcc_unreachable ();
3095 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3096 read_complex_part (y, imag_first));
3097 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3098 read_complex_part (y, !imag_first));
3101 /* A subroutine of emit_move_complex. Perform the move from Y to X
3102 via two moves of the parts. Returns the last instruction emitted. */
3105 emit_move_complex_parts (rtx x, rtx y)
3107 /* Show the output dies here. This is necessary for SUBREGs
3108 of pseudos since we cannot track their lifetimes correctly;
3109 hard regs shouldn't appear here except as return values. */
3110 if (!reload_completed && !reload_in_progress
3111 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3112 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3114 write_complex_part (x, read_complex_part (y, false), false);
3115 write_complex_part (x, read_complex_part (y, true), true);
3117 return get_last_insn ();
3120 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3121 MODE is known to be complex. Returns the last instruction emitted. */
3123 static rtx
3124 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3126 bool try_int;
3128 /* Need to take special care for pushes, to maintain proper ordering
3129 of the data, and possibly extra padding. */
3130 if (push_operand (x, mode))
3131 return emit_move_complex_push (mode, x, y);
3133 /* See if we can coerce the target into moving both values at once. */
3135 /* Move floating point as parts. */
3136 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3137 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3138 try_int = false;
3139 /* Not possible if the values are inherently not adjacent. */
3140 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3141 try_int = false;
3142 /* Is possible if both are registers (or subregs of registers). */
3143 else if (register_operand (x, mode) && register_operand (y, mode))
3144 try_int = true;
3145 /* If one of the operands is a memory, and alignment constraints
3146 are friendly enough, we may be able to do combined memory operations.
3147 We do not attempt this if Y is a constant because that combination is
3148 usually better with the by-parts thing below. */
3149 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3150 && (!STRICT_ALIGNMENT
3151 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3152 try_int = true;
3153 else
3154 try_int = false;
3156 if (try_int)
3158 rtx ret;
3160 /* For memory to memory moves, optimal behavior can be had with the
3161 existing block move logic. */
3162 if (MEM_P (x) && MEM_P (y))
3164 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3165 BLOCK_OP_NO_LIBCALL);
3166 return get_last_insn ();
3169 ret = emit_move_via_integer (mode, x, y, true);
3170 if (ret)
3171 return ret;
3174 return emit_move_complex_parts (x, y);
3177 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3178 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3180 static rtx
3181 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3183 rtx ret;
3185 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3186 if (mode != CCmode)
3188 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3189 if (code != CODE_FOR_nothing)
3191 x = emit_move_change_mode (CCmode, mode, x, true);
3192 y = emit_move_change_mode (CCmode, mode, y, true);
3193 return emit_insn (GEN_FCN (code) (x, y));
3197 /* Otherwise, find the MODE_INT mode of the same width. */
3198 ret = emit_move_via_integer (mode, x, y, false);
3199 gcc_assert (ret != NULL);
3200 return ret;
3203 /* Return true if word I of OP lies entirely in the
3204 undefined bits of a paradoxical subreg. */
3206 static bool
3207 undefined_operand_subword_p (const_rtx op, int i)
3209 enum machine_mode innermode, innermostmode;
3210 int offset;
3211 if (GET_CODE (op) != SUBREG)
3212 return false;
3213 innermode = GET_MODE (op);
3214 innermostmode = GET_MODE (SUBREG_REG (op));
3215 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3216 /* The SUBREG_BYTE represents offset, as if the value were stored in
3217 memory, except for a paradoxical subreg where we define
3218 SUBREG_BYTE to be 0; undo this exception as in
3219 simplify_subreg. */
3220 if (SUBREG_BYTE (op) == 0
3221 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3223 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3224 if (WORDS_BIG_ENDIAN)
3225 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3226 if (BYTES_BIG_ENDIAN)
3227 offset += difference % UNITS_PER_WORD;
3229 if (offset >= GET_MODE_SIZE (innermostmode)
3230 || offset <= -GET_MODE_SIZE (word_mode))
3231 return true;
3232 return false;
3235 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3236 MODE is any multi-word or full-word mode that lacks a move_insn
3237 pattern. Note that you will get better code if you define such
3238 patterns, even if they must turn into multiple assembler instructions. */
3240 static rtx
3241 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3243 rtx last_insn = 0;
3244 rtx seq, inner;
3245 bool need_clobber;
3246 int i;
3248 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3250 /* If X is a push on the stack, do the push now and replace
3251 X with a reference to the stack pointer. */
3252 if (push_operand (x, mode))
3253 x = emit_move_resolve_push (mode, x);
3255 /* If we are in reload, see if either operand is a MEM whose address
3256 is scheduled for replacement. */
3257 if (reload_in_progress && MEM_P (x)
3258 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3259 x = replace_equiv_address_nv (x, inner);
3260 if (reload_in_progress && MEM_P (y)
3261 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3262 y = replace_equiv_address_nv (y, inner);
3264 start_sequence ();
3266 need_clobber = false;
3267 for (i = 0;
3268 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3269 i++)
3271 rtx xpart = operand_subword (x, i, 1, mode);
3272 rtx ypart;
3274 /* Do not generate code for a move if it would come entirely
3275 from the undefined bits of a paradoxical subreg. */
3276 if (undefined_operand_subword_p (y, i))
3277 continue;
3279 ypart = operand_subword (y, i, 1, mode);
3281 /* If we can't get a part of Y, put Y into memory if it is a
3282 constant. Otherwise, force it into a register. Then we must
3283 be able to get a part of Y. */
3284 if (ypart == 0 && CONSTANT_P (y))
3286 y = use_anchored_address (force_const_mem (mode, y));
3287 ypart = operand_subword (y, i, 1, mode);
3289 else if (ypart == 0)
3290 ypart = operand_subword_force (y, i, mode);
3292 gcc_assert (xpart && ypart);
3294 need_clobber |= (GET_CODE (xpart) == SUBREG);
3296 last_insn = emit_move_insn (xpart, ypart);
3299 seq = get_insns ();
3300 end_sequence ();
3302 /* Show the output dies here. This is necessary for SUBREGs
3303 of pseudos since we cannot track their lifetimes correctly;
3304 hard regs shouldn't appear here except as return values.
3305 We never want to emit such a clobber after reload. */
3306 if (x != y
3307 && ! (reload_in_progress || reload_completed)
3308 && need_clobber != 0)
3309 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3311 emit_insn (seq);
3313 return last_insn;
3316 /* Low level part of emit_move_insn.
3317 Called just like emit_move_insn, but assumes X and Y
3318 are basically valid. */
3321 emit_move_insn_1 (rtx x, rtx y)
3323 enum machine_mode mode = GET_MODE (x);
3324 enum insn_code code;
3326 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3328 code = optab_handler (mov_optab, mode)->insn_code;
3329 if (code != CODE_FOR_nothing)
3330 return emit_insn (GEN_FCN (code) (x, y));
3332 /* Expand complex moves by moving real part and imag part. */
3333 if (COMPLEX_MODE_P (mode))
3334 return emit_move_complex (mode, x, y);
3336 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3337 || ALL_FIXED_POINT_MODE_P (mode))
3339 rtx result = emit_move_via_integer (mode, x, y, true);
3341 /* If we can't find an integer mode, use multi words. */
3342 if (result)
3343 return result;
3344 else
3345 return emit_move_multi_word (mode, x, y);
3348 if (GET_MODE_CLASS (mode) == MODE_CC)
3349 return emit_move_ccmode (mode, x, y);
3351 /* Try using a move pattern for the corresponding integer mode. This is
3352 only safe when simplify_subreg can convert MODE constants into integer
3353 constants. At present, it can only do this reliably if the value
3354 fits within a HOST_WIDE_INT. */
3355 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3357 rtx ret = emit_move_via_integer (mode, x, y, false);
3358 if (ret)
3359 return ret;
3362 return emit_move_multi_word (mode, x, y);
3365 /* Generate code to copy Y into X.
3366 Both Y and X must have the same mode, except that
3367 Y can be a constant with VOIDmode.
3368 This mode cannot be BLKmode; use emit_block_move for that.
3370 Return the last instruction emitted. */
3373 emit_move_insn (rtx x, rtx y)
3375 enum machine_mode mode = GET_MODE (x);
3376 rtx y_cst = NULL_RTX;
3377 rtx last_insn, set;
3379 gcc_assert (mode != BLKmode
3380 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3382 if (CONSTANT_P (y))
3384 if (optimize
3385 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3386 && (last_insn = compress_float_constant (x, y)))
3387 return last_insn;
3389 y_cst = y;
3391 if (!LEGITIMATE_CONSTANT_P (y))
3393 y = force_const_mem (mode, y);
3395 /* If the target's cannot_force_const_mem prevented the spill,
3396 assume that the target's move expanders will also take care
3397 of the non-legitimate constant. */
3398 if (!y)
3399 y = y_cst;
3400 else
3401 y = use_anchored_address (y);
3405 /* If X or Y are memory references, verify that their addresses are valid
3406 for the machine. */
3407 if (MEM_P (x)
3408 && (! memory_address_p (GET_MODE (x), XEXP (x, 0))
3409 && ! push_operand (x, GET_MODE (x))))
3410 x = validize_mem (x);
3412 if (MEM_P (y)
3413 && ! memory_address_p (GET_MODE (y), XEXP (y, 0)))
3414 y = validize_mem (y);
3416 gcc_assert (mode != BLKmode);
3418 last_insn = emit_move_insn_1 (x, y);
3420 if (y_cst && REG_P (x)
3421 && (set = single_set (last_insn)) != NULL_RTX
3422 && SET_DEST (set) == x
3423 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3424 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3426 return last_insn;
3429 /* If Y is representable exactly in a narrower mode, and the target can
3430 perform the extension directly from constant or memory, then emit the
3431 move as an extension. */
3433 static rtx
3434 compress_float_constant (rtx x, rtx y)
3436 enum machine_mode dstmode = GET_MODE (x);
3437 enum machine_mode orig_srcmode = GET_MODE (y);
3438 enum machine_mode srcmode;
3439 REAL_VALUE_TYPE r;
3440 int oldcost, newcost;
3442 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3444 if (LEGITIMATE_CONSTANT_P (y))
3445 oldcost = rtx_cost (y, SET);
3446 else
3447 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3449 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3450 srcmode != orig_srcmode;
3451 srcmode = GET_MODE_WIDER_MODE (srcmode))
3453 enum insn_code ic;
3454 rtx trunc_y, last_insn;
3456 /* Skip if the target can't extend this way. */
3457 ic = can_extend_p (dstmode, srcmode, 0);
3458 if (ic == CODE_FOR_nothing)
3459 continue;
3461 /* Skip if the narrowed value isn't exact. */
3462 if (! exact_real_truncate (srcmode, &r))
3463 continue;
3465 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3467 if (LEGITIMATE_CONSTANT_P (trunc_y))
3469 /* Skip if the target needs extra instructions to perform
3470 the extension. */
3471 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3472 continue;
3473 /* This is valid, but may not be cheaper than the original. */
3474 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3475 if (oldcost < newcost)
3476 continue;
3478 else if (float_extend_from_mem[dstmode][srcmode])
3480 trunc_y = force_const_mem (srcmode, trunc_y);
3481 /* This is valid, but may not be cheaper than the original. */
3482 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3483 if (oldcost < newcost)
3484 continue;
3485 trunc_y = validize_mem (trunc_y);
3487 else
3488 continue;
3490 /* For CSE's benefit, force the compressed constant pool entry
3491 into a new pseudo. This constant may be used in different modes,
3492 and if not, combine will put things back together for us. */
3493 trunc_y = force_reg (srcmode, trunc_y);
3494 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3495 last_insn = get_last_insn ();
3497 if (REG_P (x))
3498 set_unique_reg_note (last_insn, REG_EQUAL, y);
3500 return last_insn;
3503 return NULL_RTX;
3506 /* Pushing data onto the stack. */
3508 /* Push a block of length SIZE (perhaps variable)
3509 and return an rtx to address the beginning of the block.
3510 The value may be virtual_outgoing_args_rtx.
3512 EXTRA is the number of bytes of padding to push in addition to SIZE.
3513 BELOW nonzero means this padding comes at low addresses;
3514 otherwise, the padding comes at high addresses. */
3517 push_block (rtx size, int extra, int below)
3519 rtx temp;
3521 size = convert_modes (Pmode, ptr_mode, size, 1);
3522 if (CONSTANT_P (size))
3523 anti_adjust_stack (plus_constant (size, extra));
3524 else if (REG_P (size) && extra == 0)
3525 anti_adjust_stack (size);
3526 else
3528 temp = copy_to_mode_reg (Pmode, size);
3529 if (extra != 0)
3530 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3531 temp, 0, OPTAB_LIB_WIDEN);
3532 anti_adjust_stack (temp);
3535 #ifndef STACK_GROWS_DOWNWARD
3536 if (0)
3537 #else
3538 if (1)
3539 #endif
3541 temp = virtual_outgoing_args_rtx;
3542 if (extra != 0 && below)
3543 temp = plus_constant (temp, extra);
3545 else
3547 if (GET_CODE (size) == CONST_INT)
3548 temp = plus_constant (virtual_outgoing_args_rtx,
3549 -INTVAL (size) - (below ? 0 : extra));
3550 else if (extra != 0 && !below)
3551 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3552 negate_rtx (Pmode, plus_constant (size, extra)));
3553 else
3554 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3555 negate_rtx (Pmode, size));
3558 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3561 #ifdef PUSH_ROUNDING
3563 /* Emit single push insn. */
3565 static void
3566 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3568 rtx dest_addr;
3569 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3570 rtx dest;
3571 enum insn_code icode;
3572 insn_operand_predicate_fn pred;
3574 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3575 /* If there is push pattern, use it. Otherwise try old way of throwing
3576 MEM representing push operation to move expander. */
3577 icode = optab_handler (push_optab, mode)->insn_code;
3578 if (icode != CODE_FOR_nothing)
3580 if (((pred = insn_data[(int) icode].operand[0].predicate)
3581 && !((*pred) (x, mode))))
3582 x = force_reg (mode, x);
3583 emit_insn (GEN_FCN (icode) (x));
3584 return;
3586 if (GET_MODE_SIZE (mode) == rounded_size)
3587 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3588 /* If we are to pad downward, adjust the stack pointer first and
3589 then store X into the stack location using an offset. This is
3590 because emit_move_insn does not know how to pad; it does not have
3591 access to type. */
3592 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3594 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3595 HOST_WIDE_INT offset;
3597 emit_move_insn (stack_pointer_rtx,
3598 expand_binop (Pmode,
3599 #ifdef STACK_GROWS_DOWNWARD
3600 sub_optab,
3601 #else
3602 add_optab,
3603 #endif
3604 stack_pointer_rtx,
3605 GEN_INT (rounded_size),
3606 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3608 offset = (HOST_WIDE_INT) padding_size;
3609 #ifdef STACK_GROWS_DOWNWARD
3610 if (STACK_PUSH_CODE == POST_DEC)
3611 /* We have already decremented the stack pointer, so get the
3612 previous value. */
3613 offset += (HOST_WIDE_INT) rounded_size;
3614 #else
3615 if (STACK_PUSH_CODE == POST_INC)
3616 /* We have already incremented the stack pointer, so get the
3617 previous value. */
3618 offset -= (HOST_WIDE_INT) rounded_size;
3619 #endif
3620 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3622 else
3624 #ifdef STACK_GROWS_DOWNWARD
3625 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3626 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3627 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3628 #else
3629 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3630 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3631 GEN_INT (rounded_size));
3632 #endif
3633 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3636 dest = gen_rtx_MEM (mode, dest_addr);
3638 if (type != 0)
3640 set_mem_attributes (dest, type, 1);
3642 if (flag_optimize_sibling_calls)
3643 /* Function incoming arguments may overlap with sibling call
3644 outgoing arguments and we cannot allow reordering of reads
3645 from function arguments with stores to outgoing arguments
3646 of sibling calls. */
3647 set_mem_alias_set (dest, 0);
3649 emit_move_insn (dest, x);
3651 #endif
3653 /* Generate code to push X onto the stack, assuming it has mode MODE and
3654 type TYPE.
3655 MODE is redundant except when X is a CONST_INT (since they don't
3656 carry mode info).
3657 SIZE is an rtx for the size of data to be copied (in bytes),
3658 needed only if X is BLKmode.
3660 ALIGN (in bits) is maximum alignment we can assume.
3662 If PARTIAL and REG are both nonzero, then copy that many of the first
3663 bytes of X into registers starting with REG, and push the rest of X.
3664 The amount of space pushed is decreased by PARTIAL bytes.
3665 REG must be a hard register in this case.
3666 If REG is zero but PARTIAL is not, take any all others actions for an
3667 argument partially in registers, but do not actually load any
3668 registers.
3670 EXTRA is the amount in bytes of extra space to leave next to this arg.
3671 This is ignored if an argument block has already been allocated.
3673 On a machine that lacks real push insns, ARGS_ADDR is the address of
3674 the bottom of the argument block for this call. We use indexing off there
3675 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3676 argument block has not been preallocated.
3678 ARGS_SO_FAR is the size of args previously pushed for this call.
3680 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3681 for arguments passed in registers. If nonzero, it will be the number
3682 of bytes required. */
3684 void
3685 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3686 unsigned int align, int partial, rtx reg, int extra,
3687 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3688 rtx alignment_pad)
3690 rtx xinner;
3691 enum direction stack_direction
3692 #ifdef STACK_GROWS_DOWNWARD
3693 = downward;
3694 #else
3695 = upward;
3696 #endif
3698 /* Decide where to pad the argument: `downward' for below,
3699 `upward' for above, or `none' for don't pad it.
3700 Default is below for small data on big-endian machines; else above. */
3701 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3703 /* Invert direction if stack is post-decrement.
3704 FIXME: why? */
3705 if (STACK_PUSH_CODE == POST_DEC)
3706 if (where_pad != none)
3707 where_pad = (where_pad == downward ? upward : downward);
3709 xinner = x;
3711 if (mode == BLKmode
3712 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3714 /* Copy a block into the stack, entirely or partially. */
3716 rtx temp;
3717 int used;
3718 int offset;
3719 int skip;
3721 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3722 used = partial - offset;
3724 if (mode != BLKmode)
3726 /* A value is to be stored in an insufficiently aligned
3727 stack slot; copy via a suitably aligned slot if
3728 necessary. */
3729 size = GEN_INT (GET_MODE_SIZE (mode));
3730 if (!MEM_P (xinner))
3732 temp = assign_temp (type, 0, 1, 1);
3733 emit_move_insn (temp, xinner);
3734 xinner = temp;
3738 gcc_assert (size);
3740 /* USED is now the # of bytes we need not copy to the stack
3741 because registers will take care of them. */
3743 if (partial != 0)
3744 xinner = adjust_address (xinner, BLKmode, used);
3746 /* If the partial register-part of the arg counts in its stack size,
3747 skip the part of stack space corresponding to the registers.
3748 Otherwise, start copying to the beginning of the stack space,
3749 by setting SKIP to 0. */
3750 skip = (reg_parm_stack_space == 0) ? 0 : used;
3752 #ifdef PUSH_ROUNDING
3753 /* Do it with several push insns if that doesn't take lots of insns
3754 and if there is no difficulty with push insns that skip bytes
3755 on the stack for alignment purposes. */
3756 if (args_addr == 0
3757 && PUSH_ARGS
3758 && GET_CODE (size) == CONST_INT
3759 && skip == 0
3760 && MEM_ALIGN (xinner) >= align
3761 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3762 /* Here we avoid the case of a structure whose weak alignment
3763 forces many pushes of a small amount of data,
3764 and such small pushes do rounding that causes trouble. */
3765 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3766 || align >= BIGGEST_ALIGNMENT
3767 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3768 == (align / BITS_PER_UNIT)))
3769 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3771 /* Push padding now if padding above and stack grows down,
3772 or if padding below and stack grows up.
3773 But if space already allocated, this has already been done. */
3774 if (extra && args_addr == 0
3775 && where_pad != none && where_pad != stack_direction)
3776 anti_adjust_stack (GEN_INT (extra));
3778 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3780 else
3781 #endif /* PUSH_ROUNDING */
3783 rtx target;
3785 /* Otherwise make space on the stack and copy the data
3786 to the address of that space. */
3788 /* Deduct words put into registers from the size we must copy. */
3789 if (partial != 0)
3791 if (GET_CODE (size) == CONST_INT)
3792 size = GEN_INT (INTVAL (size) - used);
3793 else
3794 size = expand_binop (GET_MODE (size), sub_optab, size,
3795 GEN_INT (used), NULL_RTX, 0,
3796 OPTAB_LIB_WIDEN);
3799 /* Get the address of the stack space.
3800 In this case, we do not deal with EXTRA separately.
3801 A single stack adjust will do. */
3802 if (! args_addr)
3804 temp = push_block (size, extra, where_pad == downward);
3805 extra = 0;
3807 else if (GET_CODE (args_so_far) == CONST_INT)
3808 temp = memory_address (BLKmode,
3809 plus_constant (args_addr,
3810 skip + INTVAL (args_so_far)));
3811 else
3812 temp = memory_address (BLKmode,
3813 plus_constant (gen_rtx_PLUS (Pmode,
3814 args_addr,
3815 args_so_far),
3816 skip));
3818 if (!ACCUMULATE_OUTGOING_ARGS)
3820 /* If the source is referenced relative to the stack pointer,
3821 copy it to another register to stabilize it. We do not need
3822 to do this if we know that we won't be changing sp. */
3824 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3825 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3826 temp = copy_to_reg (temp);
3829 target = gen_rtx_MEM (BLKmode, temp);
3831 /* We do *not* set_mem_attributes here, because incoming arguments
3832 may overlap with sibling call outgoing arguments and we cannot
3833 allow reordering of reads from function arguments with stores
3834 to outgoing arguments of sibling calls. We do, however, want
3835 to record the alignment of the stack slot. */
3836 /* ALIGN may well be better aligned than TYPE, e.g. due to
3837 PARM_BOUNDARY. Assume the caller isn't lying. */
3838 set_mem_align (target, align);
3840 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3843 else if (partial > 0)
3845 /* Scalar partly in registers. */
3847 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3848 int i;
3849 int not_stack;
3850 /* # bytes of start of argument
3851 that we must make space for but need not store. */
3852 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3853 int args_offset = INTVAL (args_so_far);
3854 int skip;
3856 /* Push padding now if padding above and stack grows down,
3857 or if padding below and stack grows up.
3858 But if space already allocated, this has already been done. */
3859 if (extra && args_addr == 0
3860 && where_pad != none && where_pad != stack_direction)
3861 anti_adjust_stack (GEN_INT (extra));
3863 /* If we make space by pushing it, we might as well push
3864 the real data. Otherwise, we can leave OFFSET nonzero
3865 and leave the space uninitialized. */
3866 if (args_addr == 0)
3867 offset = 0;
3869 /* Now NOT_STACK gets the number of words that we don't need to
3870 allocate on the stack. Convert OFFSET to words too. */
3871 not_stack = (partial - offset) / UNITS_PER_WORD;
3872 offset /= UNITS_PER_WORD;
3874 /* If the partial register-part of the arg counts in its stack size,
3875 skip the part of stack space corresponding to the registers.
3876 Otherwise, start copying to the beginning of the stack space,
3877 by setting SKIP to 0. */
3878 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3880 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3881 x = validize_mem (force_const_mem (mode, x));
3883 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3884 SUBREGs of such registers are not allowed. */
3885 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3886 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3887 x = copy_to_reg (x);
3889 /* Loop over all the words allocated on the stack for this arg. */
3890 /* We can do it by words, because any scalar bigger than a word
3891 has a size a multiple of a word. */
3892 #ifndef PUSH_ARGS_REVERSED
3893 for (i = not_stack; i < size; i++)
3894 #else
3895 for (i = size - 1; i >= not_stack; i--)
3896 #endif
3897 if (i >= not_stack + offset)
3898 emit_push_insn (operand_subword_force (x, i, mode),
3899 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3900 0, args_addr,
3901 GEN_INT (args_offset + ((i - not_stack + skip)
3902 * UNITS_PER_WORD)),
3903 reg_parm_stack_space, alignment_pad);
3905 else
3907 rtx addr;
3908 rtx dest;
3910 /* Push padding now if padding above and stack grows down,
3911 or if padding below and stack grows up.
3912 But if space already allocated, this has already been done. */
3913 if (extra && args_addr == 0
3914 && where_pad != none && where_pad != stack_direction)
3915 anti_adjust_stack (GEN_INT (extra));
3917 #ifdef PUSH_ROUNDING
3918 if (args_addr == 0 && PUSH_ARGS)
3919 emit_single_push_insn (mode, x, type);
3920 else
3921 #endif
3923 if (GET_CODE (args_so_far) == CONST_INT)
3924 addr
3925 = memory_address (mode,
3926 plus_constant (args_addr,
3927 INTVAL (args_so_far)));
3928 else
3929 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3930 args_so_far));
3931 dest = gen_rtx_MEM (mode, addr);
3933 /* We do *not* set_mem_attributes here, because incoming arguments
3934 may overlap with sibling call outgoing arguments and we cannot
3935 allow reordering of reads from function arguments with stores
3936 to outgoing arguments of sibling calls. We do, however, want
3937 to record the alignment of the stack slot. */
3938 /* ALIGN may well be better aligned than TYPE, e.g. due to
3939 PARM_BOUNDARY. Assume the caller isn't lying. */
3940 set_mem_align (dest, align);
3942 emit_move_insn (dest, x);
3946 /* If part should go in registers, copy that part
3947 into the appropriate registers. Do this now, at the end,
3948 since mem-to-mem copies above may do function calls. */
3949 if (partial > 0 && reg != 0)
3951 /* Handle calls that pass values in multiple non-contiguous locations.
3952 The Irix 6 ABI has examples of this. */
3953 if (GET_CODE (reg) == PARALLEL)
3954 emit_group_load (reg, x, type, -1);
3955 else
3957 gcc_assert (partial % UNITS_PER_WORD == 0);
3958 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3962 if (extra && args_addr == 0 && where_pad == stack_direction)
3963 anti_adjust_stack (GEN_INT (extra));
3965 if (alignment_pad && args_addr == 0)
3966 anti_adjust_stack (alignment_pad);
3969 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3970 operations. */
3972 static rtx
3973 get_subtarget (rtx x)
3975 return (optimize
3976 || x == 0
3977 /* Only registers can be subtargets. */
3978 || !REG_P (x)
3979 /* Don't use hard regs to avoid extending their life. */
3980 || REGNO (x) < FIRST_PSEUDO_REGISTER
3981 ? 0 : x);
3984 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3985 FIELD is a bitfield. Returns true if the optimization was successful,
3986 and there's nothing else to do. */
3988 static bool
3989 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3990 unsigned HOST_WIDE_INT bitpos,
3991 enum machine_mode mode1, rtx str_rtx,
3992 tree to, tree src)
3994 enum machine_mode str_mode = GET_MODE (str_rtx);
3995 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3996 tree op0, op1;
3997 rtx value, result;
3998 optab binop;
4000 if (mode1 != VOIDmode
4001 || bitsize >= BITS_PER_WORD
4002 || str_bitsize > BITS_PER_WORD
4003 || TREE_SIDE_EFFECTS (to)
4004 || TREE_THIS_VOLATILE (to))
4005 return false;
4007 STRIP_NOPS (src);
4008 if (!BINARY_CLASS_P (src)
4009 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4010 return false;
4012 op0 = TREE_OPERAND (src, 0);
4013 op1 = TREE_OPERAND (src, 1);
4014 STRIP_NOPS (op0);
4016 if (!operand_equal_p (to, op0, 0))
4017 return false;
4019 if (MEM_P (str_rtx))
4021 unsigned HOST_WIDE_INT offset1;
4023 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4024 str_mode = word_mode;
4025 str_mode = get_best_mode (bitsize, bitpos,
4026 MEM_ALIGN (str_rtx), str_mode, 0);
4027 if (str_mode == VOIDmode)
4028 return false;
4029 str_bitsize = GET_MODE_BITSIZE (str_mode);
4031 offset1 = bitpos;
4032 bitpos %= str_bitsize;
4033 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4034 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4036 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4037 return false;
4039 /* If the bit field covers the whole REG/MEM, store_field
4040 will likely generate better code. */
4041 if (bitsize >= str_bitsize)
4042 return false;
4044 /* We can't handle fields split across multiple entities. */
4045 if (bitpos + bitsize > str_bitsize)
4046 return false;
4048 if (BYTES_BIG_ENDIAN)
4049 bitpos = str_bitsize - bitpos - bitsize;
4051 switch (TREE_CODE (src))
4053 case PLUS_EXPR:
4054 case MINUS_EXPR:
4055 /* For now, just optimize the case of the topmost bitfield
4056 where we don't need to do any masking and also
4057 1 bit bitfields where xor can be used.
4058 We might win by one instruction for the other bitfields
4059 too if insv/extv instructions aren't used, so that
4060 can be added later. */
4061 if (bitpos + bitsize != str_bitsize
4062 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4063 break;
4065 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4066 value = convert_modes (str_mode,
4067 TYPE_MODE (TREE_TYPE (op1)), value,
4068 TYPE_UNSIGNED (TREE_TYPE (op1)));
4070 /* We may be accessing data outside the field, which means
4071 we can alias adjacent data. */
4072 if (MEM_P (str_rtx))
4074 str_rtx = shallow_copy_rtx (str_rtx);
4075 set_mem_alias_set (str_rtx, 0);
4076 set_mem_expr (str_rtx, 0);
4079 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4080 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4082 value = expand_and (str_mode, value, const1_rtx, NULL);
4083 binop = xor_optab;
4085 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4086 build_int_cst (NULL_TREE, bitpos),
4087 NULL_RTX, 1);
4088 result = expand_binop (str_mode, binop, str_rtx,
4089 value, str_rtx, 1, OPTAB_WIDEN);
4090 if (result != str_rtx)
4091 emit_move_insn (str_rtx, result);
4092 return true;
4094 case BIT_IOR_EXPR:
4095 case BIT_XOR_EXPR:
4096 if (TREE_CODE (op1) != INTEGER_CST)
4097 break;
4098 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4099 value = convert_modes (GET_MODE (str_rtx),
4100 TYPE_MODE (TREE_TYPE (op1)), value,
4101 TYPE_UNSIGNED (TREE_TYPE (op1)));
4103 /* We may be accessing data outside the field, which means
4104 we can alias adjacent data. */
4105 if (MEM_P (str_rtx))
4107 str_rtx = shallow_copy_rtx (str_rtx);
4108 set_mem_alias_set (str_rtx, 0);
4109 set_mem_expr (str_rtx, 0);
4112 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4113 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4115 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4116 - 1);
4117 value = expand_and (GET_MODE (str_rtx), value, mask,
4118 NULL_RTX);
4120 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4121 build_int_cst (NULL_TREE, bitpos),
4122 NULL_RTX, 1);
4123 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4124 value, str_rtx, 1, OPTAB_WIDEN);
4125 if (result != str_rtx)
4126 emit_move_insn (str_rtx, result);
4127 return true;
4129 default:
4130 break;
4133 return false;
4137 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4138 is true, try generating a nontemporal store. */
4140 void
4141 expand_assignment (tree to, tree from, bool nontemporal)
4143 rtx to_rtx = 0;
4144 rtx result;
4146 /* Don't crash if the lhs of the assignment was erroneous. */
4147 if (TREE_CODE (to) == ERROR_MARK)
4149 result = expand_normal (from);
4150 return;
4153 /* Optimize away no-op moves without side-effects. */
4154 if (operand_equal_p (to, from, 0))
4155 return;
4157 /* Assignment of a structure component needs special treatment
4158 if the structure component's rtx is not simply a MEM.
4159 Assignment of an array element at a constant index, and assignment of
4160 an array element in an unaligned packed structure field, has the same
4161 problem. */
4162 if (handled_component_p (to)
4163 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4165 enum machine_mode mode1;
4166 HOST_WIDE_INT bitsize, bitpos;
4167 tree offset;
4168 int unsignedp;
4169 int volatilep = 0;
4170 tree tem;
4172 push_temp_slots ();
4173 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4174 &unsignedp, &volatilep, true);
4176 /* If we are going to use store_bit_field and extract_bit_field,
4177 make sure to_rtx will be safe for multiple use. */
4179 to_rtx = expand_normal (tem);
4181 if (offset != 0)
4183 rtx offset_rtx;
4185 if (!MEM_P (to_rtx))
4187 /* We can get constant negative offsets into arrays with broken
4188 user code. Translate this to a trap instead of ICEing. */
4189 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4190 expand_builtin_trap ();
4191 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4194 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4195 #ifdef POINTERS_EXTEND_UNSIGNED
4196 if (GET_MODE (offset_rtx) != Pmode)
4197 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4198 #else
4199 if (GET_MODE (offset_rtx) != ptr_mode)
4200 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4201 #endif
4203 /* A constant address in TO_RTX can have VOIDmode, we must not try
4204 to call force_reg for that case. Avoid that case. */
4205 if (MEM_P (to_rtx)
4206 && GET_MODE (to_rtx) == BLKmode
4207 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4208 && bitsize > 0
4209 && (bitpos % bitsize) == 0
4210 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4211 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4213 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4214 bitpos = 0;
4217 to_rtx = offset_address (to_rtx, offset_rtx,
4218 highest_pow2_factor_for_target (to,
4219 offset));
4222 /* Handle expand_expr of a complex value returning a CONCAT. */
4223 if (GET_CODE (to_rtx) == CONCAT)
4225 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4227 gcc_assert (bitpos == 0);
4228 result = store_expr (from, to_rtx, false, nontemporal);
4230 else
4232 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4233 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4234 nontemporal);
4237 else
4239 if (MEM_P (to_rtx))
4241 /* If the field is at offset zero, we could have been given the
4242 DECL_RTX of the parent struct. Don't munge it. */
4243 to_rtx = shallow_copy_rtx (to_rtx);
4245 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4247 /* Deal with volatile and readonly fields. The former is only
4248 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4249 if (volatilep)
4250 MEM_VOLATILE_P (to_rtx) = 1;
4251 if (component_uses_parent_alias_set (to))
4252 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4255 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4256 to_rtx, to, from))
4257 result = NULL;
4258 else
4259 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4260 TREE_TYPE (tem), get_alias_set (to),
4261 nontemporal);
4264 if (result)
4265 preserve_temp_slots (result);
4266 free_temp_slots ();
4267 pop_temp_slots ();
4268 return;
4271 /* If the rhs is a function call and its value is not an aggregate,
4272 call the function before we start to compute the lhs.
4273 This is needed for correct code for cases such as
4274 val = setjmp (buf) on machines where reference to val
4275 requires loading up part of an address in a separate insn.
4277 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4278 since it might be a promoted variable where the zero- or sign- extension
4279 needs to be done. Handling this in the normal way is safe because no
4280 computation is done before the call. */
4281 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4282 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4283 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4284 && REG_P (DECL_RTL (to))))
4286 rtx value;
4288 push_temp_slots ();
4289 value = expand_normal (from);
4290 if (to_rtx == 0)
4291 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4293 /* Handle calls that return values in multiple non-contiguous locations.
4294 The Irix 6 ABI has examples of this. */
4295 if (GET_CODE (to_rtx) == PARALLEL)
4296 emit_group_load (to_rtx, value, TREE_TYPE (from),
4297 int_size_in_bytes (TREE_TYPE (from)));
4298 else if (GET_MODE (to_rtx) == BLKmode)
4299 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4300 else
4302 if (POINTER_TYPE_P (TREE_TYPE (to)))
4303 value = convert_memory_address (GET_MODE (to_rtx), value);
4304 emit_move_insn (to_rtx, value);
4306 preserve_temp_slots (to_rtx);
4307 free_temp_slots ();
4308 pop_temp_slots ();
4309 return;
4312 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4313 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4315 if (to_rtx == 0)
4316 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4318 /* Don't move directly into a return register. */
4319 if (TREE_CODE (to) == RESULT_DECL
4320 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4322 rtx temp;
4324 push_temp_slots ();
4325 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4327 if (GET_CODE (to_rtx) == PARALLEL)
4328 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4329 int_size_in_bytes (TREE_TYPE (from)));
4330 else
4331 emit_move_insn (to_rtx, temp);
4333 preserve_temp_slots (to_rtx);
4334 free_temp_slots ();
4335 pop_temp_slots ();
4336 return;
4339 /* In case we are returning the contents of an object which overlaps
4340 the place the value is being stored, use a safe function when copying
4341 a value through a pointer into a structure value return block. */
4342 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4343 && cfun->returns_struct
4344 && !cfun->returns_pcc_struct)
4346 rtx from_rtx, size;
4348 push_temp_slots ();
4349 size = expr_size (from);
4350 from_rtx = expand_normal (from);
4352 emit_library_call (memmove_libfunc, LCT_NORMAL,
4353 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4354 XEXP (from_rtx, 0), Pmode,
4355 convert_to_mode (TYPE_MODE (sizetype),
4356 size, TYPE_UNSIGNED (sizetype)),
4357 TYPE_MODE (sizetype));
4359 preserve_temp_slots (to_rtx);
4360 free_temp_slots ();
4361 pop_temp_slots ();
4362 return;
4365 /* Compute FROM and store the value in the rtx we got. */
4367 push_temp_slots ();
4368 result = store_expr (from, to_rtx, 0, nontemporal);
4369 preserve_temp_slots (result);
4370 free_temp_slots ();
4371 pop_temp_slots ();
4372 return;
4375 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4376 succeeded, false otherwise. */
4378 static bool
4379 emit_storent_insn (rtx to, rtx from)
4381 enum machine_mode mode = GET_MODE (to), imode;
4382 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4383 rtx pattern;
4385 if (code == CODE_FOR_nothing)
4386 return false;
4388 imode = insn_data[code].operand[0].mode;
4389 if (!insn_data[code].operand[0].predicate (to, imode))
4390 return false;
4392 imode = insn_data[code].operand[1].mode;
4393 if (!insn_data[code].operand[1].predicate (from, imode))
4395 from = copy_to_mode_reg (imode, from);
4396 if (!insn_data[code].operand[1].predicate (from, imode))
4397 return false;
4400 pattern = GEN_FCN (code) (to, from);
4401 if (pattern == NULL_RTX)
4402 return false;
4404 emit_insn (pattern);
4405 return true;
4408 /* Generate code for computing expression EXP,
4409 and storing the value into TARGET.
4411 If the mode is BLKmode then we may return TARGET itself.
4412 It turns out that in BLKmode it doesn't cause a problem.
4413 because C has no operators that could combine two different
4414 assignments into the same BLKmode object with different values
4415 with no sequence point. Will other languages need this to
4416 be more thorough?
4418 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4419 stack, and block moves may need to be treated specially.
4421 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4424 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4426 rtx temp;
4427 rtx alt_rtl = NULL_RTX;
4428 int dont_return_target = 0;
4430 if (VOID_TYPE_P (TREE_TYPE (exp)))
4432 /* C++ can generate ?: expressions with a throw expression in one
4433 branch and an rvalue in the other. Here, we resolve attempts to
4434 store the throw expression's nonexistent result. */
4435 gcc_assert (!call_param_p);
4436 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4437 return NULL_RTX;
4439 if (TREE_CODE (exp) == COMPOUND_EXPR)
4441 /* Perform first part of compound expression, then assign from second
4442 part. */
4443 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4444 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4445 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4446 nontemporal);
4448 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4450 /* For conditional expression, get safe form of the target. Then
4451 test the condition, doing the appropriate assignment on either
4452 side. This avoids the creation of unnecessary temporaries.
4453 For non-BLKmode, it is more efficient not to do this. */
4455 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4457 do_pending_stack_adjust ();
4458 NO_DEFER_POP;
4459 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4460 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4461 nontemporal);
4462 emit_jump_insn (gen_jump (lab2));
4463 emit_barrier ();
4464 emit_label (lab1);
4465 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4466 nontemporal);
4467 emit_label (lab2);
4468 OK_DEFER_POP;
4470 return NULL_RTX;
4472 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4473 /* If this is a scalar in a register that is stored in a wider mode
4474 than the declared mode, compute the result into its declared mode
4475 and then convert to the wider mode. Our value is the computed
4476 expression. */
4478 rtx inner_target = 0;
4480 /* We can do the conversion inside EXP, which will often result
4481 in some optimizations. Do the conversion in two steps: first
4482 change the signedness, if needed, then the extend. But don't
4483 do this if the type of EXP is a subtype of something else
4484 since then the conversion might involve more than just
4485 converting modes. */
4486 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4487 && TREE_TYPE (TREE_TYPE (exp)) == 0
4488 && GET_MODE_PRECISION (GET_MODE (target))
4489 == TYPE_PRECISION (TREE_TYPE (exp)))
4491 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4492 != SUBREG_PROMOTED_UNSIGNED_P (target))
4494 /* Some types, e.g. Fortran's logical*4, won't have a signed
4495 version, so use the mode instead. */
4496 tree ntype
4497 = (signed_or_unsigned_type_for
4498 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4499 if (ntype == NULL)
4500 ntype = lang_hooks.types.type_for_mode
4501 (TYPE_MODE (TREE_TYPE (exp)),
4502 SUBREG_PROMOTED_UNSIGNED_P (target));
4504 exp = fold_convert (ntype, exp);
4507 exp = fold_convert (lang_hooks.types.type_for_mode
4508 (GET_MODE (SUBREG_REG (target)),
4509 SUBREG_PROMOTED_UNSIGNED_P (target)),
4510 exp);
4512 inner_target = SUBREG_REG (target);
4515 temp = expand_expr (exp, inner_target, VOIDmode,
4516 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4518 /* If TEMP is a VOIDmode constant, use convert_modes to make
4519 sure that we properly convert it. */
4520 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4522 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4523 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4524 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4525 GET_MODE (target), temp,
4526 SUBREG_PROMOTED_UNSIGNED_P (target));
4529 convert_move (SUBREG_REG (target), temp,
4530 SUBREG_PROMOTED_UNSIGNED_P (target));
4532 return NULL_RTX;
4534 else if (TREE_CODE (exp) == STRING_CST
4535 && !nontemporal && !call_param_p
4536 && TREE_STRING_LENGTH (exp) > 0
4537 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4539 /* Optimize initialization of an array with a STRING_CST. */
4540 HOST_WIDE_INT exp_len, str_copy_len;
4541 rtx dest_mem;
4543 exp_len = int_expr_size (exp);
4544 if (exp_len <= 0)
4545 goto normal_expr;
4547 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4548 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4549 goto normal_expr;
4551 str_copy_len = TREE_STRING_LENGTH (exp);
4552 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4554 str_copy_len += STORE_MAX_PIECES - 1;
4555 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4557 str_copy_len = MIN (str_copy_len, exp_len);
4558 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4559 (void *) TREE_STRING_POINTER (exp),
4560 MEM_ALIGN (target), false))
4561 goto normal_expr;
4563 dest_mem = target;
4565 dest_mem = store_by_pieces (dest_mem,
4566 str_copy_len, builtin_strncpy_read_str,
4567 (void *) TREE_STRING_POINTER (exp),
4568 MEM_ALIGN (target), false,
4569 exp_len > str_copy_len ? 1 : 0);
4570 if (exp_len > str_copy_len)
4571 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4572 GEN_INT (exp_len - str_copy_len),
4573 BLOCK_OP_NORMAL);
4574 return NULL_RTX;
4576 else
4578 rtx tmp_target;
4580 normal_expr:
4581 /* If we want to use a nontemporal store, force the value to
4582 register first. */
4583 tmp_target = nontemporal ? NULL_RTX : target;
4584 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4585 (call_param_p
4586 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4587 &alt_rtl);
4588 /* Return TARGET if it's a specified hardware register.
4589 If TARGET is a volatile mem ref, either return TARGET
4590 or return a reg copied *from* TARGET; ANSI requires this.
4592 Otherwise, if TEMP is not TARGET, return TEMP
4593 if it is constant (for efficiency),
4594 or if we really want the correct value. */
4595 if (!(target && REG_P (target)
4596 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4597 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4598 && ! rtx_equal_p (temp, target)
4599 && CONSTANT_P (temp))
4600 dont_return_target = 1;
4603 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4604 the same as that of TARGET, adjust the constant. This is needed, for
4605 example, in case it is a CONST_DOUBLE and we want only a word-sized
4606 value. */
4607 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4608 && TREE_CODE (exp) != ERROR_MARK
4609 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4610 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4611 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4613 /* If value was not generated in the target, store it there.
4614 Convert the value to TARGET's type first if necessary and emit the
4615 pending incrementations that have been queued when expanding EXP.
4616 Note that we cannot emit the whole queue blindly because this will
4617 effectively disable the POST_INC optimization later.
4619 If TEMP and TARGET compare equal according to rtx_equal_p, but
4620 one or both of them are volatile memory refs, we have to distinguish
4621 two cases:
4622 - expand_expr has used TARGET. In this case, we must not generate
4623 another copy. This can be detected by TARGET being equal according
4624 to == .
4625 - expand_expr has not used TARGET - that means that the source just
4626 happens to have the same RTX form. Since temp will have been created
4627 by expand_expr, it will compare unequal according to == .
4628 We must generate a copy in this case, to reach the correct number
4629 of volatile memory references. */
4631 if ((! rtx_equal_p (temp, target)
4632 || (temp != target && (side_effects_p (temp)
4633 || side_effects_p (target))))
4634 && TREE_CODE (exp) != ERROR_MARK
4635 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4636 but TARGET is not valid memory reference, TEMP will differ
4637 from TARGET although it is really the same location. */
4638 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4639 /* If there's nothing to copy, don't bother. Don't call
4640 expr_size unless necessary, because some front-ends (C++)
4641 expr_size-hook must not be given objects that are not
4642 supposed to be bit-copied or bit-initialized. */
4643 && expr_size (exp) != const0_rtx)
4645 if (GET_MODE (temp) != GET_MODE (target)
4646 && GET_MODE (temp) != VOIDmode)
4648 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4649 if (dont_return_target)
4651 /* In this case, we will return TEMP,
4652 so make sure it has the proper mode.
4653 But don't forget to store the value into TARGET. */
4654 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4655 emit_move_insn (target, temp);
4657 else if (GET_MODE (target) == BLKmode
4658 || GET_MODE (temp) == BLKmode)
4659 emit_block_move (target, temp, expr_size (exp),
4660 (call_param_p
4661 ? BLOCK_OP_CALL_PARM
4662 : BLOCK_OP_NORMAL));
4663 else
4664 convert_move (target, temp, unsignedp);
4667 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4669 /* Handle copying a string constant into an array. The string
4670 constant may be shorter than the array. So copy just the string's
4671 actual length, and clear the rest. First get the size of the data
4672 type of the string, which is actually the size of the target. */
4673 rtx size = expr_size (exp);
4675 if (GET_CODE (size) == CONST_INT
4676 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4677 emit_block_move (target, temp, size,
4678 (call_param_p
4679 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4680 else
4682 /* Compute the size of the data to copy from the string. */
4683 tree copy_size
4684 = size_binop (MIN_EXPR,
4685 make_tree (sizetype, size),
4686 size_int (TREE_STRING_LENGTH (exp)));
4687 rtx copy_size_rtx
4688 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4689 (call_param_p
4690 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4691 rtx label = 0;
4693 /* Copy that much. */
4694 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4695 TYPE_UNSIGNED (sizetype));
4696 emit_block_move (target, temp, copy_size_rtx,
4697 (call_param_p
4698 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4700 /* Figure out how much is left in TARGET that we have to clear.
4701 Do all calculations in ptr_mode. */
4702 if (GET_CODE (copy_size_rtx) == CONST_INT)
4704 size = plus_constant (size, -INTVAL (copy_size_rtx));
4705 target = adjust_address (target, BLKmode,
4706 INTVAL (copy_size_rtx));
4708 else
4710 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4711 copy_size_rtx, NULL_RTX, 0,
4712 OPTAB_LIB_WIDEN);
4714 #ifdef POINTERS_EXTEND_UNSIGNED
4715 if (GET_MODE (copy_size_rtx) != Pmode)
4716 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4717 TYPE_UNSIGNED (sizetype));
4718 #endif
4720 target = offset_address (target, copy_size_rtx,
4721 highest_pow2_factor (copy_size));
4722 label = gen_label_rtx ();
4723 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4724 GET_MODE (size), 0, label);
4727 if (size != const0_rtx)
4728 clear_storage (target, size, BLOCK_OP_NORMAL);
4730 if (label)
4731 emit_label (label);
4734 /* Handle calls that return values in multiple non-contiguous locations.
4735 The Irix 6 ABI has examples of this. */
4736 else if (GET_CODE (target) == PARALLEL)
4737 emit_group_load (target, temp, TREE_TYPE (exp),
4738 int_size_in_bytes (TREE_TYPE (exp)));
4739 else if (GET_MODE (temp) == BLKmode)
4740 emit_block_move (target, temp, expr_size (exp),
4741 (call_param_p
4742 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4743 else if (nontemporal
4744 && emit_storent_insn (target, temp))
4745 /* If we managed to emit a nontemporal store, there is nothing else to
4746 do. */
4748 else
4750 temp = force_operand (temp, target);
4751 if (temp != target)
4752 emit_move_insn (target, temp);
4756 return NULL_RTX;
4759 /* Helper for categorize_ctor_elements. Identical interface. */
4761 static bool
4762 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4763 HOST_WIDE_INT *p_elt_count,
4764 bool *p_must_clear)
4766 unsigned HOST_WIDE_INT idx;
4767 HOST_WIDE_INT nz_elts, elt_count;
4768 tree value, purpose;
4770 /* Whether CTOR is a valid constant initializer, in accordance with what
4771 initializer_constant_valid_p does. If inferred from the constructor
4772 elements, true until proven otherwise. */
4773 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4774 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4776 nz_elts = 0;
4777 elt_count = 0;
4779 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4781 HOST_WIDE_INT mult;
4783 mult = 1;
4784 if (TREE_CODE (purpose) == RANGE_EXPR)
4786 tree lo_index = TREE_OPERAND (purpose, 0);
4787 tree hi_index = TREE_OPERAND (purpose, 1);
4789 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4790 mult = (tree_low_cst (hi_index, 1)
4791 - tree_low_cst (lo_index, 1) + 1);
4794 switch (TREE_CODE (value))
4796 case CONSTRUCTOR:
4798 HOST_WIDE_INT nz = 0, ic = 0;
4800 bool const_elt_p
4801 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4803 nz_elts += mult * nz;
4804 elt_count += mult * ic;
4806 if (const_from_elts_p && const_p)
4807 const_p = const_elt_p;
4809 break;
4811 case INTEGER_CST:
4812 case REAL_CST:
4813 case FIXED_CST:
4814 if (!initializer_zerop (value))
4815 nz_elts += mult;
4816 elt_count += mult;
4817 break;
4819 case STRING_CST:
4820 nz_elts += mult * TREE_STRING_LENGTH (value);
4821 elt_count += mult * TREE_STRING_LENGTH (value);
4822 break;
4824 case COMPLEX_CST:
4825 if (!initializer_zerop (TREE_REALPART (value)))
4826 nz_elts += mult;
4827 if (!initializer_zerop (TREE_IMAGPART (value)))
4828 nz_elts += mult;
4829 elt_count += mult;
4830 break;
4832 case VECTOR_CST:
4834 tree v;
4835 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4837 if (!initializer_zerop (TREE_VALUE (v)))
4838 nz_elts += mult;
4839 elt_count += mult;
4842 break;
4844 default:
4845 nz_elts += mult;
4846 elt_count += mult;
4848 if (const_from_elts_p && const_p)
4849 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4850 != NULL_TREE;
4851 break;
4855 if (!*p_must_clear
4856 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4857 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4859 tree init_sub_type;
4860 bool clear_this = true;
4862 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4864 /* We don't expect more than one element of the union to be
4865 initialized. Not sure what we should do otherwise... */
4866 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4867 == 1);
4869 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4870 CONSTRUCTOR_ELTS (ctor),
4871 0)->value);
4873 /* ??? We could look at each element of the union, and find the
4874 largest element. Which would avoid comparing the size of the
4875 initialized element against any tail padding in the union.
4876 Doesn't seem worth the effort... */
4877 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4878 TYPE_SIZE (init_sub_type)) == 1)
4880 /* And now we have to find out if the element itself is fully
4881 constructed. E.g. for union { struct { int a, b; } s; } u
4882 = { .s = { .a = 1 } }. */
4883 if (elt_count == count_type_elements (init_sub_type, false))
4884 clear_this = false;
4888 *p_must_clear = clear_this;
4891 *p_nz_elts += nz_elts;
4892 *p_elt_count += elt_count;
4894 return const_p;
4897 /* Examine CTOR to discover:
4898 * how many scalar fields are set to nonzero values,
4899 and place it in *P_NZ_ELTS;
4900 * how many scalar fields in total are in CTOR,
4901 and place it in *P_ELT_COUNT.
4902 * if a type is a union, and the initializer from the constructor
4903 is not the largest element in the union, then set *p_must_clear.
4905 Return whether or not CTOR is a valid static constant initializer, the same
4906 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4908 bool
4909 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4910 HOST_WIDE_INT *p_elt_count,
4911 bool *p_must_clear)
4913 *p_nz_elts = 0;
4914 *p_elt_count = 0;
4915 *p_must_clear = false;
4917 return
4918 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4921 /* Count the number of scalars in TYPE. Return -1 on overflow or
4922 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4923 array member at the end of the structure. */
4925 HOST_WIDE_INT
4926 count_type_elements (const_tree type, bool allow_flexarr)
4928 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4929 switch (TREE_CODE (type))
4931 case ARRAY_TYPE:
4933 tree telts = array_type_nelts (type);
4934 if (telts && host_integerp (telts, 1))
4936 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4937 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4938 if (n == 0)
4939 return 0;
4940 else if (max / n > m)
4941 return n * m;
4943 return -1;
4946 case RECORD_TYPE:
4948 HOST_WIDE_INT n = 0, t;
4949 tree f;
4951 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4952 if (TREE_CODE (f) == FIELD_DECL)
4954 t = count_type_elements (TREE_TYPE (f), false);
4955 if (t < 0)
4957 /* Check for structures with flexible array member. */
4958 tree tf = TREE_TYPE (f);
4959 if (allow_flexarr
4960 && TREE_CHAIN (f) == NULL
4961 && TREE_CODE (tf) == ARRAY_TYPE
4962 && TYPE_DOMAIN (tf)
4963 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4964 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4965 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4966 && int_size_in_bytes (type) >= 0)
4967 break;
4969 return -1;
4971 n += t;
4974 return n;
4977 case UNION_TYPE:
4978 case QUAL_UNION_TYPE:
4979 return -1;
4981 case COMPLEX_TYPE:
4982 return 2;
4984 case VECTOR_TYPE:
4985 return TYPE_VECTOR_SUBPARTS (type);
4987 case INTEGER_TYPE:
4988 case REAL_TYPE:
4989 case FIXED_POINT_TYPE:
4990 case ENUMERAL_TYPE:
4991 case BOOLEAN_TYPE:
4992 case POINTER_TYPE:
4993 case OFFSET_TYPE:
4994 case REFERENCE_TYPE:
4995 return 1;
4997 case VOID_TYPE:
4998 case METHOD_TYPE:
4999 case FUNCTION_TYPE:
5000 case LANG_TYPE:
5001 default:
5002 gcc_unreachable ();
5006 /* Return 1 if EXP contains mostly (3/4) zeros. */
5008 static int
5009 mostly_zeros_p (const_tree exp)
5011 if (TREE_CODE (exp) == CONSTRUCTOR)
5014 HOST_WIDE_INT nz_elts, count, elts;
5015 bool must_clear;
5017 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5018 if (must_clear)
5019 return 1;
5021 elts = count_type_elements (TREE_TYPE (exp), false);
5023 return nz_elts < elts / 4;
5026 return initializer_zerop (exp);
5029 /* Return 1 if EXP contains all zeros. */
5031 static int
5032 all_zeros_p (const_tree exp)
5034 if (TREE_CODE (exp) == CONSTRUCTOR)
5037 HOST_WIDE_INT nz_elts, count;
5038 bool must_clear;
5040 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5041 return nz_elts == 0;
5044 return initializer_zerop (exp);
5047 /* Helper function for store_constructor.
5048 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5049 TYPE is the type of the CONSTRUCTOR, not the element type.
5050 CLEARED is as for store_constructor.
5051 ALIAS_SET is the alias set to use for any stores.
5053 This provides a recursive shortcut back to store_constructor when it isn't
5054 necessary to go through store_field. This is so that we can pass through
5055 the cleared field to let store_constructor know that we may not have to
5056 clear a substructure if the outer structure has already been cleared. */
5058 static void
5059 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5060 HOST_WIDE_INT bitpos, enum machine_mode mode,
5061 tree exp, tree type, int cleared,
5062 alias_set_type alias_set)
5064 if (TREE_CODE (exp) == CONSTRUCTOR
5065 /* We can only call store_constructor recursively if the size and
5066 bit position are on a byte boundary. */
5067 && bitpos % BITS_PER_UNIT == 0
5068 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5069 /* If we have a nonzero bitpos for a register target, then we just
5070 let store_field do the bitfield handling. This is unlikely to
5071 generate unnecessary clear instructions anyways. */
5072 && (bitpos == 0 || MEM_P (target)))
5074 if (MEM_P (target))
5075 target
5076 = adjust_address (target,
5077 GET_MODE (target) == BLKmode
5078 || 0 != (bitpos
5079 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5080 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5083 /* Update the alias set, if required. */
5084 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5085 && MEM_ALIAS_SET (target) != 0)
5087 target = copy_rtx (target);
5088 set_mem_alias_set (target, alias_set);
5091 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5093 else
5094 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5097 /* Store the value of constructor EXP into the rtx TARGET.
5098 TARGET is either a REG or a MEM; we know it cannot conflict, since
5099 safe_from_p has been called.
5100 CLEARED is true if TARGET is known to have been zero'd.
5101 SIZE is the number of bytes of TARGET we are allowed to modify: this
5102 may not be the same as the size of EXP if we are assigning to a field
5103 which has been packed to exclude padding bits. */
5105 static void
5106 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5108 tree type = TREE_TYPE (exp);
5109 #ifdef WORD_REGISTER_OPERATIONS
5110 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5111 #endif
5113 switch (TREE_CODE (type))
5115 case RECORD_TYPE:
5116 case UNION_TYPE:
5117 case QUAL_UNION_TYPE:
5119 unsigned HOST_WIDE_INT idx;
5120 tree field, value;
5122 /* If size is zero or the target is already cleared, do nothing. */
5123 if (size == 0 || cleared)
5124 cleared = 1;
5125 /* We either clear the aggregate or indicate the value is dead. */
5126 else if ((TREE_CODE (type) == UNION_TYPE
5127 || TREE_CODE (type) == QUAL_UNION_TYPE)
5128 && ! CONSTRUCTOR_ELTS (exp))
5129 /* If the constructor is empty, clear the union. */
5131 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5132 cleared = 1;
5135 /* If we are building a static constructor into a register,
5136 set the initial value as zero so we can fold the value into
5137 a constant. But if more than one register is involved,
5138 this probably loses. */
5139 else if (REG_P (target) && TREE_STATIC (exp)
5140 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5142 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5143 cleared = 1;
5146 /* If the constructor has fewer fields than the structure or
5147 if we are initializing the structure to mostly zeros, clear
5148 the whole structure first. Don't do this if TARGET is a
5149 register whose mode size isn't equal to SIZE since
5150 clear_storage can't handle this case. */
5151 else if (size > 0
5152 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5153 != fields_length (type))
5154 || mostly_zeros_p (exp))
5155 && (!REG_P (target)
5156 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5157 == size)))
5159 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5160 cleared = 1;
5163 if (REG_P (target) && !cleared)
5164 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5166 /* Store each element of the constructor into the
5167 corresponding field of TARGET. */
5168 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5170 enum machine_mode mode;
5171 HOST_WIDE_INT bitsize;
5172 HOST_WIDE_INT bitpos = 0;
5173 tree offset;
5174 rtx to_rtx = target;
5176 /* Just ignore missing fields. We cleared the whole
5177 structure, above, if any fields are missing. */
5178 if (field == 0)
5179 continue;
5181 if (cleared && initializer_zerop (value))
5182 continue;
5184 if (host_integerp (DECL_SIZE (field), 1))
5185 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5186 else
5187 bitsize = -1;
5189 mode = DECL_MODE (field);
5190 if (DECL_BIT_FIELD (field))
5191 mode = VOIDmode;
5193 offset = DECL_FIELD_OFFSET (field);
5194 if (host_integerp (offset, 0)
5195 && host_integerp (bit_position (field), 0))
5197 bitpos = int_bit_position (field);
5198 offset = 0;
5200 else
5201 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5203 if (offset)
5205 rtx offset_rtx;
5207 offset
5208 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5209 make_tree (TREE_TYPE (exp),
5210 target));
5212 offset_rtx = expand_normal (offset);
5213 gcc_assert (MEM_P (to_rtx));
5215 #ifdef POINTERS_EXTEND_UNSIGNED
5216 if (GET_MODE (offset_rtx) != Pmode)
5217 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5218 #else
5219 if (GET_MODE (offset_rtx) != ptr_mode)
5220 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5221 #endif
5223 to_rtx = offset_address (to_rtx, offset_rtx,
5224 highest_pow2_factor (offset));
5227 #ifdef WORD_REGISTER_OPERATIONS
5228 /* If this initializes a field that is smaller than a
5229 word, at the start of a word, try to widen it to a full
5230 word. This special case allows us to output C++ member
5231 function initializations in a form that the optimizers
5232 can understand. */
5233 if (REG_P (target)
5234 && bitsize < BITS_PER_WORD
5235 && bitpos % BITS_PER_WORD == 0
5236 && GET_MODE_CLASS (mode) == MODE_INT
5237 && TREE_CODE (value) == INTEGER_CST
5238 && exp_size >= 0
5239 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5241 tree type = TREE_TYPE (value);
5243 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5245 type = lang_hooks.types.type_for_size
5246 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5247 value = fold_convert (type, value);
5250 if (BYTES_BIG_ENDIAN)
5251 value
5252 = fold_build2 (LSHIFT_EXPR, type, value,
5253 build_int_cst (type,
5254 BITS_PER_WORD - bitsize));
5255 bitsize = BITS_PER_WORD;
5256 mode = word_mode;
5258 #endif
5260 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5261 && DECL_NONADDRESSABLE_P (field))
5263 to_rtx = copy_rtx (to_rtx);
5264 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5267 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5268 value, type, cleared,
5269 get_alias_set (TREE_TYPE (field)));
5271 break;
5273 case ARRAY_TYPE:
5275 tree value, index;
5276 unsigned HOST_WIDE_INT i;
5277 int need_to_clear;
5278 tree domain;
5279 tree elttype = TREE_TYPE (type);
5280 int const_bounds_p;
5281 HOST_WIDE_INT minelt = 0;
5282 HOST_WIDE_INT maxelt = 0;
5284 domain = TYPE_DOMAIN (type);
5285 const_bounds_p = (TYPE_MIN_VALUE (domain)
5286 && TYPE_MAX_VALUE (domain)
5287 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5288 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5290 /* If we have constant bounds for the range of the type, get them. */
5291 if (const_bounds_p)
5293 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5294 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5297 /* If the constructor has fewer elements than the array, clear
5298 the whole array first. Similarly if this is static
5299 constructor of a non-BLKmode object. */
5300 if (cleared)
5301 need_to_clear = 0;
5302 else if (REG_P (target) && TREE_STATIC (exp))
5303 need_to_clear = 1;
5304 else
5306 unsigned HOST_WIDE_INT idx;
5307 tree index, value;
5308 HOST_WIDE_INT count = 0, zero_count = 0;
5309 need_to_clear = ! const_bounds_p;
5311 /* This loop is a more accurate version of the loop in
5312 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5313 is also needed to check for missing elements. */
5314 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5316 HOST_WIDE_INT this_node_count;
5318 if (need_to_clear)
5319 break;
5321 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5323 tree lo_index = TREE_OPERAND (index, 0);
5324 tree hi_index = TREE_OPERAND (index, 1);
5326 if (! host_integerp (lo_index, 1)
5327 || ! host_integerp (hi_index, 1))
5329 need_to_clear = 1;
5330 break;
5333 this_node_count = (tree_low_cst (hi_index, 1)
5334 - tree_low_cst (lo_index, 1) + 1);
5336 else
5337 this_node_count = 1;
5339 count += this_node_count;
5340 if (mostly_zeros_p (value))
5341 zero_count += this_node_count;
5344 /* Clear the entire array first if there are any missing
5345 elements, or if the incidence of zero elements is >=
5346 75%. */
5347 if (! need_to_clear
5348 && (count < maxelt - minelt + 1
5349 || 4 * zero_count >= 3 * count))
5350 need_to_clear = 1;
5353 if (need_to_clear && size > 0)
5355 if (REG_P (target))
5356 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5357 else
5358 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5359 cleared = 1;
5362 if (!cleared && REG_P (target))
5363 /* Inform later passes that the old value is dead. */
5364 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5366 /* Store each element of the constructor into the
5367 corresponding element of TARGET, determined by counting the
5368 elements. */
5369 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5371 enum machine_mode mode;
5372 HOST_WIDE_INT bitsize;
5373 HOST_WIDE_INT bitpos;
5374 int unsignedp;
5375 rtx xtarget = target;
5377 if (cleared && initializer_zerop (value))
5378 continue;
5380 unsignedp = TYPE_UNSIGNED (elttype);
5381 mode = TYPE_MODE (elttype);
5382 if (mode == BLKmode)
5383 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5384 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5385 : -1);
5386 else
5387 bitsize = GET_MODE_BITSIZE (mode);
5389 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5391 tree lo_index = TREE_OPERAND (index, 0);
5392 tree hi_index = TREE_OPERAND (index, 1);
5393 rtx index_r, pos_rtx;
5394 HOST_WIDE_INT lo, hi, count;
5395 tree position;
5397 /* If the range is constant and "small", unroll the loop. */
5398 if (const_bounds_p
5399 && host_integerp (lo_index, 0)
5400 && host_integerp (hi_index, 0)
5401 && (lo = tree_low_cst (lo_index, 0),
5402 hi = tree_low_cst (hi_index, 0),
5403 count = hi - lo + 1,
5404 (!MEM_P (target)
5405 || count <= 2
5406 || (host_integerp (TYPE_SIZE (elttype), 1)
5407 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5408 <= 40 * 8)))))
5410 lo -= minelt; hi -= minelt;
5411 for (; lo <= hi; lo++)
5413 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5415 if (MEM_P (target)
5416 && !MEM_KEEP_ALIAS_SET_P (target)
5417 && TREE_CODE (type) == ARRAY_TYPE
5418 && TYPE_NONALIASED_COMPONENT (type))
5420 target = copy_rtx (target);
5421 MEM_KEEP_ALIAS_SET_P (target) = 1;
5424 store_constructor_field
5425 (target, bitsize, bitpos, mode, value, type, cleared,
5426 get_alias_set (elttype));
5429 else
5431 rtx loop_start = gen_label_rtx ();
5432 rtx loop_end = gen_label_rtx ();
5433 tree exit_cond;
5435 expand_normal (hi_index);
5436 unsignedp = TYPE_UNSIGNED (domain);
5438 index = build_decl (VAR_DECL, NULL_TREE, domain);
5440 index_r
5441 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5442 &unsignedp, 0));
5443 SET_DECL_RTL (index, index_r);
5444 store_expr (lo_index, index_r, 0, false);
5446 /* Build the head of the loop. */
5447 do_pending_stack_adjust ();
5448 emit_label (loop_start);
5450 /* Assign value to element index. */
5451 position =
5452 fold_convert (ssizetype,
5453 fold_build2 (MINUS_EXPR,
5454 TREE_TYPE (index),
5455 index,
5456 TYPE_MIN_VALUE (domain)));
5458 position =
5459 size_binop (MULT_EXPR, position,
5460 fold_convert (ssizetype,
5461 TYPE_SIZE_UNIT (elttype)));
5463 pos_rtx = expand_normal (position);
5464 xtarget = offset_address (target, pos_rtx,
5465 highest_pow2_factor (position));
5466 xtarget = adjust_address (xtarget, mode, 0);
5467 if (TREE_CODE (value) == CONSTRUCTOR)
5468 store_constructor (value, xtarget, cleared,
5469 bitsize / BITS_PER_UNIT);
5470 else
5471 store_expr (value, xtarget, 0, false);
5473 /* Generate a conditional jump to exit the loop. */
5474 exit_cond = build2 (LT_EXPR, integer_type_node,
5475 index, hi_index);
5476 jumpif (exit_cond, loop_end);
5478 /* Update the loop counter, and jump to the head of
5479 the loop. */
5480 expand_assignment (index,
5481 build2 (PLUS_EXPR, TREE_TYPE (index),
5482 index, integer_one_node),
5483 false);
5485 emit_jump (loop_start);
5487 /* Build the end of the loop. */
5488 emit_label (loop_end);
5491 else if ((index != 0 && ! host_integerp (index, 0))
5492 || ! host_integerp (TYPE_SIZE (elttype), 1))
5494 tree position;
5496 if (index == 0)
5497 index = ssize_int (1);
5499 if (minelt)
5500 index = fold_convert (ssizetype,
5501 fold_build2 (MINUS_EXPR,
5502 TREE_TYPE (index),
5503 index,
5504 TYPE_MIN_VALUE (domain)));
5506 position =
5507 size_binop (MULT_EXPR, index,
5508 fold_convert (ssizetype,
5509 TYPE_SIZE_UNIT (elttype)));
5510 xtarget = offset_address (target,
5511 expand_normal (position),
5512 highest_pow2_factor (position));
5513 xtarget = adjust_address (xtarget, mode, 0);
5514 store_expr (value, xtarget, 0, false);
5516 else
5518 if (index != 0)
5519 bitpos = ((tree_low_cst (index, 0) - minelt)
5520 * tree_low_cst (TYPE_SIZE (elttype), 1));
5521 else
5522 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5524 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5525 && TREE_CODE (type) == ARRAY_TYPE
5526 && TYPE_NONALIASED_COMPONENT (type))
5528 target = copy_rtx (target);
5529 MEM_KEEP_ALIAS_SET_P (target) = 1;
5531 store_constructor_field (target, bitsize, bitpos, mode, value,
5532 type, cleared, get_alias_set (elttype));
5535 break;
5538 case VECTOR_TYPE:
5540 unsigned HOST_WIDE_INT idx;
5541 constructor_elt *ce;
5542 int i;
5543 int need_to_clear;
5544 int icode = 0;
5545 tree elttype = TREE_TYPE (type);
5546 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5547 enum machine_mode eltmode = TYPE_MODE (elttype);
5548 HOST_WIDE_INT bitsize;
5549 HOST_WIDE_INT bitpos;
5550 rtvec vector = NULL;
5551 unsigned n_elts;
5553 gcc_assert (eltmode != BLKmode);
5555 n_elts = TYPE_VECTOR_SUBPARTS (type);
5556 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5558 enum machine_mode mode = GET_MODE (target);
5560 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5561 if (icode != CODE_FOR_nothing)
5563 unsigned int i;
5565 vector = rtvec_alloc (n_elts);
5566 for (i = 0; i < n_elts; i++)
5567 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5571 /* If the constructor has fewer elements than the vector,
5572 clear the whole array first. Similarly if this is static
5573 constructor of a non-BLKmode object. */
5574 if (cleared)
5575 need_to_clear = 0;
5576 else if (REG_P (target) && TREE_STATIC (exp))
5577 need_to_clear = 1;
5578 else
5580 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5581 tree value;
5583 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5585 int n_elts_here = tree_low_cst
5586 (int_const_binop (TRUNC_DIV_EXPR,
5587 TYPE_SIZE (TREE_TYPE (value)),
5588 TYPE_SIZE (elttype), 0), 1);
5590 count += n_elts_here;
5591 if (mostly_zeros_p (value))
5592 zero_count += n_elts_here;
5595 /* Clear the entire vector first if there are any missing elements,
5596 or if the incidence of zero elements is >= 75%. */
5597 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5600 if (need_to_clear && size > 0 && !vector)
5602 if (REG_P (target))
5603 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5604 else
5605 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5606 cleared = 1;
5609 /* Inform later passes that the old value is dead. */
5610 if (!cleared && !vector && REG_P (target))
5611 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5613 /* Store each element of the constructor into the corresponding
5614 element of TARGET, determined by counting the elements. */
5615 for (idx = 0, i = 0;
5616 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5617 idx++, i += bitsize / elt_size)
5619 HOST_WIDE_INT eltpos;
5620 tree value = ce->value;
5622 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5623 if (cleared && initializer_zerop (value))
5624 continue;
5626 if (ce->index)
5627 eltpos = tree_low_cst (ce->index, 1);
5628 else
5629 eltpos = i;
5631 if (vector)
5633 /* Vector CONSTRUCTORs should only be built from smaller
5634 vectors in the case of BLKmode vectors. */
5635 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5636 RTVEC_ELT (vector, eltpos)
5637 = expand_normal (value);
5639 else
5641 enum machine_mode value_mode =
5642 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5643 ? TYPE_MODE (TREE_TYPE (value))
5644 : eltmode;
5645 bitpos = eltpos * elt_size;
5646 store_constructor_field (target, bitsize, bitpos,
5647 value_mode, value, type,
5648 cleared, get_alias_set (elttype));
5652 if (vector)
5653 emit_insn (GEN_FCN (icode)
5654 (target,
5655 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5656 break;
5659 default:
5660 gcc_unreachable ();
5664 /* Store the value of EXP (an expression tree)
5665 into a subfield of TARGET which has mode MODE and occupies
5666 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5667 If MODE is VOIDmode, it means that we are storing into a bit-field.
5669 Always return const0_rtx unless we have something particular to
5670 return.
5672 TYPE is the type of the underlying object,
5674 ALIAS_SET is the alias set for the destination. This value will
5675 (in general) be different from that for TARGET, since TARGET is a
5676 reference to the containing structure.
5678 If NONTEMPORAL is true, try generating a nontemporal store. */
5680 static rtx
5681 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5682 enum machine_mode mode, tree exp, tree type,
5683 alias_set_type alias_set, bool nontemporal)
5685 HOST_WIDE_INT width_mask = 0;
5687 if (TREE_CODE (exp) == ERROR_MARK)
5688 return const0_rtx;
5690 /* If we have nothing to store, do nothing unless the expression has
5691 side-effects. */
5692 if (bitsize == 0)
5693 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5694 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5695 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5697 /* If we are storing into an unaligned field of an aligned union that is
5698 in a register, we may have the mode of TARGET being an integer mode but
5699 MODE == BLKmode. In that case, get an aligned object whose size and
5700 alignment are the same as TARGET and store TARGET into it (we can avoid
5701 the store if the field being stored is the entire width of TARGET). Then
5702 call ourselves recursively to store the field into a BLKmode version of
5703 that object. Finally, load from the object into TARGET. This is not
5704 very efficient in general, but should only be slightly more expensive
5705 than the otherwise-required unaligned accesses. Perhaps this can be
5706 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5707 twice, once with emit_move_insn and once via store_field. */
5709 if (mode == BLKmode
5710 && (REG_P (target) || GET_CODE (target) == SUBREG))
5712 rtx object = assign_temp (type, 0, 1, 1);
5713 rtx blk_object = adjust_address (object, BLKmode, 0);
5715 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5716 emit_move_insn (object, target);
5718 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5719 nontemporal);
5721 emit_move_insn (target, object);
5723 /* We want to return the BLKmode version of the data. */
5724 return blk_object;
5727 if (GET_CODE (target) == CONCAT)
5729 /* We're storing into a struct containing a single __complex. */
5731 gcc_assert (!bitpos);
5732 return store_expr (exp, target, 0, nontemporal);
5735 /* If the structure is in a register or if the component
5736 is a bit field, we cannot use addressing to access it.
5737 Use bit-field techniques or SUBREG to store in it. */
5739 if (mode == VOIDmode
5740 || (mode != BLKmode && ! direct_store[(int) mode]
5741 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5742 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5743 || REG_P (target)
5744 || GET_CODE (target) == SUBREG
5745 /* If the field isn't aligned enough to store as an ordinary memref,
5746 store it as a bit field. */
5747 || (mode != BLKmode
5748 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5749 || bitpos % GET_MODE_ALIGNMENT (mode))
5750 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5751 || (bitpos % BITS_PER_UNIT != 0)))
5752 /* If the RHS and field are a constant size and the size of the
5753 RHS isn't the same size as the bitfield, we must use bitfield
5754 operations. */
5755 || (bitsize >= 0
5756 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5757 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5759 rtx temp;
5761 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5762 implies a mask operation. If the precision is the same size as
5763 the field we're storing into, that mask is redundant. This is
5764 particularly common with bit field assignments generated by the
5765 C front end. */
5766 if (TREE_CODE (exp) == NOP_EXPR)
5768 tree type = TREE_TYPE (exp);
5769 if (INTEGRAL_TYPE_P (type)
5770 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5771 && bitsize == TYPE_PRECISION (type))
5773 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5774 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5775 exp = TREE_OPERAND (exp, 0);
5779 temp = expand_normal (exp);
5781 /* If BITSIZE is narrower than the size of the type of EXP
5782 we will be narrowing TEMP. Normally, what's wanted are the
5783 low-order bits. However, if EXP's type is a record and this is
5784 big-endian machine, we want the upper BITSIZE bits. */
5785 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5786 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5787 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5788 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5789 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5790 - bitsize),
5791 NULL_RTX, 1);
5793 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5794 MODE. */
5795 if (mode != VOIDmode && mode != BLKmode
5796 && mode != TYPE_MODE (TREE_TYPE (exp)))
5797 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5799 /* If the modes of TARGET and TEMP are both BLKmode, both
5800 must be in memory and BITPOS must be aligned on a byte
5801 boundary. If so, we simply do a block copy. */
5802 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5804 gcc_assert (MEM_P (target) && MEM_P (temp)
5805 && !(bitpos % BITS_PER_UNIT));
5807 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5808 emit_block_move (target, temp,
5809 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5810 / BITS_PER_UNIT),
5811 BLOCK_OP_NORMAL);
5813 return const0_rtx;
5816 /* Store the value in the bitfield. */
5817 store_bit_field (target, bitsize, bitpos, mode, temp);
5819 return const0_rtx;
5821 else
5823 /* Now build a reference to just the desired component. */
5824 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5826 if (to_rtx == target)
5827 to_rtx = copy_rtx (to_rtx);
5829 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5830 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5831 set_mem_alias_set (to_rtx, alias_set);
5833 return store_expr (exp, to_rtx, 0, nontemporal);
5837 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5838 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5839 codes and find the ultimate containing object, which we return.
5841 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5842 bit position, and *PUNSIGNEDP to the signedness of the field.
5843 If the position of the field is variable, we store a tree
5844 giving the variable offset (in units) in *POFFSET.
5845 This offset is in addition to the bit position.
5846 If the position is not variable, we store 0 in *POFFSET.
5848 If any of the extraction expressions is volatile,
5849 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5851 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5852 is a mode that can be used to access the field. In that case, *PBITSIZE
5853 is redundant.
5855 If the field describes a variable-sized object, *PMODE is set to
5856 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5857 this case, but the address of the object can be found.
5859 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5860 look through nodes that serve as markers of a greater alignment than
5861 the one that can be deduced from the expression. These nodes make it
5862 possible for front-ends to prevent temporaries from being created by
5863 the middle-end on alignment considerations. For that purpose, the
5864 normal operating mode at high-level is to always pass FALSE so that
5865 the ultimate containing object is really returned; moreover, the
5866 associated predicate handled_component_p will always return TRUE
5867 on these nodes, thus indicating that they are essentially handled
5868 by get_inner_reference. TRUE should only be passed when the caller
5869 is scanning the expression in order to build another representation
5870 and specifically knows how to handle these nodes; as such, this is
5871 the normal operating mode in the RTL expanders. */
5873 tree
5874 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5875 HOST_WIDE_INT *pbitpos, tree *poffset,
5876 enum machine_mode *pmode, int *punsignedp,
5877 int *pvolatilep, bool keep_aligning)
5879 tree size_tree = 0;
5880 enum machine_mode mode = VOIDmode;
5881 tree offset = size_zero_node;
5882 tree bit_offset = bitsize_zero_node;
5884 /* First get the mode, signedness, and size. We do this from just the
5885 outermost expression. */
5886 if (TREE_CODE (exp) == COMPONENT_REF)
5888 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5889 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5890 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5892 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5894 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5896 size_tree = TREE_OPERAND (exp, 1);
5897 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
5898 || TYPE_UNSIGNED (TREE_TYPE (exp)));
5900 /* For vector types, with the correct size of access, use the mode of
5901 inner type. */
5902 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5903 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5904 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5905 mode = TYPE_MODE (TREE_TYPE (exp));
5907 else
5909 mode = TYPE_MODE (TREE_TYPE (exp));
5910 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5912 if (mode == BLKmode)
5913 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5914 else
5915 *pbitsize = GET_MODE_BITSIZE (mode);
5918 if (size_tree != 0)
5920 if (! host_integerp (size_tree, 1))
5921 mode = BLKmode, *pbitsize = -1;
5922 else
5923 *pbitsize = tree_low_cst (size_tree, 1);
5926 *pmode = mode;
5928 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5929 and find the ultimate containing object. */
5930 while (1)
5932 switch (TREE_CODE (exp))
5934 case BIT_FIELD_REF:
5935 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5936 TREE_OPERAND (exp, 2));
5937 break;
5939 case COMPONENT_REF:
5941 tree field = TREE_OPERAND (exp, 1);
5942 tree this_offset = component_ref_field_offset (exp);
5944 /* If this field hasn't been filled in yet, don't go past it.
5945 This should only happen when folding expressions made during
5946 type construction. */
5947 if (this_offset == 0)
5948 break;
5950 offset = size_binop (PLUS_EXPR, offset, this_offset);
5951 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5952 DECL_FIELD_BIT_OFFSET (field));
5954 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5956 break;
5958 case ARRAY_REF:
5959 case ARRAY_RANGE_REF:
5961 tree index = TREE_OPERAND (exp, 1);
5962 tree low_bound = array_ref_low_bound (exp);
5963 tree unit_size = array_ref_element_size (exp);
5965 /* We assume all arrays have sizes that are a multiple of a byte.
5966 First subtract the lower bound, if any, in the type of the
5967 index, then convert to sizetype and multiply by the size of
5968 the array element. */
5969 if (! integer_zerop (low_bound))
5970 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5971 index, low_bound);
5973 offset = size_binop (PLUS_EXPR, offset,
5974 size_binop (MULT_EXPR,
5975 fold_convert (sizetype, index),
5976 unit_size));
5978 break;
5980 case REALPART_EXPR:
5981 break;
5983 case IMAGPART_EXPR:
5984 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5985 bitsize_int (*pbitsize));
5986 break;
5988 case VIEW_CONVERT_EXPR:
5989 if (keep_aligning && STRICT_ALIGNMENT
5990 && (TYPE_ALIGN (TREE_TYPE (exp))
5991 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5992 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5993 < BIGGEST_ALIGNMENT)
5994 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5995 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5996 goto done;
5997 break;
5999 default:
6000 goto done;
6003 /* If any reference in the chain is volatile, the effect is volatile. */
6004 if (TREE_THIS_VOLATILE (exp))
6005 *pvolatilep = 1;
6007 exp = TREE_OPERAND (exp, 0);
6009 done:
6011 /* If OFFSET is constant, see if we can return the whole thing as a
6012 constant bit position. Make sure to handle overflow during
6013 this conversion. */
6014 if (host_integerp (offset, 0))
6016 double_int tem = double_int_mul (tree_to_double_int (offset),
6017 uhwi_to_double_int (BITS_PER_UNIT));
6018 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6019 if (double_int_fits_in_shwi_p (tem))
6021 *pbitpos = double_int_to_shwi (tem);
6022 *poffset = NULL_TREE;
6023 return exp;
6027 /* Otherwise, split it up. */
6028 *pbitpos = tree_low_cst (bit_offset, 0);
6029 *poffset = offset;
6031 return exp;
6034 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
6035 look for whether EXP or any nested component-refs within EXP is marked
6036 as PACKED. */
6038 bool
6039 contains_packed_reference (const_tree exp)
6041 bool packed_p = false;
6043 while (1)
6045 switch (TREE_CODE (exp))
6047 case COMPONENT_REF:
6049 tree field = TREE_OPERAND (exp, 1);
6050 packed_p = DECL_PACKED (field)
6051 || TYPE_PACKED (TREE_TYPE (field))
6052 || TYPE_PACKED (TREE_TYPE (exp));
6053 if (packed_p)
6054 goto done;
6056 break;
6058 case BIT_FIELD_REF:
6059 case ARRAY_REF:
6060 case ARRAY_RANGE_REF:
6061 case REALPART_EXPR:
6062 case IMAGPART_EXPR:
6063 case VIEW_CONVERT_EXPR:
6064 break;
6066 default:
6067 goto done;
6069 exp = TREE_OPERAND (exp, 0);
6071 done:
6072 return packed_p;
6075 /* Return a tree of sizetype representing the size, in bytes, of the element
6076 of EXP, an ARRAY_REF. */
6078 tree
6079 array_ref_element_size (tree exp)
6081 tree aligned_size = TREE_OPERAND (exp, 3);
6082 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6084 /* If a size was specified in the ARRAY_REF, it's the size measured
6085 in alignment units of the element type. So multiply by that value. */
6086 if (aligned_size)
6088 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6089 sizetype from another type of the same width and signedness. */
6090 if (TREE_TYPE (aligned_size) != sizetype)
6091 aligned_size = fold_convert (sizetype, aligned_size);
6092 return size_binop (MULT_EXPR, aligned_size,
6093 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6096 /* Otherwise, take the size from that of the element type. Substitute
6097 any PLACEHOLDER_EXPR that we have. */
6098 else
6099 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6102 /* Return a tree representing the lower bound of the array mentioned in
6103 EXP, an ARRAY_REF. */
6105 tree
6106 array_ref_low_bound (tree exp)
6108 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6110 /* If a lower bound is specified in EXP, use it. */
6111 if (TREE_OPERAND (exp, 2))
6112 return TREE_OPERAND (exp, 2);
6114 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6115 substituting for a PLACEHOLDER_EXPR as needed. */
6116 if (domain_type && TYPE_MIN_VALUE (domain_type))
6117 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6119 /* Otherwise, return a zero of the appropriate type. */
6120 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6123 /* Return a tree representing the upper bound of the array mentioned in
6124 EXP, an ARRAY_REF. */
6126 tree
6127 array_ref_up_bound (tree exp)
6129 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6131 /* If there is a domain type and it has an upper bound, use it, substituting
6132 for a PLACEHOLDER_EXPR as needed. */
6133 if (domain_type && TYPE_MAX_VALUE (domain_type))
6134 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6136 /* Otherwise fail. */
6137 return NULL_TREE;
6140 /* Return a tree representing the offset, in bytes, of the field referenced
6141 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6143 tree
6144 component_ref_field_offset (tree exp)
6146 tree aligned_offset = TREE_OPERAND (exp, 2);
6147 tree field = TREE_OPERAND (exp, 1);
6149 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6150 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6151 value. */
6152 if (aligned_offset)
6154 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6155 sizetype from another type of the same width and signedness. */
6156 if (TREE_TYPE (aligned_offset) != sizetype)
6157 aligned_offset = fold_convert (sizetype, aligned_offset);
6158 return size_binop (MULT_EXPR, aligned_offset,
6159 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6162 /* Otherwise, take the offset from that of the field. Substitute
6163 any PLACEHOLDER_EXPR that we have. */
6164 else
6165 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6168 /* Return 1 if T is an expression that get_inner_reference handles. */
6171 handled_component_p (const_tree t)
6173 switch (TREE_CODE (t))
6175 case BIT_FIELD_REF:
6176 case COMPONENT_REF:
6177 case ARRAY_REF:
6178 case ARRAY_RANGE_REF:
6179 case VIEW_CONVERT_EXPR:
6180 case REALPART_EXPR:
6181 case IMAGPART_EXPR:
6182 return 1;
6184 default:
6185 return 0;
6189 /* Given an rtx VALUE that may contain additions and multiplications, return
6190 an equivalent value that just refers to a register, memory, or constant.
6191 This is done by generating instructions to perform the arithmetic and
6192 returning a pseudo-register containing the value.
6194 The returned value may be a REG, SUBREG, MEM or constant. */
6197 force_operand (rtx value, rtx target)
6199 rtx op1, op2;
6200 /* Use subtarget as the target for operand 0 of a binary operation. */
6201 rtx subtarget = get_subtarget (target);
6202 enum rtx_code code = GET_CODE (value);
6204 /* Check for subreg applied to an expression produced by loop optimizer. */
6205 if (code == SUBREG
6206 && !REG_P (SUBREG_REG (value))
6207 && !MEM_P (SUBREG_REG (value)))
6209 value
6210 = simplify_gen_subreg (GET_MODE (value),
6211 force_reg (GET_MODE (SUBREG_REG (value)),
6212 force_operand (SUBREG_REG (value),
6213 NULL_RTX)),
6214 GET_MODE (SUBREG_REG (value)),
6215 SUBREG_BYTE (value));
6216 code = GET_CODE (value);
6219 /* Check for a PIC address load. */
6220 if ((code == PLUS || code == MINUS)
6221 && XEXP (value, 0) == pic_offset_table_rtx
6222 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6223 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6224 || GET_CODE (XEXP (value, 1)) == CONST))
6226 if (!subtarget)
6227 subtarget = gen_reg_rtx (GET_MODE (value));
6228 emit_move_insn (subtarget, value);
6229 return subtarget;
6232 if (ARITHMETIC_P (value))
6234 op2 = XEXP (value, 1);
6235 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6236 subtarget = 0;
6237 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6239 code = PLUS;
6240 op2 = negate_rtx (GET_MODE (value), op2);
6243 /* Check for an addition with OP2 a constant integer and our first
6244 operand a PLUS of a virtual register and something else. In that
6245 case, we want to emit the sum of the virtual register and the
6246 constant first and then add the other value. This allows virtual
6247 register instantiation to simply modify the constant rather than
6248 creating another one around this addition. */
6249 if (code == PLUS && GET_CODE (op2) == CONST_INT
6250 && GET_CODE (XEXP (value, 0)) == PLUS
6251 && REG_P (XEXP (XEXP (value, 0), 0))
6252 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6253 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6255 rtx temp = expand_simple_binop (GET_MODE (value), code,
6256 XEXP (XEXP (value, 0), 0), op2,
6257 subtarget, 0, OPTAB_LIB_WIDEN);
6258 return expand_simple_binop (GET_MODE (value), code, temp,
6259 force_operand (XEXP (XEXP (value,
6260 0), 1), 0),
6261 target, 0, OPTAB_LIB_WIDEN);
6264 op1 = force_operand (XEXP (value, 0), subtarget);
6265 op2 = force_operand (op2, NULL_RTX);
6266 switch (code)
6268 case MULT:
6269 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6270 case DIV:
6271 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6272 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6273 target, 1, OPTAB_LIB_WIDEN);
6274 else
6275 return expand_divmod (0,
6276 FLOAT_MODE_P (GET_MODE (value))
6277 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6278 GET_MODE (value), op1, op2, target, 0);
6279 case MOD:
6280 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6281 target, 0);
6282 case UDIV:
6283 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6284 target, 1);
6285 case UMOD:
6286 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6287 target, 1);
6288 case ASHIFTRT:
6289 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6290 target, 0, OPTAB_LIB_WIDEN);
6291 default:
6292 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6293 target, 1, OPTAB_LIB_WIDEN);
6296 if (UNARY_P (value))
6298 if (!target)
6299 target = gen_reg_rtx (GET_MODE (value));
6300 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6301 switch (code)
6303 case ZERO_EXTEND:
6304 case SIGN_EXTEND:
6305 case TRUNCATE:
6306 case FLOAT_EXTEND:
6307 case FLOAT_TRUNCATE:
6308 convert_move (target, op1, code == ZERO_EXTEND);
6309 return target;
6311 case FIX:
6312 case UNSIGNED_FIX:
6313 expand_fix (target, op1, code == UNSIGNED_FIX);
6314 return target;
6316 case FLOAT:
6317 case UNSIGNED_FLOAT:
6318 expand_float (target, op1, code == UNSIGNED_FLOAT);
6319 return target;
6321 default:
6322 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6326 #ifdef INSN_SCHEDULING
6327 /* On machines that have insn scheduling, we want all memory reference to be
6328 explicit, so we need to deal with such paradoxical SUBREGs. */
6329 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6330 && (GET_MODE_SIZE (GET_MODE (value))
6331 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6332 value
6333 = simplify_gen_subreg (GET_MODE (value),
6334 force_reg (GET_MODE (SUBREG_REG (value)),
6335 force_operand (SUBREG_REG (value),
6336 NULL_RTX)),
6337 GET_MODE (SUBREG_REG (value)),
6338 SUBREG_BYTE (value));
6339 #endif
6341 return value;
6344 /* Subroutine of expand_expr: return nonzero iff there is no way that
6345 EXP can reference X, which is being modified. TOP_P is nonzero if this
6346 call is going to be used to determine whether we need a temporary
6347 for EXP, as opposed to a recursive call to this function.
6349 It is always safe for this routine to return zero since it merely
6350 searches for optimization opportunities. */
6353 safe_from_p (const_rtx x, tree exp, int top_p)
6355 rtx exp_rtl = 0;
6356 int i, nops;
6358 if (x == 0
6359 /* If EXP has varying size, we MUST use a target since we currently
6360 have no way of allocating temporaries of variable size
6361 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6362 So we assume here that something at a higher level has prevented a
6363 clash. This is somewhat bogus, but the best we can do. Only
6364 do this when X is BLKmode and when we are at the top level. */
6365 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6366 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6367 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6368 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6369 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6370 != INTEGER_CST)
6371 && GET_MODE (x) == BLKmode)
6372 /* If X is in the outgoing argument area, it is always safe. */
6373 || (MEM_P (x)
6374 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6375 || (GET_CODE (XEXP (x, 0)) == PLUS
6376 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6377 return 1;
6379 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6380 find the underlying pseudo. */
6381 if (GET_CODE (x) == SUBREG)
6383 x = SUBREG_REG (x);
6384 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6385 return 0;
6388 /* Now look at our tree code and possibly recurse. */
6389 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6391 case tcc_declaration:
6392 exp_rtl = DECL_RTL_IF_SET (exp);
6393 break;
6395 case tcc_constant:
6396 return 1;
6398 case tcc_exceptional:
6399 if (TREE_CODE (exp) == TREE_LIST)
6401 while (1)
6403 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6404 return 0;
6405 exp = TREE_CHAIN (exp);
6406 if (!exp)
6407 return 1;
6408 if (TREE_CODE (exp) != TREE_LIST)
6409 return safe_from_p (x, exp, 0);
6412 else if (TREE_CODE (exp) == CONSTRUCTOR)
6414 constructor_elt *ce;
6415 unsigned HOST_WIDE_INT idx;
6417 for (idx = 0;
6418 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6419 idx++)
6420 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6421 || !safe_from_p (x, ce->value, 0))
6422 return 0;
6423 return 1;
6425 else if (TREE_CODE (exp) == ERROR_MARK)
6426 return 1; /* An already-visited SAVE_EXPR? */
6427 else
6428 return 0;
6430 case tcc_statement:
6431 /* The only case we look at here is the DECL_INITIAL inside a
6432 DECL_EXPR. */
6433 return (TREE_CODE (exp) != DECL_EXPR
6434 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6435 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6436 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6438 case tcc_binary:
6439 case tcc_comparison:
6440 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6441 return 0;
6442 /* Fall through. */
6444 case tcc_unary:
6445 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6447 case tcc_expression:
6448 case tcc_reference:
6449 case tcc_vl_exp:
6450 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6451 the expression. If it is set, we conflict iff we are that rtx or
6452 both are in memory. Otherwise, we check all operands of the
6453 expression recursively. */
6455 switch (TREE_CODE (exp))
6457 case ADDR_EXPR:
6458 /* If the operand is static or we are static, we can't conflict.
6459 Likewise if we don't conflict with the operand at all. */
6460 if (staticp (TREE_OPERAND (exp, 0))
6461 || TREE_STATIC (exp)
6462 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6463 return 1;
6465 /* Otherwise, the only way this can conflict is if we are taking
6466 the address of a DECL a that address if part of X, which is
6467 very rare. */
6468 exp = TREE_OPERAND (exp, 0);
6469 if (DECL_P (exp))
6471 if (!DECL_RTL_SET_P (exp)
6472 || !MEM_P (DECL_RTL (exp)))
6473 return 0;
6474 else
6475 exp_rtl = XEXP (DECL_RTL (exp), 0);
6477 break;
6479 case MISALIGNED_INDIRECT_REF:
6480 case ALIGN_INDIRECT_REF:
6481 case INDIRECT_REF:
6482 if (MEM_P (x)
6483 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6484 get_alias_set (exp)))
6485 return 0;
6486 break;
6488 case CALL_EXPR:
6489 /* Assume that the call will clobber all hard registers and
6490 all of memory. */
6491 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6492 || MEM_P (x))
6493 return 0;
6494 break;
6496 case WITH_CLEANUP_EXPR:
6497 case CLEANUP_POINT_EXPR:
6498 /* Lowered by gimplify.c. */
6499 gcc_unreachable ();
6501 case SAVE_EXPR:
6502 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6504 default:
6505 break;
6508 /* If we have an rtx, we do not need to scan our operands. */
6509 if (exp_rtl)
6510 break;
6512 nops = TREE_OPERAND_LENGTH (exp);
6513 for (i = 0; i < nops; i++)
6514 if (TREE_OPERAND (exp, i) != 0
6515 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6516 return 0;
6518 break;
6520 case tcc_type:
6521 /* Should never get a type here. */
6522 gcc_unreachable ();
6524 case tcc_gimple_stmt:
6525 gcc_unreachable ();
6528 /* If we have an rtl, find any enclosed object. Then see if we conflict
6529 with it. */
6530 if (exp_rtl)
6532 if (GET_CODE (exp_rtl) == SUBREG)
6534 exp_rtl = SUBREG_REG (exp_rtl);
6535 if (REG_P (exp_rtl)
6536 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6537 return 0;
6540 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6541 are memory and they conflict. */
6542 return ! (rtx_equal_p (x, exp_rtl)
6543 || (MEM_P (x) && MEM_P (exp_rtl)
6544 && true_dependence (exp_rtl, VOIDmode, x,
6545 rtx_addr_varies_p)));
6548 /* If we reach here, it is safe. */
6549 return 1;
6553 /* Return the highest power of two that EXP is known to be a multiple of.
6554 This is used in updating alignment of MEMs in array references. */
6556 unsigned HOST_WIDE_INT
6557 highest_pow2_factor (const_tree exp)
6559 unsigned HOST_WIDE_INT c0, c1;
6561 switch (TREE_CODE (exp))
6563 case INTEGER_CST:
6564 /* We can find the lowest bit that's a one. If the low
6565 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6566 We need to handle this case since we can find it in a COND_EXPR,
6567 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6568 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6569 later ICE. */
6570 if (TREE_OVERFLOW (exp))
6571 return BIGGEST_ALIGNMENT;
6572 else
6574 /* Note: tree_low_cst is intentionally not used here,
6575 we don't care about the upper bits. */
6576 c0 = TREE_INT_CST_LOW (exp);
6577 c0 &= -c0;
6578 return c0 ? c0 : BIGGEST_ALIGNMENT;
6580 break;
6582 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6583 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6584 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6585 return MIN (c0, c1);
6587 case MULT_EXPR:
6588 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6589 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6590 return c0 * c1;
6592 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6593 case CEIL_DIV_EXPR:
6594 if (integer_pow2p (TREE_OPERAND (exp, 1))
6595 && host_integerp (TREE_OPERAND (exp, 1), 1))
6597 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6598 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6599 return MAX (1, c0 / c1);
6601 break;
6603 case NOP_EXPR: case CONVERT_EXPR:
6604 case SAVE_EXPR:
6605 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6607 case COMPOUND_EXPR:
6608 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6610 case COND_EXPR:
6611 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6612 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6613 return MIN (c0, c1);
6615 default:
6616 break;
6619 return 1;
6622 /* Similar, except that the alignment requirements of TARGET are
6623 taken into account. Assume it is at least as aligned as its
6624 type, unless it is a COMPONENT_REF in which case the layout of
6625 the structure gives the alignment. */
6627 static unsigned HOST_WIDE_INT
6628 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6630 unsigned HOST_WIDE_INT target_align, factor;
6632 factor = highest_pow2_factor (exp);
6633 if (TREE_CODE (target) == COMPONENT_REF)
6634 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6635 else
6636 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6637 return MAX (factor, target_align);
6640 /* Return &VAR expression for emulated thread local VAR. */
6642 static tree
6643 emutls_var_address (tree var)
6645 tree emuvar = emutls_decl (var);
6646 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6647 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6648 tree arglist = build_tree_list (NULL_TREE, arg);
6649 tree call = build_function_call_expr (fn, arglist);
6650 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6654 /* Subroutine of expand_expr. Expand the two operands of a binary
6655 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6656 The value may be stored in TARGET if TARGET is nonzero. The
6657 MODIFIER argument is as documented by expand_expr. */
6659 static void
6660 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6661 enum expand_modifier modifier)
6663 if (! safe_from_p (target, exp1, 1))
6664 target = 0;
6665 if (operand_equal_p (exp0, exp1, 0))
6667 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6668 *op1 = copy_rtx (*op0);
6670 else
6672 /* If we need to preserve evaluation order, copy exp0 into its own
6673 temporary variable so that it can't be clobbered by exp1. */
6674 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6675 exp0 = save_expr (exp0);
6676 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6677 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6682 /* Return a MEM that contains constant EXP. DEFER is as for
6683 output_constant_def and MODIFIER is as for expand_expr. */
6685 static rtx
6686 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6688 rtx mem;
6690 mem = output_constant_def (exp, defer);
6691 if (modifier != EXPAND_INITIALIZER)
6692 mem = use_anchored_address (mem);
6693 return mem;
6696 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6697 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6699 static rtx
6700 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6701 enum expand_modifier modifier)
6703 rtx result, subtarget;
6704 tree inner, offset;
6705 HOST_WIDE_INT bitsize, bitpos;
6706 int volatilep, unsignedp;
6707 enum machine_mode mode1;
6709 /* If we are taking the address of a constant and are at the top level,
6710 we have to use output_constant_def since we can't call force_const_mem
6711 at top level. */
6712 /* ??? This should be considered a front-end bug. We should not be
6713 generating ADDR_EXPR of something that isn't an LVALUE. The only
6714 exception here is STRING_CST. */
6715 if (CONSTANT_CLASS_P (exp))
6716 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6718 /* Everything must be something allowed by is_gimple_addressable. */
6719 switch (TREE_CODE (exp))
6721 case INDIRECT_REF:
6722 /* This case will happen via recursion for &a->b. */
6723 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6725 case CONST_DECL:
6726 /* Recurse and make the output_constant_def clause above handle this. */
6727 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6728 tmode, modifier);
6730 case REALPART_EXPR:
6731 /* The real part of the complex number is always first, therefore
6732 the address is the same as the address of the parent object. */
6733 offset = 0;
6734 bitpos = 0;
6735 inner = TREE_OPERAND (exp, 0);
6736 break;
6738 case IMAGPART_EXPR:
6739 /* The imaginary part of the complex number is always second.
6740 The expression is therefore always offset by the size of the
6741 scalar type. */
6742 offset = 0;
6743 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6744 inner = TREE_OPERAND (exp, 0);
6745 break;
6747 case VAR_DECL:
6748 /* TLS emulation hook - replace __thread VAR's &VAR with
6749 __emutls_get_address (&_emutls.VAR). */
6750 if (! targetm.have_tls
6751 && TREE_CODE (exp) == VAR_DECL
6752 && DECL_THREAD_LOCAL_P (exp))
6754 exp = emutls_var_address (exp);
6755 return expand_expr (exp, target, tmode, modifier);
6757 /* Fall through. */
6759 default:
6760 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6761 expand_expr, as that can have various side effects; LABEL_DECLs for
6762 example, may not have their DECL_RTL set yet. Expand the rtl of
6763 CONSTRUCTORs too, which should yield a memory reference for the
6764 constructor's contents. Assume language specific tree nodes can
6765 be expanded in some interesting way. */
6766 if (DECL_P (exp)
6767 || TREE_CODE (exp) == CONSTRUCTOR
6768 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6770 result = expand_expr (exp, target, tmode,
6771 modifier == EXPAND_INITIALIZER
6772 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6774 /* If the DECL isn't in memory, then the DECL wasn't properly
6775 marked TREE_ADDRESSABLE, which will be either a front-end
6776 or a tree optimizer bug. */
6777 gcc_assert (MEM_P (result));
6778 result = XEXP (result, 0);
6780 /* ??? Is this needed anymore? */
6781 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6783 assemble_external (exp);
6784 TREE_USED (exp) = 1;
6787 if (modifier != EXPAND_INITIALIZER
6788 && modifier != EXPAND_CONST_ADDRESS)
6789 result = force_operand (result, target);
6790 return result;
6793 /* Pass FALSE as the last argument to get_inner_reference although
6794 we are expanding to RTL. The rationale is that we know how to
6795 handle "aligning nodes" here: we can just bypass them because
6796 they won't change the final object whose address will be returned
6797 (they actually exist only for that purpose). */
6798 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6799 &mode1, &unsignedp, &volatilep, false);
6800 break;
6803 /* We must have made progress. */
6804 gcc_assert (inner != exp);
6806 subtarget = offset || bitpos ? NULL_RTX : target;
6807 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6809 if (offset)
6811 rtx tmp;
6813 if (modifier != EXPAND_NORMAL)
6814 result = force_operand (result, NULL);
6815 tmp = expand_expr (offset, NULL_RTX, tmode,
6816 modifier == EXPAND_INITIALIZER
6817 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6819 result = convert_memory_address (tmode, result);
6820 tmp = convert_memory_address (tmode, tmp);
6822 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6823 result = gen_rtx_PLUS (tmode, result, tmp);
6824 else
6826 subtarget = bitpos ? NULL_RTX : target;
6827 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6828 1, OPTAB_LIB_WIDEN);
6832 if (bitpos)
6834 /* Someone beforehand should have rejected taking the address
6835 of such an object. */
6836 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6838 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6839 if (modifier < EXPAND_SUM)
6840 result = force_operand (result, target);
6843 return result;
6846 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6847 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6849 static rtx
6850 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6851 enum expand_modifier modifier)
6853 enum machine_mode rmode;
6854 rtx result;
6856 /* Target mode of VOIDmode says "whatever's natural". */
6857 if (tmode == VOIDmode)
6858 tmode = TYPE_MODE (TREE_TYPE (exp));
6860 /* We can get called with some Weird Things if the user does silliness
6861 like "(short) &a". In that case, convert_memory_address won't do
6862 the right thing, so ignore the given target mode. */
6863 if (tmode != Pmode && tmode != ptr_mode)
6864 tmode = Pmode;
6866 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6867 tmode, modifier);
6869 /* Despite expand_expr claims concerning ignoring TMODE when not
6870 strictly convenient, stuff breaks if we don't honor it. Note
6871 that combined with the above, we only do this for pointer modes. */
6872 rmode = GET_MODE (result);
6873 if (rmode == VOIDmode)
6874 rmode = tmode;
6875 if (rmode != tmode)
6876 result = convert_memory_address (tmode, result);
6878 return result;
6881 /* Generate code for computing CONSTRUCTOR EXP.
6882 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6883 is TRUE, instead of creating a temporary variable in memory
6884 NULL is returned and the caller needs to handle it differently. */
6886 static rtx
6887 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6888 bool avoid_temp_mem)
6890 tree type = TREE_TYPE (exp);
6891 enum machine_mode mode = TYPE_MODE (type);
6893 /* Try to avoid creating a temporary at all. This is possible
6894 if all of the initializer is zero.
6895 FIXME: try to handle all [0..255] initializers we can handle
6896 with memset. */
6897 if (TREE_STATIC (exp)
6898 && !TREE_ADDRESSABLE (exp)
6899 && target != 0 && mode == BLKmode
6900 && all_zeros_p (exp))
6902 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6903 return target;
6906 /* All elts simple constants => refer to a constant in memory. But
6907 if this is a non-BLKmode mode, let it store a field at a time
6908 since that should make a CONST_INT or CONST_DOUBLE when we
6909 fold. Likewise, if we have a target we can use, it is best to
6910 store directly into the target unless the type is large enough
6911 that memcpy will be used. If we are making an initializer and
6912 all operands are constant, put it in memory as well.
6914 FIXME: Avoid trying to fill vector constructors piece-meal.
6915 Output them with output_constant_def below unless we're sure
6916 they're zeros. This should go away when vector initializers
6917 are treated like VECTOR_CST instead of arrays. */
6918 if ((TREE_STATIC (exp)
6919 && ((mode == BLKmode
6920 && ! (target != 0 && safe_from_p (target, exp, 1)))
6921 || TREE_ADDRESSABLE (exp)
6922 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6923 && (! MOVE_BY_PIECES_P
6924 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6925 TYPE_ALIGN (type)))
6926 && ! mostly_zeros_p (exp))))
6927 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
6928 && TREE_CONSTANT (exp)))
6930 rtx constructor;
6932 if (avoid_temp_mem)
6933 return NULL_RTX;
6935 constructor = expand_expr_constant (exp, 1, modifier);
6937 if (modifier != EXPAND_CONST_ADDRESS
6938 && modifier != EXPAND_INITIALIZER
6939 && modifier != EXPAND_SUM)
6940 constructor = validize_mem (constructor);
6942 return constructor;
6945 /* Handle calls that pass values in multiple non-contiguous
6946 locations. The Irix 6 ABI has examples of this. */
6947 if (target == 0 || ! safe_from_p (target, exp, 1)
6948 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
6950 if (avoid_temp_mem)
6951 return NULL_RTX;
6953 target
6954 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
6955 | (TREE_READONLY (exp)
6956 * TYPE_QUAL_CONST))),
6957 0, TREE_ADDRESSABLE (exp), 1);
6960 store_constructor (exp, target, 0, int_expr_size (exp));
6961 return target;
6965 /* expand_expr: generate code for computing expression EXP.
6966 An rtx for the computed value is returned. The value is never null.
6967 In the case of a void EXP, const0_rtx is returned.
6969 The value may be stored in TARGET if TARGET is nonzero.
6970 TARGET is just a suggestion; callers must assume that
6971 the rtx returned may not be the same as TARGET.
6973 If TARGET is CONST0_RTX, it means that the value will be ignored.
6975 If TMODE is not VOIDmode, it suggests generating the
6976 result in mode TMODE. But this is done only when convenient.
6977 Otherwise, TMODE is ignored and the value generated in its natural mode.
6978 TMODE is just a suggestion; callers must assume that
6979 the rtx returned may not have mode TMODE.
6981 Note that TARGET may have neither TMODE nor MODE. In that case, it
6982 probably will not be used.
6984 If MODIFIER is EXPAND_SUM then when EXP is an addition
6985 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6986 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6987 products as above, or REG or MEM, or constant.
6988 Ordinarily in such cases we would output mul or add instructions
6989 and then return a pseudo reg containing the sum.
6991 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6992 it also marks a label as absolutely required (it can't be dead).
6993 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6994 This is used for outputting expressions used in initializers.
6996 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6997 with a constant address even if that address is not normally legitimate.
6998 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7000 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7001 a call parameter. Such targets require special care as we haven't yet
7002 marked TARGET so that it's safe from being trashed by libcalls. We
7003 don't want to use TARGET for anything but the final result;
7004 Intermediate values must go elsewhere. Additionally, calls to
7005 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7007 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7008 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7009 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7010 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7011 recursively. */
7013 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
7014 enum expand_modifier, rtx *);
7017 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7018 enum expand_modifier modifier, rtx *alt_rtl)
7020 int rn = -1;
7021 rtx ret, last = NULL;
7023 /* Handle ERROR_MARK before anybody tries to access its type. */
7024 if (TREE_CODE (exp) == ERROR_MARK
7025 || TREE_CODE (exp) == PREDICT_EXPR
7026 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7028 ret = CONST0_RTX (tmode);
7029 return ret ? ret : const0_rtx;
7032 if (flag_non_call_exceptions)
7034 rn = lookup_stmt_eh_region (exp);
7035 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7036 if (rn >= 0)
7037 last = get_last_insn ();
7040 /* If this is an expression of some kind and it has an associated line
7041 number, then emit the line number before expanding the expression.
7043 We need to save and restore the file and line information so that
7044 errors discovered during expansion are emitted with the right
7045 information. It would be better of the diagnostic routines
7046 used the file/line information embedded in the tree nodes rather
7047 than globals. */
7048 if (cfun && EXPR_HAS_LOCATION (exp))
7050 location_t saved_location = input_location;
7051 input_location = EXPR_LOCATION (exp);
7052 set_curr_insn_source_location (input_location);
7054 /* Record where the insns produced belong. */
7055 set_curr_insn_block (TREE_BLOCK (exp));
7057 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7059 input_location = saved_location;
7061 else
7063 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7066 /* If using non-call exceptions, mark all insns that may trap.
7067 expand_call() will mark CALL_INSNs before we get to this code,
7068 but it doesn't handle libcalls, and these may trap. */
7069 if (rn >= 0)
7071 rtx insn;
7072 for (insn = next_real_insn (last); insn;
7073 insn = next_real_insn (insn))
7075 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7076 /* If we want exceptions for non-call insns, any
7077 may_trap_p instruction may throw. */
7078 && GET_CODE (PATTERN (insn)) != CLOBBER
7079 && GET_CODE (PATTERN (insn)) != USE
7080 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
7082 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
7083 REG_NOTES (insn));
7088 return ret;
7091 static rtx
7092 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
7093 enum expand_modifier modifier, rtx *alt_rtl)
7095 rtx op0, op1, op2, temp, decl_rtl;
7096 tree type;
7097 int unsignedp;
7098 enum machine_mode mode;
7099 enum tree_code code = TREE_CODE (exp);
7100 optab this_optab;
7101 rtx subtarget, original_target;
7102 int ignore;
7103 tree context, subexp0, subexp1;
7104 bool reduce_bit_field;
7105 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7106 ? reduce_to_bit_field_precision ((expr), \
7107 target, \
7108 type) \
7109 : (expr))
7111 if (GIMPLE_STMT_P (exp))
7113 type = void_type_node;
7114 mode = VOIDmode;
7115 unsignedp = 0;
7117 else
7119 type = TREE_TYPE (exp);
7120 mode = TYPE_MODE (type);
7121 unsignedp = TYPE_UNSIGNED (type);
7124 ignore = (target == const0_rtx
7125 || ((code == NOP_EXPR || code == CONVERT_EXPR
7126 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7127 && TREE_CODE (type) == VOID_TYPE));
7129 /* An operation in what may be a bit-field type needs the
7130 result to be reduced to the precision of the bit-field type,
7131 which is narrower than that of the type's mode. */
7132 reduce_bit_field = (!ignore
7133 && TREE_CODE (type) == INTEGER_TYPE
7134 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7136 /* If we are going to ignore this result, we need only do something
7137 if there is a side-effect somewhere in the expression. If there
7138 is, short-circuit the most common cases here. Note that we must
7139 not call expand_expr with anything but const0_rtx in case this
7140 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7142 if (ignore)
7144 if (! TREE_SIDE_EFFECTS (exp))
7145 return const0_rtx;
7147 /* Ensure we reference a volatile object even if value is ignored, but
7148 don't do this if all we are doing is taking its address. */
7149 if (TREE_THIS_VOLATILE (exp)
7150 && TREE_CODE (exp) != FUNCTION_DECL
7151 && mode != VOIDmode && mode != BLKmode
7152 && modifier != EXPAND_CONST_ADDRESS)
7154 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7155 if (MEM_P (temp))
7156 temp = copy_to_reg (temp);
7157 return const0_rtx;
7160 if (TREE_CODE_CLASS (code) == tcc_unary
7161 || code == COMPONENT_REF || code == INDIRECT_REF)
7162 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7163 modifier);
7165 else if (TREE_CODE_CLASS (code) == tcc_binary
7166 || TREE_CODE_CLASS (code) == tcc_comparison
7167 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7169 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7170 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7171 return const0_rtx;
7173 else if (code == BIT_FIELD_REF)
7175 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7176 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7177 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7178 return const0_rtx;
7181 target = 0;
7184 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7185 target = 0;
7187 /* Use subtarget as the target for operand 0 of a binary operation. */
7188 subtarget = get_subtarget (target);
7189 original_target = target;
7191 switch (code)
7193 case LABEL_DECL:
7195 tree function = decl_function_context (exp);
7197 temp = label_rtx (exp);
7198 temp = gen_rtx_LABEL_REF (Pmode, temp);
7200 if (function != current_function_decl
7201 && function != 0)
7202 LABEL_REF_NONLOCAL_P (temp) = 1;
7204 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7205 return temp;
7208 case SSA_NAME:
7209 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7210 NULL);
7212 case PARM_DECL:
7213 case VAR_DECL:
7214 /* If a static var's type was incomplete when the decl was written,
7215 but the type is complete now, lay out the decl now. */
7216 if (DECL_SIZE (exp) == 0
7217 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7218 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7219 layout_decl (exp, 0);
7221 /* TLS emulation hook - replace __thread vars with
7222 *__emutls_get_address (&_emutls.var). */
7223 if (! targetm.have_tls
7224 && TREE_CODE (exp) == VAR_DECL
7225 && DECL_THREAD_LOCAL_P (exp))
7227 exp = build_fold_indirect_ref (emutls_var_address (exp));
7228 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7231 /* ... fall through ... */
7233 case FUNCTION_DECL:
7234 case RESULT_DECL:
7235 decl_rtl = DECL_RTL (exp);
7236 gcc_assert (decl_rtl);
7237 decl_rtl = copy_rtx (decl_rtl);
7239 /* Ensure variable marked as used even if it doesn't go through
7240 a parser. If it hasn't be used yet, write out an external
7241 definition. */
7242 if (! TREE_USED (exp))
7244 assemble_external (exp);
7245 TREE_USED (exp) = 1;
7248 /* Show we haven't gotten RTL for this yet. */
7249 temp = 0;
7251 /* Variables inherited from containing functions should have
7252 been lowered by this point. */
7253 context = decl_function_context (exp);
7254 gcc_assert (!context
7255 || context == current_function_decl
7256 || TREE_STATIC (exp)
7257 /* ??? C++ creates functions that are not TREE_STATIC. */
7258 || TREE_CODE (exp) == FUNCTION_DECL);
7260 /* This is the case of an array whose size is to be determined
7261 from its initializer, while the initializer is still being parsed.
7262 See expand_decl. */
7264 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7265 temp = validize_mem (decl_rtl);
7267 /* If DECL_RTL is memory, we are in the normal case and the
7268 address is not valid, get the address into a register. */
7270 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7272 if (alt_rtl)
7273 *alt_rtl = decl_rtl;
7274 decl_rtl = use_anchored_address (decl_rtl);
7275 if (modifier != EXPAND_CONST_ADDRESS
7276 && modifier != EXPAND_SUM
7277 && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
7278 temp = replace_equiv_address (decl_rtl,
7279 copy_rtx (XEXP (decl_rtl, 0)));
7282 /* If we got something, return it. But first, set the alignment
7283 if the address is a register. */
7284 if (temp != 0)
7286 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7287 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7289 return temp;
7292 /* If the mode of DECL_RTL does not match that of the decl, it
7293 must be a promoted value. We return a SUBREG of the wanted mode,
7294 but mark it so that we know that it was already extended. */
7296 if (REG_P (decl_rtl)
7297 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7299 enum machine_mode pmode;
7301 /* Get the signedness used for this variable. Ensure we get the
7302 same mode we got when the variable was declared. */
7303 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7304 (TREE_CODE (exp) == RESULT_DECL
7305 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7306 gcc_assert (GET_MODE (decl_rtl) == pmode);
7308 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7309 SUBREG_PROMOTED_VAR_P (temp) = 1;
7310 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7311 return temp;
7314 return decl_rtl;
7316 case INTEGER_CST:
7317 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7318 TREE_INT_CST_HIGH (exp), mode);
7320 return temp;
7322 case VECTOR_CST:
7324 tree tmp = NULL_TREE;
7325 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7326 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7327 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
7328 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
7329 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
7330 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
7331 return const_vector_from_tree (exp);
7332 if (GET_MODE_CLASS (mode) == MODE_INT)
7334 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7335 if (type_for_mode)
7336 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7338 if (!tmp)
7339 tmp = build_constructor_from_list (type,
7340 TREE_VECTOR_CST_ELTS (exp));
7341 return expand_expr (tmp, ignore ? const0_rtx : target,
7342 tmode, modifier);
7345 case CONST_DECL:
7346 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7348 case REAL_CST:
7349 /* If optimized, generate immediate CONST_DOUBLE
7350 which will be turned into memory by reload if necessary.
7352 We used to force a register so that loop.c could see it. But
7353 this does not allow gen_* patterns to perform optimizations with
7354 the constants. It also produces two insns in cases like "x = 1.0;".
7355 On most machines, floating-point constants are not permitted in
7356 many insns, so we'd end up copying it to a register in any case.
7358 Now, we do the copying in expand_binop, if appropriate. */
7359 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7360 TYPE_MODE (TREE_TYPE (exp)));
7362 case FIXED_CST:
7363 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
7364 TYPE_MODE (TREE_TYPE (exp)));
7366 case COMPLEX_CST:
7367 /* Handle evaluating a complex constant in a CONCAT target. */
7368 if (original_target && GET_CODE (original_target) == CONCAT)
7370 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7371 rtx rtarg, itarg;
7373 rtarg = XEXP (original_target, 0);
7374 itarg = XEXP (original_target, 1);
7376 /* Move the real and imaginary parts separately. */
7377 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7378 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7380 if (op0 != rtarg)
7381 emit_move_insn (rtarg, op0);
7382 if (op1 != itarg)
7383 emit_move_insn (itarg, op1);
7385 return original_target;
7388 /* ... fall through ... */
7390 case STRING_CST:
7391 temp = expand_expr_constant (exp, 1, modifier);
7393 /* temp contains a constant address.
7394 On RISC machines where a constant address isn't valid,
7395 make some insns to get that address into a register. */
7396 if (modifier != EXPAND_CONST_ADDRESS
7397 && modifier != EXPAND_INITIALIZER
7398 && modifier != EXPAND_SUM
7399 && ! memory_address_p (mode, XEXP (temp, 0)))
7400 return replace_equiv_address (temp,
7401 copy_rtx (XEXP (temp, 0)));
7402 return temp;
7404 case SAVE_EXPR:
7406 tree val = TREE_OPERAND (exp, 0);
7407 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7409 if (!SAVE_EXPR_RESOLVED_P (exp))
7411 /* We can indeed still hit this case, typically via builtin
7412 expanders calling save_expr immediately before expanding
7413 something. Assume this means that we only have to deal
7414 with non-BLKmode values. */
7415 gcc_assert (GET_MODE (ret) != BLKmode);
7417 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7418 DECL_ARTIFICIAL (val) = 1;
7419 DECL_IGNORED_P (val) = 1;
7420 TREE_OPERAND (exp, 0) = val;
7421 SAVE_EXPR_RESOLVED_P (exp) = 1;
7423 if (!CONSTANT_P (ret))
7424 ret = copy_to_reg (ret);
7425 SET_DECL_RTL (val, ret);
7428 return ret;
7431 case GOTO_EXPR:
7432 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7433 expand_goto (TREE_OPERAND (exp, 0));
7434 else
7435 expand_computed_goto (TREE_OPERAND (exp, 0));
7436 return const0_rtx;
7438 case CONSTRUCTOR:
7439 /* If we don't need the result, just ensure we evaluate any
7440 subexpressions. */
7441 if (ignore)
7443 unsigned HOST_WIDE_INT idx;
7444 tree value;
7446 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7447 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7449 return const0_rtx;
7452 return expand_constructor (exp, target, modifier, false);
7454 case MISALIGNED_INDIRECT_REF:
7455 case ALIGN_INDIRECT_REF:
7456 case INDIRECT_REF:
7458 tree exp1 = TREE_OPERAND (exp, 0);
7460 if (modifier != EXPAND_WRITE)
7462 tree t;
7464 t = fold_read_from_constant_string (exp);
7465 if (t)
7466 return expand_expr (t, target, tmode, modifier);
7469 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7470 op0 = memory_address (mode, op0);
7472 if (code == ALIGN_INDIRECT_REF)
7474 int align = TYPE_ALIGN_UNIT (type);
7475 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7476 op0 = memory_address (mode, op0);
7479 temp = gen_rtx_MEM (mode, op0);
7481 set_mem_attributes (temp, exp, 0);
7483 /* Resolve the misalignment now, so that we don't have to remember
7484 to resolve it later. Of course, this only works for reads. */
7485 /* ??? When we get around to supporting writes, we'll have to handle
7486 this in store_expr directly. The vectorizer isn't generating
7487 those yet, however. */
7488 if (code == MISALIGNED_INDIRECT_REF)
7490 int icode;
7491 rtx reg, insn;
7493 gcc_assert (modifier == EXPAND_NORMAL
7494 || modifier == EXPAND_STACK_PARM);
7496 /* The vectorizer should have already checked the mode. */
7497 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7498 gcc_assert (icode != CODE_FOR_nothing);
7500 /* We've already validated the memory, and we're creating a
7501 new pseudo destination. The predicates really can't fail. */
7502 reg = gen_reg_rtx (mode);
7504 /* Nor can the insn generator. */
7505 insn = GEN_FCN (icode) (reg, temp);
7506 emit_insn (insn);
7508 return reg;
7511 return temp;
7514 case TARGET_MEM_REF:
7516 struct mem_address addr;
7518 get_address_description (exp, &addr);
7519 op0 = addr_for_mem_ref (&addr, true);
7520 op0 = memory_address (mode, op0);
7521 temp = gen_rtx_MEM (mode, op0);
7522 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7524 return temp;
7526 case ARRAY_REF:
7529 tree array = TREE_OPERAND (exp, 0);
7530 tree index = TREE_OPERAND (exp, 1);
7532 /* Fold an expression like: "foo"[2].
7533 This is not done in fold so it won't happen inside &.
7534 Don't fold if this is for wide characters since it's too
7535 difficult to do correctly and this is a very rare case. */
7537 if (modifier != EXPAND_CONST_ADDRESS
7538 && modifier != EXPAND_INITIALIZER
7539 && modifier != EXPAND_MEMORY)
7541 tree t = fold_read_from_constant_string (exp);
7543 if (t)
7544 return expand_expr (t, target, tmode, modifier);
7547 /* If this is a constant index into a constant array,
7548 just get the value from the array. Handle both the cases when
7549 we have an explicit constructor and when our operand is a variable
7550 that was declared const. */
7552 if (modifier != EXPAND_CONST_ADDRESS
7553 && modifier != EXPAND_INITIALIZER
7554 && modifier != EXPAND_MEMORY
7555 && TREE_CODE (array) == CONSTRUCTOR
7556 && ! TREE_SIDE_EFFECTS (array)
7557 && TREE_CODE (index) == INTEGER_CST)
7559 unsigned HOST_WIDE_INT ix;
7560 tree field, value;
7562 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7563 field, value)
7564 if (tree_int_cst_equal (field, index))
7566 if (!TREE_SIDE_EFFECTS (value))
7567 return expand_expr (fold (value), target, tmode, modifier);
7568 break;
7572 else if (optimize >= 1
7573 && modifier != EXPAND_CONST_ADDRESS
7574 && modifier != EXPAND_INITIALIZER
7575 && modifier != EXPAND_MEMORY
7576 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7577 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7578 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7579 && targetm.binds_local_p (array))
7581 if (TREE_CODE (index) == INTEGER_CST)
7583 tree init = DECL_INITIAL (array);
7585 if (TREE_CODE (init) == CONSTRUCTOR)
7587 unsigned HOST_WIDE_INT ix;
7588 tree field, value;
7590 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7591 field, value)
7592 if (tree_int_cst_equal (field, index))
7594 if (TREE_SIDE_EFFECTS (value))
7595 break;
7597 if (TREE_CODE (value) == CONSTRUCTOR)
7599 /* If VALUE is a CONSTRUCTOR, this
7600 optimization is only useful if
7601 this doesn't store the CONSTRUCTOR
7602 into memory. If it does, it is more
7603 efficient to just load the data from
7604 the array directly. */
7605 rtx ret = expand_constructor (value, target,
7606 modifier, true);
7607 if (ret == NULL_RTX)
7608 break;
7611 return expand_expr (fold (value), target, tmode,
7612 modifier);
7615 else if(TREE_CODE (init) == STRING_CST)
7617 tree index1 = index;
7618 tree low_bound = array_ref_low_bound (exp);
7619 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7621 /* Optimize the special-case of a zero lower bound.
7623 We convert the low_bound to sizetype to avoid some problems
7624 with constant folding. (E.g. suppose the lower bound is 1,
7625 and its mode is QI. Without the conversion,l (ARRAY
7626 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7627 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7629 if (! integer_zerop (low_bound))
7630 index1 = size_diffop (index1, fold_convert (sizetype,
7631 low_bound));
7633 if (0 > compare_tree_int (index1,
7634 TREE_STRING_LENGTH (init)))
7636 tree type = TREE_TYPE (TREE_TYPE (init));
7637 enum machine_mode mode = TYPE_MODE (type);
7639 if (GET_MODE_CLASS (mode) == MODE_INT
7640 && GET_MODE_SIZE (mode) == 1)
7641 return gen_int_mode (TREE_STRING_POINTER (init)
7642 [TREE_INT_CST_LOW (index1)],
7643 mode);
7649 goto normal_inner_ref;
7651 case COMPONENT_REF:
7652 /* If the operand is a CONSTRUCTOR, we can just extract the
7653 appropriate field if it is present. */
7654 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7656 unsigned HOST_WIDE_INT idx;
7657 tree field, value;
7659 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7660 idx, field, value)
7661 if (field == TREE_OPERAND (exp, 1)
7662 /* We can normally use the value of the field in the
7663 CONSTRUCTOR. However, if this is a bitfield in
7664 an integral mode that we can fit in a HOST_WIDE_INT,
7665 we must mask only the number of bits in the bitfield,
7666 since this is done implicitly by the constructor. If
7667 the bitfield does not meet either of those conditions,
7668 we can't do this optimization. */
7669 && (! DECL_BIT_FIELD (field)
7670 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7671 && (GET_MODE_BITSIZE (DECL_MODE (field))
7672 <= HOST_BITS_PER_WIDE_INT))))
7674 if (DECL_BIT_FIELD (field)
7675 && modifier == EXPAND_STACK_PARM)
7676 target = 0;
7677 op0 = expand_expr (value, target, tmode, modifier);
7678 if (DECL_BIT_FIELD (field))
7680 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7681 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7683 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7685 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7686 op0 = expand_and (imode, op0, op1, target);
7688 else
7690 tree count
7691 = build_int_cst (NULL_TREE,
7692 GET_MODE_BITSIZE (imode) - bitsize);
7694 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7695 target, 0);
7696 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7697 target, 0);
7701 return op0;
7704 goto normal_inner_ref;
7706 case BIT_FIELD_REF:
7707 case ARRAY_RANGE_REF:
7708 normal_inner_ref:
7710 enum machine_mode mode1;
7711 HOST_WIDE_INT bitsize, bitpos;
7712 tree offset;
7713 int volatilep = 0;
7714 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7715 &mode1, &unsignedp, &volatilep, true);
7716 rtx orig_op0;
7718 /* If we got back the original object, something is wrong. Perhaps
7719 we are evaluating an expression too early. In any event, don't
7720 infinitely recurse. */
7721 gcc_assert (tem != exp);
7723 /* If TEM's type is a union of variable size, pass TARGET to the inner
7724 computation, since it will need a temporary and TARGET is known
7725 to have to do. This occurs in unchecked conversion in Ada. */
7727 orig_op0 = op0
7728 = expand_expr (tem,
7729 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7730 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7731 != INTEGER_CST)
7732 && modifier != EXPAND_STACK_PARM
7733 ? target : NULL_RTX),
7734 VOIDmode,
7735 (modifier == EXPAND_INITIALIZER
7736 || modifier == EXPAND_CONST_ADDRESS
7737 || modifier == EXPAND_STACK_PARM)
7738 ? modifier : EXPAND_NORMAL);
7740 /* If this is a constant, put it into a register if it is a legitimate
7741 constant, OFFSET is 0, and we won't try to extract outside the
7742 register (in case we were passed a partially uninitialized object
7743 or a view_conversion to a larger size). Force the constant to
7744 memory otherwise. */
7745 if (CONSTANT_P (op0))
7747 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7748 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7749 && offset == 0
7750 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7751 op0 = force_reg (mode, op0);
7752 else
7753 op0 = validize_mem (force_const_mem (mode, op0));
7756 /* Otherwise, if this object not in memory and we either have an
7757 offset, a BLKmode result, or a reference outside the object, put it
7758 there. Such cases can occur in Ada if we have unchecked conversion
7759 of an expression from a scalar type to an array or record type or
7760 for an ARRAY_RANGE_REF whose type is BLKmode. */
7761 else if (!MEM_P (op0)
7762 && (offset != 0
7763 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7764 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7766 tree nt = build_qualified_type (TREE_TYPE (tem),
7767 (TYPE_QUALS (TREE_TYPE (tem))
7768 | TYPE_QUAL_CONST));
7769 rtx memloc = assign_temp (nt, 1, 1, 1);
7771 emit_move_insn (memloc, op0);
7772 op0 = memloc;
7775 if (offset != 0)
7777 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7778 EXPAND_SUM);
7780 gcc_assert (MEM_P (op0));
7782 #ifdef POINTERS_EXTEND_UNSIGNED
7783 if (GET_MODE (offset_rtx) != Pmode)
7784 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7785 #else
7786 if (GET_MODE (offset_rtx) != ptr_mode)
7787 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7788 #endif
7790 if (GET_MODE (op0) == BLKmode
7791 /* A constant address in OP0 can have VOIDmode, we must
7792 not try to call force_reg in that case. */
7793 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7794 && bitsize != 0
7795 && (bitpos % bitsize) == 0
7796 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7797 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7799 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7800 bitpos = 0;
7803 op0 = offset_address (op0, offset_rtx,
7804 highest_pow2_factor (offset));
7807 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7808 record its alignment as BIGGEST_ALIGNMENT. */
7809 if (MEM_P (op0) && bitpos == 0 && offset != 0
7810 && is_aligning_offset (offset, tem))
7811 set_mem_align (op0, BIGGEST_ALIGNMENT);
7813 /* Don't forget about volatility even if this is a bitfield. */
7814 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7816 if (op0 == orig_op0)
7817 op0 = copy_rtx (op0);
7819 MEM_VOLATILE_P (op0) = 1;
7822 /* The following code doesn't handle CONCAT.
7823 Assume only bitpos == 0 can be used for CONCAT, due to
7824 one element arrays having the same mode as its element. */
7825 if (GET_CODE (op0) == CONCAT)
7827 gcc_assert (bitpos == 0
7828 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7829 return op0;
7832 /* In cases where an aligned union has an unaligned object
7833 as a field, we might be extracting a BLKmode value from
7834 an integer-mode (e.g., SImode) object. Handle this case
7835 by doing the extract into an object as wide as the field
7836 (which we know to be the width of a basic mode), then
7837 storing into memory, and changing the mode to BLKmode. */
7838 if (mode1 == VOIDmode
7839 || REG_P (op0) || GET_CODE (op0) == SUBREG
7840 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7841 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7842 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7843 && modifier != EXPAND_CONST_ADDRESS
7844 && modifier != EXPAND_INITIALIZER)
7845 /* If the field isn't aligned enough to fetch as a memref,
7846 fetch it as a bit field. */
7847 || (mode1 != BLKmode
7848 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7849 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7850 || (MEM_P (op0)
7851 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7852 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7853 && ((modifier == EXPAND_CONST_ADDRESS
7854 || modifier == EXPAND_INITIALIZER)
7855 ? STRICT_ALIGNMENT
7856 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7857 || (bitpos % BITS_PER_UNIT != 0)))
7858 /* If the type and the field are a constant size and the
7859 size of the type isn't the same size as the bitfield,
7860 we must use bitfield operations. */
7861 || (bitsize >= 0
7862 && TYPE_SIZE (TREE_TYPE (exp))
7863 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7864 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7865 bitsize)))
7867 enum machine_mode ext_mode = mode;
7869 if (ext_mode == BLKmode
7870 && ! (target != 0 && MEM_P (op0)
7871 && MEM_P (target)
7872 && bitpos % BITS_PER_UNIT == 0))
7873 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7875 if (ext_mode == BLKmode)
7877 if (target == 0)
7878 target = assign_temp (type, 0, 1, 1);
7880 if (bitsize == 0)
7881 return target;
7883 /* In this case, BITPOS must start at a byte boundary and
7884 TARGET, if specified, must be a MEM. */
7885 gcc_assert (MEM_P (op0)
7886 && (!target || MEM_P (target))
7887 && !(bitpos % BITS_PER_UNIT));
7889 emit_block_move (target,
7890 adjust_address (op0, VOIDmode,
7891 bitpos / BITS_PER_UNIT),
7892 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7893 / BITS_PER_UNIT),
7894 (modifier == EXPAND_STACK_PARM
7895 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7897 return target;
7900 op0 = validize_mem (op0);
7902 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7903 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7905 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7906 (modifier == EXPAND_STACK_PARM
7907 ? NULL_RTX : target),
7908 ext_mode, ext_mode);
7910 /* If the result is a record type and BITSIZE is narrower than
7911 the mode of OP0, an integral mode, and this is a big endian
7912 machine, we must put the field into the high-order bits. */
7913 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7914 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7915 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7916 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7917 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7918 - bitsize),
7919 op0, 1);
7921 /* If the result type is BLKmode, store the data into a temporary
7922 of the appropriate type, but with the mode corresponding to the
7923 mode for the data we have (op0's mode). It's tempting to make
7924 this a constant type, since we know it's only being stored once,
7925 but that can cause problems if we are taking the address of this
7926 COMPONENT_REF because the MEM of any reference via that address
7927 will have flags corresponding to the type, which will not
7928 necessarily be constant. */
7929 if (mode == BLKmode)
7931 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7932 rtx new;
7934 /* If the reference doesn't use the alias set of its type,
7935 we cannot create the temporary using that type. */
7936 if (component_uses_parent_alias_set (exp))
7938 new = assign_stack_local (ext_mode, size, 0);
7939 set_mem_alias_set (new, get_alias_set (exp));
7941 else
7942 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7944 emit_move_insn (new, op0);
7945 op0 = copy_rtx (new);
7946 PUT_MODE (op0, BLKmode);
7947 set_mem_attributes (op0, exp, 1);
7950 return op0;
7953 /* If the result is BLKmode, use that to access the object
7954 now as well. */
7955 if (mode == BLKmode)
7956 mode1 = BLKmode;
7958 /* Get a reference to just this component. */
7959 if (modifier == EXPAND_CONST_ADDRESS
7960 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7961 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7962 else
7963 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7965 if (op0 == orig_op0)
7966 op0 = copy_rtx (op0);
7968 set_mem_attributes (op0, exp, 0);
7969 if (REG_P (XEXP (op0, 0)))
7970 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7972 MEM_VOLATILE_P (op0) |= volatilep;
7973 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7974 || modifier == EXPAND_CONST_ADDRESS
7975 || modifier == EXPAND_INITIALIZER)
7976 return op0;
7977 else if (target == 0)
7978 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7980 convert_move (target, op0, unsignedp);
7981 return target;
7984 case OBJ_TYPE_REF:
7985 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7987 case CALL_EXPR:
7988 /* All valid uses of __builtin_va_arg_pack () are removed during
7989 inlining. */
7990 if (CALL_EXPR_VA_ARG_PACK (exp))
7991 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7993 tree fndecl = get_callee_fndecl (exp), attr;
7995 if (fndecl
7996 && (attr = lookup_attribute ("error",
7997 DECL_ATTRIBUTES (fndecl))) != NULL)
7998 error ("%Kcall to %qs declared with attribute error: %s",
7999 exp, lang_hooks.decl_printable_name (fndecl, 1),
8000 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8001 if (fndecl
8002 && (attr = lookup_attribute ("warning",
8003 DECL_ATTRIBUTES (fndecl))) != NULL)
8004 warning (0, "%Kcall to %qs declared with attribute warning: %s",
8005 exp, lang_hooks.decl_printable_name (fndecl, 1),
8006 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8008 /* Check for a built-in function. */
8009 if (fndecl && DECL_BUILT_IN (fndecl))
8011 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND)
8012 return lang_hooks.expand_expr (exp, original_target,
8013 tmode, modifier, alt_rtl);
8014 else
8015 return expand_builtin (exp, target, subtarget, tmode, ignore);
8018 return expand_call (exp, target, ignore);
8020 case PAREN_EXPR:
8021 case NOP_EXPR:
8022 case CONVERT_EXPR:
8023 if (TREE_OPERAND (exp, 0) == error_mark_node)
8024 return const0_rtx;
8026 if (TREE_CODE (type) == UNION_TYPE)
8028 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
8030 /* If both input and output are BLKmode, this conversion isn't doing
8031 anything except possibly changing memory attribute. */
8032 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8034 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8035 modifier);
8037 result = copy_rtx (result);
8038 set_mem_attributes (result, exp, 0);
8039 return result;
8042 if (target == 0)
8044 if (TYPE_MODE (type) != BLKmode)
8045 target = gen_reg_rtx (TYPE_MODE (type));
8046 else
8047 target = assign_temp (type, 0, 1, 1);
8050 if (MEM_P (target))
8051 /* Store data into beginning of memory target. */
8052 store_expr (TREE_OPERAND (exp, 0),
8053 adjust_address (target, TYPE_MODE (valtype), 0),
8054 modifier == EXPAND_STACK_PARM,
8055 false);
8057 else
8059 gcc_assert (REG_P (target));
8061 /* Store this field into a union of the proper type. */
8062 store_field (target,
8063 MIN ((int_size_in_bytes (TREE_TYPE
8064 (TREE_OPERAND (exp, 0)))
8065 * BITS_PER_UNIT),
8066 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8067 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8068 type, 0, false);
8071 /* Return the entire union. */
8072 return target;
8075 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8077 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8078 modifier);
8080 /* If the signedness of the conversion differs and OP0 is
8081 a promoted SUBREG, clear that indication since we now
8082 have to do the proper extension. */
8083 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8084 && GET_CODE (op0) == SUBREG)
8085 SUBREG_PROMOTED_VAR_P (op0) = 0;
8087 return REDUCE_BIT_FIELD (op0);
8090 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8091 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8092 if (GET_MODE (op0) == mode)
8095 /* If OP0 is a constant, just convert it into the proper mode. */
8096 else if (CONSTANT_P (op0))
8098 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8099 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8101 if (modifier == EXPAND_INITIALIZER)
8102 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8103 subreg_lowpart_offset (mode,
8104 inner_mode));
8105 else
8106 op0= convert_modes (mode, inner_mode, op0,
8107 TYPE_UNSIGNED (inner_type));
8110 else if (modifier == EXPAND_INITIALIZER)
8111 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8113 else if (target == 0)
8114 op0 = convert_to_mode (mode, op0,
8115 TYPE_UNSIGNED (TREE_TYPE
8116 (TREE_OPERAND (exp, 0))));
8117 else
8119 convert_move (target, op0,
8120 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8121 op0 = target;
8124 return REDUCE_BIT_FIELD (op0);
8126 case VIEW_CONVERT_EXPR:
8127 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8129 /* If the input and output modes are both the same, we are done. */
8130 if (TYPE_MODE (type) == GET_MODE (op0))
8132 /* If neither mode is BLKmode, and both modes are the same size
8133 then we can use gen_lowpart. */
8134 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8135 && GET_MODE_SIZE (TYPE_MODE (type))
8136 == GET_MODE_SIZE (GET_MODE (op0)))
8138 if (GET_CODE (op0) == SUBREG)
8139 op0 = force_reg (GET_MODE (op0), op0);
8140 op0 = gen_lowpart (TYPE_MODE (type), op0);
8142 /* If both modes are integral, then we can convert from one to the
8143 other. */
8144 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
8145 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
8146 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
8147 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8148 /* As a last resort, spill op0 to memory, and reload it in a
8149 different mode. */
8150 else if (!MEM_P (op0))
8152 /* If the operand is not a MEM, force it into memory. Since we
8153 are going to be changing the mode of the MEM, don't call
8154 force_const_mem for constants because we don't allow pool
8155 constants to change mode. */
8156 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8158 gcc_assert (!TREE_ADDRESSABLE (exp));
8160 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8161 target
8162 = assign_stack_temp_for_type
8163 (TYPE_MODE (inner_type),
8164 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8166 emit_move_insn (target, op0);
8167 op0 = target;
8170 /* At this point, OP0 is in the correct mode. If the output type is such
8171 that the operand is known to be aligned, indicate that it is.
8172 Otherwise, we need only be concerned about alignment for non-BLKmode
8173 results. */
8174 if (MEM_P (op0))
8176 op0 = copy_rtx (op0);
8178 if (TYPE_ALIGN_OK (type))
8179 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8180 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8181 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8183 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8184 HOST_WIDE_INT temp_size
8185 = MAX (int_size_in_bytes (inner_type),
8186 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8187 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8188 temp_size, 0, type);
8189 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8191 gcc_assert (!TREE_ADDRESSABLE (exp));
8193 if (GET_MODE (op0) == BLKmode)
8194 emit_block_move (new_with_op0_mode, op0,
8195 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8196 (modifier == EXPAND_STACK_PARM
8197 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8198 else
8199 emit_move_insn (new_with_op0_mode, op0);
8201 op0 = new;
8204 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8207 return op0;
8209 case POINTER_PLUS_EXPR:
8210 /* Even though the sizetype mode and the pointer's mode can be different
8211 expand is able to handle this correctly and get the correct result out
8212 of the PLUS_EXPR code. */
8213 case PLUS_EXPR:
8215 /* Check if this is a case for multiplication and addition. */
8216 if ((TREE_CODE (type) == INTEGER_TYPE
8217 || TREE_CODE (type) == FIXED_POINT_TYPE)
8218 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8220 tree subsubexp0, subsubexp1;
8221 enum tree_code code0, code1, this_code;
8223 subexp0 = TREE_OPERAND (exp, 0);
8224 subsubexp0 = TREE_OPERAND (subexp0, 0);
8225 subsubexp1 = TREE_OPERAND (subexp0, 1);
8226 code0 = TREE_CODE (subsubexp0);
8227 code1 = TREE_CODE (subsubexp1);
8228 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8229 : FIXED_CONVERT_EXPR;
8230 if (code0 == this_code && code1 == this_code
8231 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8232 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8233 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8234 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8235 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8236 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8238 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8239 enum machine_mode innermode = TYPE_MODE (op0type);
8240 bool zextend_p = TYPE_UNSIGNED (op0type);
8241 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8242 if (sat_p == 0)
8243 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8244 else
8245 this_optab = zextend_p ? usmadd_widen_optab
8246 : ssmadd_widen_optab;
8247 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8248 && (optab_handler (this_optab, mode)->insn_code
8249 != CODE_FOR_nothing))
8251 expand_operands (TREE_OPERAND (subsubexp0, 0),
8252 TREE_OPERAND (subsubexp1, 0),
8253 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8254 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8255 VOIDmode, EXPAND_NORMAL);
8256 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8257 target, unsignedp);
8258 gcc_assert (temp);
8259 return REDUCE_BIT_FIELD (temp);
8264 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8265 something else, make sure we add the register to the constant and
8266 then to the other thing. This case can occur during strength
8267 reduction and doing it this way will produce better code if the
8268 frame pointer or argument pointer is eliminated.
8270 fold-const.c will ensure that the constant is always in the inner
8271 PLUS_EXPR, so the only case we need to do anything about is if
8272 sp, ap, or fp is our second argument, in which case we must swap
8273 the innermost first argument and our second argument. */
8275 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8276 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8277 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8278 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8279 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8280 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8282 tree t = TREE_OPERAND (exp, 1);
8284 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8285 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8288 /* If the result is to be ptr_mode and we are adding an integer to
8289 something, we might be forming a constant. So try to use
8290 plus_constant. If it produces a sum and we can't accept it,
8291 use force_operand. This allows P = &ARR[const] to generate
8292 efficient code on machines where a SYMBOL_REF is not a valid
8293 address.
8295 If this is an EXPAND_SUM call, always return the sum. */
8296 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8297 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8299 if (modifier == EXPAND_STACK_PARM)
8300 target = 0;
8301 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8302 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8303 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8305 rtx constant_part;
8307 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8308 EXPAND_SUM);
8309 /* Use immed_double_const to ensure that the constant is
8310 truncated according to the mode of OP1, then sign extended
8311 to a HOST_WIDE_INT. Using the constant directly can result
8312 in non-canonical RTL in a 64x32 cross compile. */
8313 constant_part
8314 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8315 (HOST_WIDE_INT) 0,
8316 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8317 op1 = plus_constant (op1, INTVAL (constant_part));
8318 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8319 op1 = force_operand (op1, target);
8320 return REDUCE_BIT_FIELD (op1);
8323 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8324 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8325 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8327 rtx constant_part;
8329 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8330 (modifier == EXPAND_INITIALIZER
8331 ? EXPAND_INITIALIZER : EXPAND_SUM));
8332 if (! CONSTANT_P (op0))
8334 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8335 VOIDmode, modifier);
8336 /* Return a PLUS if modifier says it's OK. */
8337 if (modifier == EXPAND_SUM
8338 || modifier == EXPAND_INITIALIZER)
8339 return simplify_gen_binary (PLUS, mode, op0, op1);
8340 goto binop2;
8342 /* Use immed_double_const to ensure that the constant is
8343 truncated according to the mode of OP1, then sign extended
8344 to a HOST_WIDE_INT. Using the constant directly can result
8345 in non-canonical RTL in a 64x32 cross compile. */
8346 constant_part
8347 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8348 (HOST_WIDE_INT) 0,
8349 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8350 op0 = plus_constant (op0, INTVAL (constant_part));
8351 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8352 op0 = force_operand (op0, target);
8353 return REDUCE_BIT_FIELD (op0);
8357 /* No sense saving up arithmetic to be done
8358 if it's all in the wrong mode to form part of an address.
8359 And force_operand won't know whether to sign-extend or
8360 zero-extend. */
8361 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8362 || mode != ptr_mode)
8364 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8365 subtarget, &op0, &op1, 0);
8366 if (op0 == const0_rtx)
8367 return op1;
8368 if (op1 == const0_rtx)
8369 return op0;
8370 goto binop2;
8373 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8374 subtarget, &op0, &op1, modifier);
8375 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8377 case MINUS_EXPR:
8378 /* Check if this is a case for multiplication and subtraction. */
8379 if ((TREE_CODE (type) == INTEGER_TYPE
8380 || TREE_CODE (type) == FIXED_POINT_TYPE)
8381 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8383 tree subsubexp0, subsubexp1;
8384 enum tree_code code0, code1, this_code;
8386 subexp1 = TREE_OPERAND (exp, 1);
8387 subsubexp0 = TREE_OPERAND (subexp1, 0);
8388 subsubexp1 = TREE_OPERAND (subexp1, 1);
8389 code0 = TREE_CODE (subsubexp0);
8390 code1 = TREE_CODE (subsubexp1);
8391 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8392 : FIXED_CONVERT_EXPR;
8393 if (code0 == this_code && code1 == this_code
8394 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8395 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8396 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8397 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8398 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8399 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8401 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8402 enum machine_mode innermode = TYPE_MODE (op0type);
8403 bool zextend_p = TYPE_UNSIGNED (op0type);
8404 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8405 if (sat_p == 0)
8406 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8407 else
8408 this_optab = zextend_p ? usmsub_widen_optab
8409 : ssmsub_widen_optab;
8410 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8411 && (optab_handler (this_optab, mode)->insn_code
8412 != CODE_FOR_nothing))
8414 expand_operands (TREE_OPERAND (subsubexp0, 0),
8415 TREE_OPERAND (subsubexp1, 0),
8416 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8417 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8418 VOIDmode, EXPAND_NORMAL);
8419 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8420 target, unsignedp);
8421 gcc_assert (temp);
8422 return REDUCE_BIT_FIELD (temp);
8427 /* For initializers, we are allowed to return a MINUS of two
8428 symbolic constants. Here we handle all cases when both operands
8429 are constant. */
8430 /* Handle difference of two symbolic constants,
8431 for the sake of an initializer. */
8432 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8433 && really_constant_p (TREE_OPERAND (exp, 0))
8434 && really_constant_p (TREE_OPERAND (exp, 1)))
8436 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8437 NULL_RTX, &op0, &op1, modifier);
8439 /* If the last operand is a CONST_INT, use plus_constant of
8440 the negated constant. Else make the MINUS. */
8441 if (GET_CODE (op1) == CONST_INT)
8442 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8443 else
8444 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8447 /* No sense saving up arithmetic to be done
8448 if it's all in the wrong mode to form part of an address.
8449 And force_operand won't know whether to sign-extend or
8450 zero-extend. */
8451 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8452 || mode != ptr_mode)
8453 goto binop;
8455 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8456 subtarget, &op0, &op1, modifier);
8458 /* Convert A - const to A + (-const). */
8459 if (GET_CODE (op1) == CONST_INT)
8461 op1 = negate_rtx (mode, op1);
8462 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8465 goto binop2;
8467 case MULT_EXPR:
8468 /* If this is a fixed-point operation, then we cannot use the code
8469 below because "expand_mult" doesn't support sat/no-sat fixed-point
8470 multiplications. */
8471 if (ALL_FIXED_POINT_MODE_P (mode))
8472 goto binop;
8474 /* If first operand is constant, swap them.
8475 Thus the following special case checks need only
8476 check the second operand. */
8477 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8479 tree t1 = TREE_OPERAND (exp, 0);
8480 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8481 TREE_OPERAND (exp, 1) = t1;
8484 /* Attempt to return something suitable for generating an
8485 indexed address, for machines that support that. */
8487 if (modifier == EXPAND_SUM && mode == ptr_mode
8488 && host_integerp (TREE_OPERAND (exp, 1), 0))
8490 tree exp1 = TREE_OPERAND (exp, 1);
8492 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8493 EXPAND_SUM);
8495 if (!REG_P (op0))
8496 op0 = force_operand (op0, NULL_RTX);
8497 if (!REG_P (op0))
8498 op0 = copy_to_mode_reg (mode, op0);
8500 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8501 gen_int_mode (tree_low_cst (exp1, 0),
8502 TYPE_MODE (TREE_TYPE (exp1)))));
8505 if (modifier == EXPAND_STACK_PARM)
8506 target = 0;
8508 /* Check for multiplying things that have been extended
8509 from a narrower type. If this machine supports multiplying
8510 in that narrower type with a result in the desired type,
8511 do it that way, and avoid the explicit type-conversion. */
8513 subexp0 = TREE_OPERAND (exp, 0);
8514 subexp1 = TREE_OPERAND (exp, 1);
8515 /* First, check if we have a multiplication of one signed and one
8516 unsigned operand. */
8517 if (TREE_CODE (subexp0) == NOP_EXPR
8518 && TREE_CODE (subexp1) == NOP_EXPR
8519 && TREE_CODE (type) == INTEGER_TYPE
8520 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8521 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8522 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8523 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8524 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8525 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8527 enum machine_mode innermode
8528 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8529 this_optab = usmul_widen_optab;
8530 if (mode == GET_MODE_WIDER_MODE (innermode))
8532 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8534 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8535 expand_operands (TREE_OPERAND (subexp0, 0),
8536 TREE_OPERAND (subexp1, 0),
8537 NULL_RTX, &op0, &op1, 0);
8538 else
8539 expand_operands (TREE_OPERAND (subexp0, 0),
8540 TREE_OPERAND (subexp1, 0),
8541 NULL_RTX, &op1, &op0, 0);
8543 goto binop3;
8547 /* Check for a multiplication with matching signedness. */
8548 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8549 && TREE_CODE (type) == INTEGER_TYPE
8550 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8551 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8552 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8553 && int_fits_type_p (TREE_OPERAND (exp, 1),
8554 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8555 /* Don't use a widening multiply if a shift will do. */
8556 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8557 > HOST_BITS_PER_WIDE_INT)
8558 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8560 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8561 && (TYPE_PRECISION (TREE_TYPE
8562 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8563 == TYPE_PRECISION (TREE_TYPE
8564 (TREE_OPERAND
8565 (TREE_OPERAND (exp, 0), 0))))
8566 /* If both operands are extended, they must either both
8567 be zero-extended or both be sign-extended. */
8568 && (TYPE_UNSIGNED (TREE_TYPE
8569 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8570 == TYPE_UNSIGNED (TREE_TYPE
8571 (TREE_OPERAND
8572 (TREE_OPERAND (exp, 0), 0)))))))
8574 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8575 enum machine_mode innermode = TYPE_MODE (op0type);
8576 bool zextend_p = TYPE_UNSIGNED (op0type);
8577 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8578 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8580 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8582 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8584 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8585 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8586 TREE_OPERAND (exp, 1),
8587 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8588 else
8589 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8590 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8591 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8592 goto binop3;
8594 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8595 && innermode == word_mode)
8597 rtx htem, hipart;
8598 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8599 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8600 op1 = convert_modes (innermode, mode,
8601 expand_normal (TREE_OPERAND (exp, 1)),
8602 unsignedp);
8603 else
8604 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8605 temp = expand_binop (mode, other_optab, op0, op1, target,
8606 unsignedp, OPTAB_LIB_WIDEN);
8607 hipart = gen_highpart (innermode, temp);
8608 htem = expand_mult_highpart_adjust (innermode, hipart,
8609 op0, op1, hipart,
8610 zextend_p);
8611 if (htem != hipart)
8612 emit_move_insn (hipart, htem);
8613 return REDUCE_BIT_FIELD (temp);
8617 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8618 subtarget, &op0, &op1, 0);
8619 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8621 case TRUNC_DIV_EXPR:
8622 case FLOOR_DIV_EXPR:
8623 case CEIL_DIV_EXPR:
8624 case ROUND_DIV_EXPR:
8625 case EXACT_DIV_EXPR:
8626 /* If this is a fixed-point operation, then we cannot use the code
8627 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8628 divisions. */
8629 if (ALL_FIXED_POINT_MODE_P (mode))
8630 goto binop;
8632 if (modifier == EXPAND_STACK_PARM)
8633 target = 0;
8634 /* Possible optimization: compute the dividend with EXPAND_SUM
8635 then if the divisor is constant can optimize the case
8636 where some terms of the dividend have coeffs divisible by it. */
8637 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8638 subtarget, &op0, &op1, 0);
8639 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8641 case RDIV_EXPR:
8642 goto binop;
8644 case TRUNC_MOD_EXPR:
8645 case FLOOR_MOD_EXPR:
8646 case CEIL_MOD_EXPR:
8647 case ROUND_MOD_EXPR:
8648 if (modifier == EXPAND_STACK_PARM)
8649 target = 0;
8650 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8651 subtarget, &op0, &op1, 0);
8652 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8654 case FIXED_CONVERT_EXPR:
8655 op0 = expand_normal (TREE_OPERAND (exp, 0));
8656 if (target == 0 || modifier == EXPAND_STACK_PARM)
8657 target = gen_reg_rtx (mode);
8659 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
8660 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
8661 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8662 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8663 else
8664 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8665 return target;
8667 case FIX_TRUNC_EXPR:
8668 op0 = expand_normal (TREE_OPERAND (exp, 0));
8669 if (target == 0 || modifier == EXPAND_STACK_PARM)
8670 target = gen_reg_rtx (mode);
8671 expand_fix (target, op0, unsignedp);
8672 return target;
8674 case FLOAT_EXPR:
8675 op0 = expand_normal (TREE_OPERAND (exp, 0));
8676 if (target == 0 || modifier == EXPAND_STACK_PARM)
8677 target = gen_reg_rtx (mode);
8678 /* expand_float can't figure out what to do if FROM has VOIDmode.
8679 So give it the correct mode. With -O, cse will optimize this. */
8680 if (GET_MODE (op0) == VOIDmode)
8681 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8682 op0);
8683 expand_float (target, op0,
8684 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8685 return target;
8687 case NEGATE_EXPR:
8688 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8689 VOIDmode, EXPAND_NORMAL);
8690 if (modifier == EXPAND_STACK_PARM)
8691 target = 0;
8692 temp = expand_unop (mode,
8693 optab_for_tree_code (NEGATE_EXPR, type),
8694 op0, target, 0);
8695 gcc_assert (temp);
8696 return REDUCE_BIT_FIELD (temp);
8698 case ABS_EXPR:
8699 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8700 VOIDmode, EXPAND_NORMAL);
8701 if (modifier == EXPAND_STACK_PARM)
8702 target = 0;
8704 /* ABS_EXPR is not valid for complex arguments. */
8705 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8706 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8708 /* Unsigned abs is simply the operand. Testing here means we don't
8709 risk generating incorrect code below. */
8710 if (TYPE_UNSIGNED (type))
8711 return op0;
8713 return expand_abs (mode, op0, target, unsignedp,
8714 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8716 case MAX_EXPR:
8717 case MIN_EXPR:
8718 target = original_target;
8719 if (target == 0
8720 || modifier == EXPAND_STACK_PARM
8721 || (MEM_P (target) && MEM_VOLATILE_P (target))
8722 || GET_MODE (target) != mode
8723 || (REG_P (target)
8724 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8725 target = gen_reg_rtx (mode);
8726 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8727 target, &op0, &op1, 0);
8729 /* First try to do it with a special MIN or MAX instruction.
8730 If that does not win, use a conditional jump to select the proper
8731 value. */
8732 this_optab = optab_for_tree_code (code, type);
8733 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8734 OPTAB_WIDEN);
8735 if (temp != 0)
8736 return temp;
8738 /* At this point, a MEM target is no longer useful; we will get better
8739 code without it. */
8741 if (! REG_P (target))
8742 target = gen_reg_rtx (mode);
8744 /* If op1 was placed in target, swap op0 and op1. */
8745 if (target != op0 && target == op1)
8747 temp = op0;
8748 op0 = op1;
8749 op1 = temp;
8752 /* We generate better code and avoid problems with op1 mentioning
8753 target by forcing op1 into a pseudo if it isn't a constant. */
8754 if (! CONSTANT_P (op1))
8755 op1 = force_reg (mode, op1);
8758 enum rtx_code comparison_code;
8759 rtx cmpop1 = op1;
8761 if (code == MAX_EXPR)
8762 comparison_code = unsignedp ? GEU : GE;
8763 else
8764 comparison_code = unsignedp ? LEU : LE;
8766 /* Canonicalize to comparisons against 0. */
8767 if (op1 == const1_rtx)
8769 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8770 or (a != 0 ? a : 1) for unsigned.
8771 For MIN we are safe converting (a <= 1 ? a : 1)
8772 into (a <= 0 ? a : 1) */
8773 cmpop1 = const0_rtx;
8774 if (code == MAX_EXPR)
8775 comparison_code = unsignedp ? NE : GT;
8777 if (op1 == constm1_rtx && !unsignedp)
8779 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8780 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8781 cmpop1 = const0_rtx;
8782 if (code == MIN_EXPR)
8783 comparison_code = LT;
8785 #ifdef HAVE_conditional_move
8786 /* Use a conditional move if possible. */
8787 if (can_conditionally_move_p (mode))
8789 rtx insn;
8791 /* ??? Same problem as in expmed.c: emit_conditional_move
8792 forces a stack adjustment via compare_from_rtx, and we
8793 lose the stack adjustment if the sequence we are about
8794 to create is discarded. */
8795 do_pending_stack_adjust ();
8797 start_sequence ();
8799 /* Try to emit the conditional move. */
8800 insn = emit_conditional_move (target, comparison_code,
8801 op0, cmpop1, mode,
8802 op0, op1, mode,
8803 unsignedp);
8805 /* If we could do the conditional move, emit the sequence,
8806 and return. */
8807 if (insn)
8809 rtx seq = get_insns ();
8810 end_sequence ();
8811 emit_insn (seq);
8812 return target;
8815 /* Otherwise discard the sequence and fall back to code with
8816 branches. */
8817 end_sequence ();
8819 #endif
8820 if (target != op0)
8821 emit_move_insn (target, op0);
8823 temp = gen_label_rtx ();
8824 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8825 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8827 emit_move_insn (target, op1);
8828 emit_label (temp);
8829 return target;
8831 case BIT_NOT_EXPR:
8832 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8833 VOIDmode, EXPAND_NORMAL);
8834 if (modifier == EXPAND_STACK_PARM)
8835 target = 0;
8836 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8837 gcc_assert (temp);
8838 return temp;
8840 /* ??? Can optimize bitwise operations with one arg constant.
8841 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8842 and (a bitwise1 b) bitwise2 b (etc)
8843 but that is probably not worth while. */
8845 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8846 boolean values when we want in all cases to compute both of them. In
8847 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8848 as actual zero-or-1 values and then bitwise anding. In cases where
8849 there cannot be any side effects, better code would be made by
8850 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8851 how to recognize those cases. */
8853 case TRUTH_AND_EXPR:
8854 code = BIT_AND_EXPR;
8855 case BIT_AND_EXPR:
8856 goto binop;
8858 case TRUTH_OR_EXPR:
8859 code = BIT_IOR_EXPR;
8860 case BIT_IOR_EXPR:
8861 goto binop;
8863 case TRUTH_XOR_EXPR:
8864 code = BIT_XOR_EXPR;
8865 case BIT_XOR_EXPR:
8866 goto binop;
8868 case LROTATE_EXPR:
8869 case RROTATE_EXPR:
8870 /* The expansion code only handles expansion of mode precision
8871 rotates. */
8872 gcc_assert (GET_MODE_PRECISION (TYPE_MODE (type))
8873 == TYPE_PRECISION (type));
8875 /* Falltrough. */
8876 case LSHIFT_EXPR:
8877 case RSHIFT_EXPR:
8878 /* If this is a fixed-point operation, then we cannot use the code
8879 below because "expand_shift" doesn't support sat/no-sat fixed-point
8880 shifts. */
8881 if (ALL_FIXED_POINT_MODE_P (mode))
8882 goto binop;
8884 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8885 subtarget = 0;
8886 if (modifier == EXPAND_STACK_PARM)
8887 target = 0;
8888 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8889 VOIDmode, EXPAND_NORMAL);
8890 temp = expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8891 unsignedp);
8892 if (code == LSHIFT_EXPR)
8893 temp = REDUCE_BIT_FIELD (temp);
8894 return temp;
8896 /* Could determine the answer when only additive constants differ. Also,
8897 the addition of one can be handled by changing the condition. */
8898 case LT_EXPR:
8899 case LE_EXPR:
8900 case GT_EXPR:
8901 case GE_EXPR:
8902 case EQ_EXPR:
8903 case NE_EXPR:
8904 case UNORDERED_EXPR:
8905 case ORDERED_EXPR:
8906 case UNLT_EXPR:
8907 case UNLE_EXPR:
8908 case UNGT_EXPR:
8909 case UNGE_EXPR:
8910 case UNEQ_EXPR:
8911 case LTGT_EXPR:
8912 temp = do_store_flag (exp,
8913 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8914 tmode != VOIDmode ? tmode : mode, 0);
8915 if (temp != 0)
8916 return temp;
8918 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8919 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8920 && original_target
8921 && REG_P (original_target)
8922 && (GET_MODE (original_target)
8923 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8925 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8926 VOIDmode, EXPAND_NORMAL);
8928 /* If temp is constant, we can just compute the result. */
8929 if (GET_CODE (temp) == CONST_INT)
8931 if (INTVAL (temp) != 0)
8932 emit_move_insn (target, const1_rtx);
8933 else
8934 emit_move_insn (target, const0_rtx);
8936 return target;
8939 if (temp != original_target)
8941 enum machine_mode mode1 = GET_MODE (temp);
8942 if (mode1 == VOIDmode)
8943 mode1 = tmode != VOIDmode ? tmode : mode;
8945 temp = copy_to_mode_reg (mode1, temp);
8948 op1 = gen_label_rtx ();
8949 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8950 GET_MODE (temp), unsignedp, op1);
8951 emit_move_insn (temp, const1_rtx);
8952 emit_label (op1);
8953 return temp;
8956 /* If no set-flag instruction, must generate a conditional store
8957 into a temporary variable. Drop through and handle this
8958 like && and ||. */
8960 if (! ignore
8961 && (target == 0
8962 || modifier == EXPAND_STACK_PARM
8963 || ! safe_from_p (target, exp, 1)
8964 /* Make sure we don't have a hard reg (such as function's return
8965 value) live across basic blocks, if not optimizing. */
8966 || (!optimize && REG_P (target)
8967 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8968 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8970 if (target)
8971 emit_move_insn (target, const0_rtx);
8973 op1 = gen_label_rtx ();
8974 jumpifnot (exp, op1);
8976 if (target)
8977 emit_move_insn (target, const1_rtx);
8979 emit_label (op1);
8980 return ignore ? const0_rtx : target;
8982 case TRUTH_NOT_EXPR:
8983 if (modifier == EXPAND_STACK_PARM)
8984 target = 0;
8985 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
8986 VOIDmode, EXPAND_NORMAL);
8987 /* The parser is careful to generate TRUTH_NOT_EXPR
8988 only with operands that are always zero or one. */
8989 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8990 target, 1, OPTAB_LIB_WIDEN);
8991 gcc_assert (temp);
8992 return temp;
8994 case STATEMENT_LIST:
8996 tree_stmt_iterator iter;
8998 gcc_assert (ignore);
9000 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9001 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9003 return const0_rtx;
9005 case COND_EXPR:
9006 /* A COND_EXPR with its type being VOID_TYPE represents a
9007 conditional jump and is handled in
9008 expand_gimple_cond_expr. */
9009 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
9011 /* Note that COND_EXPRs whose type is a structure or union
9012 are required to be constructed to contain assignments of
9013 a temporary variable, so that we can evaluate them here
9014 for side effect only. If type is void, we must do likewise. */
9016 gcc_assert (!TREE_ADDRESSABLE (type)
9017 && !ignore
9018 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
9019 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
9021 /* If we are not to produce a result, we have no target. Otherwise,
9022 if a target was specified use it; it will not be used as an
9023 intermediate target unless it is safe. If no target, use a
9024 temporary. */
9026 if (modifier != EXPAND_STACK_PARM
9027 && original_target
9028 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
9029 && GET_MODE (original_target) == mode
9030 #ifdef HAVE_conditional_move
9031 && (! can_conditionally_move_p (mode)
9032 || REG_P (original_target))
9033 #endif
9034 && !MEM_P (original_target))
9035 temp = original_target;
9036 else
9037 temp = assign_temp (type, 0, 0, 1);
9039 do_pending_stack_adjust ();
9040 NO_DEFER_POP;
9041 op0 = gen_label_rtx ();
9042 op1 = gen_label_rtx ();
9043 jumpifnot (TREE_OPERAND (exp, 0), op0);
9044 store_expr (TREE_OPERAND (exp, 1), temp,
9045 modifier == EXPAND_STACK_PARM,
9046 false);
9048 emit_jump_insn (gen_jump (op1));
9049 emit_barrier ();
9050 emit_label (op0);
9051 store_expr (TREE_OPERAND (exp, 2), temp,
9052 modifier == EXPAND_STACK_PARM,
9053 false);
9055 emit_label (op1);
9056 OK_DEFER_POP;
9057 return temp;
9059 case VEC_COND_EXPR:
9060 target = expand_vec_cond_expr (exp, target);
9061 return target;
9063 case MODIFY_EXPR:
9065 tree lhs = TREE_OPERAND (exp, 0);
9066 tree rhs = TREE_OPERAND (exp, 1);
9067 gcc_assert (ignore);
9068 expand_assignment (lhs, rhs, false);
9069 return const0_rtx;
9072 case GIMPLE_MODIFY_STMT:
9074 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
9075 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
9077 gcc_assert (ignore);
9079 /* Check for |= or &= of a bitfield of size one into another bitfield
9080 of size 1. In this case, (unless we need the result of the
9081 assignment) we can do this more efficiently with a
9082 test followed by an assignment, if necessary.
9084 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9085 things change so we do, this code should be enhanced to
9086 support it. */
9087 if (TREE_CODE (lhs) == COMPONENT_REF
9088 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9089 || TREE_CODE (rhs) == BIT_AND_EXPR)
9090 && TREE_OPERAND (rhs, 0) == lhs
9091 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9092 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9093 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9095 rtx label = gen_label_rtx ();
9096 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9097 do_jump (TREE_OPERAND (rhs, 1),
9098 value ? label : 0,
9099 value ? 0 : label);
9100 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9101 MOVE_NONTEMPORAL (exp));
9102 do_pending_stack_adjust ();
9103 emit_label (label);
9104 return const0_rtx;
9107 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9108 return const0_rtx;
9111 case RETURN_EXPR:
9112 if (!TREE_OPERAND (exp, 0))
9113 expand_null_return ();
9114 else
9115 expand_return (TREE_OPERAND (exp, 0));
9116 return const0_rtx;
9118 case ADDR_EXPR:
9119 return expand_expr_addr_expr (exp, target, tmode, modifier);
9121 case COMPLEX_EXPR:
9122 /* Get the rtx code of the operands. */
9123 op0 = expand_normal (TREE_OPERAND (exp, 0));
9124 op1 = expand_normal (TREE_OPERAND (exp, 1));
9126 if (!target)
9127 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9129 /* Move the real (op0) and imaginary (op1) parts to their location. */
9130 write_complex_part (target, op0, false);
9131 write_complex_part (target, op1, true);
9133 return target;
9135 case REALPART_EXPR:
9136 op0 = expand_normal (TREE_OPERAND (exp, 0));
9137 return read_complex_part (op0, false);
9139 case IMAGPART_EXPR:
9140 op0 = expand_normal (TREE_OPERAND (exp, 0));
9141 return read_complex_part (op0, true);
9143 case RESX_EXPR:
9144 expand_resx_expr (exp);
9145 return const0_rtx;
9147 case TRY_CATCH_EXPR:
9148 case CATCH_EXPR:
9149 case EH_FILTER_EXPR:
9150 case TRY_FINALLY_EXPR:
9151 /* Lowered by tree-eh.c. */
9152 gcc_unreachable ();
9154 case WITH_CLEANUP_EXPR:
9155 case CLEANUP_POINT_EXPR:
9156 case TARGET_EXPR:
9157 case CASE_LABEL_EXPR:
9158 case VA_ARG_EXPR:
9159 case BIND_EXPR:
9160 case INIT_EXPR:
9161 case CONJ_EXPR:
9162 case COMPOUND_EXPR:
9163 case PREINCREMENT_EXPR:
9164 case PREDECREMENT_EXPR:
9165 case POSTINCREMENT_EXPR:
9166 case POSTDECREMENT_EXPR:
9167 case LOOP_EXPR:
9168 case EXIT_EXPR:
9169 case TRUTH_ANDIF_EXPR:
9170 case TRUTH_ORIF_EXPR:
9171 /* Lowered by gimplify.c. */
9172 gcc_unreachable ();
9174 case CHANGE_DYNAMIC_TYPE_EXPR:
9175 /* This is ignored at the RTL level. The tree level set
9176 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9177 overkill for the RTL layer but is all that we can
9178 represent. */
9179 return const0_rtx;
9181 case EXC_PTR_EXPR:
9182 return get_exception_pointer ();
9184 case FILTER_EXPR:
9185 return get_exception_filter ();
9187 case FDESC_EXPR:
9188 /* Function descriptors are not valid except for as
9189 initialization constants, and should not be expanded. */
9190 gcc_unreachable ();
9192 case SWITCH_EXPR:
9193 expand_case (exp);
9194 return const0_rtx;
9196 case LABEL_EXPR:
9197 expand_label (TREE_OPERAND (exp, 0));
9198 return const0_rtx;
9200 case ASM_EXPR:
9201 expand_asm_expr (exp);
9202 return const0_rtx;
9204 case WITH_SIZE_EXPR:
9205 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9206 have pulled out the size to use in whatever context it needed. */
9207 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9208 modifier, alt_rtl);
9210 case REALIGN_LOAD_EXPR:
9212 tree oprnd0 = TREE_OPERAND (exp, 0);
9213 tree oprnd1 = TREE_OPERAND (exp, 1);
9214 tree oprnd2 = TREE_OPERAND (exp, 2);
9215 rtx op2;
9217 this_optab = optab_for_tree_code (code, type);
9218 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9219 op2 = expand_normal (oprnd2);
9220 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9221 target, unsignedp);
9222 gcc_assert (temp);
9223 return temp;
9226 case DOT_PROD_EXPR:
9228 tree oprnd0 = TREE_OPERAND (exp, 0);
9229 tree oprnd1 = TREE_OPERAND (exp, 1);
9230 tree oprnd2 = TREE_OPERAND (exp, 2);
9231 rtx op2;
9233 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9234 op2 = expand_normal (oprnd2);
9235 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9236 target, unsignedp);
9237 return target;
9240 case WIDEN_SUM_EXPR:
9242 tree oprnd0 = TREE_OPERAND (exp, 0);
9243 tree oprnd1 = TREE_OPERAND (exp, 1);
9245 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9246 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9247 target, unsignedp);
9248 return target;
9251 case REDUC_MAX_EXPR:
9252 case REDUC_MIN_EXPR:
9253 case REDUC_PLUS_EXPR:
9255 op0 = expand_normal (TREE_OPERAND (exp, 0));
9256 this_optab = optab_for_tree_code (code, type);
9257 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9258 gcc_assert (temp);
9259 return temp;
9262 case VEC_EXTRACT_EVEN_EXPR:
9263 case VEC_EXTRACT_ODD_EXPR:
9265 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9266 NULL_RTX, &op0, &op1, 0);
9267 this_optab = optab_for_tree_code (code, type);
9268 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9269 OPTAB_WIDEN);
9270 gcc_assert (temp);
9271 return temp;
9274 case VEC_INTERLEAVE_HIGH_EXPR:
9275 case VEC_INTERLEAVE_LOW_EXPR:
9277 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9278 NULL_RTX, &op0, &op1, 0);
9279 this_optab = optab_for_tree_code (code, type);
9280 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9281 OPTAB_WIDEN);
9282 gcc_assert (temp);
9283 return temp;
9286 case VEC_LSHIFT_EXPR:
9287 case VEC_RSHIFT_EXPR:
9289 target = expand_vec_shift_expr (exp, target);
9290 return target;
9293 case VEC_UNPACK_HI_EXPR:
9294 case VEC_UNPACK_LO_EXPR:
9296 op0 = expand_normal (TREE_OPERAND (exp, 0));
9297 this_optab = optab_for_tree_code (code, type);
9298 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9299 target, unsignedp);
9300 gcc_assert (temp);
9301 return temp;
9304 case VEC_UNPACK_FLOAT_HI_EXPR:
9305 case VEC_UNPACK_FLOAT_LO_EXPR:
9307 op0 = expand_normal (TREE_OPERAND (exp, 0));
9308 /* The signedness is determined from input operand. */
9309 this_optab = optab_for_tree_code (code,
9310 TREE_TYPE (TREE_OPERAND (exp, 0)));
9311 temp = expand_widen_pattern_expr
9312 (exp, op0, NULL_RTX, NULL_RTX,
9313 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9315 gcc_assert (temp);
9316 return temp;
9319 case VEC_WIDEN_MULT_HI_EXPR:
9320 case VEC_WIDEN_MULT_LO_EXPR:
9322 tree oprnd0 = TREE_OPERAND (exp, 0);
9323 tree oprnd1 = TREE_OPERAND (exp, 1);
9325 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9326 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9327 target, unsignedp);
9328 gcc_assert (target);
9329 return target;
9332 case VEC_PACK_TRUNC_EXPR:
9333 case VEC_PACK_SAT_EXPR:
9334 case VEC_PACK_FIX_TRUNC_EXPR:
9336 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9337 goto binop;
9340 case OMP_ATOMIC_LOAD:
9341 case OMP_ATOMIC_STORE:
9342 /* OMP expansion is not run when there were errors, so these codes
9343 can get here. */
9344 gcc_assert (errorcount != 0);
9345 return NULL_RTX;
9347 default:
9348 return lang_hooks.expand_expr (exp, original_target, tmode,
9349 modifier, alt_rtl);
9352 /* Here to do an ordinary binary operator. */
9353 binop:
9354 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9355 subtarget, &op0, &op1, 0);
9356 binop2:
9357 this_optab = optab_for_tree_code (code, type);
9358 binop3:
9359 if (modifier == EXPAND_STACK_PARM)
9360 target = 0;
9361 temp = expand_binop (mode, this_optab, op0, op1, target,
9362 unsignedp, OPTAB_LIB_WIDEN);
9363 gcc_assert (temp);
9364 return REDUCE_BIT_FIELD (temp);
9366 #undef REDUCE_BIT_FIELD
9368 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9369 signedness of TYPE), possibly returning the result in TARGET. */
9370 static rtx
9371 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9373 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9374 if (target && GET_MODE (target) != GET_MODE (exp))
9375 target = 0;
9376 /* For constant values, reduce using build_int_cst_type. */
9377 if (GET_CODE (exp) == CONST_INT)
9379 HOST_WIDE_INT value = INTVAL (exp);
9380 tree t = build_int_cst_type (type, value);
9381 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9383 else if (TYPE_UNSIGNED (type))
9385 rtx mask;
9386 if (prec < HOST_BITS_PER_WIDE_INT)
9387 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9388 GET_MODE (exp));
9389 else
9390 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9391 ((unsigned HOST_WIDE_INT) 1
9392 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9393 GET_MODE (exp));
9394 return expand_and (GET_MODE (exp), exp, mask, target);
9396 else
9398 tree count = build_int_cst (NULL_TREE,
9399 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9400 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9401 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9405 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9406 when applied to the address of EXP produces an address known to be
9407 aligned more than BIGGEST_ALIGNMENT. */
9409 static int
9410 is_aligning_offset (const_tree offset, const_tree exp)
9412 /* Strip off any conversions. */
9413 while (TREE_CODE (offset) == NOP_EXPR
9414 || TREE_CODE (offset) == CONVERT_EXPR)
9415 offset = TREE_OPERAND (offset, 0);
9417 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9418 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9419 if (TREE_CODE (offset) != BIT_AND_EXPR
9420 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9421 || compare_tree_int (TREE_OPERAND (offset, 1),
9422 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9423 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9424 return 0;
9426 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9427 It must be NEGATE_EXPR. Then strip any more conversions. */
9428 offset = TREE_OPERAND (offset, 0);
9429 while (TREE_CODE (offset) == NOP_EXPR
9430 || TREE_CODE (offset) == CONVERT_EXPR)
9431 offset = TREE_OPERAND (offset, 0);
9433 if (TREE_CODE (offset) != NEGATE_EXPR)
9434 return 0;
9436 offset = TREE_OPERAND (offset, 0);
9437 while (TREE_CODE (offset) == NOP_EXPR
9438 || TREE_CODE (offset) == CONVERT_EXPR)
9439 offset = TREE_OPERAND (offset, 0);
9441 /* This must now be the address of EXP. */
9442 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9445 /* Return the tree node if an ARG corresponds to a string constant or zero
9446 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9447 in bytes within the string that ARG is accessing. The type of the
9448 offset will be `sizetype'. */
9450 tree
9451 string_constant (tree arg, tree *ptr_offset)
9453 tree array, offset, lower_bound;
9454 STRIP_NOPS (arg);
9456 if (TREE_CODE (arg) == ADDR_EXPR)
9458 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9460 *ptr_offset = size_zero_node;
9461 return TREE_OPERAND (arg, 0);
9463 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9465 array = TREE_OPERAND (arg, 0);
9466 offset = size_zero_node;
9468 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9470 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9471 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9472 if (TREE_CODE (array) != STRING_CST
9473 && TREE_CODE (array) != VAR_DECL)
9474 return 0;
9476 /* Check if the array has a nonzero lower bound. */
9477 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9478 if (!integer_zerop (lower_bound))
9480 /* If the offset and base aren't both constants, return 0. */
9481 if (TREE_CODE (lower_bound) != INTEGER_CST)
9482 return 0;
9483 if (TREE_CODE (offset) != INTEGER_CST)
9484 return 0;
9485 /* Adjust offset by the lower bound. */
9486 offset = size_diffop (fold_convert (sizetype, offset),
9487 fold_convert (sizetype, lower_bound));
9490 else
9491 return 0;
9493 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9495 tree arg0 = TREE_OPERAND (arg, 0);
9496 tree arg1 = TREE_OPERAND (arg, 1);
9498 STRIP_NOPS (arg0);
9499 STRIP_NOPS (arg1);
9501 if (TREE_CODE (arg0) == ADDR_EXPR
9502 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9503 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9505 array = TREE_OPERAND (arg0, 0);
9506 offset = arg1;
9508 else if (TREE_CODE (arg1) == ADDR_EXPR
9509 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9510 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9512 array = TREE_OPERAND (arg1, 0);
9513 offset = arg0;
9515 else
9516 return 0;
9518 else
9519 return 0;
9521 if (TREE_CODE (array) == STRING_CST)
9523 *ptr_offset = fold_convert (sizetype, offset);
9524 return array;
9526 else if (TREE_CODE (array) == VAR_DECL)
9528 int length;
9530 /* Variables initialized to string literals can be handled too. */
9531 if (DECL_INITIAL (array) == NULL_TREE
9532 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9533 return 0;
9535 /* If they are read-only, non-volatile and bind locally. */
9536 if (! TREE_READONLY (array)
9537 || TREE_SIDE_EFFECTS (array)
9538 || ! targetm.binds_local_p (array))
9539 return 0;
9541 /* Avoid const char foo[4] = "abcde"; */
9542 if (DECL_SIZE_UNIT (array) == NULL_TREE
9543 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9544 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9545 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9546 return 0;
9548 /* If variable is bigger than the string literal, OFFSET must be constant
9549 and inside of the bounds of the string literal. */
9550 offset = fold_convert (sizetype, offset);
9551 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9552 && (! host_integerp (offset, 1)
9553 || compare_tree_int (offset, length) >= 0))
9554 return 0;
9556 *ptr_offset = offset;
9557 return DECL_INITIAL (array);
9560 return 0;
9563 /* Generate code to calculate EXP using a store-flag instruction
9564 and return an rtx for the result. EXP is either a comparison
9565 or a TRUTH_NOT_EXPR whose operand is a comparison.
9567 If TARGET is nonzero, store the result there if convenient.
9569 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9570 cheap.
9572 Return zero if there is no suitable set-flag instruction
9573 available on this machine.
9575 Once expand_expr has been called on the arguments of the comparison,
9576 we are committed to doing the store flag, since it is not safe to
9577 re-evaluate the expression. We emit the store-flag insn by calling
9578 emit_store_flag, but only expand the arguments if we have a reason
9579 to believe that emit_store_flag will be successful. If we think that
9580 it will, but it isn't, we have to simulate the store-flag with a
9581 set/jump/set sequence. */
9583 static rtx
9584 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9586 enum rtx_code code;
9587 tree arg0, arg1, type;
9588 tree tem;
9589 enum machine_mode operand_mode;
9590 int invert = 0;
9591 int unsignedp;
9592 rtx op0, op1;
9593 enum insn_code icode;
9594 rtx subtarget = target;
9595 rtx result, label;
9597 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9598 result at the end. We can't simply invert the test since it would
9599 have already been inverted if it were valid. This case occurs for
9600 some floating-point comparisons. */
9602 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9603 invert = 1, exp = TREE_OPERAND (exp, 0);
9605 arg0 = TREE_OPERAND (exp, 0);
9606 arg1 = TREE_OPERAND (exp, 1);
9608 /* Don't crash if the comparison was erroneous. */
9609 if (arg0 == error_mark_node || arg1 == error_mark_node)
9610 return const0_rtx;
9612 type = TREE_TYPE (arg0);
9613 operand_mode = TYPE_MODE (type);
9614 unsignedp = TYPE_UNSIGNED (type);
9616 /* We won't bother with BLKmode store-flag operations because it would mean
9617 passing a lot of information to emit_store_flag. */
9618 if (operand_mode == BLKmode)
9619 return 0;
9621 /* We won't bother with store-flag operations involving function pointers
9622 when function pointers must be canonicalized before comparisons. */
9623 #ifdef HAVE_canonicalize_funcptr_for_compare
9624 if (HAVE_canonicalize_funcptr_for_compare
9625 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9626 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9627 == FUNCTION_TYPE))
9628 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9629 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9630 == FUNCTION_TYPE))))
9631 return 0;
9632 #endif
9634 STRIP_NOPS (arg0);
9635 STRIP_NOPS (arg1);
9637 /* Get the rtx comparison code to use. We know that EXP is a comparison
9638 operation of some type. Some comparisons against 1 and -1 can be
9639 converted to comparisons with zero. Do so here so that the tests
9640 below will be aware that we have a comparison with zero. These
9641 tests will not catch constants in the first operand, but constants
9642 are rarely passed as the first operand. */
9644 switch (TREE_CODE (exp))
9646 case EQ_EXPR:
9647 code = EQ;
9648 break;
9649 case NE_EXPR:
9650 code = NE;
9651 break;
9652 case LT_EXPR:
9653 if (integer_onep (arg1))
9654 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9655 else
9656 code = unsignedp ? LTU : LT;
9657 break;
9658 case LE_EXPR:
9659 if (! unsignedp && integer_all_onesp (arg1))
9660 arg1 = integer_zero_node, code = LT;
9661 else
9662 code = unsignedp ? LEU : LE;
9663 break;
9664 case GT_EXPR:
9665 if (! unsignedp && integer_all_onesp (arg1))
9666 arg1 = integer_zero_node, code = GE;
9667 else
9668 code = unsignedp ? GTU : GT;
9669 break;
9670 case GE_EXPR:
9671 if (integer_onep (arg1))
9672 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9673 else
9674 code = unsignedp ? GEU : GE;
9675 break;
9677 case UNORDERED_EXPR:
9678 code = UNORDERED;
9679 break;
9680 case ORDERED_EXPR:
9681 code = ORDERED;
9682 break;
9683 case UNLT_EXPR:
9684 code = UNLT;
9685 break;
9686 case UNLE_EXPR:
9687 code = UNLE;
9688 break;
9689 case UNGT_EXPR:
9690 code = UNGT;
9691 break;
9692 case UNGE_EXPR:
9693 code = UNGE;
9694 break;
9695 case UNEQ_EXPR:
9696 code = UNEQ;
9697 break;
9698 case LTGT_EXPR:
9699 code = LTGT;
9700 break;
9702 default:
9703 gcc_unreachable ();
9706 /* Put a constant second. */
9707 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9708 || TREE_CODE (arg0) == FIXED_CST)
9710 tem = arg0; arg0 = arg1; arg1 = tem;
9711 code = swap_condition (code);
9714 /* If this is an equality or inequality test of a single bit, we can
9715 do this by shifting the bit being tested to the low-order bit and
9716 masking the result with the constant 1. If the condition was EQ,
9717 we xor it with 1. This does not require an scc insn and is faster
9718 than an scc insn even if we have it.
9720 The code to make this transformation was moved into fold_single_bit_test,
9721 so we just call into the folder and expand its result. */
9723 if ((code == NE || code == EQ)
9724 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9725 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9727 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9728 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9729 arg0, arg1, type),
9730 target, VOIDmode, EXPAND_NORMAL);
9733 /* Now see if we are likely to be able to do this. Return if not. */
9734 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9735 return 0;
9737 icode = setcc_gen_code[(int) code];
9739 if (icode == CODE_FOR_nothing)
9741 enum machine_mode wmode;
9743 for (wmode = operand_mode;
9744 icode == CODE_FOR_nothing && wmode != VOIDmode;
9745 wmode = GET_MODE_WIDER_MODE (wmode))
9746 icode = optab_handler (cstore_optab, wmode)->insn_code;
9749 if (icode == CODE_FOR_nothing
9750 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9752 /* We can only do this if it is one of the special cases that
9753 can be handled without an scc insn. */
9754 if ((code == LT && integer_zerop (arg1))
9755 || (! only_cheap && code == GE && integer_zerop (arg1)))
9757 else if (! only_cheap && (code == NE || code == EQ)
9758 && TREE_CODE (type) != REAL_TYPE
9759 && ((optab_handler (abs_optab, operand_mode)->insn_code
9760 != CODE_FOR_nothing)
9761 || (optab_handler (ffs_optab, operand_mode)->insn_code
9762 != CODE_FOR_nothing)))
9764 else
9765 return 0;
9768 if (! get_subtarget (target)
9769 || GET_MODE (subtarget) != operand_mode)
9770 subtarget = 0;
9772 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9774 if (target == 0)
9775 target = gen_reg_rtx (mode);
9777 result = emit_store_flag (target, code, op0, op1,
9778 operand_mode, unsignedp, 1);
9780 if (result)
9782 if (invert)
9783 result = expand_binop (mode, xor_optab, result, const1_rtx,
9784 result, 0, OPTAB_LIB_WIDEN);
9785 return result;
9788 /* If this failed, we have to do this with set/compare/jump/set code. */
9789 if (!REG_P (target)
9790 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9791 target = gen_reg_rtx (GET_MODE (target));
9793 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9794 label = gen_label_rtx ();
9795 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9796 NULL_RTX, label);
9798 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9799 emit_label (label);
9801 return target;
9805 /* Stubs in case we haven't got a casesi insn. */
9806 #ifndef HAVE_casesi
9807 # define HAVE_casesi 0
9808 # define gen_casesi(a, b, c, d, e) (0)
9809 # define CODE_FOR_casesi CODE_FOR_nothing
9810 #endif
9812 /* If the machine does not have a case insn that compares the bounds,
9813 this means extra overhead for dispatch tables, which raises the
9814 threshold for using them. */
9815 #ifndef CASE_VALUES_THRESHOLD
9816 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9817 #endif /* CASE_VALUES_THRESHOLD */
9819 unsigned int
9820 case_values_threshold (void)
9822 return CASE_VALUES_THRESHOLD;
9825 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9826 0 otherwise (i.e. if there is no casesi instruction). */
9828 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9829 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
9830 rtx fallback_label ATTRIBUTE_UNUSED)
9832 enum machine_mode index_mode = SImode;
9833 int index_bits = GET_MODE_BITSIZE (index_mode);
9834 rtx op1, op2, index;
9835 enum machine_mode op_mode;
9837 if (! HAVE_casesi)
9838 return 0;
9840 /* Convert the index to SImode. */
9841 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9843 enum machine_mode omode = TYPE_MODE (index_type);
9844 rtx rangertx = expand_normal (range);
9846 /* We must handle the endpoints in the original mode. */
9847 index_expr = build2 (MINUS_EXPR, index_type,
9848 index_expr, minval);
9849 minval = integer_zero_node;
9850 index = expand_normal (index_expr);
9851 if (default_label)
9852 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9853 omode, 1, default_label);
9854 /* Now we can safely truncate. */
9855 index = convert_to_mode (index_mode, index, 0);
9857 else
9859 if (TYPE_MODE (index_type) != index_mode)
9861 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9862 index_expr = fold_convert (index_type, index_expr);
9865 index = expand_normal (index_expr);
9868 do_pending_stack_adjust ();
9870 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9871 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9872 (index, op_mode))
9873 index = copy_to_mode_reg (op_mode, index);
9875 op1 = expand_normal (minval);
9877 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9878 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9879 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9880 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9881 (op1, op_mode))
9882 op1 = copy_to_mode_reg (op_mode, op1);
9884 op2 = expand_normal (range);
9886 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9887 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9888 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9889 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9890 (op2, op_mode))
9891 op2 = copy_to_mode_reg (op_mode, op2);
9893 emit_jump_insn (gen_casesi (index, op1, op2,
9894 table_label, !default_label
9895 ? fallback_label : default_label));
9896 return 1;
9899 /* Attempt to generate a tablejump instruction; same concept. */
9900 #ifndef HAVE_tablejump
9901 #define HAVE_tablejump 0
9902 #define gen_tablejump(x, y) (0)
9903 #endif
9905 /* Subroutine of the next function.
9907 INDEX is the value being switched on, with the lowest value
9908 in the table already subtracted.
9909 MODE is its expected mode (needed if INDEX is constant).
9910 RANGE is the length of the jump table.
9911 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9913 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9914 index value is out of range. */
9916 static void
9917 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9918 rtx default_label)
9920 rtx temp, vector;
9922 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
9923 cfun->cfg->max_jumptable_ents = INTVAL (range);
9925 /* Do an unsigned comparison (in the proper mode) between the index
9926 expression and the value which represents the length of the range.
9927 Since we just finished subtracting the lower bound of the range
9928 from the index expression, this comparison allows us to simultaneously
9929 check that the original index expression value is both greater than
9930 or equal to the minimum value of the range and less than or equal to
9931 the maximum value of the range. */
9933 if (default_label)
9934 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9935 default_label);
9937 /* If index is in range, it must fit in Pmode.
9938 Convert to Pmode so we can index with it. */
9939 if (mode != Pmode)
9940 index = convert_to_mode (Pmode, index, 1);
9942 /* Don't let a MEM slip through, because then INDEX that comes
9943 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9944 and break_out_memory_refs will go to work on it and mess it up. */
9945 #ifdef PIC_CASE_VECTOR_ADDRESS
9946 if (flag_pic && !REG_P (index))
9947 index = copy_to_mode_reg (Pmode, index);
9948 #endif
9950 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9951 GET_MODE_SIZE, because this indicates how large insns are. The other
9952 uses should all be Pmode, because they are addresses. This code
9953 could fail if addresses and insns are not the same size. */
9954 index = gen_rtx_PLUS (Pmode,
9955 gen_rtx_MULT (Pmode, index,
9956 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9957 gen_rtx_LABEL_REF (Pmode, table_label));
9958 #ifdef PIC_CASE_VECTOR_ADDRESS
9959 if (flag_pic)
9960 index = PIC_CASE_VECTOR_ADDRESS (index);
9961 else
9962 #endif
9963 index = memory_address (CASE_VECTOR_MODE, index);
9964 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9965 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9966 convert_move (temp, vector, 0);
9968 emit_jump_insn (gen_tablejump (temp, table_label));
9970 /* If we are generating PIC code or if the table is PC-relative, the
9971 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9972 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9973 emit_barrier ();
9977 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9978 rtx table_label, rtx default_label)
9980 rtx index;
9982 if (! HAVE_tablejump)
9983 return 0;
9985 index_expr = fold_build2 (MINUS_EXPR, index_type,
9986 fold_convert (index_type, index_expr),
9987 fold_convert (index_type, minval));
9988 index = expand_normal (index_expr);
9989 do_pending_stack_adjust ();
9991 do_tablejump (index, TYPE_MODE (index_type),
9992 convert_modes (TYPE_MODE (index_type),
9993 TYPE_MODE (TREE_TYPE (range)),
9994 expand_normal (range),
9995 TYPE_UNSIGNED (TREE_TYPE (range))),
9996 table_label, default_label);
9997 return 1;
10000 /* Nonzero if the mode is a valid vector mode for this architecture.
10001 This returns nonzero even if there is no hardware support for the
10002 vector mode, but we can emulate with narrower modes. */
10005 vector_mode_valid_p (enum machine_mode mode)
10007 enum mode_class class = GET_MODE_CLASS (mode);
10008 enum machine_mode innermode;
10010 /* Doh! What's going on? */
10011 if (class != MODE_VECTOR_INT
10012 && class != MODE_VECTOR_FLOAT
10013 && class != MODE_VECTOR_FRACT
10014 && class != MODE_VECTOR_UFRACT
10015 && class != MODE_VECTOR_ACCUM
10016 && class != MODE_VECTOR_UACCUM)
10017 return 0;
10019 /* Hardware support. Woo hoo! */
10020 if (targetm.vector_mode_supported_p (mode))
10021 return 1;
10023 innermode = GET_MODE_INNER (mode);
10025 /* We should probably return 1 if requesting V4DI and we have no DI,
10026 but we have V2DI, but this is probably very unlikely. */
10028 /* If we have support for the inner mode, we can safely emulate it.
10029 We may not have V2DI, but me can emulate with a pair of DIs. */
10030 return targetm.scalar_mode_supported_p (innermode);
10033 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10034 static rtx
10035 const_vector_from_tree (tree exp)
10037 rtvec v;
10038 int units, i;
10039 tree link, elt;
10040 enum machine_mode inner, mode;
10042 mode = TYPE_MODE (TREE_TYPE (exp));
10044 if (initializer_zerop (exp))
10045 return CONST0_RTX (mode);
10047 units = GET_MODE_NUNITS (mode);
10048 inner = GET_MODE_INNER (mode);
10050 v = rtvec_alloc (units);
10052 link = TREE_VECTOR_CST_ELTS (exp);
10053 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10055 elt = TREE_VALUE (link);
10057 if (TREE_CODE (elt) == REAL_CST)
10058 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10059 inner);
10060 else if (TREE_CODE (elt) == FIXED_CST)
10061 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10062 inner);
10063 else
10064 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10065 TREE_INT_CST_HIGH (elt),
10066 inner);
10069 /* Initialize remaining elements to 0. */
10070 for (; i < units; ++i)
10071 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10073 return gen_rtx_CONST_VECTOR (mode, v);
10075 #include "gt-expr.h"