Daily bump.
[official-gcc.git] / gcc / expr.c
blob3294f1a23e02bf29122880ad67480e2117c14f71
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
56 #include "diagnostic.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
64 #ifdef PUSH_ROUNDING
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
69 #endif
70 #endif
72 #endif
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
77 #else
78 #define STACK_PUSH_CODE PRE_INC
79 #endif
80 #endif
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
91 /* This structure is used by move_by_pieces to describe the move to
92 be performed. */
93 struct move_by_pieces
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 rtx from;
100 rtx from_addr;
101 int autinc_from;
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
105 int reverse;
108 /* This structure is used by store_by_pieces to describe the clear to
109 be performed. */
111 struct store_by_pieces
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
120 void *constfundata;
121 int reverse;
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 unsigned int,
126 unsigned int);
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
179 #endif
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
187 #endif
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero. */
191 #ifndef SET_BY_PIECES_P
192 #define SET_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) SET_RATIO)
195 #endif
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memcpy" storage when the source is a constant string. */
199 #ifndef STORE_BY_PIECES_P
200 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
202 < (unsigned int) MOVE_RATIO)
203 #endif
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movmem_optab[NUM_MACHINE_MODES];
208 /* This array records the insn_code of insns to perform block sets. */
209 enum insn_code setmem_optab[NUM_MACHINE_MODES];
211 /* These arrays record the insn_code of three different kinds of insns
212 to perform block compares. */
213 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217 /* Synchronization primitives. */
218 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
245 #endif
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
251 void
252 init_expr_target (void)
254 rtx insn, pat;
255 enum machine_mode mode;
256 int num_clobbers;
257 rtx mem, mem1;
258 rtx reg;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
277 int regno;
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 regno++)
292 if (! HARD_REGNO_MODE_OK (regno, mode))
293 continue;
295 SET_REGNO (reg, regno);
297 SET_SRC (pat) = mem;
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
307 SET_SRC (pat) = reg;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
328 enum insn_code ic;
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
332 continue;
334 PUT_MODE (mem, srcmode);
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
342 /* This is run at the start of compiling a function. */
344 void
345 init_expr (void)
347 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
352 fixed-point.
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
356 void
357 convert_move (rtx to, rtx from, int unsignedp)
359 enum machine_mode to_mode = GET_MODE (to);
360 enum machine_mode from_mode = GET_MODE (from);
361 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
363 enum insn_code code;
364 rtx libcall;
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
371 gcc_assert (to_real == from_real);
372 gcc_assert (to_mode != BLKmode);
373 gcc_assert (from_mode != BLKmode);
375 /* If the source and destination are already the same, then there's
376 nothing to do. */
377 if (to == from)
378 return;
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
382 TO here. */
384 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 >= GET_MODE_SIZE (to_mode))
387 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388 from = gen_lowpart (to_mode, from), from_mode = to_mode;
390 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
392 if (to_mode == from_mode
393 || (from_mode == VOIDmode && CONSTANT_P (from)))
395 emit_move_insn (to, from);
396 return;
399 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
401 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
403 if (VECTOR_MODE_P (to_mode))
404 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
405 else
406 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
408 emit_move_insn (to, from);
409 return;
412 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
414 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
416 return;
419 if (to_real)
421 rtx value, insns;
422 convert_optab tab;
424 gcc_assert ((GET_MODE_PRECISION (from_mode)
425 != GET_MODE_PRECISION (to_mode))
426 || (DECIMAL_FLOAT_MODE_P (from_mode)
427 != DECIMAL_FLOAT_MODE_P (to_mode)));
429 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 /* Conversion between decimal float and binary float, same size. */
431 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
433 tab = sext_optab;
434 else
435 tab = trunc_optab;
437 /* Try converting directly if the insn is supported. */
439 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440 if (code != CODE_FOR_nothing)
442 emit_unop_insn (code, to, from,
443 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
444 return;
447 /* Otherwise use a libcall. */
448 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall);
453 start_sequence ();
454 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
455 1, from, from_mode);
456 insns = get_insns ();
457 end_sequence ();
458 emit_libcall_block (insns, to, value,
459 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
460 from)
461 : gen_rtx_FLOAT_EXTEND (to_mode, from));
462 return;
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
473 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 != CODE_FOR_nothing);
476 if (full_mode != from_mode)
477 from = convert_to_mode (full_mode, from, unsignedp);
478 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
479 to, from, UNKNOWN);
480 return;
482 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
484 rtx new_from;
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
488 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 != CODE_FOR_nothing);
491 if (to_mode == full_mode)
493 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
494 to, from, UNKNOWN);
495 return;
498 new_from = gen_reg_rtx (full_mode);
499 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 new_from, from, UNKNOWN);
502 /* else proceed to integer conversions below. */
503 from_mode = full_mode;
504 from = new_from;
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 expand_fixed_convert (to, from, 0, 0);
518 else
519 expand_fixed_convert (to, from, 0, 1);
520 return;
523 /* Now both modes are integers. */
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
529 rtx insns;
530 rtx lowpart;
531 rtx fill_value;
532 rtx lowfrom;
533 int i;
534 enum machine_mode lowpart_mode;
535 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
539 != CODE_FOR_nothing)
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize > 0 && GET_CODE (from) == SUBREG)
546 from = force_reg (from_mode, from);
547 emit_unop_insn (code, to, from, equiv_code);
548 return;
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 != CODE_FOR_nothing))
555 rtx word_to = gen_reg_rtx (word_mode);
556 if (REG_P (to))
558 if (reg_overlap_mentioned_p (to, from))
559 from = force_reg (from_mode, from);
560 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
562 convert_move (word_to, from, unsignedp);
563 emit_unop_insn (code, to, word_to, equiv_code);
564 return;
567 /* No special multiword conversion insn; do it by hand. */
568 start_sequence ();
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
573 if (reg_overlap_mentioned_p (to, from))
574 from = force_reg (from_mode, from);
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 lowpart_mode = word_mode;
579 else
580 lowpart_mode = from_mode;
582 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
584 lowpart = gen_lowpart (lowpart_mode, to);
585 emit_move_insn (lowpart, lowfrom);
587 /* Compute the value to put in each remaining word. */
588 if (unsignedp)
589 fill_value = const0_rtx;
590 else
592 #ifdef HAVE_slt
593 if (HAVE_slt
594 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
595 && STORE_FLAG_VALUE == -1)
597 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
598 lowpart_mode, 0);
599 fill_value = gen_reg_rtx (word_mode);
600 emit_insn (gen_slt (fill_value));
602 else
603 #endif
605 fill_value
606 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
607 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
608 NULL_RTX, 0);
609 fill_value = convert_to_mode (word_mode, fill_value, 1);
613 /* Fill the remaining words. */
614 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
616 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
617 rtx subword = operand_subword (to, index, 1, to_mode);
619 gcc_assert (subword);
621 if (fill_value != subword)
622 emit_move_insn (subword, fill_value);
625 insns = get_insns ();
626 end_sequence ();
628 emit_no_conflict_block (insns, to, from, NULL_RTX,
629 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
630 return;
633 /* Truncating multi-word to a word or less. */
634 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
635 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
637 if (!((MEM_P (from)
638 && ! MEM_VOLATILE_P (from)
639 && direct_load[(int) to_mode]
640 && ! mode_dependent_address_p (XEXP (from, 0)))
641 || REG_P (from)
642 || GET_CODE (from) == SUBREG))
643 from = force_reg (from_mode, from);
644 convert_move (to, gen_lowpart (word_mode, from), 0);
645 return;
648 /* Now follow all the conversions between integers
649 no more than a word long. */
651 /* For truncation, usually we can just refer to FROM in a narrower mode. */
652 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
653 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
654 GET_MODE_BITSIZE (from_mode)))
656 if (!((MEM_P (from)
657 && ! MEM_VOLATILE_P (from)
658 && direct_load[(int) to_mode]
659 && ! mode_dependent_address_p (XEXP (from, 0)))
660 || REG_P (from)
661 || GET_CODE (from) == SUBREG))
662 from = force_reg (from_mode, from);
663 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
664 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
665 from = copy_to_reg (from);
666 emit_move_insn (to, gen_lowpart (to_mode, from));
667 return;
670 /* Handle extension. */
671 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
673 /* Convert directly if that works. */
674 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
675 != CODE_FOR_nothing)
677 emit_unop_insn (code, to, from, equiv_code);
678 return;
680 else
682 enum machine_mode intermediate;
683 rtx tmp;
684 tree shift_amount;
686 /* Search for a mode to convert via. */
687 for (intermediate = from_mode; intermediate != VOIDmode;
688 intermediate = GET_MODE_WIDER_MODE (intermediate))
689 if (((can_extend_p (to_mode, intermediate, unsignedp)
690 != CODE_FOR_nothing)
691 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
692 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
693 GET_MODE_BITSIZE (intermediate))))
694 && (can_extend_p (intermediate, from_mode, unsignedp)
695 != CODE_FOR_nothing))
697 convert_move (to, convert_to_mode (intermediate, from,
698 unsignedp), unsignedp);
699 return;
702 /* No suitable intermediate mode.
703 Generate what we need with shifts. */
704 shift_amount = build_int_cst (NULL_TREE,
705 GET_MODE_BITSIZE (to_mode)
706 - GET_MODE_BITSIZE (from_mode));
707 from = gen_lowpart (to_mode, force_reg (from_mode, from));
708 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
709 to, unsignedp);
710 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
711 to, unsignedp);
712 if (tmp != to)
713 emit_move_insn (to, tmp);
714 return;
718 /* Support special truncate insns for certain modes. */
719 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
721 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
722 to, from, UNKNOWN);
723 return;
726 /* Handle truncation of volatile memrefs, and so on;
727 the things that couldn't be truncated directly,
728 and for which there was no special instruction.
730 ??? Code above formerly short-circuited this, for most integer
731 mode pairs, with a force_reg in from_mode followed by a recursive
732 call to this routine. Appears always to have been wrong. */
733 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
735 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
736 emit_move_insn (to, temp);
737 return;
740 /* Mode combination is not recognized. */
741 gcc_unreachable ();
744 /* Return an rtx for a value that would result
745 from converting X to mode MODE.
746 Both X and MODE may be floating, or both integer.
747 UNSIGNEDP is nonzero if X is an unsigned value.
748 This can be done by referring to a part of X in place
749 or by copying to a new temporary with conversion. */
752 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
754 return convert_modes (mode, VOIDmode, x, unsignedp);
757 /* Return an rtx for a value that would result
758 from converting X from mode OLDMODE to mode MODE.
759 Both modes may be floating, or both integer.
760 UNSIGNEDP is nonzero if X is an unsigned value.
762 This can be done by referring to a part of X in place
763 or by copying to a new temporary with conversion.
765 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
768 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
770 rtx temp;
772 /* If FROM is a SUBREG that indicates that we have already done at least
773 the required extension, strip it. */
775 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
776 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
777 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
778 x = gen_lowpart (mode, x);
780 if (GET_MODE (x) != VOIDmode)
781 oldmode = GET_MODE (x);
783 if (mode == oldmode)
784 return x;
786 /* There is one case that we must handle specially: If we are converting
787 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
788 we are to interpret the constant as unsigned, gen_lowpart will do
789 the wrong if the constant appears negative. What we want to do is
790 make the high-order word of the constant zero, not all ones. */
792 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
793 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
794 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
796 HOST_WIDE_INT val = INTVAL (x);
798 if (oldmode != VOIDmode
799 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
801 int width = GET_MODE_BITSIZE (oldmode);
803 /* We need to zero extend VAL. */
804 val &= ((HOST_WIDE_INT) 1 << width) - 1;
807 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
810 /* We can do this with a gen_lowpart if both desired and current modes
811 are integer, and this is either a constant integer, a register, or a
812 non-volatile MEM. Except for the constant case where MODE is no
813 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
815 if ((GET_CODE (x) == CONST_INT
816 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
817 || (GET_MODE_CLASS (mode) == MODE_INT
818 && GET_MODE_CLASS (oldmode) == MODE_INT
819 && (GET_CODE (x) == CONST_DOUBLE
820 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
821 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
822 && direct_load[(int) mode])
823 || (REG_P (x)
824 && (! HARD_REGISTER_P (x)
825 || HARD_REGNO_MODE_OK (REGNO (x), mode))
826 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
827 GET_MODE_BITSIZE (GET_MODE (x)))))))))
829 /* ?? If we don't know OLDMODE, we have to assume here that
830 X does not need sign- or zero-extension. This may not be
831 the case, but it's the best we can do. */
832 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
833 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
835 HOST_WIDE_INT val = INTVAL (x);
836 int width = GET_MODE_BITSIZE (oldmode);
838 /* We must sign or zero-extend in this case. Start by
839 zero-extending, then sign extend if we need to. */
840 val &= ((HOST_WIDE_INT) 1 << width) - 1;
841 if (! unsignedp
842 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
843 val |= (HOST_WIDE_INT) (-1) << width;
845 return gen_int_mode (val, mode);
848 return gen_lowpart (mode, x);
851 /* Converting from integer constant into mode is always equivalent to an
852 subreg operation. */
853 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
855 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
856 return simplify_gen_subreg (mode, x, oldmode, 0);
859 temp = gen_reg_rtx (mode);
860 convert_move (temp, x, unsignedp);
861 return temp;
864 /* STORE_MAX_PIECES is the number of bytes at a time that we can
865 store efficiently. Due to internal GCC limitations, this is
866 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
867 for an immediate constant. */
869 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
871 /* Determine whether the LEN bytes can be moved by using several move
872 instructions. Return nonzero if a call to move_by_pieces should
873 succeed. */
876 can_move_by_pieces (unsigned HOST_WIDE_INT len,
877 unsigned int align ATTRIBUTE_UNUSED)
879 return MOVE_BY_PIECES_P (len, align);
882 /* Generate several move instructions to copy LEN bytes from block FROM to
883 block TO. (These are MEM rtx's with BLKmode).
885 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
886 used to push FROM to the stack.
888 ALIGN is maximum stack alignment we can assume.
890 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
891 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
892 stpcpy. */
895 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
896 unsigned int align, int endp)
898 struct move_by_pieces data;
899 rtx to_addr, from_addr = XEXP (from, 0);
900 unsigned int max_size = MOVE_MAX_PIECES + 1;
901 enum machine_mode mode = VOIDmode, tmode;
902 enum insn_code icode;
904 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
906 data.offset = 0;
907 data.from_addr = from_addr;
908 if (to)
910 to_addr = XEXP (to, 0);
911 data.to = to;
912 data.autinc_to
913 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
914 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
915 data.reverse
916 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
918 else
920 to_addr = NULL_RTX;
921 data.to = NULL_RTX;
922 data.autinc_to = 1;
923 #ifdef STACK_GROWS_DOWNWARD
924 data.reverse = 1;
925 #else
926 data.reverse = 0;
927 #endif
929 data.to_addr = to_addr;
930 data.from = from;
931 data.autinc_from
932 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
933 || GET_CODE (from_addr) == POST_INC
934 || GET_CODE (from_addr) == POST_DEC);
936 data.explicit_inc_from = 0;
937 data.explicit_inc_to = 0;
938 if (data.reverse) data.offset = len;
939 data.len = len;
941 /* If copying requires more than two move insns,
942 copy addresses to registers (to make displacements shorter)
943 and use post-increment if available. */
944 if (!(data.autinc_from && data.autinc_to)
945 && move_by_pieces_ninsns (len, align, max_size) > 2)
947 /* Find the mode of the largest move... */
948 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
949 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
950 if (GET_MODE_SIZE (tmode) < max_size)
951 mode = tmode;
953 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
955 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
956 data.autinc_from = 1;
957 data.explicit_inc_from = -1;
959 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
961 data.from_addr = copy_addr_to_reg (from_addr);
962 data.autinc_from = 1;
963 data.explicit_inc_from = 1;
965 if (!data.autinc_from && CONSTANT_P (from_addr))
966 data.from_addr = copy_addr_to_reg (from_addr);
967 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
969 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
970 data.autinc_to = 1;
971 data.explicit_inc_to = -1;
973 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
975 data.to_addr = copy_addr_to_reg (to_addr);
976 data.autinc_to = 1;
977 data.explicit_inc_to = 1;
979 if (!data.autinc_to && CONSTANT_P (to_addr))
980 data.to_addr = copy_addr_to_reg (to_addr);
983 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
984 if (align >= GET_MODE_ALIGNMENT (tmode))
985 align = GET_MODE_ALIGNMENT (tmode);
986 else
988 enum machine_mode xmode;
990 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
991 tmode != VOIDmode;
992 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
993 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
994 || SLOW_UNALIGNED_ACCESS (tmode, align))
995 break;
997 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1000 /* First move what we can in the largest integer mode, then go to
1001 successively smaller modes. */
1003 while (max_size > 1)
1005 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1006 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1007 if (GET_MODE_SIZE (tmode) < max_size)
1008 mode = tmode;
1010 if (mode == VOIDmode)
1011 break;
1013 icode = optab_handler (mov_optab, mode)->insn_code;
1014 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1015 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1017 max_size = GET_MODE_SIZE (mode);
1020 /* The code above should have handled everything. */
1021 gcc_assert (!data.len);
1023 if (endp)
1025 rtx to1;
1027 gcc_assert (!data.reverse);
1028 if (data.autinc_to)
1030 if (endp == 2)
1032 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1033 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1034 else
1035 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1036 -1));
1038 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1039 data.offset);
1041 else
1043 if (endp == 2)
1044 --data.offset;
1045 to1 = adjust_address (data.to, QImode, data.offset);
1047 return to1;
1049 else
1050 return data.to;
1053 /* Return number of insns required to move L bytes by pieces.
1054 ALIGN (in bits) is maximum alignment we can assume. */
1056 static unsigned HOST_WIDE_INT
1057 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1058 unsigned int max_size)
1060 unsigned HOST_WIDE_INT n_insns = 0;
1061 enum machine_mode tmode;
1063 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1064 if (align >= GET_MODE_ALIGNMENT (tmode))
1065 align = GET_MODE_ALIGNMENT (tmode);
1066 else
1068 enum machine_mode tmode, xmode;
1070 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1071 tmode != VOIDmode;
1072 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1073 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1074 || SLOW_UNALIGNED_ACCESS (tmode, align))
1075 break;
1077 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1080 while (max_size > 1)
1082 enum machine_mode mode = VOIDmode;
1083 enum insn_code icode;
1085 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1086 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1087 if (GET_MODE_SIZE (tmode) < max_size)
1088 mode = tmode;
1090 if (mode == VOIDmode)
1091 break;
1093 icode = optab_handler (mov_optab, mode)->insn_code;
1094 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1095 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1097 max_size = GET_MODE_SIZE (mode);
1100 gcc_assert (!l);
1101 return n_insns;
1104 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1105 with move instructions for mode MODE. GENFUN is the gen_... function
1106 to make a move insn for that mode. DATA has all the other info. */
1108 static void
1109 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1110 struct move_by_pieces *data)
1112 unsigned int size = GET_MODE_SIZE (mode);
1113 rtx to1 = NULL_RTX, from1;
1115 while (data->len >= size)
1117 if (data->reverse)
1118 data->offset -= size;
1120 if (data->to)
1122 if (data->autinc_to)
1123 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1124 data->offset);
1125 else
1126 to1 = adjust_address (data->to, mode, data->offset);
1129 if (data->autinc_from)
1130 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1131 data->offset);
1132 else
1133 from1 = adjust_address (data->from, mode, data->offset);
1135 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1136 emit_insn (gen_add2_insn (data->to_addr,
1137 GEN_INT (-(HOST_WIDE_INT)size)));
1138 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1139 emit_insn (gen_add2_insn (data->from_addr,
1140 GEN_INT (-(HOST_WIDE_INT)size)));
1142 if (data->to)
1143 emit_insn ((*genfun) (to1, from1));
1144 else
1146 #ifdef PUSH_ROUNDING
1147 emit_single_push_insn (mode, from1, NULL);
1148 #else
1149 gcc_unreachable ();
1150 #endif
1153 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1154 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1155 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1156 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1158 if (! data->reverse)
1159 data->offset += size;
1161 data->len -= size;
1165 /* Emit code to move a block Y to a block X. This may be done with
1166 string-move instructions, with multiple scalar move instructions,
1167 or with a library call.
1169 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1170 SIZE is an rtx that says how long they are.
1171 ALIGN is the maximum alignment we can assume they have.
1172 METHOD describes what kind of copy this is, and what mechanisms may be used.
1174 Return the address of the new block, if memcpy is called and returns it,
1175 0 otherwise. */
1178 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1179 unsigned int expected_align, HOST_WIDE_INT expected_size)
1181 bool may_use_call;
1182 rtx retval = 0;
1183 unsigned int align;
1185 switch (method)
1187 case BLOCK_OP_NORMAL:
1188 case BLOCK_OP_TAILCALL:
1189 may_use_call = true;
1190 break;
1192 case BLOCK_OP_CALL_PARM:
1193 may_use_call = block_move_libcall_safe_for_call_parm ();
1195 /* Make inhibit_defer_pop nonzero around the library call
1196 to force it to pop the arguments right away. */
1197 NO_DEFER_POP;
1198 break;
1200 case BLOCK_OP_NO_LIBCALL:
1201 may_use_call = false;
1202 break;
1204 default:
1205 gcc_unreachable ();
1208 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1210 gcc_assert (MEM_P (x));
1211 gcc_assert (MEM_P (y));
1212 gcc_assert (size);
1214 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1215 block copy is more efficient for other large modes, e.g. DCmode. */
1216 x = adjust_address (x, BLKmode, 0);
1217 y = adjust_address (y, BLKmode, 0);
1219 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1220 can be incorrect is coming from __builtin_memcpy. */
1221 if (GET_CODE (size) == CONST_INT)
1223 if (INTVAL (size) == 0)
1224 return 0;
1226 x = shallow_copy_rtx (x);
1227 y = shallow_copy_rtx (y);
1228 set_mem_size (x, size);
1229 set_mem_size (y, size);
1232 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1233 move_by_pieces (x, y, INTVAL (size), align, 0);
1234 else if (emit_block_move_via_movmem (x, y, size, align,
1235 expected_align, expected_size))
1237 else if (may_use_call)
1238 retval = emit_block_move_via_libcall (x, y, size,
1239 method == BLOCK_OP_TAILCALL);
1240 else
1241 emit_block_move_via_loop (x, y, size, align);
1243 if (method == BLOCK_OP_CALL_PARM)
1244 OK_DEFER_POP;
1246 return retval;
1250 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1252 return emit_block_move_hints (x, y, size, method, 0, -1);
1255 /* A subroutine of emit_block_move. Returns true if calling the
1256 block move libcall will not clobber any parameters which may have
1257 already been placed on the stack. */
1259 static bool
1260 block_move_libcall_safe_for_call_parm (void)
1262 /* If arguments are pushed on the stack, then they're safe. */
1263 if (PUSH_ARGS)
1264 return true;
1266 /* If registers go on the stack anyway, any argument is sure to clobber
1267 an outgoing argument. */
1268 #if defined (REG_PARM_STACK_SPACE)
1269 if (OUTGOING_REG_PARM_STACK_SPACE)
1271 tree fn;
1272 fn = emit_block_move_libcall_fn (false);
1273 if (REG_PARM_STACK_SPACE (fn) != 0)
1274 return false;
1276 #endif
1278 /* If any argument goes in memory, then it might clobber an outgoing
1279 argument. */
1281 CUMULATIVE_ARGS args_so_far;
1282 tree fn, arg;
1284 fn = emit_block_move_libcall_fn (false);
1285 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1287 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1288 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1290 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1291 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1292 if (!tmp || !REG_P (tmp))
1293 return false;
1294 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1295 return false;
1296 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1299 return true;
1302 /* A subroutine of emit_block_move. Expand a movmem pattern;
1303 return true if successful. */
1305 static bool
1306 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1307 unsigned int expected_align, HOST_WIDE_INT expected_size)
1309 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1310 int save_volatile_ok = volatile_ok;
1311 enum machine_mode mode;
1313 if (expected_align < align)
1314 expected_align = align;
1316 /* Since this is a move insn, we don't care about volatility. */
1317 volatile_ok = 1;
1319 /* Try the most limited insn first, because there's no point
1320 including more than one in the machine description unless
1321 the more limited one has some advantage. */
1323 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1324 mode = GET_MODE_WIDER_MODE (mode))
1326 enum insn_code code = movmem_optab[(int) mode];
1327 insn_operand_predicate_fn pred;
1329 if (code != CODE_FOR_nothing
1330 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1331 here because if SIZE is less than the mode mask, as it is
1332 returned by the macro, it will definitely be less than the
1333 actual mode mask. */
1334 && ((GET_CODE (size) == CONST_INT
1335 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1336 <= (GET_MODE_MASK (mode) >> 1)))
1337 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1338 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1339 || (*pred) (x, BLKmode))
1340 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1341 || (*pred) (y, BLKmode))
1342 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1343 || (*pred) (opalign, VOIDmode)))
1345 rtx op2;
1346 rtx last = get_last_insn ();
1347 rtx pat;
1349 op2 = convert_to_mode (mode, size, 1);
1350 pred = insn_data[(int) code].operand[2].predicate;
1351 if (pred != 0 && ! (*pred) (op2, mode))
1352 op2 = copy_to_mode_reg (mode, op2);
1354 /* ??? When called via emit_block_move_for_call, it'd be
1355 nice if there were some way to inform the backend, so
1356 that it doesn't fail the expansion because it thinks
1357 emitting the libcall would be more efficient. */
1359 if (insn_data[(int) code].n_operands == 4)
1360 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1361 else
1362 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1363 GEN_INT (expected_align),
1364 GEN_INT (expected_size));
1365 if (pat)
1367 emit_insn (pat);
1368 volatile_ok = save_volatile_ok;
1369 return true;
1371 else
1372 delete_insns_since (last);
1376 volatile_ok = save_volatile_ok;
1377 return false;
1380 /* A subroutine of emit_block_move. Expand a call to memcpy.
1381 Return the return value from memcpy, 0 otherwise. */
1384 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1386 rtx dst_addr, src_addr;
1387 tree call_expr, fn, src_tree, dst_tree, size_tree;
1388 enum machine_mode size_mode;
1389 rtx retval;
1391 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1392 pseudos. We can then place those new pseudos into a VAR_DECL and
1393 use them later. */
1395 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1396 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1398 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1399 src_addr = convert_memory_address (ptr_mode, src_addr);
1401 dst_tree = make_tree (ptr_type_node, dst_addr);
1402 src_tree = make_tree (ptr_type_node, src_addr);
1404 size_mode = TYPE_MODE (sizetype);
1406 size = convert_to_mode (size_mode, size, 1);
1407 size = copy_to_mode_reg (size_mode, size);
1409 /* It is incorrect to use the libcall calling conventions to call
1410 memcpy in this context. This could be a user call to memcpy and
1411 the user may wish to examine the return value from memcpy. For
1412 targets where libcalls and normal calls have different conventions
1413 for returning pointers, we could end up generating incorrect code. */
1415 size_tree = make_tree (sizetype, size);
1417 fn = emit_block_move_libcall_fn (true);
1418 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1419 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1421 retval = expand_normal (call_expr);
1423 return retval;
1426 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1427 for the function we use for block copies. The first time FOR_CALL
1428 is true, we call assemble_external. */
1430 static GTY(()) tree block_move_fn;
1432 void
1433 init_block_move_fn (const char *asmspec)
1435 if (!block_move_fn)
1437 tree args, fn;
1439 fn = get_identifier ("memcpy");
1440 args = build_function_type_list (ptr_type_node, ptr_type_node,
1441 const_ptr_type_node, sizetype,
1442 NULL_TREE);
1444 fn = build_decl (FUNCTION_DECL, fn, args);
1445 DECL_EXTERNAL (fn) = 1;
1446 TREE_PUBLIC (fn) = 1;
1447 DECL_ARTIFICIAL (fn) = 1;
1448 TREE_NOTHROW (fn) = 1;
1449 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1450 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1452 block_move_fn = fn;
1455 if (asmspec)
1456 set_user_assembler_name (block_move_fn, asmspec);
1459 static tree
1460 emit_block_move_libcall_fn (int for_call)
1462 static bool emitted_extern;
1464 if (!block_move_fn)
1465 init_block_move_fn (NULL);
1467 if (for_call && !emitted_extern)
1469 emitted_extern = true;
1470 make_decl_rtl (block_move_fn);
1471 assemble_external (block_move_fn);
1474 return block_move_fn;
1477 /* A subroutine of emit_block_move. Copy the data via an explicit
1478 loop. This is used only when libcalls are forbidden. */
1479 /* ??? It'd be nice to copy in hunks larger than QImode. */
1481 static void
1482 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1483 unsigned int align ATTRIBUTE_UNUSED)
1485 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1486 enum machine_mode iter_mode;
1488 iter_mode = GET_MODE (size);
1489 if (iter_mode == VOIDmode)
1490 iter_mode = word_mode;
1492 top_label = gen_label_rtx ();
1493 cmp_label = gen_label_rtx ();
1494 iter = gen_reg_rtx (iter_mode);
1496 emit_move_insn (iter, const0_rtx);
1498 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1499 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1500 do_pending_stack_adjust ();
1502 emit_jump (cmp_label);
1503 emit_label (top_label);
1505 tmp = convert_modes (Pmode, iter_mode, iter, true);
1506 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1507 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1508 x = change_address (x, QImode, x_addr);
1509 y = change_address (y, QImode, y_addr);
1511 emit_move_insn (x, y);
1513 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1514 true, OPTAB_LIB_WIDEN);
1515 if (tmp != iter)
1516 emit_move_insn (iter, tmp);
1518 emit_label (cmp_label);
1520 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1521 true, top_label);
1524 /* Copy all or part of a value X into registers starting at REGNO.
1525 The number of registers to be filled is NREGS. */
1527 void
1528 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1530 int i;
1531 #ifdef HAVE_load_multiple
1532 rtx pat;
1533 rtx last;
1534 #endif
1536 if (nregs == 0)
1537 return;
1539 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1540 x = validize_mem (force_const_mem (mode, x));
1542 /* See if the machine can do this with a load multiple insn. */
1543 #ifdef HAVE_load_multiple
1544 if (HAVE_load_multiple)
1546 last = get_last_insn ();
1547 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1548 GEN_INT (nregs));
1549 if (pat)
1551 emit_insn (pat);
1552 return;
1554 else
1555 delete_insns_since (last);
1557 #endif
1559 for (i = 0; i < nregs; i++)
1560 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1561 operand_subword_force (x, i, mode));
1564 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1565 The number of registers to be filled is NREGS. */
1567 void
1568 move_block_from_reg (int regno, rtx x, int nregs)
1570 int i;
1572 if (nregs == 0)
1573 return;
1575 /* See if the machine can do this with a store multiple insn. */
1576 #ifdef HAVE_store_multiple
1577 if (HAVE_store_multiple)
1579 rtx last = get_last_insn ();
1580 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1581 GEN_INT (nregs));
1582 if (pat)
1584 emit_insn (pat);
1585 return;
1587 else
1588 delete_insns_since (last);
1590 #endif
1592 for (i = 0; i < nregs; i++)
1594 rtx tem = operand_subword (x, i, 1, BLKmode);
1596 gcc_assert (tem);
1598 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1602 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1603 ORIG, where ORIG is a non-consecutive group of registers represented by
1604 a PARALLEL. The clone is identical to the original except in that the
1605 original set of registers is replaced by a new set of pseudo registers.
1606 The new set has the same modes as the original set. */
1609 gen_group_rtx (rtx orig)
1611 int i, length;
1612 rtx *tmps;
1614 gcc_assert (GET_CODE (orig) == PARALLEL);
1616 length = XVECLEN (orig, 0);
1617 tmps = alloca (sizeof (rtx) * length);
1619 /* Skip a NULL entry in first slot. */
1620 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1622 if (i)
1623 tmps[0] = 0;
1625 for (; i < length; i++)
1627 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1628 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1630 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1633 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1636 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1637 except that values are placed in TMPS[i], and must later be moved
1638 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1640 static void
1641 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1643 rtx src;
1644 int start, i;
1645 enum machine_mode m = GET_MODE (orig_src);
1647 gcc_assert (GET_CODE (dst) == PARALLEL);
1649 if (m != VOIDmode
1650 && !SCALAR_INT_MODE_P (m)
1651 && !MEM_P (orig_src)
1652 && GET_CODE (orig_src) != CONCAT)
1654 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1655 if (imode == BLKmode)
1656 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1657 else
1658 src = gen_reg_rtx (imode);
1659 if (imode != BLKmode)
1660 src = gen_lowpart (GET_MODE (orig_src), src);
1661 emit_move_insn (src, orig_src);
1662 /* ...and back again. */
1663 if (imode != BLKmode)
1664 src = gen_lowpart (imode, src);
1665 emit_group_load_1 (tmps, dst, src, type, ssize);
1666 return;
1669 /* Check for a NULL entry, used to indicate that the parameter goes
1670 both on the stack and in registers. */
1671 if (XEXP (XVECEXP (dst, 0, 0), 0))
1672 start = 0;
1673 else
1674 start = 1;
1676 /* Process the pieces. */
1677 for (i = start; i < XVECLEN (dst, 0); i++)
1679 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1680 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1681 unsigned int bytelen = GET_MODE_SIZE (mode);
1682 int shift = 0;
1684 /* Handle trailing fragments that run over the size of the struct. */
1685 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1687 /* Arrange to shift the fragment to where it belongs.
1688 extract_bit_field loads to the lsb of the reg. */
1689 if (
1690 #ifdef BLOCK_REG_PADDING
1691 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1692 == (BYTES_BIG_ENDIAN ? upward : downward)
1693 #else
1694 BYTES_BIG_ENDIAN
1695 #endif
1697 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1698 bytelen = ssize - bytepos;
1699 gcc_assert (bytelen > 0);
1702 /* If we won't be loading directly from memory, protect the real source
1703 from strange tricks we might play; but make sure that the source can
1704 be loaded directly into the destination. */
1705 src = orig_src;
1706 if (!MEM_P (orig_src)
1707 && (!CONSTANT_P (orig_src)
1708 || (GET_MODE (orig_src) != mode
1709 && GET_MODE (orig_src) != VOIDmode)))
1711 if (GET_MODE (orig_src) == VOIDmode)
1712 src = gen_reg_rtx (mode);
1713 else
1714 src = gen_reg_rtx (GET_MODE (orig_src));
1716 emit_move_insn (src, orig_src);
1719 /* Optimize the access just a bit. */
1720 if (MEM_P (src)
1721 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1722 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1723 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1724 && bytelen == GET_MODE_SIZE (mode))
1726 tmps[i] = gen_reg_rtx (mode);
1727 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1729 else if (COMPLEX_MODE_P (mode)
1730 && GET_MODE (src) == mode
1731 && bytelen == GET_MODE_SIZE (mode))
1732 /* Let emit_move_complex do the bulk of the work. */
1733 tmps[i] = src;
1734 else if (GET_CODE (src) == CONCAT)
1736 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1737 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1739 if ((bytepos == 0 && bytelen == slen0)
1740 || (bytepos != 0 && bytepos + bytelen <= slen))
1742 /* The following assumes that the concatenated objects all
1743 have the same size. In this case, a simple calculation
1744 can be used to determine the object and the bit field
1745 to be extracted. */
1746 tmps[i] = XEXP (src, bytepos / slen0);
1747 if (! CONSTANT_P (tmps[i])
1748 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1749 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1750 (bytepos % slen0) * BITS_PER_UNIT,
1751 1, NULL_RTX, mode, mode);
1753 else
1755 rtx mem;
1757 gcc_assert (!bytepos);
1758 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1759 emit_move_insn (mem, src);
1760 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1761 0, 1, NULL_RTX, mode, mode);
1764 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1765 SIMD register, which is currently broken. While we get GCC
1766 to emit proper RTL for these cases, let's dump to memory. */
1767 else if (VECTOR_MODE_P (GET_MODE (dst))
1768 && REG_P (src))
1770 int slen = GET_MODE_SIZE (GET_MODE (src));
1771 rtx mem;
1773 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1774 emit_move_insn (mem, src);
1775 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1777 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1778 && XVECLEN (dst, 0) > 1)
1779 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1780 else if (CONSTANT_P (src))
1782 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1784 if (len == ssize)
1785 tmps[i] = src;
1786 else
1788 rtx first, second;
1790 gcc_assert (2 * len == ssize);
1791 split_double (src, &first, &second);
1792 if (i)
1793 tmps[i] = second;
1794 else
1795 tmps[i] = first;
1798 else if (REG_P (src) && GET_MODE (src) == mode)
1799 tmps[i] = src;
1800 else
1801 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1802 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1803 mode, mode);
1805 if (shift)
1806 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1807 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1811 /* Emit code to move a block SRC of type TYPE to a block DST,
1812 where DST is non-consecutive registers represented by a PARALLEL.
1813 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1814 if not known. */
1816 void
1817 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1819 rtx *tmps;
1820 int i;
1822 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1823 emit_group_load_1 (tmps, dst, src, type, ssize);
1825 /* Copy the extracted pieces into the proper (probable) hard regs. */
1826 for (i = 0; i < XVECLEN (dst, 0); i++)
1828 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1829 if (d == NULL)
1830 continue;
1831 emit_move_insn (d, tmps[i]);
1835 /* Similar, but load SRC into new pseudos in a format that looks like
1836 PARALLEL. This can later be fed to emit_group_move to get things
1837 in the right place. */
1840 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1842 rtvec vec;
1843 int i;
1845 vec = rtvec_alloc (XVECLEN (parallel, 0));
1846 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1848 /* Convert the vector to look just like the original PARALLEL, except
1849 with the computed values. */
1850 for (i = 0; i < XVECLEN (parallel, 0); i++)
1852 rtx e = XVECEXP (parallel, 0, i);
1853 rtx d = XEXP (e, 0);
1855 if (d)
1857 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1858 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1860 RTVEC_ELT (vec, i) = e;
1863 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1866 /* Emit code to move a block SRC to block DST, where SRC and DST are
1867 non-consecutive groups of registers, each represented by a PARALLEL. */
1869 void
1870 emit_group_move (rtx dst, rtx src)
1872 int i;
1874 gcc_assert (GET_CODE (src) == PARALLEL
1875 && GET_CODE (dst) == PARALLEL
1876 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1878 /* Skip first entry if NULL. */
1879 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1880 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1881 XEXP (XVECEXP (src, 0, i), 0));
1884 /* Move a group of registers represented by a PARALLEL into pseudos. */
1887 emit_group_move_into_temps (rtx src)
1889 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1890 int i;
1892 for (i = 0; i < XVECLEN (src, 0); i++)
1894 rtx e = XVECEXP (src, 0, i);
1895 rtx d = XEXP (e, 0);
1897 if (d)
1898 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1899 RTVEC_ELT (vec, i) = e;
1902 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1905 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1906 where SRC is non-consecutive registers represented by a PARALLEL.
1907 SSIZE represents the total size of block ORIG_DST, or -1 if not
1908 known. */
1910 void
1911 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1913 rtx *tmps, dst;
1914 int start, finish, i;
1915 enum machine_mode m = GET_MODE (orig_dst);
1917 gcc_assert (GET_CODE (src) == PARALLEL);
1919 if (!SCALAR_INT_MODE_P (m)
1920 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1922 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1923 if (imode == BLKmode)
1924 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1925 else
1926 dst = gen_reg_rtx (imode);
1927 emit_group_store (dst, src, type, ssize);
1928 if (imode != BLKmode)
1929 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1930 emit_move_insn (orig_dst, dst);
1931 return;
1934 /* Check for a NULL entry, used to indicate that the parameter goes
1935 both on the stack and in registers. */
1936 if (XEXP (XVECEXP (src, 0, 0), 0))
1937 start = 0;
1938 else
1939 start = 1;
1940 finish = XVECLEN (src, 0);
1942 tmps = alloca (sizeof (rtx) * finish);
1944 /* Copy the (probable) hard regs into pseudos. */
1945 for (i = start; i < finish; i++)
1947 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1948 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1950 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1951 emit_move_insn (tmps[i], reg);
1953 else
1954 tmps[i] = reg;
1957 /* If we won't be storing directly into memory, protect the real destination
1958 from strange tricks we might play. */
1959 dst = orig_dst;
1960 if (GET_CODE (dst) == PARALLEL)
1962 rtx temp;
1964 /* We can get a PARALLEL dst if there is a conditional expression in
1965 a return statement. In that case, the dst and src are the same,
1966 so no action is necessary. */
1967 if (rtx_equal_p (dst, src))
1968 return;
1970 /* It is unclear if we can ever reach here, but we may as well handle
1971 it. Allocate a temporary, and split this into a store/load to/from
1972 the temporary. */
1974 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1975 emit_group_store (temp, src, type, ssize);
1976 emit_group_load (dst, temp, type, ssize);
1977 return;
1979 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1981 enum machine_mode outer = GET_MODE (dst);
1982 enum machine_mode inner;
1983 HOST_WIDE_INT bytepos;
1984 bool done = false;
1985 rtx temp;
1987 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1988 dst = gen_reg_rtx (outer);
1990 /* Make life a bit easier for combine. */
1991 /* If the first element of the vector is the low part
1992 of the destination mode, use a paradoxical subreg to
1993 initialize the destination. */
1994 if (start < finish)
1996 inner = GET_MODE (tmps[start]);
1997 bytepos = subreg_lowpart_offset (inner, outer);
1998 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2000 temp = simplify_gen_subreg (outer, tmps[start],
2001 inner, 0);
2002 if (temp)
2004 emit_move_insn (dst, temp);
2005 done = true;
2006 start++;
2011 /* If the first element wasn't the low part, try the last. */
2012 if (!done
2013 && start < finish - 1)
2015 inner = GET_MODE (tmps[finish - 1]);
2016 bytepos = subreg_lowpart_offset (inner, outer);
2017 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2019 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2020 inner, 0);
2021 if (temp)
2023 emit_move_insn (dst, temp);
2024 done = true;
2025 finish--;
2030 /* Otherwise, simply initialize the result to zero. */
2031 if (!done)
2032 emit_move_insn (dst, CONST0_RTX (outer));
2035 /* Process the pieces. */
2036 for (i = start; i < finish; i++)
2038 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2039 enum machine_mode mode = GET_MODE (tmps[i]);
2040 unsigned int bytelen = GET_MODE_SIZE (mode);
2041 rtx dest = dst;
2043 /* Handle trailing fragments that run over the size of the struct. */
2044 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2046 /* store_bit_field always takes its value from the lsb.
2047 Move the fragment to the lsb if it's not already there. */
2048 if (
2049 #ifdef BLOCK_REG_PADDING
2050 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2051 == (BYTES_BIG_ENDIAN ? upward : downward)
2052 #else
2053 BYTES_BIG_ENDIAN
2054 #endif
2057 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2058 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2059 build_int_cst (NULL_TREE, shift),
2060 tmps[i], 0);
2062 bytelen = ssize - bytepos;
2065 if (GET_CODE (dst) == CONCAT)
2067 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2068 dest = XEXP (dst, 0);
2069 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2071 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2072 dest = XEXP (dst, 1);
2074 else
2076 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2077 dest = assign_stack_temp (GET_MODE (dest),
2078 GET_MODE_SIZE (GET_MODE (dest)), 0);
2079 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2080 tmps[i]);
2081 dst = dest;
2082 break;
2086 /* Optimize the access just a bit. */
2087 if (MEM_P (dest)
2088 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2089 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2090 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2091 && bytelen == GET_MODE_SIZE (mode))
2092 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2093 else
2094 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2095 mode, tmps[i]);
2098 /* Copy from the pseudo into the (probable) hard reg. */
2099 if (orig_dst != dst)
2100 emit_move_insn (orig_dst, dst);
2103 /* Generate code to copy a BLKmode object of TYPE out of a
2104 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2105 is null, a stack temporary is created. TGTBLK is returned.
2107 The purpose of this routine is to handle functions that return
2108 BLKmode structures in registers. Some machines (the PA for example)
2109 want to return all small structures in registers regardless of the
2110 structure's alignment. */
2113 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2115 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2116 rtx src = NULL, dst = NULL;
2117 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2118 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2119 enum machine_mode copy_mode;
2121 if (tgtblk == 0)
2123 tgtblk = assign_temp (build_qualified_type (type,
2124 (TYPE_QUALS (type)
2125 | TYPE_QUAL_CONST)),
2126 0, 1, 1);
2127 preserve_temp_slots (tgtblk);
2130 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2131 into a new pseudo which is a full word. */
2133 if (GET_MODE (srcreg) != BLKmode
2134 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2135 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2137 /* If the structure doesn't take up a whole number of words, see whether
2138 SRCREG is padded on the left or on the right. If it's on the left,
2139 set PADDING_CORRECTION to the number of bits to skip.
2141 In most ABIs, the structure will be returned at the least end of
2142 the register, which translates to right padding on little-endian
2143 targets and left padding on big-endian targets. The opposite
2144 holds if the structure is returned at the most significant
2145 end of the register. */
2146 if (bytes % UNITS_PER_WORD != 0
2147 && (targetm.calls.return_in_msb (type)
2148 ? !BYTES_BIG_ENDIAN
2149 : BYTES_BIG_ENDIAN))
2150 padding_correction
2151 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2153 /* Copy the structure BITSIZE bits at a time. If the target lives in
2154 memory, take care of not reading/writing past its end by selecting
2155 a copy mode suited to BITSIZE. This should always be possible given
2156 how it is computed.
2158 We could probably emit more efficient code for machines which do not use
2159 strict alignment, but it doesn't seem worth the effort at the current
2160 time. */
2162 copy_mode = word_mode;
2163 if (MEM_P (tgtblk))
2165 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2166 if (mem_mode != BLKmode)
2167 copy_mode = mem_mode;
2170 for (bitpos = 0, xbitpos = padding_correction;
2171 bitpos < bytes * BITS_PER_UNIT;
2172 bitpos += bitsize, xbitpos += bitsize)
2174 /* We need a new source operand each time xbitpos is on a
2175 word boundary and when xbitpos == padding_correction
2176 (the first time through). */
2177 if (xbitpos % BITS_PER_WORD == 0
2178 || xbitpos == padding_correction)
2179 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2180 GET_MODE (srcreg));
2182 /* We need a new destination operand each time bitpos is on
2183 a word boundary. */
2184 if (bitpos % BITS_PER_WORD == 0)
2185 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2187 /* Use xbitpos for the source extraction (right justified) and
2188 bitpos for the destination store (left justified). */
2189 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2190 extract_bit_field (src, bitsize,
2191 xbitpos % BITS_PER_WORD, 1,
2192 NULL_RTX, copy_mode, copy_mode));
2195 return tgtblk;
2198 /* Add a USE expression for REG to the (possibly empty) list pointed
2199 to by CALL_FUSAGE. REG must denote a hard register. */
2201 void
2202 use_reg (rtx *call_fusage, rtx reg)
2204 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2206 *call_fusage
2207 = gen_rtx_EXPR_LIST (VOIDmode,
2208 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2211 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2212 starting at REGNO. All of these registers must be hard registers. */
2214 void
2215 use_regs (rtx *call_fusage, int regno, int nregs)
2217 int i;
2219 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2221 for (i = 0; i < nregs; i++)
2222 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2225 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2226 PARALLEL REGS. This is for calls that pass values in multiple
2227 non-contiguous locations. The Irix 6 ABI has examples of this. */
2229 void
2230 use_group_regs (rtx *call_fusage, rtx regs)
2232 int i;
2234 for (i = 0; i < XVECLEN (regs, 0); i++)
2236 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2238 /* A NULL entry means the parameter goes both on the stack and in
2239 registers. This can also be a MEM for targets that pass values
2240 partially on the stack and partially in registers. */
2241 if (reg != 0 && REG_P (reg))
2242 use_reg (call_fusage, reg);
2247 /* Determine whether the LEN bytes generated by CONSTFUN can be
2248 stored to memory using several move instructions. CONSTFUNDATA is
2249 a pointer which will be passed as argument in every CONSTFUN call.
2250 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2251 a memset operation and false if it's a copy of a constant string.
2252 Return nonzero if a call to store_by_pieces should succeed. */
2255 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2256 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2257 void *constfundata, unsigned int align, bool memsetp)
2259 unsigned HOST_WIDE_INT l;
2260 unsigned int max_size;
2261 HOST_WIDE_INT offset = 0;
2262 enum machine_mode mode, tmode;
2263 enum insn_code icode;
2264 int reverse;
2265 rtx cst;
2267 if (len == 0)
2268 return 1;
2270 if (! (memsetp
2271 ? SET_BY_PIECES_P (len, align)
2272 : STORE_BY_PIECES_P (len, align)))
2273 return 0;
2275 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2276 if (align >= GET_MODE_ALIGNMENT (tmode))
2277 align = GET_MODE_ALIGNMENT (tmode);
2278 else
2280 enum machine_mode xmode;
2282 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2283 tmode != VOIDmode;
2284 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2285 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2286 || SLOW_UNALIGNED_ACCESS (tmode, align))
2287 break;
2289 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2292 /* We would first store what we can in the largest integer mode, then go to
2293 successively smaller modes. */
2295 for (reverse = 0;
2296 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2297 reverse++)
2299 l = len;
2300 mode = VOIDmode;
2301 max_size = STORE_MAX_PIECES + 1;
2302 while (max_size > 1)
2304 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2305 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2306 if (GET_MODE_SIZE (tmode) < max_size)
2307 mode = tmode;
2309 if (mode == VOIDmode)
2310 break;
2312 icode = optab_handler (mov_optab, mode)->insn_code;
2313 if (icode != CODE_FOR_nothing
2314 && align >= GET_MODE_ALIGNMENT (mode))
2316 unsigned int size = GET_MODE_SIZE (mode);
2318 while (l >= size)
2320 if (reverse)
2321 offset -= size;
2323 cst = (*constfun) (constfundata, offset, mode);
2324 if (!LEGITIMATE_CONSTANT_P (cst))
2325 return 0;
2327 if (!reverse)
2328 offset += size;
2330 l -= size;
2334 max_size = GET_MODE_SIZE (mode);
2337 /* The code above should have handled everything. */
2338 gcc_assert (!l);
2341 return 1;
2344 /* Generate several move instructions to store LEN bytes generated by
2345 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2346 pointer which will be passed as argument in every CONSTFUN call.
2347 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2348 a memset operation and false if it's a copy of a constant string.
2349 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2350 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2351 stpcpy. */
2354 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2355 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2356 void *constfundata, unsigned int align, bool memsetp, int endp)
2358 struct store_by_pieces data;
2360 if (len == 0)
2362 gcc_assert (endp != 2);
2363 return to;
2366 gcc_assert (memsetp
2367 ? SET_BY_PIECES_P (len, align)
2368 : STORE_BY_PIECES_P (len, align));
2369 data.constfun = constfun;
2370 data.constfundata = constfundata;
2371 data.len = len;
2372 data.to = to;
2373 store_by_pieces_1 (&data, align);
2374 if (endp)
2376 rtx to1;
2378 gcc_assert (!data.reverse);
2379 if (data.autinc_to)
2381 if (endp == 2)
2383 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2384 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2385 else
2386 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2387 -1));
2389 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2390 data.offset);
2392 else
2394 if (endp == 2)
2395 --data.offset;
2396 to1 = adjust_address (data.to, QImode, data.offset);
2398 return to1;
2400 else
2401 return data.to;
2404 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2405 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2407 static void
2408 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2410 struct store_by_pieces data;
2412 if (len == 0)
2413 return;
2415 data.constfun = clear_by_pieces_1;
2416 data.constfundata = NULL;
2417 data.len = len;
2418 data.to = to;
2419 store_by_pieces_1 (&data, align);
2422 /* Callback routine for clear_by_pieces.
2423 Return const0_rtx unconditionally. */
2425 static rtx
2426 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2427 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2428 enum machine_mode mode ATTRIBUTE_UNUSED)
2430 return const0_rtx;
2433 /* Subroutine of clear_by_pieces and store_by_pieces.
2434 Generate several move instructions to store LEN bytes of block TO. (A MEM
2435 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2437 static void
2438 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2439 unsigned int align ATTRIBUTE_UNUSED)
2441 rtx to_addr = XEXP (data->to, 0);
2442 unsigned int max_size = STORE_MAX_PIECES + 1;
2443 enum machine_mode mode = VOIDmode, tmode;
2444 enum insn_code icode;
2446 data->offset = 0;
2447 data->to_addr = to_addr;
2448 data->autinc_to
2449 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2450 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2452 data->explicit_inc_to = 0;
2453 data->reverse
2454 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2455 if (data->reverse)
2456 data->offset = data->len;
2458 /* If storing requires more than two move insns,
2459 copy addresses to registers (to make displacements shorter)
2460 and use post-increment if available. */
2461 if (!data->autinc_to
2462 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2464 /* Determine the main mode we'll be using. */
2465 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2466 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2467 if (GET_MODE_SIZE (tmode) < max_size)
2468 mode = tmode;
2470 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2472 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2473 data->autinc_to = 1;
2474 data->explicit_inc_to = -1;
2477 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2478 && ! data->autinc_to)
2480 data->to_addr = copy_addr_to_reg (to_addr);
2481 data->autinc_to = 1;
2482 data->explicit_inc_to = 1;
2485 if ( !data->autinc_to && CONSTANT_P (to_addr))
2486 data->to_addr = copy_addr_to_reg (to_addr);
2489 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2490 if (align >= GET_MODE_ALIGNMENT (tmode))
2491 align = GET_MODE_ALIGNMENT (tmode);
2492 else
2494 enum machine_mode xmode;
2496 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2497 tmode != VOIDmode;
2498 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2499 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2500 || SLOW_UNALIGNED_ACCESS (tmode, align))
2501 break;
2503 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2506 /* First store what we can in the largest integer mode, then go to
2507 successively smaller modes. */
2509 while (max_size > 1)
2511 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2512 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2513 if (GET_MODE_SIZE (tmode) < max_size)
2514 mode = tmode;
2516 if (mode == VOIDmode)
2517 break;
2519 icode = optab_handler (mov_optab, mode)->insn_code;
2520 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2521 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2523 max_size = GET_MODE_SIZE (mode);
2526 /* The code above should have handled everything. */
2527 gcc_assert (!data->len);
2530 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2531 with move instructions for mode MODE. GENFUN is the gen_... function
2532 to make a move insn for that mode. DATA has all the other info. */
2534 static void
2535 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2536 struct store_by_pieces *data)
2538 unsigned int size = GET_MODE_SIZE (mode);
2539 rtx to1, cst;
2541 while (data->len >= size)
2543 if (data->reverse)
2544 data->offset -= size;
2546 if (data->autinc_to)
2547 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2548 data->offset);
2549 else
2550 to1 = adjust_address (data->to, mode, data->offset);
2552 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2553 emit_insn (gen_add2_insn (data->to_addr,
2554 GEN_INT (-(HOST_WIDE_INT) size)));
2556 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2557 emit_insn ((*genfun) (to1, cst));
2559 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2560 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2562 if (! data->reverse)
2563 data->offset += size;
2565 data->len -= size;
2569 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2570 its length in bytes. */
2573 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2574 unsigned int expected_align, HOST_WIDE_INT expected_size)
2576 enum machine_mode mode = GET_MODE (object);
2577 unsigned int align;
2579 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2581 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2582 just move a zero. Otherwise, do this a piece at a time. */
2583 if (mode != BLKmode
2584 && GET_CODE (size) == CONST_INT
2585 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2587 rtx zero = CONST0_RTX (mode);
2588 if (zero != NULL)
2590 emit_move_insn (object, zero);
2591 return NULL;
2594 if (COMPLEX_MODE_P (mode))
2596 zero = CONST0_RTX (GET_MODE_INNER (mode));
2597 if (zero != NULL)
2599 write_complex_part (object, zero, 0);
2600 write_complex_part (object, zero, 1);
2601 return NULL;
2606 if (size == const0_rtx)
2607 return NULL;
2609 align = MEM_ALIGN (object);
2611 if (GET_CODE (size) == CONST_INT
2612 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2613 clear_by_pieces (object, INTVAL (size), align);
2614 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2615 expected_align, expected_size))
2617 else
2618 return set_storage_via_libcall (object, size, const0_rtx,
2619 method == BLOCK_OP_TAILCALL);
2621 return NULL;
2625 clear_storage (rtx object, rtx size, enum block_op_methods method)
2627 return clear_storage_hints (object, size, method, 0, -1);
2631 /* A subroutine of clear_storage. Expand a call to memset.
2632 Return the return value of memset, 0 otherwise. */
2635 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2637 tree call_expr, fn, object_tree, size_tree, val_tree;
2638 enum machine_mode size_mode;
2639 rtx retval;
2641 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2642 place those into new pseudos into a VAR_DECL and use them later. */
2644 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2646 size_mode = TYPE_MODE (sizetype);
2647 size = convert_to_mode (size_mode, size, 1);
2648 size = copy_to_mode_reg (size_mode, size);
2650 /* It is incorrect to use the libcall calling conventions to call
2651 memset in this context. This could be a user call to memset and
2652 the user may wish to examine the return value from memset. For
2653 targets where libcalls and normal calls have different conventions
2654 for returning pointers, we could end up generating incorrect code. */
2656 object_tree = make_tree (ptr_type_node, object);
2657 if (GET_CODE (val) != CONST_INT)
2658 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2659 size_tree = make_tree (sizetype, size);
2660 val_tree = make_tree (integer_type_node, val);
2662 fn = clear_storage_libcall_fn (true);
2663 call_expr = build_call_expr (fn, 3,
2664 object_tree, integer_zero_node, size_tree);
2665 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2667 retval = expand_normal (call_expr);
2669 return retval;
2672 /* A subroutine of set_storage_via_libcall. Create the tree node
2673 for the function we use for block clears. The first time FOR_CALL
2674 is true, we call assemble_external. */
2676 static GTY(()) tree block_clear_fn;
2678 void
2679 init_block_clear_fn (const char *asmspec)
2681 if (!block_clear_fn)
2683 tree fn, args;
2685 fn = get_identifier ("memset");
2686 args = build_function_type_list (ptr_type_node, ptr_type_node,
2687 integer_type_node, sizetype,
2688 NULL_TREE);
2690 fn = build_decl (FUNCTION_DECL, fn, args);
2691 DECL_EXTERNAL (fn) = 1;
2692 TREE_PUBLIC (fn) = 1;
2693 DECL_ARTIFICIAL (fn) = 1;
2694 TREE_NOTHROW (fn) = 1;
2695 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2696 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2698 block_clear_fn = fn;
2701 if (asmspec)
2702 set_user_assembler_name (block_clear_fn, asmspec);
2705 static tree
2706 clear_storage_libcall_fn (int for_call)
2708 static bool emitted_extern;
2710 if (!block_clear_fn)
2711 init_block_clear_fn (NULL);
2713 if (for_call && !emitted_extern)
2715 emitted_extern = true;
2716 make_decl_rtl (block_clear_fn);
2717 assemble_external (block_clear_fn);
2720 return block_clear_fn;
2723 /* Expand a setmem pattern; return true if successful. */
2725 bool
2726 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2727 unsigned int expected_align, HOST_WIDE_INT expected_size)
2729 /* Try the most limited insn first, because there's no point
2730 including more than one in the machine description unless
2731 the more limited one has some advantage. */
2733 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2734 enum machine_mode mode;
2736 if (expected_align < align)
2737 expected_align = align;
2739 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2740 mode = GET_MODE_WIDER_MODE (mode))
2742 enum insn_code code = setmem_optab[(int) mode];
2743 insn_operand_predicate_fn pred;
2745 if (code != CODE_FOR_nothing
2746 /* We don't need MODE to be narrower than
2747 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2748 the mode mask, as it is returned by the macro, it will
2749 definitely be less than the actual mode mask. */
2750 && ((GET_CODE (size) == CONST_INT
2751 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2752 <= (GET_MODE_MASK (mode) >> 1)))
2753 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2754 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2755 || (*pred) (object, BLKmode))
2756 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2757 || (*pred) (opalign, VOIDmode)))
2759 rtx opsize, opchar;
2760 enum machine_mode char_mode;
2761 rtx last = get_last_insn ();
2762 rtx pat;
2764 opsize = convert_to_mode (mode, size, 1);
2765 pred = insn_data[(int) code].operand[1].predicate;
2766 if (pred != 0 && ! (*pred) (opsize, mode))
2767 opsize = copy_to_mode_reg (mode, opsize);
2769 opchar = val;
2770 char_mode = insn_data[(int) code].operand[2].mode;
2771 if (char_mode != VOIDmode)
2773 opchar = convert_to_mode (char_mode, opchar, 1);
2774 pred = insn_data[(int) code].operand[2].predicate;
2775 if (pred != 0 && ! (*pred) (opchar, char_mode))
2776 opchar = copy_to_mode_reg (char_mode, opchar);
2779 if (insn_data[(int) code].n_operands == 4)
2780 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2781 else
2782 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2783 GEN_INT (expected_align),
2784 GEN_INT (expected_size));
2785 if (pat)
2787 emit_insn (pat);
2788 return true;
2790 else
2791 delete_insns_since (last);
2795 return false;
2799 /* Write to one of the components of the complex value CPLX. Write VAL to
2800 the real part if IMAG_P is false, and the imaginary part if its true. */
2802 static void
2803 write_complex_part (rtx cplx, rtx val, bool imag_p)
2805 enum machine_mode cmode;
2806 enum machine_mode imode;
2807 unsigned ibitsize;
2809 if (GET_CODE (cplx) == CONCAT)
2811 emit_move_insn (XEXP (cplx, imag_p), val);
2812 return;
2815 cmode = GET_MODE (cplx);
2816 imode = GET_MODE_INNER (cmode);
2817 ibitsize = GET_MODE_BITSIZE (imode);
2819 /* For MEMs simplify_gen_subreg may generate an invalid new address
2820 because, e.g., the original address is considered mode-dependent
2821 by the target, which restricts simplify_subreg from invoking
2822 adjust_address_nv. Instead of preparing fallback support for an
2823 invalid address, we call adjust_address_nv directly. */
2824 if (MEM_P (cplx))
2826 emit_move_insn (adjust_address_nv (cplx, imode,
2827 imag_p ? GET_MODE_SIZE (imode) : 0),
2828 val);
2829 return;
2832 /* If the sub-object is at least word sized, then we know that subregging
2833 will work. This special case is important, since store_bit_field
2834 wants to operate on integer modes, and there's rarely an OImode to
2835 correspond to TCmode. */
2836 if (ibitsize >= BITS_PER_WORD
2837 /* For hard regs we have exact predicates. Assume we can split
2838 the original object if it spans an even number of hard regs.
2839 This special case is important for SCmode on 64-bit platforms
2840 where the natural size of floating-point regs is 32-bit. */
2841 || (REG_P (cplx)
2842 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2843 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2845 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2846 imag_p ? GET_MODE_SIZE (imode) : 0);
2847 if (part)
2849 emit_move_insn (part, val);
2850 return;
2852 else
2853 /* simplify_gen_subreg may fail for sub-word MEMs. */
2854 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2857 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2860 /* Extract one of the components of the complex value CPLX. Extract the
2861 real part if IMAG_P is false, and the imaginary part if it's true. */
2863 static rtx
2864 read_complex_part (rtx cplx, bool imag_p)
2866 enum machine_mode cmode, imode;
2867 unsigned ibitsize;
2869 if (GET_CODE (cplx) == CONCAT)
2870 return XEXP (cplx, imag_p);
2872 cmode = GET_MODE (cplx);
2873 imode = GET_MODE_INNER (cmode);
2874 ibitsize = GET_MODE_BITSIZE (imode);
2876 /* Special case reads from complex constants that got spilled to memory. */
2877 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2879 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2880 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2882 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2883 if (CONSTANT_CLASS_P (part))
2884 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2888 /* For MEMs simplify_gen_subreg may generate an invalid new address
2889 because, e.g., the original address is considered mode-dependent
2890 by the target, which restricts simplify_subreg from invoking
2891 adjust_address_nv. Instead of preparing fallback support for an
2892 invalid address, we call adjust_address_nv directly. */
2893 if (MEM_P (cplx))
2894 return adjust_address_nv (cplx, imode,
2895 imag_p ? GET_MODE_SIZE (imode) : 0);
2897 /* If the sub-object is at least word sized, then we know that subregging
2898 will work. This special case is important, since extract_bit_field
2899 wants to operate on integer modes, and there's rarely an OImode to
2900 correspond to TCmode. */
2901 if (ibitsize >= BITS_PER_WORD
2902 /* For hard regs we have exact predicates. Assume we can split
2903 the original object if it spans an even number of hard regs.
2904 This special case is important for SCmode on 64-bit platforms
2905 where the natural size of floating-point regs is 32-bit. */
2906 || (REG_P (cplx)
2907 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2908 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2910 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2911 imag_p ? GET_MODE_SIZE (imode) : 0);
2912 if (ret)
2913 return ret;
2914 else
2915 /* simplify_gen_subreg may fail for sub-word MEMs. */
2916 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2919 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2920 true, NULL_RTX, imode, imode);
2923 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2924 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2925 represented in NEW_MODE. If FORCE is true, this will never happen, as
2926 we'll force-create a SUBREG if needed. */
2928 static rtx
2929 emit_move_change_mode (enum machine_mode new_mode,
2930 enum machine_mode old_mode, rtx x, bool force)
2932 rtx ret;
2934 if (push_operand (x, GET_MODE (x)))
2936 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2937 MEM_COPY_ATTRIBUTES (ret, x);
2939 else if (MEM_P (x))
2941 /* We don't have to worry about changing the address since the
2942 size in bytes is supposed to be the same. */
2943 if (reload_in_progress)
2945 /* Copy the MEM to change the mode and move any
2946 substitutions from the old MEM to the new one. */
2947 ret = adjust_address_nv (x, new_mode, 0);
2948 copy_replacements (x, ret);
2950 else
2951 ret = adjust_address (x, new_mode, 0);
2953 else
2955 /* Note that we do want simplify_subreg's behavior of validating
2956 that the new mode is ok for a hard register. If we were to use
2957 simplify_gen_subreg, we would create the subreg, but would
2958 probably run into the target not being able to implement it. */
2959 /* Except, of course, when FORCE is true, when this is exactly what
2960 we want. Which is needed for CCmodes on some targets. */
2961 if (force)
2962 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2963 else
2964 ret = simplify_subreg (new_mode, x, old_mode, 0);
2967 return ret;
2970 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2971 an integer mode of the same size as MODE. Returns the instruction
2972 emitted, or NULL if such a move could not be generated. */
2974 static rtx
2975 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2977 enum machine_mode imode;
2978 enum insn_code code;
2980 /* There must exist a mode of the exact size we require. */
2981 imode = int_mode_for_mode (mode);
2982 if (imode == BLKmode)
2983 return NULL_RTX;
2985 /* The target must support moves in this mode. */
2986 code = optab_handler (mov_optab, imode)->insn_code;
2987 if (code == CODE_FOR_nothing)
2988 return NULL_RTX;
2990 x = emit_move_change_mode (imode, mode, x, force);
2991 if (x == NULL_RTX)
2992 return NULL_RTX;
2993 y = emit_move_change_mode (imode, mode, y, force);
2994 if (y == NULL_RTX)
2995 return NULL_RTX;
2996 return emit_insn (GEN_FCN (code) (x, y));
2999 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3000 Return an equivalent MEM that does not use an auto-increment. */
3002 static rtx
3003 emit_move_resolve_push (enum machine_mode mode, rtx x)
3005 enum rtx_code code = GET_CODE (XEXP (x, 0));
3006 HOST_WIDE_INT adjust;
3007 rtx temp;
3009 adjust = GET_MODE_SIZE (mode);
3010 #ifdef PUSH_ROUNDING
3011 adjust = PUSH_ROUNDING (adjust);
3012 #endif
3013 if (code == PRE_DEC || code == POST_DEC)
3014 adjust = -adjust;
3015 else if (code == PRE_MODIFY || code == POST_MODIFY)
3017 rtx expr = XEXP (XEXP (x, 0), 1);
3018 HOST_WIDE_INT val;
3020 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3021 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
3022 val = INTVAL (XEXP (expr, 1));
3023 if (GET_CODE (expr) == MINUS)
3024 val = -val;
3025 gcc_assert (adjust == val || adjust == -val);
3026 adjust = val;
3029 /* Do not use anti_adjust_stack, since we don't want to update
3030 stack_pointer_delta. */
3031 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3032 GEN_INT (adjust), stack_pointer_rtx,
3033 0, OPTAB_LIB_WIDEN);
3034 if (temp != stack_pointer_rtx)
3035 emit_move_insn (stack_pointer_rtx, temp);
3037 switch (code)
3039 case PRE_INC:
3040 case PRE_DEC:
3041 case PRE_MODIFY:
3042 temp = stack_pointer_rtx;
3043 break;
3044 case POST_INC:
3045 case POST_DEC:
3046 case POST_MODIFY:
3047 temp = plus_constant (stack_pointer_rtx, -adjust);
3048 break;
3049 default:
3050 gcc_unreachable ();
3053 return replace_equiv_address (x, temp);
3056 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3057 X is known to satisfy push_operand, and MODE is known to be complex.
3058 Returns the last instruction emitted. */
3061 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3063 enum machine_mode submode = GET_MODE_INNER (mode);
3064 bool imag_first;
3066 #ifdef PUSH_ROUNDING
3067 unsigned int submodesize = GET_MODE_SIZE (submode);
3069 /* In case we output to the stack, but the size is smaller than the
3070 machine can push exactly, we need to use move instructions. */
3071 if (PUSH_ROUNDING (submodesize) != submodesize)
3073 x = emit_move_resolve_push (mode, x);
3074 return emit_move_insn (x, y);
3076 #endif
3078 /* Note that the real part always precedes the imag part in memory
3079 regardless of machine's endianness. */
3080 switch (GET_CODE (XEXP (x, 0)))
3082 case PRE_DEC:
3083 case POST_DEC:
3084 imag_first = true;
3085 break;
3086 case PRE_INC:
3087 case POST_INC:
3088 imag_first = false;
3089 break;
3090 default:
3091 gcc_unreachable ();
3094 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3095 read_complex_part (y, imag_first));
3096 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3097 read_complex_part (y, !imag_first));
3100 /* A subroutine of emit_move_complex. Perform the move from Y to X
3101 via two moves of the parts. Returns the last instruction emitted. */
3104 emit_move_complex_parts (rtx x, rtx y)
3106 /* Show the output dies here. This is necessary for SUBREGs
3107 of pseudos since we cannot track their lifetimes correctly;
3108 hard regs shouldn't appear here except as return values. */
3109 if (!reload_completed && !reload_in_progress
3110 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3111 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3113 write_complex_part (x, read_complex_part (y, false), false);
3114 write_complex_part (x, read_complex_part (y, true), true);
3116 return get_last_insn ();
3119 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3120 MODE is known to be complex. Returns the last instruction emitted. */
3122 static rtx
3123 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3125 bool try_int;
3127 /* Need to take special care for pushes, to maintain proper ordering
3128 of the data, and possibly extra padding. */
3129 if (push_operand (x, mode))
3130 return emit_move_complex_push (mode, x, y);
3132 /* See if we can coerce the target into moving both values at once. */
3134 /* Move floating point as parts. */
3135 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3136 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3137 try_int = false;
3138 /* Not possible if the values are inherently not adjacent. */
3139 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3140 try_int = false;
3141 /* Is possible if both are registers (or subregs of registers). */
3142 else if (register_operand (x, mode) && register_operand (y, mode))
3143 try_int = true;
3144 /* If one of the operands is a memory, and alignment constraints
3145 are friendly enough, we may be able to do combined memory operations.
3146 We do not attempt this if Y is a constant because that combination is
3147 usually better with the by-parts thing below. */
3148 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3149 && (!STRICT_ALIGNMENT
3150 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3151 try_int = true;
3152 else
3153 try_int = false;
3155 if (try_int)
3157 rtx ret;
3159 /* For memory to memory moves, optimal behavior can be had with the
3160 existing block move logic. */
3161 if (MEM_P (x) && MEM_P (y))
3163 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3164 BLOCK_OP_NO_LIBCALL);
3165 return get_last_insn ();
3168 ret = emit_move_via_integer (mode, x, y, true);
3169 if (ret)
3170 return ret;
3173 return emit_move_complex_parts (x, y);
3176 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3177 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3179 static rtx
3180 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3182 rtx ret;
3184 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3185 if (mode != CCmode)
3187 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3188 if (code != CODE_FOR_nothing)
3190 x = emit_move_change_mode (CCmode, mode, x, true);
3191 y = emit_move_change_mode (CCmode, mode, y, true);
3192 return emit_insn (GEN_FCN (code) (x, y));
3196 /* Otherwise, find the MODE_INT mode of the same width. */
3197 ret = emit_move_via_integer (mode, x, y, false);
3198 gcc_assert (ret != NULL);
3199 return ret;
3202 /* Return true if word I of OP lies entirely in the
3203 undefined bits of a paradoxical subreg. */
3205 static bool
3206 undefined_operand_subword_p (const_rtx op, int i)
3208 enum machine_mode innermode, innermostmode;
3209 int offset;
3210 if (GET_CODE (op) != SUBREG)
3211 return false;
3212 innermode = GET_MODE (op);
3213 innermostmode = GET_MODE (SUBREG_REG (op));
3214 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3215 /* The SUBREG_BYTE represents offset, as if the value were stored in
3216 memory, except for a paradoxical subreg where we define
3217 SUBREG_BYTE to be 0; undo this exception as in
3218 simplify_subreg. */
3219 if (SUBREG_BYTE (op) == 0
3220 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3222 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3223 if (WORDS_BIG_ENDIAN)
3224 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3225 if (BYTES_BIG_ENDIAN)
3226 offset += difference % UNITS_PER_WORD;
3228 if (offset >= GET_MODE_SIZE (innermostmode)
3229 || offset <= -GET_MODE_SIZE (word_mode))
3230 return true;
3231 return false;
3234 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3235 MODE is any multi-word or full-word mode that lacks a move_insn
3236 pattern. Note that you will get better code if you define such
3237 patterns, even if they must turn into multiple assembler instructions. */
3239 static rtx
3240 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3242 rtx last_insn = 0;
3243 rtx seq, inner;
3244 bool need_clobber;
3245 int i;
3247 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3249 /* If X is a push on the stack, do the push now and replace
3250 X with a reference to the stack pointer. */
3251 if (push_operand (x, mode))
3252 x = emit_move_resolve_push (mode, x);
3254 /* If we are in reload, see if either operand is a MEM whose address
3255 is scheduled for replacement. */
3256 if (reload_in_progress && MEM_P (x)
3257 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3258 x = replace_equiv_address_nv (x, inner);
3259 if (reload_in_progress && MEM_P (y)
3260 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3261 y = replace_equiv_address_nv (y, inner);
3263 start_sequence ();
3265 need_clobber = false;
3266 for (i = 0;
3267 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3268 i++)
3270 rtx xpart = operand_subword (x, i, 1, mode);
3271 rtx ypart;
3273 /* Do not generate code for a move if it would come entirely
3274 from the undefined bits of a paradoxical subreg. */
3275 if (undefined_operand_subword_p (y, i))
3276 continue;
3278 ypart = operand_subword (y, i, 1, mode);
3280 /* If we can't get a part of Y, put Y into memory if it is a
3281 constant. Otherwise, force it into a register. Then we must
3282 be able to get a part of Y. */
3283 if (ypart == 0 && CONSTANT_P (y))
3285 y = use_anchored_address (force_const_mem (mode, y));
3286 ypart = operand_subword (y, i, 1, mode);
3288 else if (ypart == 0)
3289 ypart = operand_subword_force (y, i, mode);
3291 gcc_assert (xpart && ypart);
3293 need_clobber |= (GET_CODE (xpart) == SUBREG);
3295 last_insn = emit_move_insn (xpart, ypart);
3298 seq = get_insns ();
3299 end_sequence ();
3301 /* Show the output dies here. This is necessary for SUBREGs
3302 of pseudos since we cannot track their lifetimes correctly;
3303 hard regs shouldn't appear here except as return values.
3304 We never want to emit such a clobber after reload. */
3305 if (x != y
3306 && ! (reload_in_progress || reload_completed)
3307 && need_clobber != 0)
3308 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3310 emit_insn (seq);
3312 return last_insn;
3315 /* Low level part of emit_move_insn.
3316 Called just like emit_move_insn, but assumes X and Y
3317 are basically valid. */
3320 emit_move_insn_1 (rtx x, rtx y)
3322 enum machine_mode mode = GET_MODE (x);
3323 enum insn_code code;
3325 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3327 code = optab_handler (mov_optab, mode)->insn_code;
3328 if (code != CODE_FOR_nothing)
3329 return emit_insn (GEN_FCN (code) (x, y));
3331 /* Expand complex moves by moving real part and imag part. */
3332 if (COMPLEX_MODE_P (mode))
3333 return emit_move_complex (mode, x, y);
3335 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3336 || ALL_FIXED_POINT_MODE_P (mode))
3338 rtx result = emit_move_via_integer (mode, x, y, true);
3340 /* If we can't find an integer mode, use multi words. */
3341 if (result)
3342 return result;
3343 else
3344 return emit_move_multi_word (mode, x, y);
3347 if (GET_MODE_CLASS (mode) == MODE_CC)
3348 return emit_move_ccmode (mode, x, y);
3350 /* Try using a move pattern for the corresponding integer mode. This is
3351 only safe when simplify_subreg can convert MODE constants into integer
3352 constants. At present, it can only do this reliably if the value
3353 fits within a HOST_WIDE_INT. */
3354 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3356 rtx ret = emit_move_via_integer (mode, x, y, false);
3357 if (ret)
3358 return ret;
3361 return emit_move_multi_word (mode, x, y);
3364 /* Generate code to copy Y into X.
3365 Both Y and X must have the same mode, except that
3366 Y can be a constant with VOIDmode.
3367 This mode cannot be BLKmode; use emit_block_move for that.
3369 Return the last instruction emitted. */
3372 emit_move_insn (rtx x, rtx y)
3374 enum machine_mode mode = GET_MODE (x);
3375 rtx y_cst = NULL_RTX;
3376 rtx last_insn, set;
3378 gcc_assert (mode != BLKmode
3379 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3381 if (CONSTANT_P (y))
3383 if (optimize
3384 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3385 && (last_insn = compress_float_constant (x, y)))
3386 return last_insn;
3388 y_cst = y;
3390 if (!LEGITIMATE_CONSTANT_P (y))
3392 y = force_const_mem (mode, y);
3394 /* If the target's cannot_force_const_mem prevented the spill,
3395 assume that the target's move expanders will also take care
3396 of the non-legitimate constant. */
3397 if (!y)
3398 y = y_cst;
3399 else
3400 y = use_anchored_address (y);
3404 /* If X or Y are memory references, verify that their addresses are valid
3405 for the machine. */
3406 if (MEM_P (x)
3407 && (! memory_address_p (GET_MODE (x), XEXP (x, 0))
3408 && ! push_operand (x, GET_MODE (x))))
3409 x = validize_mem (x);
3411 if (MEM_P (y)
3412 && ! memory_address_p (GET_MODE (y), XEXP (y, 0)))
3413 y = validize_mem (y);
3415 gcc_assert (mode != BLKmode);
3417 last_insn = emit_move_insn_1 (x, y);
3419 if (y_cst && REG_P (x)
3420 && (set = single_set (last_insn)) != NULL_RTX
3421 && SET_DEST (set) == x
3422 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3423 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3425 return last_insn;
3428 /* If Y is representable exactly in a narrower mode, and the target can
3429 perform the extension directly from constant or memory, then emit the
3430 move as an extension. */
3432 static rtx
3433 compress_float_constant (rtx x, rtx y)
3435 enum machine_mode dstmode = GET_MODE (x);
3436 enum machine_mode orig_srcmode = GET_MODE (y);
3437 enum machine_mode srcmode;
3438 REAL_VALUE_TYPE r;
3439 int oldcost, newcost;
3441 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3443 if (LEGITIMATE_CONSTANT_P (y))
3444 oldcost = rtx_cost (y, SET);
3445 else
3446 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3448 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3449 srcmode != orig_srcmode;
3450 srcmode = GET_MODE_WIDER_MODE (srcmode))
3452 enum insn_code ic;
3453 rtx trunc_y, last_insn;
3455 /* Skip if the target can't extend this way. */
3456 ic = can_extend_p (dstmode, srcmode, 0);
3457 if (ic == CODE_FOR_nothing)
3458 continue;
3460 /* Skip if the narrowed value isn't exact. */
3461 if (! exact_real_truncate (srcmode, &r))
3462 continue;
3464 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3466 if (LEGITIMATE_CONSTANT_P (trunc_y))
3468 /* Skip if the target needs extra instructions to perform
3469 the extension. */
3470 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3471 continue;
3472 /* This is valid, but may not be cheaper than the original. */
3473 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3474 if (oldcost < newcost)
3475 continue;
3477 else if (float_extend_from_mem[dstmode][srcmode])
3479 trunc_y = force_const_mem (srcmode, trunc_y);
3480 /* This is valid, but may not be cheaper than the original. */
3481 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3482 if (oldcost < newcost)
3483 continue;
3484 trunc_y = validize_mem (trunc_y);
3486 else
3487 continue;
3489 /* For CSE's benefit, force the compressed constant pool entry
3490 into a new pseudo. This constant may be used in different modes,
3491 and if not, combine will put things back together for us. */
3492 trunc_y = force_reg (srcmode, trunc_y);
3493 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3494 last_insn = get_last_insn ();
3496 if (REG_P (x))
3497 set_unique_reg_note (last_insn, REG_EQUAL, y);
3499 return last_insn;
3502 return NULL_RTX;
3505 /* Pushing data onto the stack. */
3507 /* Push a block of length SIZE (perhaps variable)
3508 and return an rtx to address the beginning of the block.
3509 The value may be virtual_outgoing_args_rtx.
3511 EXTRA is the number of bytes of padding to push in addition to SIZE.
3512 BELOW nonzero means this padding comes at low addresses;
3513 otherwise, the padding comes at high addresses. */
3516 push_block (rtx size, int extra, int below)
3518 rtx temp;
3520 size = convert_modes (Pmode, ptr_mode, size, 1);
3521 if (CONSTANT_P (size))
3522 anti_adjust_stack (plus_constant (size, extra));
3523 else if (REG_P (size) && extra == 0)
3524 anti_adjust_stack (size);
3525 else
3527 temp = copy_to_mode_reg (Pmode, size);
3528 if (extra != 0)
3529 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3530 temp, 0, OPTAB_LIB_WIDEN);
3531 anti_adjust_stack (temp);
3534 #ifndef STACK_GROWS_DOWNWARD
3535 if (0)
3536 #else
3537 if (1)
3538 #endif
3540 temp = virtual_outgoing_args_rtx;
3541 if (extra != 0 && below)
3542 temp = plus_constant (temp, extra);
3544 else
3546 if (GET_CODE (size) == CONST_INT)
3547 temp = plus_constant (virtual_outgoing_args_rtx,
3548 -INTVAL (size) - (below ? 0 : extra));
3549 else if (extra != 0 && !below)
3550 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3551 negate_rtx (Pmode, plus_constant (size, extra)));
3552 else
3553 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3554 negate_rtx (Pmode, size));
3557 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3560 #ifdef PUSH_ROUNDING
3562 /* Emit single push insn. */
3564 static void
3565 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3567 rtx dest_addr;
3568 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3569 rtx dest;
3570 enum insn_code icode;
3571 insn_operand_predicate_fn pred;
3573 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3574 /* If there is push pattern, use it. Otherwise try old way of throwing
3575 MEM representing push operation to move expander. */
3576 icode = optab_handler (push_optab, mode)->insn_code;
3577 if (icode != CODE_FOR_nothing)
3579 if (((pred = insn_data[(int) icode].operand[0].predicate)
3580 && !((*pred) (x, mode))))
3581 x = force_reg (mode, x);
3582 emit_insn (GEN_FCN (icode) (x));
3583 return;
3585 if (GET_MODE_SIZE (mode) == rounded_size)
3586 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3587 /* If we are to pad downward, adjust the stack pointer first and
3588 then store X into the stack location using an offset. This is
3589 because emit_move_insn does not know how to pad; it does not have
3590 access to type. */
3591 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3593 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3594 HOST_WIDE_INT offset;
3596 emit_move_insn (stack_pointer_rtx,
3597 expand_binop (Pmode,
3598 #ifdef STACK_GROWS_DOWNWARD
3599 sub_optab,
3600 #else
3601 add_optab,
3602 #endif
3603 stack_pointer_rtx,
3604 GEN_INT (rounded_size),
3605 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3607 offset = (HOST_WIDE_INT) padding_size;
3608 #ifdef STACK_GROWS_DOWNWARD
3609 if (STACK_PUSH_CODE == POST_DEC)
3610 /* We have already decremented the stack pointer, so get the
3611 previous value. */
3612 offset += (HOST_WIDE_INT) rounded_size;
3613 #else
3614 if (STACK_PUSH_CODE == POST_INC)
3615 /* We have already incremented the stack pointer, so get the
3616 previous value. */
3617 offset -= (HOST_WIDE_INT) rounded_size;
3618 #endif
3619 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3621 else
3623 #ifdef STACK_GROWS_DOWNWARD
3624 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3625 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3626 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3627 #else
3628 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3629 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3630 GEN_INT (rounded_size));
3631 #endif
3632 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3635 dest = gen_rtx_MEM (mode, dest_addr);
3637 if (type != 0)
3639 set_mem_attributes (dest, type, 1);
3641 if (flag_optimize_sibling_calls)
3642 /* Function incoming arguments may overlap with sibling call
3643 outgoing arguments and we cannot allow reordering of reads
3644 from function arguments with stores to outgoing arguments
3645 of sibling calls. */
3646 set_mem_alias_set (dest, 0);
3648 emit_move_insn (dest, x);
3650 #endif
3652 /* Generate code to push X onto the stack, assuming it has mode MODE and
3653 type TYPE.
3654 MODE is redundant except when X is a CONST_INT (since they don't
3655 carry mode info).
3656 SIZE is an rtx for the size of data to be copied (in bytes),
3657 needed only if X is BLKmode.
3659 ALIGN (in bits) is maximum alignment we can assume.
3661 If PARTIAL and REG are both nonzero, then copy that many of the first
3662 bytes of X into registers starting with REG, and push the rest of X.
3663 The amount of space pushed is decreased by PARTIAL bytes.
3664 REG must be a hard register in this case.
3665 If REG is zero but PARTIAL is not, take any all others actions for an
3666 argument partially in registers, but do not actually load any
3667 registers.
3669 EXTRA is the amount in bytes of extra space to leave next to this arg.
3670 This is ignored if an argument block has already been allocated.
3672 On a machine that lacks real push insns, ARGS_ADDR is the address of
3673 the bottom of the argument block for this call. We use indexing off there
3674 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3675 argument block has not been preallocated.
3677 ARGS_SO_FAR is the size of args previously pushed for this call.
3679 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3680 for arguments passed in registers. If nonzero, it will be the number
3681 of bytes required. */
3683 void
3684 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3685 unsigned int align, int partial, rtx reg, int extra,
3686 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3687 rtx alignment_pad)
3689 rtx xinner;
3690 enum direction stack_direction
3691 #ifdef STACK_GROWS_DOWNWARD
3692 = downward;
3693 #else
3694 = upward;
3695 #endif
3697 /* Decide where to pad the argument: `downward' for below,
3698 `upward' for above, or `none' for don't pad it.
3699 Default is below for small data on big-endian machines; else above. */
3700 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3702 /* Invert direction if stack is post-decrement.
3703 FIXME: why? */
3704 if (STACK_PUSH_CODE == POST_DEC)
3705 if (where_pad != none)
3706 where_pad = (where_pad == downward ? upward : downward);
3708 xinner = x;
3710 if (mode == BLKmode
3711 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3713 /* Copy a block into the stack, entirely or partially. */
3715 rtx temp;
3716 int used;
3717 int offset;
3718 int skip;
3720 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3721 used = partial - offset;
3723 if (mode != BLKmode)
3725 /* A value is to be stored in an insufficiently aligned
3726 stack slot; copy via a suitably aligned slot if
3727 necessary. */
3728 size = GEN_INT (GET_MODE_SIZE (mode));
3729 if (!MEM_P (xinner))
3731 temp = assign_temp (type, 0, 1, 1);
3732 emit_move_insn (temp, xinner);
3733 xinner = temp;
3737 gcc_assert (size);
3739 /* USED is now the # of bytes we need not copy to the stack
3740 because registers will take care of them. */
3742 if (partial != 0)
3743 xinner = adjust_address (xinner, BLKmode, used);
3745 /* If the partial register-part of the arg counts in its stack size,
3746 skip the part of stack space corresponding to the registers.
3747 Otherwise, start copying to the beginning of the stack space,
3748 by setting SKIP to 0. */
3749 skip = (reg_parm_stack_space == 0) ? 0 : used;
3751 #ifdef PUSH_ROUNDING
3752 /* Do it with several push insns if that doesn't take lots of insns
3753 and if there is no difficulty with push insns that skip bytes
3754 on the stack for alignment purposes. */
3755 if (args_addr == 0
3756 && PUSH_ARGS
3757 && GET_CODE (size) == CONST_INT
3758 && skip == 0
3759 && MEM_ALIGN (xinner) >= align
3760 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3761 /* Here we avoid the case of a structure whose weak alignment
3762 forces many pushes of a small amount of data,
3763 and such small pushes do rounding that causes trouble. */
3764 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3765 || align >= BIGGEST_ALIGNMENT
3766 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3767 == (align / BITS_PER_UNIT)))
3768 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3770 /* Push padding now if padding above and stack grows down,
3771 or if padding below and stack grows up.
3772 But if space already allocated, this has already been done. */
3773 if (extra && args_addr == 0
3774 && where_pad != none && where_pad != stack_direction)
3775 anti_adjust_stack (GEN_INT (extra));
3777 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3779 else
3780 #endif /* PUSH_ROUNDING */
3782 rtx target;
3784 /* Otherwise make space on the stack and copy the data
3785 to the address of that space. */
3787 /* Deduct words put into registers from the size we must copy. */
3788 if (partial != 0)
3790 if (GET_CODE (size) == CONST_INT)
3791 size = GEN_INT (INTVAL (size) - used);
3792 else
3793 size = expand_binop (GET_MODE (size), sub_optab, size,
3794 GEN_INT (used), NULL_RTX, 0,
3795 OPTAB_LIB_WIDEN);
3798 /* Get the address of the stack space.
3799 In this case, we do not deal with EXTRA separately.
3800 A single stack adjust will do. */
3801 if (! args_addr)
3803 temp = push_block (size, extra, where_pad == downward);
3804 extra = 0;
3806 else if (GET_CODE (args_so_far) == CONST_INT)
3807 temp = memory_address (BLKmode,
3808 plus_constant (args_addr,
3809 skip + INTVAL (args_so_far)));
3810 else
3811 temp = memory_address (BLKmode,
3812 plus_constant (gen_rtx_PLUS (Pmode,
3813 args_addr,
3814 args_so_far),
3815 skip));
3817 if (!ACCUMULATE_OUTGOING_ARGS)
3819 /* If the source is referenced relative to the stack pointer,
3820 copy it to another register to stabilize it. We do not need
3821 to do this if we know that we won't be changing sp. */
3823 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3824 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3825 temp = copy_to_reg (temp);
3828 target = gen_rtx_MEM (BLKmode, temp);
3830 /* We do *not* set_mem_attributes here, because incoming arguments
3831 may overlap with sibling call outgoing arguments and we cannot
3832 allow reordering of reads from function arguments with stores
3833 to outgoing arguments of sibling calls. We do, however, want
3834 to record the alignment of the stack slot. */
3835 /* ALIGN may well be better aligned than TYPE, e.g. due to
3836 PARM_BOUNDARY. Assume the caller isn't lying. */
3837 set_mem_align (target, align);
3839 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3842 else if (partial > 0)
3844 /* Scalar partly in registers. */
3846 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3847 int i;
3848 int not_stack;
3849 /* # bytes of start of argument
3850 that we must make space for but need not store. */
3851 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3852 int args_offset = INTVAL (args_so_far);
3853 int skip;
3855 /* Push padding now if padding above and stack grows down,
3856 or if padding below and stack grows up.
3857 But if space already allocated, this has already been done. */
3858 if (extra && args_addr == 0
3859 && where_pad != none && where_pad != stack_direction)
3860 anti_adjust_stack (GEN_INT (extra));
3862 /* If we make space by pushing it, we might as well push
3863 the real data. Otherwise, we can leave OFFSET nonzero
3864 and leave the space uninitialized. */
3865 if (args_addr == 0)
3866 offset = 0;
3868 /* Now NOT_STACK gets the number of words that we don't need to
3869 allocate on the stack. Convert OFFSET to words too. */
3870 not_stack = (partial - offset) / UNITS_PER_WORD;
3871 offset /= UNITS_PER_WORD;
3873 /* If the partial register-part of the arg counts in its stack size,
3874 skip the part of stack space corresponding to the registers.
3875 Otherwise, start copying to the beginning of the stack space,
3876 by setting SKIP to 0. */
3877 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3879 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3880 x = validize_mem (force_const_mem (mode, x));
3882 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3883 SUBREGs of such registers are not allowed. */
3884 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3885 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3886 x = copy_to_reg (x);
3888 /* Loop over all the words allocated on the stack for this arg. */
3889 /* We can do it by words, because any scalar bigger than a word
3890 has a size a multiple of a word. */
3891 #ifndef PUSH_ARGS_REVERSED
3892 for (i = not_stack; i < size; i++)
3893 #else
3894 for (i = size - 1; i >= not_stack; i--)
3895 #endif
3896 if (i >= not_stack + offset)
3897 emit_push_insn (operand_subword_force (x, i, mode),
3898 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3899 0, args_addr,
3900 GEN_INT (args_offset + ((i - not_stack + skip)
3901 * UNITS_PER_WORD)),
3902 reg_parm_stack_space, alignment_pad);
3904 else
3906 rtx addr;
3907 rtx dest;
3909 /* Push padding now if padding above and stack grows down,
3910 or if padding below and stack grows up.
3911 But if space already allocated, this has already been done. */
3912 if (extra && args_addr == 0
3913 && where_pad != none && where_pad != stack_direction)
3914 anti_adjust_stack (GEN_INT (extra));
3916 #ifdef PUSH_ROUNDING
3917 if (args_addr == 0 && PUSH_ARGS)
3918 emit_single_push_insn (mode, x, type);
3919 else
3920 #endif
3922 if (GET_CODE (args_so_far) == CONST_INT)
3923 addr
3924 = memory_address (mode,
3925 plus_constant (args_addr,
3926 INTVAL (args_so_far)));
3927 else
3928 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3929 args_so_far));
3930 dest = gen_rtx_MEM (mode, addr);
3932 /* We do *not* set_mem_attributes here, because incoming arguments
3933 may overlap with sibling call outgoing arguments and we cannot
3934 allow reordering of reads from function arguments with stores
3935 to outgoing arguments of sibling calls. We do, however, want
3936 to record the alignment of the stack slot. */
3937 /* ALIGN may well be better aligned than TYPE, e.g. due to
3938 PARM_BOUNDARY. Assume the caller isn't lying. */
3939 set_mem_align (dest, align);
3941 emit_move_insn (dest, x);
3945 /* If part should go in registers, copy that part
3946 into the appropriate registers. Do this now, at the end,
3947 since mem-to-mem copies above may do function calls. */
3948 if (partial > 0 && reg != 0)
3950 /* Handle calls that pass values in multiple non-contiguous locations.
3951 The Irix 6 ABI has examples of this. */
3952 if (GET_CODE (reg) == PARALLEL)
3953 emit_group_load (reg, x, type, -1);
3954 else
3956 gcc_assert (partial % UNITS_PER_WORD == 0);
3957 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3961 if (extra && args_addr == 0 && where_pad == stack_direction)
3962 anti_adjust_stack (GEN_INT (extra));
3964 if (alignment_pad && args_addr == 0)
3965 anti_adjust_stack (alignment_pad);
3968 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3969 operations. */
3971 static rtx
3972 get_subtarget (rtx x)
3974 return (optimize
3975 || x == 0
3976 /* Only registers can be subtargets. */
3977 || !REG_P (x)
3978 /* Don't use hard regs to avoid extending their life. */
3979 || REGNO (x) < FIRST_PSEUDO_REGISTER
3980 ? 0 : x);
3983 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3984 FIELD is a bitfield. Returns true if the optimization was successful,
3985 and there's nothing else to do. */
3987 static bool
3988 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3989 unsigned HOST_WIDE_INT bitpos,
3990 enum machine_mode mode1, rtx str_rtx,
3991 tree to, tree src)
3993 enum machine_mode str_mode = GET_MODE (str_rtx);
3994 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3995 tree op0, op1;
3996 rtx value, result;
3997 optab binop;
3999 if (mode1 != VOIDmode
4000 || bitsize >= BITS_PER_WORD
4001 || str_bitsize > BITS_PER_WORD
4002 || TREE_SIDE_EFFECTS (to)
4003 || TREE_THIS_VOLATILE (to))
4004 return false;
4006 STRIP_NOPS (src);
4007 if (!BINARY_CLASS_P (src)
4008 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4009 return false;
4011 op0 = TREE_OPERAND (src, 0);
4012 op1 = TREE_OPERAND (src, 1);
4013 STRIP_NOPS (op0);
4015 if (!operand_equal_p (to, op0, 0))
4016 return false;
4018 if (MEM_P (str_rtx))
4020 unsigned HOST_WIDE_INT offset1;
4022 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4023 str_mode = word_mode;
4024 str_mode = get_best_mode (bitsize, bitpos,
4025 MEM_ALIGN (str_rtx), str_mode, 0);
4026 if (str_mode == VOIDmode)
4027 return false;
4028 str_bitsize = GET_MODE_BITSIZE (str_mode);
4030 offset1 = bitpos;
4031 bitpos %= str_bitsize;
4032 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4033 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4035 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4036 return false;
4038 /* If the bit field covers the whole REG/MEM, store_field
4039 will likely generate better code. */
4040 if (bitsize >= str_bitsize)
4041 return false;
4043 /* We can't handle fields split across multiple entities. */
4044 if (bitpos + bitsize > str_bitsize)
4045 return false;
4047 if (BYTES_BIG_ENDIAN)
4048 bitpos = str_bitsize - bitpos - bitsize;
4050 switch (TREE_CODE (src))
4052 case PLUS_EXPR:
4053 case MINUS_EXPR:
4054 /* For now, just optimize the case of the topmost bitfield
4055 where we don't need to do any masking and also
4056 1 bit bitfields where xor can be used.
4057 We might win by one instruction for the other bitfields
4058 too if insv/extv instructions aren't used, so that
4059 can be added later. */
4060 if (bitpos + bitsize != str_bitsize
4061 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4062 break;
4064 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4065 value = convert_modes (str_mode,
4066 TYPE_MODE (TREE_TYPE (op1)), value,
4067 TYPE_UNSIGNED (TREE_TYPE (op1)));
4069 /* We may be accessing data outside the field, which means
4070 we can alias adjacent data. */
4071 if (MEM_P (str_rtx))
4073 str_rtx = shallow_copy_rtx (str_rtx);
4074 set_mem_alias_set (str_rtx, 0);
4075 set_mem_expr (str_rtx, 0);
4078 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4079 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4081 value = expand_and (str_mode, value, const1_rtx, NULL);
4082 binop = xor_optab;
4084 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4085 build_int_cst (NULL_TREE, bitpos),
4086 NULL_RTX, 1);
4087 result = expand_binop (str_mode, binop, str_rtx,
4088 value, str_rtx, 1, OPTAB_WIDEN);
4089 if (result != str_rtx)
4090 emit_move_insn (str_rtx, result);
4091 return true;
4093 case BIT_IOR_EXPR:
4094 case BIT_XOR_EXPR:
4095 if (TREE_CODE (op1) != INTEGER_CST)
4096 break;
4097 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4098 value = convert_modes (GET_MODE (str_rtx),
4099 TYPE_MODE (TREE_TYPE (op1)), value,
4100 TYPE_UNSIGNED (TREE_TYPE (op1)));
4102 /* We may be accessing data outside the field, which means
4103 we can alias adjacent data. */
4104 if (MEM_P (str_rtx))
4106 str_rtx = shallow_copy_rtx (str_rtx);
4107 set_mem_alias_set (str_rtx, 0);
4108 set_mem_expr (str_rtx, 0);
4111 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4112 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4114 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4115 - 1);
4116 value = expand_and (GET_MODE (str_rtx), value, mask,
4117 NULL_RTX);
4119 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4120 build_int_cst (NULL_TREE, bitpos),
4121 NULL_RTX, 1);
4122 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4123 value, str_rtx, 1, OPTAB_WIDEN);
4124 if (result != str_rtx)
4125 emit_move_insn (str_rtx, result);
4126 return true;
4128 default:
4129 break;
4132 return false;
4136 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4137 is true, try generating a nontemporal store. */
4139 void
4140 expand_assignment (tree to, tree from, bool nontemporal)
4142 rtx to_rtx = 0;
4143 rtx result;
4145 /* Don't crash if the lhs of the assignment was erroneous. */
4146 if (TREE_CODE (to) == ERROR_MARK)
4148 result = expand_normal (from);
4149 return;
4152 /* Optimize away no-op moves without side-effects. */
4153 if (operand_equal_p (to, from, 0))
4154 return;
4156 /* Assignment of a structure component needs special treatment
4157 if the structure component's rtx is not simply a MEM.
4158 Assignment of an array element at a constant index, and assignment of
4159 an array element in an unaligned packed structure field, has the same
4160 problem. */
4161 if (handled_component_p (to)
4162 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4164 enum machine_mode mode1;
4165 HOST_WIDE_INT bitsize, bitpos;
4166 tree offset;
4167 int unsignedp;
4168 int volatilep = 0;
4169 tree tem;
4171 push_temp_slots ();
4172 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4173 &unsignedp, &volatilep, true);
4175 /* If we are going to use store_bit_field and extract_bit_field,
4176 make sure to_rtx will be safe for multiple use. */
4178 to_rtx = expand_normal (tem);
4180 if (offset != 0)
4182 rtx offset_rtx;
4184 if (!MEM_P (to_rtx))
4186 /* We can get constant negative offsets into arrays with broken
4187 user code. Translate this to a trap instead of ICEing. */
4188 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4189 expand_builtin_trap ();
4190 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4193 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4194 #ifdef POINTERS_EXTEND_UNSIGNED
4195 if (GET_MODE (offset_rtx) != Pmode)
4196 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4197 #else
4198 if (GET_MODE (offset_rtx) != ptr_mode)
4199 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4200 #endif
4202 /* A constant address in TO_RTX can have VOIDmode, we must not try
4203 to call force_reg for that case. Avoid that case. */
4204 if (MEM_P (to_rtx)
4205 && GET_MODE (to_rtx) == BLKmode
4206 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4207 && bitsize > 0
4208 && (bitpos % bitsize) == 0
4209 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4210 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4212 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4213 bitpos = 0;
4216 to_rtx = offset_address (to_rtx, offset_rtx,
4217 highest_pow2_factor_for_target (to,
4218 offset));
4221 /* Handle expand_expr of a complex value returning a CONCAT. */
4222 if (GET_CODE (to_rtx) == CONCAT)
4224 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4226 gcc_assert (bitpos == 0);
4227 result = store_expr (from, to_rtx, false, nontemporal);
4229 else
4231 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4232 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4233 nontemporal);
4236 else
4238 if (MEM_P (to_rtx))
4240 /* If the field is at offset zero, we could have been given the
4241 DECL_RTX of the parent struct. Don't munge it. */
4242 to_rtx = shallow_copy_rtx (to_rtx);
4244 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4246 /* Deal with volatile and readonly fields. The former is only
4247 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4248 if (volatilep)
4249 MEM_VOLATILE_P (to_rtx) = 1;
4250 if (component_uses_parent_alias_set (to))
4251 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4254 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4255 to_rtx, to, from))
4256 result = NULL;
4257 else
4258 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4259 TREE_TYPE (tem), get_alias_set (to),
4260 nontemporal);
4263 if (result)
4264 preserve_temp_slots (result);
4265 free_temp_slots ();
4266 pop_temp_slots ();
4267 return;
4270 /* If the rhs is a function call and its value is not an aggregate,
4271 call the function before we start to compute the lhs.
4272 This is needed for correct code for cases such as
4273 val = setjmp (buf) on machines where reference to val
4274 requires loading up part of an address in a separate insn.
4276 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4277 since it might be a promoted variable where the zero- or sign- extension
4278 needs to be done. Handling this in the normal way is safe because no
4279 computation is done before the call. */
4280 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4281 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4282 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4283 && REG_P (DECL_RTL (to))))
4285 rtx value;
4287 push_temp_slots ();
4288 value = expand_normal (from);
4289 if (to_rtx == 0)
4290 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4292 /* Handle calls that return values in multiple non-contiguous locations.
4293 The Irix 6 ABI has examples of this. */
4294 if (GET_CODE (to_rtx) == PARALLEL)
4295 emit_group_load (to_rtx, value, TREE_TYPE (from),
4296 int_size_in_bytes (TREE_TYPE (from)));
4297 else if (GET_MODE (to_rtx) == BLKmode)
4298 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4299 else
4301 if (POINTER_TYPE_P (TREE_TYPE (to)))
4302 value = convert_memory_address (GET_MODE (to_rtx), value);
4303 emit_move_insn (to_rtx, value);
4305 preserve_temp_slots (to_rtx);
4306 free_temp_slots ();
4307 pop_temp_slots ();
4308 return;
4311 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4312 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4314 if (to_rtx == 0)
4315 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4317 /* Don't move directly into a return register. */
4318 if (TREE_CODE (to) == RESULT_DECL
4319 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4321 rtx temp;
4323 push_temp_slots ();
4324 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4326 if (GET_CODE (to_rtx) == PARALLEL)
4327 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4328 int_size_in_bytes (TREE_TYPE (from)));
4329 else
4330 emit_move_insn (to_rtx, temp);
4332 preserve_temp_slots (to_rtx);
4333 free_temp_slots ();
4334 pop_temp_slots ();
4335 return;
4338 /* In case we are returning the contents of an object which overlaps
4339 the place the value is being stored, use a safe function when copying
4340 a value through a pointer into a structure value return block. */
4341 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4342 && current_function_returns_struct
4343 && !current_function_returns_pcc_struct)
4345 rtx from_rtx, size;
4347 push_temp_slots ();
4348 size = expr_size (from);
4349 from_rtx = expand_normal (from);
4351 emit_library_call (memmove_libfunc, LCT_NORMAL,
4352 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4353 XEXP (from_rtx, 0), Pmode,
4354 convert_to_mode (TYPE_MODE (sizetype),
4355 size, TYPE_UNSIGNED (sizetype)),
4356 TYPE_MODE (sizetype));
4358 preserve_temp_slots (to_rtx);
4359 free_temp_slots ();
4360 pop_temp_slots ();
4361 return;
4364 /* Compute FROM and store the value in the rtx we got. */
4366 push_temp_slots ();
4367 result = store_expr (from, to_rtx, 0, nontemporal);
4368 preserve_temp_slots (result);
4369 free_temp_slots ();
4370 pop_temp_slots ();
4371 return;
4374 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4375 succeeded, false otherwise. */
4377 static bool
4378 emit_storent_insn (rtx to, rtx from)
4380 enum machine_mode mode = GET_MODE (to), imode;
4381 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4382 rtx pattern;
4384 if (code == CODE_FOR_nothing)
4385 return false;
4387 imode = insn_data[code].operand[0].mode;
4388 if (!insn_data[code].operand[0].predicate (to, imode))
4389 return false;
4391 imode = insn_data[code].operand[1].mode;
4392 if (!insn_data[code].operand[1].predicate (from, imode))
4394 from = copy_to_mode_reg (imode, from);
4395 if (!insn_data[code].operand[1].predicate (from, imode))
4396 return false;
4399 pattern = GEN_FCN (code) (to, from);
4400 if (pattern == NULL_RTX)
4401 return false;
4403 emit_insn (pattern);
4404 return true;
4407 /* Generate code for computing expression EXP,
4408 and storing the value into TARGET.
4410 If the mode is BLKmode then we may return TARGET itself.
4411 It turns out that in BLKmode it doesn't cause a problem.
4412 because C has no operators that could combine two different
4413 assignments into the same BLKmode object with different values
4414 with no sequence point. Will other languages need this to
4415 be more thorough?
4417 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4418 stack, and block moves may need to be treated specially.
4420 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4423 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4425 rtx temp;
4426 rtx alt_rtl = NULL_RTX;
4427 int dont_return_target = 0;
4429 if (VOID_TYPE_P (TREE_TYPE (exp)))
4431 /* C++ can generate ?: expressions with a throw expression in one
4432 branch and an rvalue in the other. Here, we resolve attempts to
4433 store the throw expression's nonexistent result. */
4434 gcc_assert (!call_param_p);
4435 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4436 return NULL_RTX;
4438 if (TREE_CODE (exp) == COMPOUND_EXPR)
4440 /* Perform first part of compound expression, then assign from second
4441 part. */
4442 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4443 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4444 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4445 nontemporal);
4447 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4449 /* For conditional expression, get safe form of the target. Then
4450 test the condition, doing the appropriate assignment on either
4451 side. This avoids the creation of unnecessary temporaries.
4452 For non-BLKmode, it is more efficient not to do this. */
4454 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4456 do_pending_stack_adjust ();
4457 NO_DEFER_POP;
4458 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4459 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4460 nontemporal);
4461 emit_jump_insn (gen_jump (lab2));
4462 emit_barrier ();
4463 emit_label (lab1);
4464 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4465 nontemporal);
4466 emit_label (lab2);
4467 OK_DEFER_POP;
4469 return NULL_RTX;
4471 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4472 /* If this is a scalar in a register that is stored in a wider mode
4473 than the declared mode, compute the result into its declared mode
4474 and then convert to the wider mode. Our value is the computed
4475 expression. */
4477 rtx inner_target = 0;
4479 /* We can do the conversion inside EXP, which will often result
4480 in some optimizations. Do the conversion in two steps: first
4481 change the signedness, if needed, then the extend. But don't
4482 do this if the type of EXP is a subtype of something else
4483 since then the conversion might involve more than just
4484 converting modes. */
4485 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4486 && TREE_TYPE (TREE_TYPE (exp)) == 0
4487 && (!lang_hooks.reduce_bit_field_operations
4488 || (GET_MODE_PRECISION (GET_MODE (target))
4489 == TYPE_PRECISION (TREE_TYPE (exp)))))
4491 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4492 != SUBREG_PROMOTED_UNSIGNED_P (target))
4494 /* Some types, e.g. Fortran's logical*4, won't have a signed
4495 version, so use the mode instead. */
4496 tree ntype
4497 = (signed_or_unsigned_type_for
4498 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4499 if (ntype == NULL)
4500 ntype = lang_hooks.types.type_for_mode
4501 (TYPE_MODE (TREE_TYPE (exp)),
4502 SUBREG_PROMOTED_UNSIGNED_P (target));
4504 exp = fold_convert (ntype, exp);
4507 exp = fold_convert (lang_hooks.types.type_for_mode
4508 (GET_MODE (SUBREG_REG (target)),
4509 SUBREG_PROMOTED_UNSIGNED_P (target)),
4510 exp);
4512 inner_target = SUBREG_REG (target);
4515 temp = expand_expr (exp, inner_target, VOIDmode,
4516 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4518 /* If TEMP is a VOIDmode constant, use convert_modes to make
4519 sure that we properly convert it. */
4520 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4522 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4523 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4524 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4525 GET_MODE (target), temp,
4526 SUBREG_PROMOTED_UNSIGNED_P (target));
4529 convert_move (SUBREG_REG (target), temp,
4530 SUBREG_PROMOTED_UNSIGNED_P (target));
4532 return NULL_RTX;
4534 else if (TREE_CODE (exp) == STRING_CST
4535 && !nontemporal && !call_param_p
4536 && TREE_STRING_LENGTH (exp) > 0
4537 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4539 /* Optimize initialization of an array with a STRING_CST. */
4540 HOST_WIDE_INT exp_len, str_copy_len;
4541 rtx dest_mem;
4543 exp_len = int_expr_size (exp);
4544 if (exp_len <= 0)
4545 goto normal_expr;
4547 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4548 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4549 goto normal_expr;
4551 str_copy_len = TREE_STRING_LENGTH (exp);
4552 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4554 str_copy_len += STORE_MAX_PIECES - 1;
4555 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4557 str_copy_len = MIN (str_copy_len, exp_len);
4558 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4559 (void *) TREE_STRING_POINTER (exp),
4560 MEM_ALIGN (target), false))
4561 goto normal_expr;
4563 dest_mem = target;
4565 dest_mem = store_by_pieces (dest_mem,
4566 str_copy_len, builtin_strncpy_read_str,
4567 (void *) TREE_STRING_POINTER (exp),
4568 MEM_ALIGN (target), false,
4569 exp_len > str_copy_len ? 1 : 0);
4570 if (exp_len > str_copy_len)
4571 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4572 GEN_INT (exp_len - str_copy_len),
4573 BLOCK_OP_NORMAL);
4574 return NULL_RTX;
4576 else
4578 rtx tmp_target;
4580 normal_expr:
4581 /* If we want to use a nontemporal store, force the value to
4582 register first. */
4583 tmp_target = nontemporal ? NULL_RTX : target;
4584 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4585 (call_param_p
4586 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4587 &alt_rtl);
4588 /* Return TARGET if it's a specified hardware register.
4589 If TARGET is a volatile mem ref, either return TARGET
4590 or return a reg copied *from* TARGET; ANSI requires this.
4592 Otherwise, if TEMP is not TARGET, return TEMP
4593 if it is constant (for efficiency),
4594 or if we really want the correct value. */
4595 if (!(target && REG_P (target)
4596 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4597 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4598 && ! rtx_equal_p (temp, target)
4599 && CONSTANT_P (temp))
4600 dont_return_target = 1;
4603 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4604 the same as that of TARGET, adjust the constant. This is needed, for
4605 example, in case it is a CONST_DOUBLE and we want only a word-sized
4606 value. */
4607 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4608 && TREE_CODE (exp) != ERROR_MARK
4609 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4610 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4611 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4613 /* If value was not generated in the target, store it there.
4614 Convert the value to TARGET's type first if necessary and emit the
4615 pending incrementations that have been queued when expanding EXP.
4616 Note that we cannot emit the whole queue blindly because this will
4617 effectively disable the POST_INC optimization later.
4619 If TEMP and TARGET compare equal according to rtx_equal_p, but
4620 one or both of them are volatile memory refs, we have to distinguish
4621 two cases:
4622 - expand_expr has used TARGET. In this case, we must not generate
4623 another copy. This can be detected by TARGET being equal according
4624 to == .
4625 - expand_expr has not used TARGET - that means that the source just
4626 happens to have the same RTX form. Since temp will have been created
4627 by expand_expr, it will compare unequal according to == .
4628 We must generate a copy in this case, to reach the correct number
4629 of volatile memory references. */
4631 if ((! rtx_equal_p (temp, target)
4632 || (temp != target && (side_effects_p (temp)
4633 || side_effects_p (target))))
4634 && TREE_CODE (exp) != ERROR_MARK
4635 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4636 but TARGET is not valid memory reference, TEMP will differ
4637 from TARGET although it is really the same location. */
4638 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4639 /* If there's nothing to copy, don't bother. Don't call
4640 expr_size unless necessary, because some front-ends (C++)
4641 expr_size-hook must not be given objects that are not
4642 supposed to be bit-copied or bit-initialized. */
4643 && expr_size (exp) != const0_rtx)
4645 if (GET_MODE (temp) != GET_MODE (target)
4646 && GET_MODE (temp) != VOIDmode)
4648 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4649 if (dont_return_target)
4651 /* In this case, we will return TEMP,
4652 so make sure it has the proper mode.
4653 But don't forget to store the value into TARGET. */
4654 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4655 emit_move_insn (target, temp);
4657 else if (GET_MODE (target) == BLKmode
4658 || GET_MODE (temp) == BLKmode)
4659 emit_block_move (target, temp, expr_size (exp),
4660 (call_param_p
4661 ? BLOCK_OP_CALL_PARM
4662 : BLOCK_OP_NORMAL));
4663 else
4664 convert_move (target, temp, unsignedp);
4667 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4669 /* Handle copying a string constant into an array. The string
4670 constant may be shorter than the array. So copy just the string's
4671 actual length, and clear the rest. First get the size of the data
4672 type of the string, which is actually the size of the target. */
4673 rtx size = expr_size (exp);
4675 if (GET_CODE (size) == CONST_INT
4676 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4677 emit_block_move (target, temp, size,
4678 (call_param_p
4679 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4680 else
4682 /* Compute the size of the data to copy from the string. */
4683 tree copy_size
4684 = size_binop (MIN_EXPR,
4685 make_tree (sizetype, size),
4686 size_int (TREE_STRING_LENGTH (exp)));
4687 rtx copy_size_rtx
4688 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4689 (call_param_p
4690 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4691 rtx label = 0;
4693 /* Copy that much. */
4694 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4695 TYPE_UNSIGNED (sizetype));
4696 emit_block_move (target, temp, copy_size_rtx,
4697 (call_param_p
4698 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4700 /* Figure out how much is left in TARGET that we have to clear.
4701 Do all calculations in ptr_mode. */
4702 if (GET_CODE (copy_size_rtx) == CONST_INT)
4704 size = plus_constant (size, -INTVAL (copy_size_rtx));
4705 target = adjust_address (target, BLKmode,
4706 INTVAL (copy_size_rtx));
4708 else
4710 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4711 copy_size_rtx, NULL_RTX, 0,
4712 OPTAB_LIB_WIDEN);
4714 #ifdef POINTERS_EXTEND_UNSIGNED
4715 if (GET_MODE (copy_size_rtx) != Pmode)
4716 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4717 TYPE_UNSIGNED (sizetype));
4718 #endif
4720 target = offset_address (target, copy_size_rtx,
4721 highest_pow2_factor (copy_size));
4722 label = gen_label_rtx ();
4723 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4724 GET_MODE (size), 0, label);
4727 if (size != const0_rtx)
4728 clear_storage (target, size, BLOCK_OP_NORMAL);
4730 if (label)
4731 emit_label (label);
4734 /* Handle calls that return values in multiple non-contiguous locations.
4735 The Irix 6 ABI has examples of this. */
4736 else if (GET_CODE (target) == PARALLEL)
4737 emit_group_load (target, temp, TREE_TYPE (exp),
4738 int_size_in_bytes (TREE_TYPE (exp)));
4739 else if (GET_MODE (temp) == BLKmode)
4740 emit_block_move (target, temp, expr_size (exp),
4741 (call_param_p
4742 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4743 else if (nontemporal
4744 && emit_storent_insn (target, temp))
4745 /* If we managed to emit a nontemporal store, there is nothing else to
4746 do. */
4748 else
4750 temp = force_operand (temp, target);
4751 if (temp != target)
4752 emit_move_insn (target, temp);
4756 return NULL_RTX;
4759 /* Helper for categorize_ctor_elements. Identical interface. */
4761 static bool
4762 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4763 HOST_WIDE_INT *p_elt_count,
4764 bool *p_must_clear)
4766 unsigned HOST_WIDE_INT idx;
4767 HOST_WIDE_INT nz_elts, elt_count;
4768 tree value, purpose;
4770 /* Whether CTOR is a valid constant initializer, in accordance with what
4771 initializer_constant_valid_p does. If inferred from the constructor
4772 elements, true until proven otherwise. */
4773 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4774 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4776 nz_elts = 0;
4777 elt_count = 0;
4779 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4781 HOST_WIDE_INT mult;
4783 mult = 1;
4784 if (TREE_CODE (purpose) == RANGE_EXPR)
4786 tree lo_index = TREE_OPERAND (purpose, 0);
4787 tree hi_index = TREE_OPERAND (purpose, 1);
4789 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4790 mult = (tree_low_cst (hi_index, 1)
4791 - tree_low_cst (lo_index, 1) + 1);
4794 switch (TREE_CODE (value))
4796 case CONSTRUCTOR:
4798 HOST_WIDE_INT nz = 0, ic = 0;
4800 bool const_elt_p
4801 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4803 nz_elts += mult * nz;
4804 elt_count += mult * ic;
4806 if (const_from_elts_p && const_p)
4807 const_p = const_elt_p;
4809 break;
4811 case INTEGER_CST:
4812 case REAL_CST:
4813 case FIXED_CST:
4814 if (!initializer_zerop (value))
4815 nz_elts += mult;
4816 elt_count += mult;
4817 break;
4819 case STRING_CST:
4820 nz_elts += mult * TREE_STRING_LENGTH (value);
4821 elt_count += mult * TREE_STRING_LENGTH (value);
4822 break;
4824 case COMPLEX_CST:
4825 if (!initializer_zerop (TREE_REALPART (value)))
4826 nz_elts += mult;
4827 if (!initializer_zerop (TREE_IMAGPART (value)))
4828 nz_elts += mult;
4829 elt_count += mult;
4830 break;
4832 case VECTOR_CST:
4834 tree v;
4835 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4837 if (!initializer_zerop (TREE_VALUE (v)))
4838 nz_elts += mult;
4839 elt_count += mult;
4842 break;
4844 default:
4845 nz_elts += mult;
4846 elt_count += mult;
4848 if (const_from_elts_p && const_p)
4849 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4850 != NULL_TREE;
4851 break;
4855 if (!*p_must_clear
4856 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4857 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4859 tree init_sub_type;
4860 bool clear_this = true;
4862 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4864 /* We don't expect more than one element of the union to be
4865 initialized. Not sure what we should do otherwise... */
4866 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4867 == 1);
4869 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4870 CONSTRUCTOR_ELTS (ctor),
4871 0)->value);
4873 /* ??? We could look at each element of the union, and find the
4874 largest element. Which would avoid comparing the size of the
4875 initialized element against any tail padding in the union.
4876 Doesn't seem worth the effort... */
4877 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4878 TYPE_SIZE (init_sub_type)) == 1)
4880 /* And now we have to find out if the element itself is fully
4881 constructed. E.g. for union { struct { int a, b; } s; } u
4882 = { .s = { .a = 1 } }. */
4883 if (elt_count == count_type_elements (init_sub_type, false))
4884 clear_this = false;
4888 *p_must_clear = clear_this;
4891 *p_nz_elts += nz_elts;
4892 *p_elt_count += elt_count;
4894 return const_p;
4897 /* Examine CTOR to discover:
4898 * how many scalar fields are set to nonzero values,
4899 and place it in *P_NZ_ELTS;
4900 * how many scalar fields in total are in CTOR,
4901 and place it in *P_ELT_COUNT.
4902 * if a type is a union, and the initializer from the constructor
4903 is not the largest element in the union, then set *p_must_clear.
4905 Return whether or not CTOR is a valid static constant initializer, the same
4906 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4908 bool
4909 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4910 HOST_WIDE_INT *p_elt_count,
4911 bool *p_must_clear)
4913 *p_nz_elts = 0;
4914 *p_elt_count = 0;
4915 *p_must_clear = false;
4917 return
4918 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4921 /* Count the number of scalars in TYPE. Return -1 on overflow or
4922 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4923 array member at the end of the structure. */
4925 HOST_WIDE_INT
4926 count_type_elements (const_tree type, bool allow_flexarr)
4928 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4929 switch (TREE_CODE (type))
4931 case ARRAY_TYPE:
4933 tree telts = array_type_nelts (type);
4934 if (telts && host_integerp (telts, 1))
4936 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4937 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4938 if (n == 0)
4939 return 0;
4940 else if (max / n > m)
4941 return n * m;
4943 return -1;
4946 case RECORD_TYPE:
4948 HOST_WIDE_INT n = 0, t;
4949 tree f;
4951 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4952 if (TREE_CODE (f) == FIELD_DECL)
4954 t = count_type_elements (TREE_TYPE (f), false);
4955 if (t < 0)
4957 /* Check for structures with flexible array member. */
4958 tree tf = TREE_TYPE (f);
4959 if (allow_flexarr
4960 && TREE_CHAIN (f) == NULL
4961 && TREE_CODE (tf) == ARRAY_TYPE
4962 && TYPE_DOMAIN (tf)
4963 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4964 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4965 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4966 && int_size_in_bytes (type) >= 0)
4967 break;
4969 return -1;
4971 n += t;
4974 return n;
4977 case UNION_TYPE:
4978 case QUAL_UNION_TYPE:
4979 return -1;
4981 case COMPLEX_TYPE:
4982 return 2;
4984 case VECTOR_TYPE:
4985 return TYPE_VECTOR_SUBPARTS (type);
4987 case INTEGER_TYPE:
4988 case REAL_TYPE:
4989 case FIXED_POINT_TYPE:
4990 case ENUMERAL_TYPE:
4991 case BOOLEAN_TYPE:
4992 case POINTER_TYPE:
4993 case OFFSET_TYPE:
4994 case REFERENCE_TYPE:
4995 return 1;
4997 case VOID_TYPE:
4998 case METHOD_TYPE:
4999 case FUNCTION_TYPE:
5000 case LANG_TYPE:
5001 default:
5002 gcc_unreachable ();
5006 /* Return 1 if EXP contains mostly (3/4) zeros. */
5008 static int
5009 mostly_zeros_p (const_tree exp)
5011 if (TREE_CODE (exp) == CONSTRUCTOR)
5014 HOST_WIDE_INT nz_elts, count, elts;
5015 bool must_clear;
5017 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5018 if (must_clear)
5019 return 1;
5021 elts = count_type_elements (TREE_TYPE (exp), false);
5023 return nz_elts < elts / 4;
5026 return initializer_zerop (exp);
5029 /* Return 1 if EXP contains all zeros. */
5031 static int
5032 all_zeros_p (const_tree exp)
5034 if (TREE_CODE (exp) == CONSTRUCTOR)
5037 HOST_WIDE_INT nz_elts, count;
5038 bool must_clear;
5040 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5041 return nz_elts == 0;
5044 return initializer_zerop (exp);
5047 /* Helper function for store_constructor.
5048 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5049 TYPE is the type of the CONSTRUCTOR, not the element type.
5050 CLEARED is as for store_constructor.
5051 ALIAS_SET is the alias set to use for any stores.
5053 This provides a recursive shortcut back to store_constructor when it isn't
5054 necessary to go through store_field. This is so that we can pass through
5055 the cleared field to let store_constructor know that we may not have to
5056 clear a substructure if the outer structure has already been cleared. */
5058 static void
5059 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5060 HOST_WIDE_INT bitpos, enum machine_mode mode,
5061 tree exp, tree type, int cleared,
5062 alias_set_type alias_set)
5064 if (TREE_CODE (exp) == CONSTRUCTOR
5065 /* We can only call store_constructor recursively if the size and
5066 bit position are on a byte boundary. */
5067 && bitpos % BITS_PER_UNIT == 0
5068 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5069 /* If we have a nonzero bitpos for a register target, then we just
5070 let store_field do the bitfield handling. This is unlikely to
5071 generate unnecessary clear instructions anyways. */
5072 && (bitpos == 0 || MEM_P (target)))
5074 if (MEM_P (target))
5075 target
5076 = adjust_address (target,
5077 GET_MODE (target) == BLKmode
5078 || 0 != (bitpos
5079 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5080 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5083 /* Update the alias set, if required. */
5084 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5085 && MEM_ALIAS_SET (target) != 0)
5087 target = copy_rtx (target);
5088 set_mem_alias_set (target, alias_set);
5091 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5093 else
5094 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5097 /* Store the value of constructor EXP into the rtx TARGET.
5098 TARGET is either a REG or a MEM; we know it cannot conflict, since
5099 safe_from_p has been called.
5100 CLEARED is true if TARGET is known to have been zero'd.
5101 SIZE is the number of bytes of TARGET we are allowed to modify: this
5102 may not be the same as the size of EXP if we are assigning to a field
5103 which has been packed to exclude padding bits. */
5105 static void
5106 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5108 tree type = TREE_TYPE (exp);
5109 #ifdef WORD_REGISTER_OPERATIONS
5110 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5111 #endif
5113 switch (TREE_CODE (type))
5115 case RECORD_TYPE:
5116 case UNION_TYPE:
5117 case QUAL_UNION_TYPE:
5119 unsigned HOST_WIDE_INT idx;
5120 tree field, value;
5122 /* If size is zero or the target is already cleared, do nothing. */
5123 if (size == 0 || cleared)
5124 cleared = 1;
5125 /* We either clear the aggregate or indicate the value is dead. */
5126 else if ((TREE_CODE (type) == UNION_TYPE
5127 || TREE_CODE (type) == QUAL_UNION_TYPE)
5128 && ! CONSTRUCTOR_ELTS (exp))
5129 /* If the constructor is empty, clear the union. */
5131 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5132 cleared = 1;
5135 /* If we are building a static constructor into a register,
5136 set the initial value as zero so we can fold the value into
5137 a constant. But if more than one register is involved,
5138 this probably loses. */
5139 else if (REG_P (target) && TREE_STATIC (exp)
5140 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5142 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5143 cleared = 1;
5146 /* If the constructor has fewer fields than the structure or
5147 if we are initializing the structure to mostly zeros, clear
5148 the whole structure first. Don't do this if TARGET is a
5149 register whose mode size isn't equal to SIZE since
5150 clear_storage can't handle this case. */
5151 else if (size > 0
5152 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5153 != fields_length (type))
5154 || mostly_zeros_p (exp))
5155 && (!REG_P (target)
5156 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5157 == size)))
5159 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5160 cleared = 1;
5163 if (REG_P (target) && !cleared)
5164 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5166 /* Store each element of the constructor into the
5167 corresponding field of TARGET. */
5168 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5170 enum machine_mode mode;
5171 HOST_WIDE_INT bitsize;
5172 HOST_WIDE_INT bitpos = 0;
5173 tree offset;
5174 rtx to_rtx = target;
5176 /* Just ignore missing fields. We cleared the whole
5177 structure, above, if any fields are missing. */
5178 if (field == 0)
5179 continue;
5181 if (cleared && initializer_zerop (value))
5182 continue;
5184 if (host_integerp (DECL_SIZE (field), 1))
5185 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5186 else
5187 bitsize = -1;
5189 mode = DECL_MODE (field);
5190 if (DECL_BIT_FIELD (field))
5191 mode = VOIDmode;
5193 offset = DECL_FIELD_OFFSET (field);
5194 if (host_integerp (offset, 0)
5195 && host_integerp (bit_position (field), 0))
5197 bitpos = int_bit_position (field);
5198 offset = 0;
5200 else
5201 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5203 if (offset)
5205 rtx offset_rtx;
5207 offset
5208 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5209 make_tree (TREE_TYPE (exp),
5210 target));
5212 offset_rtx = expand_normal (offset);
5213 gcc_assert (MEM_P (to_rtx));
5215 #ifdef POINTERS_EXTEND_UNSIGNED
5216 if (GET_MODE (offset_rtx) != Pmode)
5217 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5218 #else
5219 if (GET_MODE (offset_rtx) != ptr_mode)
5220 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5221 #endif
5223 to_rtx = offset_address (to_rtx, offset_rtx,
5224 highest_pow2_factor (offset));
5227 #ifdef WORD_REGISTER_OPERATIONS
5228 /* If this initializes a field that is smaller than a
5229 word, at the start of a word, try to widen it to a full
5230 word. This special case allows us to output C++ member
5231 function initializations in a form that the optimizers
5232 can understand. */
5233 if (REG_P (target)
5234 && bitsize < BITS_PER_WORD
5235 && bitpos % BITS_PER_WORD == 0
5236 && GET_MODE_CLASS (mode) == MODE_INT
5237 && TREE_CODE (value) == INTEGER_CST
5238 && exp_size >= 0
5239 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5241 tree type = TREE_TYPE (value);
5243 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5245 type = lang_hooks.types.type_for_size
5246 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5247 value = fold_convert (type, value);
5250 if (BYTES_BIG_ENDIAN)
5251 value
5252 = fold_build2 (LSHIFT_EXPR, type, value,
5253 build_int_cst (type,
5254 BITS_PER_WORD - bitsize));
5255 bitsize = BITS_PER_WORD;
5256 mode = word_mode;
5258 #endif
5260 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5261 && DECL_NONADDRESSABLE_P (field))
5263 to_rtx = copy_rtx (to_rtx);
5264 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5267 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5268 value, type, cleared,
5269 get_alias_set (TREE_TYPE (field)));
5271 break;
5273 case ARRAY_TYPE:
5275 tree value, index;
5276 unsigned HOST_WIDE_INT i;
5277 int need_to_clear;
5278 tree domain;
5279 tree elttype = TREE_TYPE (type);
5280 int const_bounds_p;
5281 HOST_WIDE_INT minelt = 0;
5282 HOST_WIDE_INT maxelt = 0;
5284 domain = TYPE_DOMAIN (type);
5285 const_bounds_p = (TYPE_MIN_VALUE (domain)
5286 && TYPE_MAX_VALUE (domain)
5287 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5288 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5290 /* If we have constant bounds for the range of the type, get them. */
5291 if (const_bounds_p)
5293 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5294 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5297 /* If the constructor has fewer elements than the array, clear
5298 the whole array first. Similarly if this is static
5299 constructor of a non-BLKmode object. */
5300 if (cleared)
5301 need_to_clear = 0;
5302 else if (REG_P (target) && TREE_STATIC (exp))
5303 need_to_clear = 1;
5304 else
5306 unsigned HOST_WIDE_INT idx;
5307 tree index, value;
5308 HOST_WIDE_INT count = 0, zero_count = 0;
5309 need_to_clear = ! const_bounds_p;
5311 /* This loop is a more accurate version of the loop in
5312 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5313 is also needed to check for missing elements. */
5314 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5316 HOST_WIDE_INT this_node_count;
5318 if (need_to_clear)
5319 break;
5321 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5323 tree lo_index = TREE_OPERAND (index, 0);
5324 tree hi_index = TREE_OPERAND (index, 1);
5326 if (! host_integerp (lo_index, 1)
5327 || ! host_integerp (hi_index, 1))
5329 need_to_clear = 1;
5330 break;
5333 this_node_count = (tree_low_cst (hi_index, 1)
5334 - tree_low_cst (lo_index, 1) + 1);
5336 else
5337 this_node_count = 1;
5339 count += this_node_count;
5340 if (mostly_zeros_p (value))
5341 zero_count += this_node_count;
5344 /* Clear the entire array first if there are any missing
5345 elements, or if the incidence of zero elements is >=
5346 75%. */
5347 if (! need_to_clear
5348 && (count < maxelt - minelt + 1
5349 || 4 * zero_count >= 3 * count))
5350 need_to_clear = 1;
5353 if (need_to_clear && size > 0)
5355 if (REG_P (target))
5356 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5357 else
5358 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5359 cleared = 1;
5362 if (!cleared && REG_P (target))
5363 /* Inform later passes that the old value is dead. */
5364 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5366 /* Store each element of the constructor into the
5367 corresponding element of TARGET, determined by counting the
5368 elements. */
5369 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5371 enum machine_mode mode;
5372 HOST_WIDE_INT bitsize;
5373 HOST_WIDE_INT bitpos;
5374 int unsignedp;
5375 rtx xtarget = target;
5377 if (cleared && initializer_zerop (value))
5378 continue;
5380 unsignedp = TYPE_UNSIGNED (elttype);
5381 mode = TYPE_MODE (elttype);
5382 if (mode == BLKmode)
5383 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5384 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5385 : -1);
5386 else
5387 bitsize = GET_MODE_BITSIZE (mode);
5389 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5391 tree lo_index = TREE_OPERAND (index, 0);
5392 tree hi_index = TREE_OPERAND (index, 1);
5393 rtx index_r, pos_rtx;
5394 HOST_WIDE_INT lo, hi, count;
5395 tree position;
5397 /* If the range is constant and "small", unroll the loop. */
5398 if (const_bounds_p
5399 && host_integerp (lo_index, 0)
5400 && host_integerp (hi_index, 0)
5401 && (lo = tree_low_cst (lo_index, 0),
5402 hi = tree_low_cst (hi_index, 0),
5403 count = hi - lo + 1,
5404 (!MEM_P (target)
5405 || count <= 2
5406 || (host_integerp (TYPE_SIZE (elttype), 1)
5407 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5408 <= 40 * 8)))))
5410 lo -= minelt; hi -= minelt;
5411 for (; lo <= hi; lo++)
5413 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5415 if (MEM_P (target)
5416 && !MEM_KEEP_ALIAS_SET_P (target)
5417 && TREE_CODE (type) == ARRAY_TYPE
5418 && TYPE_NONALIASED_COMPONENT (type))
5420 target = copy_rtx (target);
5421 MEM_KEEP_ALIAS_SET_P (target) = 1;
5424 store_constructor_field
5425 (target, bitsize, bitpos, mode, value, type, cleared,
5426 get_alias_set (elttype));
5429 else
5431 rtx loop_start = gen_label_rtx ();
5432 rtx loop_end = gen_label_rtx ();
5433 tree exit_cond;
5435 expand_normal (hi_index);
5436 unsignedp = TYPE_UNSIGNED (domain);
5438 index = build_decl (VAR_DECL, NULL_TREE, domain);
5440 index_r
5441 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5442 &unsignedp, 0));
5443 SET_DECL_RTL (index, index_r);
5444 store_expr (lo_index, index_r, 0, false);
5446 /* Build the head of the loop. */
5447 do_pending_stack_adjust ();
5448 emit_label (loop_start);
5450 /* Assign value to element index. */
5451 position =
5452 fold_convert (ssizetype,
5453 fold_build2 (MINUS_EXPR,
5454 TREE_TYPE (index),
5455 index,
5456 TYPE_MIN_VALUE (domain)));
5458 position =
5459 size_binop (MULT_EXPR, position,
5460 fold_convert (ssizetype,
5461 TYPE_SIZE_UNIT (elttype)));
5463 pos_rtx = expand_normal (position);
5464 xtarget = offset_address (target, pos_rtx,
5465 highest_pow2_factor (position));
5466 xtarget = adjust_address (xtarget, mode, 0);
5467 if (TREE_CODE (value) == CONSTRUCTOR)
5468 store_constructor (value, xtarget, cleared,
5469 bitsize / BITS_PER_UNIT);
5470 else
5471 store_expr (value, xtarget, 0, false);
5473 /* Generate a conditional jump to exit the loop. */
5474 exit_cond = build2 (LT_EXPR, integer_type_node,
5475 index, hi_index);
5476 jumpif (exit_cond, loop_end);
5478 /* Update the loop counter, and jump to the head of
5479 the loop. */
5480 expand_assignment (index,
5481 build2 (PLUS_EXPR, TREE_TYPE (index),
5482 index, integer_one_node),
5483 false);
5485 emit_jump (loop_start);
5487 /* Build the end of the loop. */
5488 emit_label (loop_end);
5491 else if ((index != 0 && ! host_integerp (index, 0))
5492 || ! host_integerp (TYPE_SIZE (elttype), 1))
5494 tree position;
5496 if (index == 0)
5497 index = ssize_int (1);
5499 if (minelt)
5500 index = fold_convert (ssizetype,
5501 fold_build2 (MINUS_EXPR,
5502 TREE_TYPE (index),
5503 index,
5504 TYPE_MIN_VALUE (domain)));
5506 position =
5507 size_binop (MULT_EXPR, index,
5508 fold_convert (ssizetype,
5509 TYPE_SIZE_UNIT (elttype)));
5510 xtarget = offset_address (target,
5511 expand_normal (position),
5512 highest_pow2_factor (position));
5513 xtarget = adjust_address (xtarget, mode, 0);
5514 store_expr (value, xtarget, 0, false);
5516 else
5518 if (index != 0)
5519 bitpos = ((tree_low_cst (index, 0) - minelt)
5520 * tree_low_cst (TYPE_SIZE (elttype), 1));
5521 else
5522 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5524 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5525 && TREE_CODE (type) == ARRAY_TYPE
5526 && TYPE_NONALIASED_COMPONENT (type))
5528 target = copy_rtx (target);
5529 MEM_KEEP_ALIAS_SET_P (target) = 1;
5531 store_constructor_field (target, bitsize, bitpos, mode, value,
5532 type, cleared, get_alias_set (elttype));
5535 break;
5538 case VECTOR_TYPE:
5540 unsigned HOST_WIDE_INT idx;
5541 constructor_elt *ce;
5542 int i;
5543 int need_to_clear;
5544 int icode = 0;
5545 tree elttype = TREE_TYPE (type);
5546 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5547 enum machine_mode eltmode = TYPE_MODE (elttype);
5548 HOST_WIDE_INT bitsize;
5549 HOST_WIDE_INT bitpos;
5550 rtvec vector = NULL;
5551 unsigned n_elts;
5553 gcc_assert (eltmode != BLKmode);
5555 n_elts = TYPE_VECTOR_SUBPARTS (type);
5556 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5558 enum machine_mode mode = GET_MODE (target);
5560 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5561 if (icode != CODE_FOR_nothing)
5563 unsigned int i;
5565 vector = rtvec_alloc (n_elts);
5566 for (i = 0; i < n_elts; i++)
5567 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5571 /* If the constructor has fewer elements than the vector,
5572 clear the whole array first. Similarly if this is static
5573 constructor of a non-BLKmode object. */
5574 if (cleared)
5575 need_to_clear = 0;
5576 else if (REG_P (target) && TREE_STATIC (exp))
5577 need_to_clear = 1;
5578 else
5580 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5581 tree value;
5583 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5585 int n_elts_here = tree_low_cst
5586 (int_const_binop (TRUNC_DIV_EXPR,
5587 TYPE_SIZE (TREE_TYPE (value)),
5588 TYPE_SIZE (elttype), 0), 1);
5590 count += n_elts_here;
5591 if (mostly_zeros_p (value))
5592 zero_count += n_elts_here;
5595 /* Clear the entire vector first if there are any missing elements,
5596 or if the incidence of zero elements is >= 75%. */
5597 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5600 if (need_to_clear && size > 0 && !vector)
5602 if (REG_P (target))
5603 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5604 else
5605 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5606 cleared = 1;
5609 /* Inform later passes that the old value is dead. */
5610 if (!cleared && !vector && REG_P (target))
5611 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5613 /* Store each element of the constructor into the corresponding
5614 element of TARGET, determined by counting the elements. */
5615 for (idx = 0, i = 0;
5616 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5617 idx++, i += bitsize / elt_size)
5619 HOST_WIDE_INT eltpos;
5620 tree value = ce->value;
5622 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5623 if (cleared && initializer_zerop (value))
5624 continue;
5626 if (ce->index)
5627 eltpos = tree_low_cst (ce->index, 1);
5628 else
5629 eltpos = i;
5631 if (vector)
5633 /* Vector CONSTRUCTORs should only be built from smaller
5634 vectors in the case of BLKmode vectors. */
5635 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5636 RTVEC_ELT (vector, eltpos)
5637 = expand_normal (value);
5639 else
5641 enum machine_mode value_mode =
5642 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5643 ? TYPE_MODE (TREE_TYPE (value))
5644 : eltmode;
5645 bitpos = eltpos * elt_size;
5646 store_constructor_field (target, bitsize, bitpos,
5647 value_mode, value, type,
5648 cleared, get_alias_set (elttype));
5652 if (vector)
5653 emit_insn (GEN_FCN (icode)
5654 (target,
5655 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5656 break;
5659 default:
5660 gcc_unreachable ();
5664 /* Store the value of EXP (an expression tree)
5665 into a subfield of TARGET which has mode MODE and occupies
5666 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5667 If MODE is VOIDmode, it means that we are storing into a bit-field.
5669 Always return const0_rtx unless we have something particular to
5670 return.
5672 TYPE is the type of the underlying object,
5674 ALIAS_SET is the alias set for the destination. This value will
5675 (in general) be different from that for TARGET, since TARGET is a
5676 reference to the containing structure.
5678 If NONTEMPORAL is true, try generating a nontemporal store. */
5680 static rtx
5681 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5682 enum machine_mode mode, tree exp, tree type,
5683 alias_set_type alias_set, bool nontemporal)
5685 HOST_WIDE_INT width_mask = 0;
5687 if (TREE_CODE (exp) == ERROR_MARK)
5688 return const0_rtx;
5690 /* If we have nothing to store, do nothing unless the expression has
5691 side-effects. */
5692 if (bitsize == 0)
5693 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5694 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5695 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5697 /* If we are storing into an unaligned field of an aligned union that is
5698 in a register, we may have the mode of TARGET being an integer mode but
5699 MODE == BLKmode. In that case, get an aligned object whose size and
5700 alignment are the same as TARGET and store TARGET into it (we can avoid
5701 the store if the field being stored is the entire width of TARGET). Then
5702 call ourselves recursively to store the field into a BLKmode version of
5703 that object. Finally, load from the object into TARGET. This is not
5704 very efficient in general, but should only be slightly more expensive
5705 than the otherwise-required unaligned accesses. Perhaps this can be
5706 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5707 twice, once with emit_move_insn and once via store_field. */
5709 if (mode == BLKmode
5710 && (REG_P (target) || GET_CODE (target) == SUBREG))
5712 rtx object = assign_temp (type, 0, 1, 1);
5713 rtx blk_object = adjust_address (object, BLKmode, 0);
5715 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5716 emit_move_insn (object, target);
5718 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5719 nontemporal);
5721 emit_move_insn (target, object);
5723 /* We want to return the BLKmode version of the data. */
5724 return blk_object;
5727 if (GET_CODE (target) == CONCAT)
5729 /* We're storing into a struct containing a single __complex. */
5731 gcc_assert (!bitpos);
5732 return store_expr (exp, target, 0, nontemporal);
5735 /* If the structure is in a register or if the component
5736 is a bit field, we cannot use addressing to access it.
5737 Use bit-field techniques or SUBREG to store in it. */
5739 if (mode == VOIDmode
5740 || (mode != BLKmode && ! direct_store[(int) mode]
5741 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5742 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5743 || REG_P (target)
5744 || GET_CODE (target) == SUBREG
5745 /* If the field isn't aligned enough to store as an ordinary memref,
5746 store it as a bit field. */
5747 || (mode != BLKmode
5748 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5749 || bitpos % GET_MODE_ALIGNMENT (mode))
5750 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5751 || (bitpos % BITS_PER_UNIT != 0)))
5752 /* If the RHS and field are a constant size and the size of the
5753 RHS isn't the same size as the bitfield, we must use bitfield
5754 operations. */
5755 || (bitsize >= 0
5756 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5757 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5759 rtx temp;
5761 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5762 implies a mask operation. If the precision is the same size as
5763 the field we're storing into, that mask is redundant. This is
5764 particularly common with bit field assignments generated by the
5765 C front end. */
5766 if (TREE_CODE (exp) == NOP_EXPR)
5768 tree type = TREE_TYPE (exp);
5769 if (INTEGRAL_TYPE_P (type)
5770 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5771 && bitsize == TYPE_PRECISION (type))
5773 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5774 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5775 exp = TREE_OPERAND (exp, 0);
5779 temp = expand_normal (exp);
5781 /* If BITSIZE is narrower than the size of the type of EXP
5782 we will be narrowing TEMP. Normally, what's wanted are the
5783 low-order bits. However, if EXP's type is a record and this is
5784 big-endian machine, we want the upper BITSIZE bits. */
5785 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5786 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5787 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5788 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5789 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5790 - bitsize),
5791 NULL_RTX, 1);
5793 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5794 MODE. */
5795 if (mode != VOIDmode && mode != BLKmode
5796 && mode != TYPE_MODE (TREE_TYPE (exp)))
5797 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5799 /* If the modes of TARGET and TEMP are both BLKmode, both
5800 must be in memory and BITPOS must be aligned on a byte
5801 boundary. If so, we simply do a block copy. */
5802 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5804 gcc_assert (MEM_P (target) && MEM_P (temp)
5805 && !(bitpos % BITS_PER_UNIT));
5807 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5808 emit_block_move (target, temp,
5809 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5810 / BITS_PER_UNIT),
5811 BLOCK_OP_NORMAL);
5813 return const0_rtx;
5816 /* Store the value in the bitfield. */
5817 store_bit_field (target, bitsize, bitpos, mode, temp);
5819 return const0_rtx;
5821 else
5823 /* Now build a reference to just the desired component. */
5824 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5826 if (to_rtx == target)
5827 to_rtx = copy_rtx (to_rtx);
5829 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5830 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5831 set_mem_alias_set (to_rtx, alias_set);
5833 return store_expr (exp, to_rtx, 0, nontemporal);
5837 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5838 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5839 codes and find the ultimate containing object, which we return.
5841 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5842 bit position, and *PUNSIGNEDP to the signedness of the field.
5843 If the position of the field is variable, we store a tree
5844 giving the variable offset (in units) in *POFFSET.
5845 This offset is in addition to the bit position.
5846 If the position is not variable, we store 0 in *POFFSET.
5848 If any of the extraction expressions is volatile,
5849 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5851 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5852 is a mode that can be used to access the field. In that case, *PBITSIZE
5853 is redundant.
5855 If the field describes a variable-sized object, *PMODE is set to
5856 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5857 this case, but the address of the object can be found.
5859 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5860 look through nodes that serve as markers of a greater alignment than
5861 the one that can be deduced from the expression. These nodes make it
5862 possible for front-ends to prevent temporaries from being created by
5863 the middle-end on alignment considerations. For that purpose, the
5864 normal operating mode at high-level is to always pass FALSE so that
5865 the ultimate containing object is really returned; moreover, the
5866 associated predicate handled_component_p will always return TRUE
5867 on these nodes, thus indicating that they are essentially handled
5868 by get_inner_reference. TRUE should only be passed when the caller
5869 is scanning the expression in order to build another representation
5870 and specifically knows how to handle these nodes; as such, this is
5871 the normal operating mode in the RTL expanders. */
5873 tree
5874 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5875 HOST_WIDE_INT *pbitpos, tree *poffset,
5876 enum machine_mode *pmode, int *punsignedp,
5877 int *pvolatilep, bool keep_aligning)
5879 tree size_tree = 0;
5880 enum machine_mode mode = VOIDmode;
5881 tree offset = size_zero_node;
5882 tree bit_offset = bitsize_zero_node;
5884 /* First get the mode, signedness, and size. We do this from just the
5885 outermost expression. */
5886 if (TREE_CODE (exp) == COMPONENT_REF)
5888 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5889 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5890 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5892 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5894 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5896 size_tree = TREE_OPERAND (exp, 1);
5897 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
5898 || TYPE_UNSIGNED (TREE_TYPE (exp)));
5900 /* For vector types, with the correct size of access, use the mode of
5901 inner type. */
5902 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5903 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5904 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5905 mode = TYPE_MODE (TREE_TYPE (exp));
5907 else
5909 mode = TYPE_MODE (TREE_TYPE (exp));
5910 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5912 if (mode == BLKmode)
5913 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5914 else
5915 *pbitsize = GET_MODE_BITSIZE (mode);
5918 if (size_tree != 0)
5920 if (! host_integerp (size_tree, 1))
5921 mode = BLKmode, *pbitsize = -1;
5922 else
5923 *pbitsize = tree_low_cst (size_tree, 1);
5926 *pmode = mode;
5928 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5929 and find the ultimate containing object. */
5930 while (1)
5932 switch (TREE_CODE (exp))
5934 case BIT_FIELD_REF:
5935 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5936 TREE_OPERAND (exp, 2));
5937 break;
5939 case COMPONENT_REF:
5941 tree field = TREE_OPERAND (exp, 1);
5942 tree this_offset = component_ref_field_offset (exp);
5944 /* If this field hasn't been filled in yet, don't go past it.
5945 This should only happen when folding expressions made during
5946 type construction. */
5947 if (this_offset == 0)
5948 break;
5950 offset = size_binop (PLUS_EXPR, offset, this_offset);
5951 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5952 DECL_FIELD_BIT_OFFSET (field));
5954 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5956 break;
5958 case ARRAY_REF:
5959 case ARRAY_RANGE_REF:
5961 tree index = TREE_OPERAND (exp, 1);
5962 tree low_bound = array_ref_low_bound (exp);
5963 tree unit_size = array_ref_element_size (exp);
5965 /* We assume all arrays have sizes that are a multiple of a byte.
5966 First subtract the lower bound, if any, in the type of the
5967 index, then convert to sizetype and multiply by the size of
5968 the array element. */
5969 if (! integer_zerop (low_bound))
5970 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5971 index, low_bound);
5973 offset = size_binop (PLUS_EXPR, offset,
5974 size_binop (MULT_EXPR,
5975 fold_convert (sizetype, index),
5976 unit_size));
5978 break;
5980 case REALPART_EXPR:
5981 break;
5983 case IMAGPART_EXPR:
5984 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5985 bitsize_int (*pbitsize));
5986 break;
5988 case VIEW_CONVERT_EXPR:
5989 if (keep_aligning && STRICT_ALIGNMENT
5990 && (TYPE_ALIGN (TREE_TYPE (exp))
5991 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5992 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5993 < BIGGEST_ALIGNMENT)
5994 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5995 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5996 goto done;
5997 break;
5999 default:
6000 goto done;
6003 /* If any reference in the chain is volatile, the effect is volatile. */
6004 if (TREE_THIS_VOLATILE (exp))
6005 *pvolatilep = 1;
6007 exp = TREE_OPERAND (exp, 0);
6009 done:
6011 /* If OFFSET is constant, see if we can return the whole thing as a
6012 constant bit position. Make sure to handle overflow during
6013 this conversion. */
6014 if (host_integerp (offset, 0))
6016 double_int tem = double_int_mul (tree_to_double_int (offset),
6017 uhwi_to_double_int (BITS_PER_UNIT));
6018 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6019 if (double_int_fits_in_shwi_p (tem))
6021 *pbitpos = double_int_to_shwi (tem);
6022 *poffset = NULL_TREE;
6023 return exp;
6027 /* Otherwise, split it up. */
6028 *pbitpos = tree_low_cst (bit_offset, 0);
6029 *poffset = offset;
6031 return exp;
6034 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
6035 look for whether EXP or any nested component-refs within EXP is marked
6036 as PACKED. */
6038 bool
6039 contains_packed_reference (const_tree exp)
6041 bool packed_p = false;
6043 while (1)
6045 switch (TREE_CODE (exp))
6047 case COMPONENT_REF:
6049 tree field = TREE_OPERAND (exp, 1);
6050 packed_p = DECL_PACKED (field)
6051 || TYPE_PACKED (TREE_TYPE (field))
6052 || TYPE_PACKED (TREE_TYPE (exp));
6053 if (packed_p)
6054 goto done;
6056 break;
6058 case BIT_FIELD_REF:
6059 case ARRAY_REF:
6060 case ARRAY_RANGE_REF:
6061 case REALPART_EXPR:
6062 case IMAGPART_EXPR:
6063 case VIEW_CONVERT_EXPR:
6064 break;
6066 default:
6067 goto done;
6069 exp = TREE_OPERAND (exp, 0);
6071 done:
6072 return packed_p;
6075 /* Return a tree of sizetype representing the size, in bytes, of the element
6076 of EXP, an ARRAY_REF. */
6078 tree
6079 array_ref_element_size (tree exp)
6081 tree aligned_size = TREE_OPERAND (exp, 3);
6082 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6084 /* If a size was specified in the ARRAY_REF, it's the size measured
6085 in alignment units of the element type. So multiply by that value. */
6086 if (aligned_size)
6088 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6089 sizetype from another type of the same width and signedness. */
6090 if (TREE_TYPE (aligned_size) != sizetype)
6091 aligned_size = fold_convert (sizetype, aligned_size);
6092 return size_binop (MULT_EXPR, aligned_size,
6093 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6096 /* Otherwise, take the size from that of the element type. Substitute
6097 any PLACEHOLDER_EXPR that we have. */
6098 else
6099 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6102 /* Return a tree representing the lower bound of the array mentioned in
6103 EXP, an ARRAY_REF. */
6105 tree
6106 array_ref_low_bound (tree exp)
6108 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6110 /* If a lower bound is specified in EXP, use it. */
6111 if (TREE_OPERAND (exp, 2))
6112 return TREE_OPERAND (exp, 2);
6114 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6115 substituting for a PLACEHOLDER_EXPR as needed. */
6116 if (domain_type && TYPE_MIN_VALUE (domain_type))
6117 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6119 /* Otherwise, return a zero of the appropriate type. */
6120 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6123 /* Return a tree representing the upper bound of the array mentioned in
6124 EXP, an ARRAY_REF. */
6126 tree
6127 array_ref_up_bound (tree exp)
6129 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6131 /* If there is a domain type and it has an upper bound, use it, substituting
6132 for a PLACEHOLDER_EXPR as needed. */
6133 if (domain_type && TYPE_MAX_VALUE (domain_type))
6134 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6136 /* Otherwise fail. */
6137 return NULL_TREE;
6140 /* Return a tree representing the offset, in bytes, of the field referenced
6141 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6143 tree
6144 component_ref_field_offset (tree exp)
6146 tree aligned_offset = TREE_OPERAND (exp, 2);
6147 tree field = TREE_OPERAND (exp, 1);
6149 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6150 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6151 value. */
6152 if (aligned_offset)
6154 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6155 sizetype from another type of the same width and signedness. */
6156 if (TREE_TYPE (aligned_offset) != sizetype)
6157 aligned_offset = fold_convert (sizetype, aligned_offset);
6158 return size_binop (MULT_EXPR, aligned_offset,
6159 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6162 /* Otherwise, take the offset from that of the field. Substitute
6163 any PLACEHOLDER_EXPR that we have. */
6164 else
6165 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6168 /* Return 1 if T is an expression that get_inner_reference handles. */
6171 handled_component_p (const_tree t)
6173 switch (TREE_CODE (t))
6175 case BIT_FIELD_REF:
6176 case COMPONENT_REF:
6177 case ARRAY_REF:
6178 case ARRAY_RANGE_REF:
6179 case VIEW_CONVERT_EXPR:
6180 case REALPART_EXPR:
6181 case IMAGPART_EXPR:
6182 return 1;
6184 default:
6185 return 0;
6189 /* Given an rtx VALUE that may contain additions and multiplications, return
6190 an equivalent value that just refers to a register, memory, or constant.
6191 This is done by generating instructions to perform the arithmetic and
6192 returning a pseudo-register containing the value.
6194 The returned value may be a REG, SUBREG, MEM or constant. */
6197 force_operand (rtx value, rtx target)
6199 rtx op1, op2;
6200 /* Use subtarget as the target for operand 0 of a binary operation. */
6201 rtx subtarget = get_subtarget (target);
6202 enum rtx_code code = GET_CODE (value);
6204 /* Check for subreg applied to an expression produced by loop optimizer. */
6205 if (code == SUBREG
6206 && !REG_P (SUBREG_REG (value))
6207 && !MEM_P (SUBREG_REG (value)))
6209 value
6210 = simplify_gen_subreg (GET_MODE (value),
6211 force_reg (GET_MODE (SUBREG_REG (value)),
6212 force_operand (SUBREG_REG (value),
6213 NULL_RTX)),
6214 GET_MODE (SUBREG_REG (value)),
6215 SUBREG_BYTE (value));
6216 code = GET_CODE (value);
6219 /* Check for a PIC address load. */
6220 if ((code == PLUS || code == MINUS)
6221 && XEXP (value, 0) == pic_offset_table_rtx
6222 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6223 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6224 || GET_CODE (XEXP (value, 1)) == CONST))
6226 if (!subtarget)
6227 subtarget = gen_reg_rtx (GET_MODE (value));
6228 emit_move_insn (subtarget, value);
6229 return subtarget;
6232 if (ARITHMETIC_P (value))
6234 op2 = XEXP (value, 1);
6235 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6236 subtarget = 0;
6237 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6239 code = PLUS;
6240 op2 = negate_rtx (GET_MODE (value), op2);
6243 /* Check for an addition with OP2 a constant integer and our first
6244 operand a PLUS of a virtual register and something else. In that
6245 case, we want to emit the sum of the virtual register and the
6246 constant first and then add the other value. This allows virtual
6247 register instantiation to simply modify the constant rather than
6248 creating another one around this addition. */
6249 if (code == PLUS && GET_CODE (op2) == CONST_INT
6250 && GET_CODE (XEXP (value, 0)) == PLUS
6251 && REG_P (XEXP (XEXP (value, 0), 0))
6252 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6253 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6255 rtx temp = expand_simple_binop (GET_MODE (value), code,
6256 XEXP (XEXP (value, 0), 0), op2,
6257 subtarget, 0, OPTAB_LIB_WIDEN);
6258 return expand_simple_binop (GET_MODE (value), code, temp,
6259 force_operand (XEXP (XEXP (value,
6260 0), 1), 0),
6261 target, 0, OPTAB_LIB_WIDEN);
6264 op1 = force_operand (XEXP (value, 0), subtarget);
6265 op2 = force_operand (op2, NULL_RTX);
6266 switch (code)
6268 case MULT:
6269 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6270 case DIV:
6271 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6272 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6273 target, 1, OPTAB_LIB_WIDEN);
6274 else
6275 return expand_divmod (0,
6276 FLOAT_MODE_P (GET_MODE (value))
6277 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6278 GET_MODE (value), op1, op2, target, 0);
6279 case MOD:
6280 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6281 target, 0);
6282 case UDIV:
6283 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6284 target, 1);
6285 case UMOD:
6286 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6287 target, 1);
6288 case ASHIFTRT:
6289 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6290 target, 0, OPTAB_LIB_WIDEN);
6291 default:
6292 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6293 target, 1, OPTAB_LIB_WIDEN);
6296 if (UNARY_P (value))
6298 if (!target)
6299 target = gen_reg_rtx (GET_MODE (value));
6300 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6301 switch (code)
6303 case ZERO_EXTEND:
6304 case SIGN_EXTEND:
6305 case TRUNCATE:
6306 case FLOAT_EXTEND:
6307 case FLOAT_TRUNCATE:
6308 convert_move (target, op1, code == ZERO_EXTEND);
6309 return target;
6311 case FIX:
6312 case UNSIGNED_FIX:
6313 expand_fix (target, op1, code == UNSIGNED_FIX);
6314 return target;
6316 case FLOAT:
6317 case UNSIGNED_FLOAT:
6318 expand_float (target, op1, code == UNSIGNED_FLOAT);
6319 return target;
6321 default:
6322 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6326 #ifdef INSN_SCHEDULING
6327 /* On machines that have insn scheduling, we want all memory reference to be
6328 explicit, so we need to deal with such paradoxical SUBREGs. */
6329 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6330 && (GET_MODE_SIZE (GET_MODE (value))
6331 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6332 value
6333 = simplify_gen_subreg (GET_MODE (value),
6334 force_reg (GET_MODE (SUBREG_REG (value)),
6335 force_operand (SUBREG_REG (value),
6336 NULL_RTX)),
6337 GET_MODE (SUBREG_REG (value)),
6338 SUBREG_BYTE (value));
6339 #endif
6341 return value;
6344 /* Subroutine of expand_expr: return nonzero iff there is no way that
6345 EXP can reference X, which is being modified. TOP_P is nonzero if this
6346 call is going to be used to determine whether we need a temporary
6347 for EXP, as opposed to a recursive call to this function.
6349 It is always safe for this routine to return zero since it merely
6350 searches for optimization opportunities. */
6353 safe_from_p (const_rtx x, tree exp, int top_p)
6355 rtx exp_rtl = 0;
6356 int i, nops;
6358 if (x == 0
6359 /* If EXP has varying size, we MUST use a target since we currently
6360 have no way of allocating temporaries of variable size
6361 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6362 So we assume here that something at a higher level has prevented a
6363 clash. This is somewhat bogus, but the best we can do. Only
6364 do this when X is BLKmode and when we are at the top level. */
6365 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6366 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6367 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6368 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6369 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6370 != INTEGER_CST)
6371 && GET_MODE (x) == BLKmode)
6372 /* If X is in the outgoing argument area, it is always safe. */
6373 || (MEM_P (x)
6374 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6375 || (GET_CODE (XEXP (x, 0)) == PLUS
6376 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6377 return 1;
6379 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6380 find the underlying pseudo. */
6381 if (GET_CODE (x) == SUBREG)
6383 x = SUBREG_REG (x);
6384 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6385 return 0;
6388 /* Now look at our tree code and possibly recurse. */
6389 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6391 case tcc_declaration:
6392 exp_rtl = DECL_RTL_IF_SET (exp);
6393 break;
6395 case tcc_constant:
6396 return 1;
6398 case tcc_exceptional:
6399 if (TREE_CODE (exp) == TREE_LIST)
6401 while (1)
6403 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6404 return 0;
6405 exp = TREE_CHAIN (exp);
6406 if (!exp)
6407 return 1;
6408 if (TREE_CODE (exp) != TREE_LIST)
6409 return safe_from_p (x, exp, 0);
6412 else if (TREE_CODE (exp) == CONSTRUCTOR)
6414 constructor_elt *ce;
6415 unsigned HOST_WIDE_INT idx;
6417 for (idx = 0;
6418 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6419 idx++)
6420 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6421 || !safe_from_p (x, ce->value, 0))
6422 return 0;
6423 return 1;
6425 else if (TREE_CODE (exp) == ERROR_MARK)
6426 return 1; /* An already-visited SAVE_EXPR? */
6427 else
6428 return 0;
6430 case tcc_statement:
6431 /* The only case we look at here is the DECL_INITIAL inside a
6432 DECL_EXPR. */
6433 return (TREE_CODE (exp) != DECL_EXPR
6434 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6435 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6436 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6438 case tcc_binary:
6439 case tcc_comparison:
6440 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6441 return 0;
6442 /* Fall through. */
6444 case tcc_unary:
6445 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6447 case tcc_expression:
6448 case tcc_reference:
6449 case tcc_vl_exp:
6450 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6451 the expression. If it is set, we conflict iff we are that rtx or
6452 both are in memory. Otherwise, we check all operands of the
6453 expression recursively. */
6455 switch (TREE_CODE (exp))
6457 case ADDR_EXPR:
6458 /* If the operand is static or we are static, we can't conflict.
6459 Likewise if we don't conflict with the operand at all. */
6460 if (staticp (TREE_OPERAND (exp, 0))
6461 || TREE_STATIC (exp)
6462 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6463 return 1;
6465 /* Otherwise, the only way this can conflict is if we are taking
6466 the address of a DECL a that address if part of X, which is
6467 very rare. */
6468 exp = TREE_OPERAND (exp, 0);
6469 if (DECL_P (exp))
6471 if (!DECL_RTL_SET_P (exp)
6472 || !MEM_P (DECL_RTL (exp)))
6473 return 0;
6474 else
6475 exp_rtl = XEXP (DECL_RTL (exp), 0);
6477 break;
6479 case MISALIGNED_INDIRECT_REF:
6480 case ALIGN_INDIRECT_REF:
6481 case INDIRECT_REF:
6482 if (MEM_P (x)
6483 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6484 get_alias_set (exp)))
6485 return 0;
6486 break;
6488 case CALL_EXPR:
6489 /* Assume that the call will clobber all hard registers and
6490 all of memory. */
6491 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6492 || MEM_P (x))
6493 return 0;
6494 break;
6496 case WITH_CLEANUP_EXPR:
6497 case CLEANUP_POINT_EXPR:
6498 /* Lowered by gimplify.c. */
6499 gcc_unreachable ();
6501 case SAVE_EXPR:
6502 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6504 default:
6505 break;
6508 /* If we have an rtx, we do not need to scan our operands. */
6509 if (exp_rtl)
6510 break;
6512 nops = TREE_OPERAND_LENGTH (exp);
6513 for (i = 0; i < nops; i++)
6514 if (TREE_OPERAND (exp, i) != 0
6515 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6516 return 0;
6518 break;
6520 case tcc_type:
6521 /* Should never get a type here. */
6522 gcc_unreachable ();
6524 case tcc_gimple_stmt:
6525 gcc_unreachable ();
6528 /* If we have an rtl, find any enclosed object. Then see if we conflict
6529 with it. */
6530 if (exp_rtl)
6532 if (GET_CODE (exp_rtl) == SUBREG)
6534 exp_rtl = SUBREG_REG (exp_rtl);
6535 if (REG_P (exp_rtl)
6536 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6537 return 0;
6540 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6541 are memory and they conflict. */
6542 return ! (rtx_equal_p (x, exp_rtl)
6543 || (MEM_P (x) && MEM_P (exp_rtl)
6544 && true_dependence (exp_rtl, VOIDmode, x,
6545 rtx_addr_varies_p)));
6548 /* If we reach here, it is safe. */
6549 return 1;
6553 /* Return the highest power of two that EXP is known to be a multiple of.
6554 This is used in updating alignment of MEMs in array references. */
6556 unsigned HOST_WIDE_INT
6557 highest_pow2_factor (const_tree exp)
6559 unsigned HOST_WIDE_INT c0, c1;
6561 switch (TREE_CODE (exp))
6563 case INTEGER_CST:
6564 /* We can find the lowest bit that's a one. If the low
6565 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6566 We need to handle this case since we can find it in a COND_EXPR,
6567 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6568 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6569 later ICE. */
6570 if (TREE_OVERFLOW (exp))
6571 return BIGGEST_ALIGNMENT;
6572 else
6574 /* Note: tree_low_cst is intentionally not used here,
6575 we don't care about the upper bits. */
6576 c0 = TREE_INT_CST_LOW (exp);
6577 c0 &= -c0;
6578 return c0 ? c0 : BIGGEST_ALIGNMENT;
6580 break;
6582 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6583 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6584 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6585 return MIN (c0, c1);
6587 case MULT_EXPR:
6588 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6589 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6590 return c0 * c1;
6592 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6593 case CEIL_DIV_EXPR:
6594 if (integer_pow2p (TREE_OPERAND (exp, 1))
6595 && host_integerp (TREE_OPERAND (exp, 1), 1))
6597 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6598 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6599 return MAX (1, c0 / c1);
6601 break;
6603 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6604 case SAVE_EXPR:
6605 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6607 case COMPOUND_EXPR:
6608 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6610 case COND_EXPR:
6611 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6612 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6613 return MIN (c0, c1);
6615 default:
6616 break;
6619 return 1;
6622 /* Similar, except that the alignment requirements of TARGET are
6623 taken into account. Assume it is at least as aligned as its
6624 type, unless it is a COMPONENT_REF in which case the layout of
6625 the structure gives the alignment. */
6627 static unsigned HOST_WIDE_INT
6628 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6630 unsigned HOST_WIDE_INT target_align, factor;
6632 factor = highest_pow2_factor (exp);
6633 if (TREE_CODE (target) == COMPONENT_REF)
6634 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6635 else
6636 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6637 return MAX (factor, target_align);
6640 /* Return &VAR expression for emulated thread local VAR. */
6642 static tree
6643 emutls_var_address (tree var)
6645 tree emuvar = emutls_decl (var);
6646 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6647 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6648 tree arglist = build_tree_list (NULL_TREE, arg);
6649 tree call = build_function_call_expr (fn, arglist);
6650 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6653 /* Expands variable VAR. */
6655 void
6656 expand_var (tree var)
6658 if (DECL_EXTERNAL (var))
6659 return;
6661 if (TREE_STATIC (var))
6662 /* If this is an inlined copy of a static local variable,
6663 look up the original decl. */
6664 var = DECL_ORIGIN (var);
6666 if (TREE_STATIC (var)
6667 ? !TREE_ASM_WRITTEN (var)
6668 : !DECL_RTL_SET_P (var))
6670 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6671 /* Should be ignored. */;
6672 else if (lang_hooks.expand_decl (var))
6673 /* OK. */;
6674 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6675 expand_decl (var);
6676 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6677 rest_of_decl_compilation (var, 0, 0);
6678 else
6679 /* No expansion needed. */
6680 gcc_assert (TREE_CODE (var) == TYPE_DECL
6681 || TREE_CODE (var) == CONST_DECL
6682 || TREE_CODE (var) == FUNCTION_DECL
6683 || TREE_CODE (var) == LABEL_DECL);
6687 /* Subroutine of expand_expr. Expand the two operands of a binary
6688 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6689 The value may be stored in TARGET if TARGET is nonzero. The
6690 MODIFIER argument is as documented by expand_expr. */
6692 static void
6693 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6694 enum expand_modifier modifier)
6696 if (! safe_from_p (target, exp1, 1))
6697 target = 0;
6698 if (operand_equal_p (exp0, exp1, 0))
6700 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6701 *op1 = copy_rtx (*op0);
6703 else
6705 /* If we need to preserve evaluation order, copy exp0 into its own
6706 temporary variable so that it can't be clobbered by exp1. */
6707 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6708 exp0 = save_expr (exp0);
6709 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6710 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6715 /* Return a MEM that contains constant EXP. DEFER is as for
6716 output_constant_def and MODIFIER is as for expand_expr. */
6718 static rtx
6719 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6721 rtx mem;
6723 mem = output_constant_def (exp, defer);
6724 if (modifier != EXPAND_INITIALIZER)
6725 mem = use_anchored_address (mem);
6726 return mem;
6729 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6730 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6732 static rtx
6733 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6734 enum expand_modifier modifier)
6736 rtx result, subtarget;
6737 tree inner, offset;
6738 HOST_WIDE_INT bitsize, bitpos;
6739 int volatilep, unsignedp;
6740 enum machine_mode mode1;
6742 /* If we are taking the address of a constant and are at the top level,
6743 we have to use output_constant_def since we can't call force_const_mem
6744 at top level. */
6745 /* ??? This should be considered a front-end bug. We should not be
6746 generating ADDR_EXPR of something that isn't an LVALUE. The only
6747 exception here is STRING_CST. */
6748 if (CONSTANT_CLASS_P (exp))
6749 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6751 /* Everything must be something allowed by is_gimple_addressable. */
6752 switch (TREE_CODE (exp))
6754 case INDIRECT_REF:
6755 /* This case will happen via recursion for &a->b. */
6756 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6758 case CONST_DECL:
6759 /* Recurse and make the output_constant_def clause above handle this. */
6760 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6761 tmode, modifier);
6763 case REALPART_EXPR:
6764 /* The real part of the complex number is always first, therefore
6765 the address is the same as the address of the parent object. */
6766 offset = 0;
6767 bitpos = 0;
6768 inner = TREE_OPERAND (exp, 0);
6769 break;
6771 case IMAGPART_EXPR:
6772 /* The imaginary part of the complex number is always second.
6773 The expression is therefore always offset by the size of the
6774 scalar type. */
6775 offset = 0;
6776 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6777 inner = TREE_OPERAND (exp, 0);
6778 break;
6780 case VAR_DECL:
6781 /* TLS emulation hook - replace __thread VAR's &VAR with
6782 __emutls_get_address (&_emutls.VAR). */
6783 if (! targetm.have_tls
6784 && TREE_CODE (exp) == VAR_DECL
6785 && DECL_THREAD_LOCAL_P (exp))
6787 exp = emutls_var_address (exp);
6788 return expand_expr (exp, target, tmode, modifier);
6790 /* Fall through. */
6792 default:
6793 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6794 expand_expr, as that can have various side effects; LABEL_DECLs for
6795 example, may not have their DECL_RTL set yet. Expand the rtl of
6796 CONSTRUCTORs too, which should yield a memory reference for the
6797 constructor's contents. Assume language specific tree nodes can
6798 be expanded in some interesting way. */
6799 if (DECL_P (exp)
6800 || TREE_CODE (exp) == CONSTRUCTOR
6801 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6803 result = expand_expr (exp, target, tmode,
6804 modifier == EXPAND_INITIALIZER
6805 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6807 /* If the DECL isn't in memory, then the DECL wasn't properly
6808 marked TREE_ADDRESSABLE, which will be either a front-end
6809 or a tree optimizer bug. */
6810 gcc_assert (MEM_P (result));
6811 result = XEXP (result, 0);
6813 /* ??? Is this needed anymore? */
6814 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6816 assemble_external (exp);
6817 TREE_USED (exp) = 1;
6820 if (modifier != EXPAND_INITIALIZER
6821 && modifier != EXPAND_CONST_ADDRESS)
6822 result = force_operand (result, target);
6823 return result;
6826 /* Pass FALSE as the last argument to get_inner_reference although
6827 we are expanding to RTL. The rationale is that we know how to
6828 handle "aligning nodes" here: we can just bypass them because
6829 they won't change the final object whose address will be returned
6830 (they actually exist only for that purpose). */
6831 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6832 &mode1, &unsignedp, &volatilep, false);
6833 break;
6836 /* We must have made progress. */
6837 gcc_assert (inner != exp);
6839 subtarget = offset || bitpos ? NULL_RTX : target;
6840 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6842 if (offset)
6844 rtx tmp;
6846 if (modifier != EXPAND_NORMAL)
6847 result = force_operand (result, NULL);
6848 tmp = expand_expr (offset, NULL_RTX, tmode,
6849 modifier == EXPAND_INITIALIZER
6850 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6852 result = convert_memory_address (tmode, result);
6853 tmp = convert_memory_address (tmode, tmp);
6855 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6856 result = gen_rtx_PLUS (tmode, result, tmp);
6857 else
6859 subtarget = bitpos ? NULL_RTX : target;
6860 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6861 1, OPTAB_LIB_WIDEN);
6865 if (bitpos)
6867 /* Someone beforehand should have rejected taking the address
6868 of such an object. */
6869 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6871 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6872 if (modifier < EXPAND_SUM)
6873 result = force_operand (result, target);
6876 return result;
6879 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6880 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6882 static rtx
6883 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6884 enum expand_modifier modifier)
6886 enum machine_mode rmode;
6887 rtx result;
6889 /* Target mode of VOIDmode says "whatever's natural". */
6890 if (tmode == VOIDmode)
6891 tmode = TYPE_MODE (TREE_TYPE (exp));
6893 /* We can get called with some Weird Things if the user does silliness
6894 like "(short) &a". In that case, convert_memory_address won't do
6895 the right thing, so ignore the given target mode. */
6896 if (tmode != Pmode && tmode != ptr_mode)
6897 tmode = Pmode;
6899 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6900 tmode, modifier);
6902 /* Despite expand_expr claims concerning ignoring TMODE when not
6903 strictly convenient, stuff breaks if we don't honor it. Note
6904 that combined with the above, we only do this for pointer modes. */
6905 rmode = GET_MODE (result);
6906 if (rmode == VOIDmode)
6907 rmode = tmode;
6908 if (rmode != tmode)
6909 result = convert_memory_address (tmode, result);
6911 return result;
6914 /* Generate code for computing CONSTRUCTOR EXP.
6915 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6916 is TRUE, instead of creating a temporary variable in memory
6917 NULL is returned and the caller needs to handle it differently. */
6919 static rtx
6920 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6921 bool avoid_temp_mem)
6923 tree type = TREE_TYPE (exp);
6924 enum machine_mode mode = TYPE_MODE (type);
6926 /* Try to avoid creating a temporary at all. This is possible
6927 if all of the initializer is zero.
6928 FIXME: try to handle all [0..255] initializers we can handle
6929 with memset. */
6930 if (TREE_STATIC (exp)
6931 && !TREE_ADDRESSABLE (exp)
6932 && target != 0 && mode == BLKmode
6933 && all_zeros_p (exp))
6935 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6936 return target;
6939 /* All elts simple constants => refer to a constant in memory. But
6940 if this is a non-BLKmode mode, let it store a field at a time
6941 since that should make a CONST_INT or CONST_DOUBLE when we
6942 fold. Likewise, if we have a target we can use, it is best to
6943 store directly into the target unless the type is large enough
6944 that memcpy will be used. If we are making an initializer and
6945 all operands are constant, put it in memory as well.
6947 FIXME: Avoid trying to fill vector constructors piece-meal.
6948 Output them with output_constant_def below unless we're sure
6949 they're zeros. This should go away when vector initializers
6950 are treated like VECTOR_CST instead of arrays. */
6951 if ((TREE_STATIC (exp)
6952 && ((mode == BLKmode
6953 && ! (target != 0 && safe_from_p (target, exp, 1)))
6954 || TREE_ADDRESSABLE (exp)
6955 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6956 && (! MOVE_BY_PIECES_P
6957 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6958 TYPE_ALIGN (type)))
6959 && ! mostly_zeros_p (exp))))
6960 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
6961 && TREE_CONSTANT (exp)))
6963 rtx constructor;
6965 if (avoid_temp_mem)
6966 return NULL_RTX;
6968 constructor = expand_expr_constant (exp, 1, modifier);
6970 if (modifier != EXPAND_CONST_ADDRESS
6971 && modifier != EXPAND_INITIALIZER
6972 && modifier != EXPAND_SUM)
6973 constructor = validize_mem (constructor);
6975 return constructor;
6978 /* Handle calls that pass values in multiple non-contiguous
6979 locations. The Irix 6 ABI has examples of this. */
6980 if (target == 0 || ! safe_from_p (target, exp, 1)
6981 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
6983 if (avoid_temp_mem)
6984 return NULL_RTX;
6986 target
6987 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
6988 | (TREE_READONLY (exp)
6989 * TYPE_QUAL_CONST))),
6990 0, TREE_ADDRESSABLE (exp), 1);
6993 store_constructor (exp, target, 0, int_expr_size (exp));
6994 return target;
6998 /* expand_expr: generate code for computing expression EXP.
6999 An rtx for the computed value is returned. The value is never null.
7000 In the case of a void EXP, const0_rtx is returned.
7002 The value may be stored in TARGET if TARGET is nonzero.
7003 TARGET is just a suggestion; callers must assume that
7004 the rtx returned may not be the same as TARGET.
7006 If TARGET is CONST0_RTX, it means that the value will be ignored.
7008 If TMODE is not VOIDmode, it suggests generating the
7009 result in mode TMODE. But this is done only when convenient.
7010 Otherwise, TMODE is ignored and the value generated in its natural mode.
7011 TMODE is just a suggestion; callers must assume that
7012 the rtx returned may not have mode TMODE.
7014 Note that TARGET may have neither TMODE nor MODE. In that case, it
7015 probably will not be used.
7017 If MODIFIER is EXPAND_SUM then when EXP is an addition
7018 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7019 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7020 products as above, or REG or MEM, or constant.
7021 Ordinarily in such cases we would output mul or add instructions
7022 and then return a pseudo reg containing the sum.
7024 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7025 it also marks a label as absolutely required (it can't be dead).
7026 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7027 This is used for outputting expressions used in initializers.
7029 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7030 with a constant address even if that address is not normally legitimate.
7031 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7033 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7034 a call parameter. Such targets require special care as we haven't yet
7035 marked TARGET so that it's safe from being trashed by libcalls. We
7036 don't want to use TARGET for anything but the final result;
7037 Intermediate values must go elsewhere. Additionally, calls to
7038 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7040 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7041 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7042 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7043 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7044 recursively. */
7046 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
7047 enum expand_modifier, rtx *);
7050 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7051 enum expand_modifier modifier, rtx *alt_rtl)
7053 int rn = -1;
7054 rtx ret, last = NULL;
7056 /* Handle ERROR_MARK before anybody tries to access its type. */
7057 if (TREE_CODE (exp) == ERROR_MARK
7058 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7060 ret = CONST0_RTX (tmode);
7061 return ret ? ret : const0_rtx;
7064 if (flag_non_call_exceptions)
7066 rn = lookup_stmt_eh_region (exp);
7067 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7068 if (rn >= 0)
7069 last = get_last_insn ();
7072 /* If this is an expression of some kind and it has an associated line
7073 number, then emit the line number before expanding the expression.
7075 We need to save and restore the file and line information so that
7076 errors discovered during expansion are emitted with the right
7077 information. It would be better of the diagnostic routines
7078 used the file/line information embedded in the tree nodes rather
7079 than globals. */
7080 if (cfun && EXPR_HAS_LOCATION (exp))
7082 location_t saved_location = input_location;
7083 input_location = EXPR_LOCATION (exp);
7084 set_curr_insn_source_location (input_location);
7086 /* Record where the insns produced belong. */
7087 set_curr_insn_block (TREE_BLOCK (exp));
7089 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7091 input_location = saved_location;
7093 else
7095 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7098 /* If using non-call exceptions, mark all insns that may trap.
7099 expand_call() will mark CALL_INSNs before we get to this code,
7100 but it doesn't handle libcalls, and these may trap. */
7101 if (rn >= 0)
7103 rtx insn;
7104 for (insn = next_real_insn (last); insn;
7105 insn = next_real_insn (insn))
7107 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7108 /* If we want exceptions for non-call insns, any
7109 may_trap_p instruction may throw. */
7110 && GET_CODE (PATTERN (insn)) != CLOBBER
7111 && GET_CODE (PATTERN (insn)) != USE
7112 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
7114 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
7115 REG_NOTES (insn));
7120 return ret;
7123 static rtx
7124 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
7125 enum expand_modifier modifier, rtx *alt_rtl)
7127 rtx op0, op1, op2, temp, decl_rtl;
7128 tree type;
7129 int unsignedp;
7130 enum machine_mode mode;
7131 enum tree_code code = TREE_CODE (exp);
7132 optab this_optab;
7133 rtx subtarget, original_target;
7134 int ignore;
7135 tree context, subexp0, subexp1;
7136 bool reduce_bit_field = false;
7137 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
7138 ? reduce_to_bit_field_precision ((expr), \
7139 target, \
7140 type) \
7141 : (expr))
7143 if (GIMPLE_STMT_P (exp))
7145 type = void_type_node;
7146 mode = VOIDmode;
7147 unsignedp = 0;
7149 else
7151 type = TREE_TYPE (exp);
7152 mode = TYPE_MODE (type);
7153 unsignedp = TYPE_UNSIGNED (type);
7155 if (lang_hooks.reduce_bit_field_operations
7156 && TREE_CODE (type) == INTEGER_TYPE
7157 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
7159 /* An operation in what may be a bit-field type needs the
7160 result to be reduced to the precision of the bit-field type,
7161 which is narrower than that of the type's mode. */
7162 reduce_bit_field = true;
7163 if (modifier == EXPAND_STACK_PARM)
7164 target = 0;
7167 /* Use subtarget as the target for operand 0 of a binary operation. */
7168 subtarget = get_subtarget (target);
7169 original_target = target;
7170 ignore = (target == const0_rtx
7171 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
7172 || code == CONVERT_EXPR || code == COND_EXPR
7173 || code == VIEW_CONVERT_EXPR)
7174 && TREE_CODE (type) == VOID_TYPE));
7176 /* If we are going to ignore this result, we need only do something
7177 if there is a side-effect somewhere in the expression. If there
7178 is, short-circuit the most common cases here. Note that we must
7179 not call expand_expr with anything but const0_rtx in case this
7180 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7182 if (ignore)
7184 if (! TREE_SIDE_EFFECTS (exp))
7185 return const0_rtx;
7187 /* Ensure we reference a volatile object even if value is ignored, but
7188 don't do this if all we are doing is taking its address. */
7189 if (TREE_THIS_VOLATILE (exp)
7190 && TREE_CODE (exp) != FUNCTION_DECL
7191 && mode != VOIDmode && mode != BLKmode
7192 && modifier != EXPAND_CONST_ADDRESS)
7194 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7195 if (MEM_P (temp))
7196 temp = copy_to_reg (temp);
7197 return const0_rtx;
7200 if (TREE_CODE_CLASS (code) == tcc_unary
7201 || code == COMPONENT_REF || code == INDIRECT_REF)
7202 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7203 modifier);
7205 else if (TREE_CODE_CLASS (code) == tcc_binary
7206 || TREE_CODE_CLASS (code) == tcc_comparison
7207 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7209 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7210 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7211 return const0_rtx;
7213 else if (code == BIT_FIELD_REF)
7215 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7216 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7217 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7218 return const0_rtx;
7221 target = 0;
7225 switch (code)
7227 case LABEL_DECL:
7229 tree function = decl_function_context (exp);
7231 temp = label_rtx (exp);
7232 temp = gen_rtx_LABEL_REF (Pmode, temp);
7234 if (function != current_function_decl
7235 && function != 0)
7236 LABEL_REF_NONLOCAL_P (temp) = 1;
7238 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7239 return temp;
7242 case SSA_NAME:
7243 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7244 NULL);
7246 case PARM_DECL:
7247 case VAR_DECL:
7248 /* If a static var's type was incomplete when the decl was written,
7249 but the type is complete now, lay out the decl now. */
7250 if (DECL_SIZE (exp) == 0
7251 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7252 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7253 layout_decl (exp, 0);
7255 /* TLS emulation hook - replace __thread vars with
7256 *__emutls_get_address (&_emutls.var). */
7257 if (! targetm.have_tls
7258 && TREE_CODE (exp) == VAR_DECL
7259 && DECL_THREAD_LOCAL_P (exp))
7261 exp = build_fold_indirect_ref (emutls_var_address (exp));
7262 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7265 /* ... fall through ... */
7267 case FUNCTION_DECL:
7268 case RESULT_DECL:
7269 decl_rtl = DECL_RTL (exp);
7270 gcc_assert (decl_rtl);
7271 decl_rtl = copy_rtx (decl_rtl);
7273 /* Ensure variable marked as used even if it doesn't go through
7274 a parser. If it hasn't be used yet, write out an external
7275 definition. */
7276 if (! TREE_USED (exp))
7278 assemble_external (exp);
7279 TREE_USED (exp) = 1;
7282 /* Show we haven't gotten RTL for this yet. */
7283 temp = 0;
7285 /* Variables inherited from containing functions should have
7286 been lowered by this point. */
7287 context = decl_function_context (exp);
7288 gcc_assert (!context
7289 || context == current_function_decl
7290 || TREE_STATIC (exp)
7291 /* ??? C++ creates functions that are not TREE_STATIC. */
7292 || TREE_CODE (exp) == FUNCTION_DECL);
7294 /* This is the case of an array whose size is to be determined
7295 from its initializer, while the initializer is still being parsed.
7296 See expand_decl. */
7298 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7299 temp = validize_mem (decl_rtl);
7301 /* If DECL_RTL is memory, we are in the normal case and the
7302 address is not valid, get the address into a register. */
7304 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7306 if (alt_rtl)
7307 *alt_rtl = decl_rtl;
7308 decl_rtl = use_anchored_address (decl_rtl);
7309 if (modifier != EXPAND_CONST_ADDRESS
7310 && modifier != EXPAND_SUM
7311 && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
7312 temp = replace_equiv_address (decl_rtl,
7313 copy_rtx (XEXP (decl_rtl, 0)));
7316 /* If we got something, return it. But first, set the alignment
7317 if the address is a register. */
7318 if (temp != 0)
7320 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7321 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7323 return temp;
7326 /* If the mode of DECL_RTL does not match that of the decl, it
7327 must be a promoted value. We return a SUBREG of the wanted mode,
7328 but mark it so that we know that it was already extended. */
7330 if (REG_P (decl_rtl)
7331 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7333 enum machine_mode pmode;
7335 /* Get the signedness used for this variable. Ensure we get the
7336 same mode we got when the variable was declared. */
7337 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7338 (TREE_CODE (exp) == RESULT_DECL
7339 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7340 gcc_assert (GET_MODE (decl_rtl) == pmode);
7342 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7343 SUBREG_PROMOTED_VAR_P (temp) = 1;
7344 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7345 return temp;
7348 return decl_rtl;
7350 case INTEGER_CST:
7351 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7352 TREE_INT_CST_HIGH (exp), mode);
7354 return temp;
7356 case VECTOR_CST:
7358 tree tmp = NULL_TREE;
7359 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7360 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7361 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
7362 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
7363 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
7364 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
7365 return const_vector_from_tree (exp);
7366 if (GET_MODE_CLASS (mode) == MODE_INT)
7368 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7369 if (type_for_mode)
7370 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7372 if (!tmp)
7373 tmp = build_constructor_from_list (type,
7374 TREE_VECTOR_CST_ELTS (exp));
7375 return expand_expr (tmp, ignore ? const0_rtx : target,
7376 tmode, modifier);
7379 case CONST_DECL:
7380 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7382 case REAL_CST:
7383 /* If optimized, generate immediate CONST_DOUBLE
7384 which will be turned into memory by reload if necessary.
7386 We used to force a register so that loop.c could see it. But
7387 this does not allow gen_* patterns to perform optimizations with
7388 the constants. It also produces two insns in cases like "x = 1.0;".
7389 On most machines, floating-point constants are not permitted in
7390 many insns, so we'd end up copying it to a register in any case.
7392 Now, we do the copying in expand_binop, if appropriate. */
7393 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7394 TYPE_MODE (TREE_TYPE (exp)));
7396 case FIXED_CST:
7397 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
7398 TYPE_MODE (TREE_TYPE (exp)));
7400 case COMPLEX_CST:
7401 /* Handle evaluating a complex constant in a CONCAT target. */
7402 if (original_target && GET_CODE (original_target) == CONCAT)
7404 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7405 rtx rtarg, itarg;
7407 rtarg = XEXP (original_target, 0);
7408 itarg = XEXP (original_target, 1);
7410 /* Move the real and imaginary parts separately. */
7411 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7412 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7414 if (op0 != rtarg)
7415 emit_move_insn (rtarg, op0);
7416 if (op1 != itarg)
7417 emit_move_insn (itarg, op1);
7419 return original_target;
7422 /* ... fall through ... */
7424 case STRING_CST:
7425 temp = expand_expr_constant (exp, 1, modifier);
7427 /* temp contains a constant address.
7428 On RISC machines where a constant address isn't valid,
7429 make some insns to get that address into a register. */
7430 if (modifier != EXPAND_CONST_ADDRESS
7431 && modifier != EXPAND_INITIALIZER
7432 && modifier != EXPAND_SUM
7433 && ! memory_address_p (mode, XEXP (temp, 0)))
7434 return replace_equiv_address (temp,
7435 copy_rtx (XEXP (temp, 0)));
7436 return temp;
7438 case SAVE_EXPR:
7440 tree val = TREE_OPERAND (exp, 0);
7441 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7443 if (!SAVE_EXPR_RESOLVED_P (exp))
7445 /* We can indeed still hit this case, typically via builtin
7446 expanders calling save_expr immediately before expanding
7447 something. Assume this means that we only have to deal
7448 with non-BLKmode values. */
7449 gcc_assert (GET_MODE (ret) != BLKmode);
7451 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7452 DECL_ARTIFICIAL (val) = 1;
7453 DECL_IGNORED_P (val) = 1;
7454 TREE_OPERAND (exp, 0) = val;
7455 SAVE_EXPR_RESOLVED_P (exp) = 1;
7457 if (!CONSTANT_P (ret))
7458 ret = copy_to_reg (ret);
7459 SET_DECL_RTL (val, ret);
7462 return ret;
7465 case GOTO_EXPR:
7466 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7467 expand_goto (TREE_OPERAND (exp, 0));
7468 else
7469 expand_computed_goto (TREE_OPERAND (exp, 0));
7470 return const0_rtx;
7472 case CONSTRUCTOR:
7473 /* If we don't need the result, just ensure we evaluate any
7474 subexpressions. */
7475 if (ignore)
7477 unsigned HOST_WIDE_INT idx;
7478 tree value;
7480 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7481 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7483 return const0_rtx;
7486 return expand_constructor (exp, target, modifier, false);
7488 case MISALIGNED_INDIRECT_REF:
7489 case ALIGN_INDIRECT_REF:
7490 case INDIRECT_REF:
7492 tree exp1 = TREE_OPERAND (exp, 0);
7494 if (modifier != EXPAND_WRITE)
7496 tree t;
7498 t = fold_read_from_constant_string (exp);
7499 if (t)
7500 return expand_expr (t, target, tmode, modifier);
7503 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7504 op0 = memory_address (mode, op0);
7506 if (code == ALIGN_INDIRECT_REF)
7508 int align = TYPE_ALIGN_UNIT (type);
7509 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7510 op0 = memory_address (mode, op0);
7513 temp = gen_rtx_MEM (mode, op0);
7515 set_mem_attributes (temp, exp, 0);
7517 /* Resolve the misalignment now, so that we don't have to remember
7518 to resolve it later. Of course, this only works for reads. */
7519 /* ??? When we get around to supporting writes, we'll have to handle
7520 this in store_expr directly. The vectorizer isn't generating
7521 those yet, however. */
7522 if (code == MISALIGNED_INDIRECT_REF)
7524 int icode;
7525 rtx reg, insn;
7527 gcc_assert (modifier == EXPAND_NORMAL
7528 || modifier == EXPAND_STACK_PARM);
7530 /* The vectorizer should have already checked the mode. */
7531 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7532 gcc_assert (icode != CODE_FOR_nothing);
7534 /* We've already validated the memory, and we're creating a
7535 new pseudo destination. The predicates really can't fail. */
7536 reg = gen_reg_rtx (mode);
7538 /* Nor can the insn generator. */
7539 insn = GEN_FCN (icode) (reg, temp);
7540 emit_insn (insn);
7542 return reg;
7545 return temp;
7548 case TARGET_MEM_REF:
7550 struct mem_address addr;
7552 get_address_description (exp, &addr);
7553 op0 = addr_for_mem_ref (&addr, true);
7554 op0 = memory_address (mode, op0);
7555 temp = gen_rtx_MEM (mode, op0);
7556 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7558 return temp;
7560 case ARRAY_REF:
7563 tree array = TREE_OPERAND (exp, 0);
7564 tree index = TREE_OPERAND (exp, 1);
7566 /* Fold an expression like: "foo"[2].
7567 This is not done in fold so it won't happen inside &.
7568 Don't fold if this is for wide characters since it's too
7569 difficult to do correctly and this is a very rare case. */
7571 if (modifier != EXPAND_CONST_ADDRESS
7572 && modifier != EXPAND_INITIALIZER
7573 && modifier != EXPAND_MEMORY)
7575 tree t = fold_read_from_constant_string (exp);
7577 if (t)
7578 return expand_expr (t, target, tmode, modifier);
7581 /* If this is a constant index into a constant array,
7582 just get the value from the array. Handle both the cases when
7583 we have an explicit constructor and when our operand is a variable
7584 that was declared const. */
7586 if (modifier != EXPAND_CONST_ADDRESS
7587 && modifier != EXPAND_INITIALIZER
7588 && modifier != EXPAND_MEMORY
7589 && TREE_CODE (array) == CONSTRUCTOR
7590 && ! TREE_SIDE_EFFECTS (array)
7591 && TREE_CODE (index) == INTEGER_CST)
7593 unsigned HOST_WIDE_INT ix;
7594 tree field, value;
7596 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7597 field, value)
7598 if (tree_int_cst_equal (field, index))
7600 if (!TREE_SIDE_EFFECTS (value))
7601 return expand_expr (fold (value), target, tmode, modifier);
7602 break;
7606 else if (optimize >= 1
7607 && modifier != EXPAND_CONST_ADDRESS
7608 && modifier != EXPAND_INITIALIZER
7609 && modifier != EXPAND_MEMORY
7610 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7611 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7612 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7613 && targetm.binds_local_p (array))
7615 if (TREE_CODE (index) == INTEGER_CST)
7617 tree init = DECL_INITIAL (array);
7619 if (TREE_CODE (init) == CONSTRUCTOR)
7621 unsigned HOST_WIDE_INT ix;
7622 tree field, value;
7624 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7625 field, value)
7626 if (tree_int_cst_equal (field, index))
7628 if (TREE_SIDE_EFFECTS (value))
7629 break;
7631 if (TREE_CODE (value) == CONSTRUCTOR)
7633 /* If VALUE is a CONSTRUCTOR, this
7634 optimization is only useful if
7635 this doesn't store the CONSTRUCTOR
7636 into memory. If it does, it is more
7637 efficient to just load the data from
7638 the array directly. */
7639 rtx ret = expand_constructor (value, target,
7640 modifier, true);
7641 if (ret == NULL_RTX)
7642 break;
7645 return expand_expr (fold (value), target, tmode,
7646 modifier);
7649 else if(TREE_CODE (init) == STRING_CST)
7651 tree index1 = index;
7652 tree low_bound = array_ref_low_bound (exp);
7653 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7655 /* Optimize the special-case of a zero lower bound.
7657 We convert the low_bound to sizetype to avoid some problems
7658 with constant folding. (E.g. suppose the lower bound is 1,
7659 and its mode is QI. Without the conversion,l (ARRAY
7660 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7661 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7663 if (! integer_zerop (low_bound))
7664 index1 = size_diffop (index1, fold_convert (sizetype,
7665 low_bound));
7667 if (0 > compare_tree_int (index1,
7668 TREE_STRING_LENGTH (init)))
7670 tree type = TREE_TYPE (TREE_TYPE (init));
7671 enum machine_mode mode = TYPE_MODE (type);
7673 if (GET_MODE_CLASS (mode) == MODE_INT
7674 && GET_MODE_SIZE (mode) == 1)
7675 return gen_int_mode (TREE_STRING_POINTER (init)
7676 [TREE_INT_CST_LOW (index1)],
7677 mode);
7683 goto normal_inner_ref;
7685 case COMPONENT_REF:
7686 /* If the operand is a CONSTRUCTOR, we can just extract the
7687 appropriate field if it is present. */
7688 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7690 unsigned HOST_WIDE_INT idx;
7691 tree field, value;
7693 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7694 idx, field, value)
7695 if (field == TREE_OPERAND (exp, 1)
7696 /* We can normally use the value of the field in the
7697 CONSTRUCTOR. However, if this is a bitfield in
7698 an integral mode that we can fit in a HOST_WIDE_INT,
7699 we must mask only the number of bits in the bitfield,
7700 since this is done implicitly by the constructor. If
7701 the bitfield does not meet either of those conditions,
7702 we can't do this optimization. */
7703 && (! DECL_BIT_FIELD (field)
7704 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7705 && (GET_MODE_BITSIZE (DECL_MODE (field))
7706 <= HOST_BITS_PER_WIDE_INT))))
7708 if (DECL_BIT_FIELD (field)
7709 && modifier == EXPAND_STACK_PARM)
7710 target = 0;
7711 op0 = expand_expr (value, target, tmode, modifier);
7712 if (DECL_BIT_FIELD (field))
7714 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7715 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7717 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7719 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7720 op0 = expand_and (imode, op0, op1, target);
7722 else
7724 tree count
7725 = build_int_cst (NULL_TREE,
7726 GET_MODE_BITSIZE (imode) - bitsize);
7728 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7729 target, 0);
7730 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7731 target, 0);
7735 return op0;
7738 goto normal_inner_ref;
7740 case BIT_FIELD_REF:
7741 case ARRAY_RANGE_REF:
7742 normal_inner_ref:
7744 enum machine_mode mode1;
7745 HOST_WIDE_INT bitsize, bitpos;
7746 tree offset;
7747 int volatilep = 0;
7748 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7749 &mode1, &unsignedp, &volatilep, true);
7750 rtx orig_op0;
7752 /* If we got back the original object, something is wrong. Perhaps
7753 we are evaluating an expression too early. In any event, don't
7754 infinitely recurse. */
7755 gcc_assert (tem != exp);
7757 /* If TEM's type is a union of variable size, pass TARGET to the inner
7758 computation, since it will need a temporary and TARGET is known
7759 to have to do. This occurs in unchecked conversion in Ada. */
7761 orig_op0 = op0
7762 = expand_expr (tem,
7763 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7764 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7765 != INTEGER_CST)
7766 && modifier != EXPAND_STACK_PARM
7767 ? target : NULL_RTX),
7768 VOIDmode,
7769 (modifier == EXPAND_INITIALIZER
7770 || modifier == EXPAND_CONST_ADDRESS
7771 || modifier == EXPAND_STACK_PARM)
7772 ? modifier : EXPAND_NORMAL);
7774 /* If this is a constant, put it into a register if it is a legitimate
7775 constant, OFFSET is 0, and we won't try to extract outside the
7776 register (in case we were passed a partially uninitialized object
7777 or a view_conversion to a larger size). Force the constant to
7778 memory otherwise. */
7779 if (CONSTANT_P (op0))
7781 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7782 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7783 && offset == 0
7784 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7785 op0 = force_reg (mode, op0);
7786 else
7787 op0 = validize_mem (force_const_mem (mode, op0));
7790 /* Otherwise, if this object not in memory and we either have an
7791 offset, a BLKmode result, or a reference outside the object, put it
7792 there. Such cases can occur in Ada if we have unchecked conversion
7793 of an expression from a scalar type to an array or record type or
7794 for an ARRAY_RANGE_REF whose type is BLKmode. */
7795 else if (!MEM_P (op0)
7796 && (offset != 0
7797 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7798 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7800 tree nt = build_qualified_type (TREE_TYPE (tem),
7801 (TYPE_QUALS (TREE_TYPE (tem))
7802 | TYPE_QUAL_CONST));
7803 rtx memloc = assign_temp (nt, 1, 1, 1);
7805 emit_move_insn (memloc, op0);
7806 op0 = memloc;
7809 if (offset != 0)
7811 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7812 EXPAND_SUM);
7814 gcc_assert (MEM_P (op0));
7816 #ifdef POINTERS_EXTEND_UNSIGNED
7817 if (GET_MODE (offset_rtx) != Pmode)
7818 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7819 #else
7820 if (GET_MODE (offset_rtx) != ptr_mode)
7821 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7822 #endif
7824 if (GET_MODE (op0) == BLKmode
7825 /* A constant address in OP0 can have VOIDmode, we must
7826 not try to call force_reg in that case. */
7827 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7828 && bitsize != 0
7829 && (bitpos % bitsize) == 0
7830 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7831 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7833 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7834 bitpos = 0;
7837 op0 = offset_address (op0, offset_rtx,
7838 highest_pow2_factor (offset));
7841 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7842 record its alignment as BIGGEST_ALIGNMENT. */
7843 if (MEM_P (op0) && bitpos == 0 && offset != 0
7844 && is_aligning_offset (offset, tem))
7845 set_mem_align (op0, BIGGEST_ALIGNMENT);
7847 /* Don't forget about volatility even if this is a bitfield. */
7848 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7850 if (op0 == orig_op0)
7851 op0 = copy_rtx (op0);
7853 MEM_VOLATILE_P (op0) = 1;
7856 /* The following code doesn't handle CONCAT.
7857 Assume only bitpos == 0 can be used for CONCAT, due to
7858 one element arrays having the same mode as its element. */
7859 if (GET_CODE (op0) == CONCAT)
7861 gcc_assert (bitpos == 0
7862 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7863 return op0;
7866 /* In cases where an aligned union has an unaligned object
7867 as a field, we might be extracting a BLKmode value from
7868 an integer-mode (e.g., SImode) object. Handle this case
7869 by doing the extract into an object as wide as the field
7870 (which we know to be the width of a basic mode), then
7871 storing into memory, and changing the mode to BLKmode. */
7872 if (mode1 == VOIDmode
7873 || REG_P (op0) || GET_CODE (op0) == SUBREG
7874 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7875 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7876 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7877 && modifier != EXPAND_CONST_ADDRESS
7878 && modifier != EXPAND_INITIALIZER)
7879 /* If the field isn't aligned enough to fetch as a memref,
7880 fetch it as a bit field. */
7881 || (mode1 != BLKmode
7882 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7883 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7884 || (MEM_P (op0)
7885 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7886 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7887 && ((modifier == EXPAND_CONST_ADDRESS
7888 || modifier == EXPAND_INITIALIZER)
7889 ? STRICT_ALIGNMENT
7890 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7891 || (bitpos % BITS_PER_UNIT != 0)))
7892 /* If the type and the field are a constant size and the
7893 size of the type isn't the same size as the bitfield,
7894 we must use bitfield operations. */
7895 || (bitsize >= 0
7896 && TYPE_SIZE (TREE_TYPE (exp))
7897 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7898 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7899 bitsize)))
7901 enum machine_mode ext_mode = mode;
7903 if (ext_mode == BLKmode
7904 && ! (target != 0 && MEM_P (op0)
7905 && MEM_P (target)
7906 && bitpos % BITS_PER_UNIT == 0))
7907 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7909 if (ext_mode == BLKmode)
7911 if (target == 0)
7912 target = assign_temp (type, 0, 1, 1);
7914 if (bitsize == 0)
7915 return target;
7917 /* In this case, BITPOS must start at a byte boundary and
7918 TARGET, if specified, must be a MEM. */
7919 gcc_assert (MEM_P (op0)
7920 && (!target || MEM_P (target))
7921 && !(bitpos % BITS_PER_UNIT));
7923 emit_block_move (target,
7924 adjust_address (op0, VOIDmode,
7925 bitpos / BITS_PER_UNIT),
7926 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7927 / BITS_PER_UNIT),
7928 (modifier == EXPAND_STACK_PARM
7929 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7931 return target;
7934 op0 = validize_mem (op0);
7936 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7937 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7939 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7940 (modifier == EXPAND_STACK_PARM
7941 ? NULL_RTX : target),
7942 ext_mode, ext_mode);
7944 /* If the result is a record type and BITSIZE is narrower than
7945 the mode of OP0, an integral mode, and this is a big endian
7946 machine, we must put the field into the high-order bits. */
7947 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7948 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7949 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7950 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7951 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7952 - bitsize),
7953 op0, 1);
7955 /* If the result type is BLKmode, store the data into a temporary
7956 of the appropriate type, but with the mode corresponding to the
7957 mode for the data we have (op0's mode). It's tempting to make
7958 this a constant type, since we know it's only being stored once,
7959 but that can cause problems if we are taking the address of this
7960 COMPONENT_REF because the MEM of any reference via that address
7961 will have flags corresponding to the type, which will not
7962 necessarily be constant. */
7963 if (mode == BLKmode)
7965 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7966 rtx new;
7968 /* If the reference doesn't use the alias set of its type,
7969 we cannot create the temporary using that type. */
7970 if (component_uses_parent_alias_set (exp))
7972 new = assign_stack_local (ext_mode, size, 0);
7973 set_mem_alias_set (new, get_alias_set (exp));
7975 else
7976 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7978 emit_move_insn (new, op0);
7979 op0 = copy_rtx (new);
7980 PUT_MODE (op0, BLKmode);
7981 set_mem_attributes (op0, exp, 1);
7984 return op0;
7987 /* If the result is BLKmode, use that to access the object
7988 now as well. */
7989 if (mode == BLKmode)
7990 mode1 = BLKmode;
7992 /* Get a reference to just this component. */
7993 if (modifier == EXPAND_CONST_ADDRESS
7994 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7995 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7996 else
7997 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7999 if (op0 == orig_op0)
8000 op0 = copy_rtx (op0);
8002 set_mem_attributes (op0, exp, 0);
8003 if (REG_P (XEXP (op0, 0)))
8004 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
8006 MEM_VOLATILE_P (op0) |= volatilep;
8007 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
8008 || modifier == EXPAND_CONST_ADDRESS
8009 || modifier == EXPAND_INITIALIZER)
8010 return op0;
8011 else if (target == 0)
8012 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8014 convert_move (target, op0, unsignedp);
8015 return target;
8018 case OBJ_TYPE_REF:
8019 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
8021 case CALL_EXPR:
8022 /* All valid uses of __builtin_va_arg_pack () are removed during
8023 inlining. */
8024 if (CALL_EXPR_VA_ARG_PACK (exp))
8025 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8027 tree fndecl = get_callee_fndecl (exp), attr;
8029 if (fndecl
8030 && (attr = lookup_attribute ("error",
8031 DECL_ATTRIBUTES (fndecl))) != NULL)
8032 error ("%Kcall to %qs declared with attribute error: %s",
8033 exp, lang_hooks.decl_printable_name (fndecl, 1),
8034 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8035 if (fndecl
8036 && (attr = lookup_attribute ("warning",
8037 DECL_ATTRIBUTES (fndecl))) != NULL)
8038 warning (0, "%Kcall to %qs declared with attribute warning: %s",
8039 exp, lang_hooks.decl_printable_name (fndecl, 1),
8040 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8042 /* Check for a built-in function. */
8043 if (fndecl && DECL_BUILT_IN (fndecl))
8045 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND)
8046 return lang_hooks.expand_expr (exp, original_target,
8047 tmode, modifier, alt_rtl);
8048 else
8049 return expand_builtin (exp, target, subtarget, tmode, ignore);
8052 return expand_call (exp, target, ignore);
8054 case PAREN_EXPR:
8055 case NON_LVALUE_EXPR:
8056 case NOP_EXPR:
8057 case CONVERT_EXPR:
8058 if (TREE_OPERAND (exp, 0) == error_mark_node)
8059 return const0_rtx;
8061 if (TREE_CODE (type) == UNION_TYPE)
8063 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
8065 /* If both input and output are BLKmode, this conversion isn't doing
8066 anything except possibly changing memory attribute. */
8067 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8069 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8070 modifier);
8072 result = copy_rtx (result);
8073 set_mem_attributes (result, exp, 0);
8074 return result;
8077 if (target == 0)
8079 if (TYPE_MODE (type) != BLKmode)
8080 target = gen_reg_rtx (TYPE_MODE (type));
8081 else
8082 target = assign_temp (type, 0, 1, 1);
8085 if (MEM_P (target))
8086 /* Store data into beginning of memory target. */
8087 store_expr (TREE_OPERAND (exp, 0),
8088 adjust_address (target, TYPE_MODE (valtype), 0),
8089 modifier == EXPAND_STACK_PARM,
8090 false);
8092 else
8094 gcc_assert (REG_P (target));
8096 /* Store this field into a union of the proper type. */
8097 store_field (target,
8098 MIN ((int_size_in_bytes (TREE_TYPE
8099 (TREE_OPERAND (exp, 0)))
8100 * BITS_PER_UNIT),
8101 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8102 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8103 type, 0, false);
8106 /* Return the entire union. */
8107 return target;
8110 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8112 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8113 modifier);
8115 /* If the signedness of the conversion differs and OP0 is
8116 a promoted SUBREG, clear that indication since we now
8117 have to do the proper extension. */
8118 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8119 && GET_CODE (op0) == SUBREG)
8120 SUBREG_PROMOTED_VAR_P (op0) = 0;
8122 return REDUCE_BIT_FIELD (op0);
8125 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8126 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8127 if (GET_MODE (op0) == mode)
8130 /* If OP0 is a constant, just convert it into the proper mode. */
8131 else if (CONSTANT_P (op0))
8133 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8134 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8136 if (modifier == EXPAND_INITIALIZER)
8137 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8138 subreg_lowpart_offset (mode,
8139 inner_mode));
8140 else
8141 op0= convert_modes (mode, inner_mode, op0,
8142 TYPE_UNSIGNED (inner_type));
8145 else if (modifier == EXPAND_INITIALIZER)
8146 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8148 else if (target == 0)
8149 op0 = convert_to_mode (mode, op0,
8150 TYPE_UNSIGNED (TREE_TYPE
8151 (TREE_OPERAND (exp, 0))));
8152 else
8154 convert_move (target, op0,
8155 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8156 op0 = target;
8159 return REDUCE_BIT_FIELD (op0);
8161 case VIEW_CONVERT_EXPR:
8162 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8164 /* If the input and output modes are both the same, we are done. */
8165 if (TYPE_MODE (type) == GET_MODE (op0))
8167 /* If neither mode is BLKmode, and both modes are the same size
8168 then we can use gen_lowpart. */
8169 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8170 && GET_MODE_SIZE (TYPE_MODE (type))
8171 == GET_MODE_SIZE (GET_MODE (op0)))
8173 if (GET_CODE (op0) == SUBREG)
8174 op0 = force_reg (GET_MODE (op0), op0);
8175 op0 = gen_lowpart (TYPE_MODE (type), op0);
8177 /* If both modes are integral, then we can convert from one to the
8178 other. */
8179 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
8180 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
8181 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
8182 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8183 /* As a last resort, spill op0 to memory, and reload it in a
8184 different mode. */
8185 else if (!MEM_P (op0))
8187 /* If the operand is not a MEM, force it into memory. Since we
8188 are going to be changing the mode of the MEM, don't call
8189 force_const_mem for constants because we don't allow pool
8190 constants to change mode. */
8191 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8193 gcc_assert (!TREE_ADDRESSABLE (exp));
8195 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8196 target
8197 = assign_stack_temp_for_type
8198 (TYPE_MODE (inner_type),
8199 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8201 emit_move_insn (target, op0);
8202 op0 = target;
8205 /* At this point, OP0 is in the correct mode. If the output type is such
8206 that the operand is known to be aligned, indicate that it is.
8207 Otherwise, we need only be concerned about alignment for non-BLKmode
8208 results. */
8209 if (MEM_P (op0))
8211 op0 = copy_rtx (op0);
8213 if (TYPE_ALIGN_OK (type))
8214 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8215 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8216 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8218 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8219 HOST_WIDE_INT temp_size
8220 = MAX (int_size_in_bytes (inner_type),
8221 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8222 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8223 temp_size, 0, type);
8224 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8226 gcc_assert (!TREE_ADDRESSABLE (exp));
8228 if (GET_MODE (op0) == BLKmode)
8229 emit_block_move (new_with_op0_mode, op0,
8230 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8231 (modifier == EXPAND_STACK_PARM
8232 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8233 else
8234 emit_move_insn (new_with_op0_mode, op0);
8236 op0 = new;
8239 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8242 return op0;
8244 case POINTER_PLUS_EXPR:
8245 /* Even though the sizetype mode and the pointer's mode can be different
8246 expand is able to handle this correctly and get the correct result out
8247 of the PLUS_EXPR code. */
8248 case PLUS_EXPR:
8250 /* Check if this is a case for multiplication and addition. */
8251 if ((TREE_CODE (type) == INTEGER_TYPE
8252 || TREE_CODE (type) == FIXED_POINT_TYPE)
8253 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8255 tree subsubexp0, subsubexp1;
8256 enum tree_code code0, code1, this_code;
8258 subexp0 = TREE_OPERAND (exp, 0);
8259 subsubexp0 = TREE_OPERAND (subexp0, 0);
8260 subsubexp1 = TREE_OPERAND (subexp0, 1);
8261 code0 = TREE_CODE (subsubexp0);
8262 code1 = TREE_CODE (subsubexp1);
8263 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8264 : FIXED_CONVERT_EXPR;
8265 if (code0 == this_code && code1 == this_code
8266 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8267 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8268 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8269 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8270 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8271 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8273 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8274 enum machine_mode innermode = TYPE_MODE (op0type);
8275 bool zextend_p = TYPE_UNSIGNED (op0type);
8276 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8277 if (sat_p == 0)
8278 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8279 else
8280 this_optab = zextend_p ? usmadd_widen_optab
8281 : ssmadd_widen_optab;
8282 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8283 && (optab_handler (this_optab, mode)->insn_code
8284 != CODE_FOR_nothing))
8286 expand_operands (TREE_OPERAND (subsubexp0, 0),
8287 TREE_OPERAND (subsubexp1, 0),
8288 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8289 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8290 VOIDmode, EXPAND_NORMAL);
8291 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8292 target, unsignedp);
8293 gcc_assert (temp);
8294 return REDUCE_BIT_FIELD (temp);
8299 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8300 something else, make sure we add the register to the constant and
8301 then to the other thing. This case can occur during strength
8302 reduction and doing it this way will produce better code if the
8303 frame pointer or argument pointer is eliminated.
8305 fold-const.c will ensure that the constant is always in the inner
8306 PLUS_EXPR, so the only case we need to do anything about is if
8307 sp, ap, or fp is our second argument, in which case we must swap
8308 the innermost first argument and our second argument. */
8310 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8311 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8312 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8313 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8314 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8315 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8317 tree t = TREE_OPERAND (exp, 1);
8319 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8320 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8323 /* If the result is to be ptr_mode and we are adding an integer to
8324 something, we might be forming a constant. So try to use
8325 plus_constant. If it produces a sum and we can't accept it,
8326 use force_operand. This allows P = &ARR[const] to generate
8327 efficient code on machines where a SYMBOL_REF is not a valid
8328 address.
8330 If this is an EXPAND_SUM call, always return the sum. */
8331 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8332 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8334 if (modifier == EXPAND_STACK_PARM)
8335 target = 0;
8336 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8337 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8338 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8340 rtx constant_part;
8342 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8343 EXPAND_SUM);
8344 /* Use immed_double_const to ensure that the constant is
8345 truncated according to the mode of OP1, then sign extended
8346 to a HOST_WIDE_INT. Using the constant directly can result
8347 in non-canonical RTL in a 64x32 cross compile. */
8348 constant_part
8349 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8350 (HOST_WIDE_INT) 0,
8351 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8352 op1 = plus_constant (op1, INTVAL (constant_part));
8353 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8354 op1 = force_operand (op1, target);
8355 return REDUCE_BIT_FIELD (op1);
8358 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8359 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8360 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8362 rtx constant_part;
8364 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8365 (modifier == EXPAND_INITIALIZER
8366 ? EXPAND_INITIALIZER : EXPAND_SUM));
8367 if (! CONSTANT_P (op0))
8369 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8370 VOIDmode, modifier);
8371 /* Return a PLUS if modifier says it's OK. */
8372 if (modifier == EXPAND_SUM
8373 || modifier == EXPAND_INITIALIZER)
8374 return simplify_gen_binary (PLUS, mode, op0, op1);
8375 goto binop2;
8377 /* Use immed_double_const to ensure that the constant is
8378 truncated according to the mode of OP1, then sign extended
8379 to a HOST_WIDE_INT. Using the constant directly can result
8380 in non-canonical RTL in a 64x32 cross compile. */
8381 constant_part
8382 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8383 (HOST_WIDE_INT) 0,
8384 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8385 op0 = plus_constant (op0, INTVAL (constant_part));
8386 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8387 op0 = force_operand (op0, target);
8388 return REDUCE_BIT_FIELD (op0);
8392 /* No sense saving up arithmetic to be done
8393 if it's all in the wrong mode to form part of an address.
8394 And force_operand won't know whether to sign-extend or
8395 zero-extend. */
8396 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8397 || mode != ptr_mode)
8399 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8400 subtarget, &op0, &op1, 0);
8401 if (op0 == const0_rtx)
8402 return op1;
8403 if (op1 == const0_rtx)
8404 return op0;
8405 goto binop2;
8408 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8409 subtarget, &op0, &op1, modifier);
8410 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8412 case MINUS_EXPR:
8413 /* Check if this is a case for multiplication and subtraction. */
8414 if ((TREE_CODE (type) == INTEGER_TYPE
8415 || TREE_CODE (type) == FIXED_POINT_TYPE)
8416 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8418 tree subsubexp0, subsubexp1;
8419 enum tree_code code0, code1, this_code;
8421 subexp1 = TREE_OPERAND (exp, 1);
8422 subsubexp0 = TREE_OPERAND (subexp1, 0);
8423 subsubexp1 = TREE_OPERAND (subexp1, 1);
8424 code0 = TREE_CODE (subsubexp0);
8425 code1 = TREE_CODE (subsubexp1);
8426 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8427 : FIXED_CONVERT_EXPR;
8428 if (code0 == this_code && code1 == this_code
8429 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8430 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8431 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8432 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8433 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8434 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8436 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8437 enum machine_mode innermode = TYPE_MODE (op0type);
8438 bool zextend_p = TYPE_UNSIGNED (op0type);
8439 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8440 if (sat_p == 0)
8441 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8442 else
8443 this_optab = zextend_p ? usmsub_widen_optab
8444 : ssmsub_widen_optab;
8445 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8446 && (optab_handler (this_optab, mode)->insn_code
8447 != CODE_FOR_nothing))
8449 expand_operands (TREE_OPERAND (subsubexp0, 0),
8450 TREE_OPERAND (subsubexp1, 0),
8451 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8452 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8453 VOIDmode, EXPAND_NORMAL);
8454 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8455 target, unsignedp);
8456 gcc_assert (temp);
8457 return REDUCE_BIT_FIELD (temp);
8462 /* For initializers, we are allowed to return a MINUS of two
8463 symbolic constants. Here we handle all cases when both operands
8464 are constant. */
8465 /* Handle difference of two symbolic constants,
8466 for the sake of an initializer. */
8467 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8468 && really_constant_p (TREE_OPERAND (exp, 0))
8469 && really_constant_p (TREE_OPERAND (exp, 1)))
8471 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8472 NULL_RTX, &op0, &op1, modifier);
8474 /* If the last operand is a CONST_INT, use plus_constant of
8475 the negated constant. Else make the MINUS. */
8476 if (GET_CODE (op1) == CONST_INT)
8477 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8478 else
8479 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8482 /* No sense saving up arithmetic to be done
8483 if it's all in the wrong mode to form part of an address.
8484 And force_operand won't know whether to sign-extend or
8485 zero-extend. */
8486 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8487 || mode != ptr_mode)
8488 goto binop;
8490 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8491 subtarget, &op0, &op1, modifier);
8493 /* Convert A - const to A + (-const). */
8494 if (GET_CODE (op1) == CONST_INT)
8496 op1 = negate_rtx (mode, op1);
8497 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8500 goto binop2;
8502 case MULT_EXPR:
8503 /* If this is a fixed-point operation, then we cannot use the code
8504 below because "expand_mult" doesn't support sat/no-sat fixed-point
8505 multiplications. */
8506 if (ALL_FIXED_POINT_MODE_P (mode))
8507 goto binop;
8509 /* If first operand is constant, swap them.
8510 Thus the following special case checks need only
8511 check the second operand. */
8512 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8514 tree t1 = TREE_OPERAND (exp, 0);
8515 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8516 TREE_OPERAND (exp, 1) = t1;
8519 /* Attempt to return something suitable for generating an
8520 indexed address, for machines that support that. */
8522 if (modifier == EXPAND_SUM && mode == ptr_mode
8523 && host_integerp (TREE_OPERAND (exp, 1), 0))
8525 tree exp1 = TREE_OPERAND (exp, 1);
8527 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8528 EXPAND_SUM);
8530 if (!REG_P (op0))
8531 op0 = force_operand (op0, NULL_RTX);
8532 if (!REG_P (op0))
8533 op0 = copy_to_mode_reg (mode, op0);
8535 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8536 gen_int_mode (tree_low_cst (exp1, 0),
8537 TYPE_MODE (TREE_TYPE (exp1)))));
8540 if (modifier == EXPAND_STACK_PARM)
8541 target = 0;
8543 /* Check for multiplying things that have been extended
8544 from a narrower type. If this machine supports multiplying
8545 in that narrower type with a result in the desired type,
8546 do it that way, and avoid the explicit type-conversion. */
8548 subexp0 = TREE_OPERAND (exp, 0);
8549 subexp1 = TREE_OPERAND (exp, 1);
8550 /* First, check if we have a multiplication of one signed and one
8551 unsigned operand. */
8552 if (TREE_CODE (subexp0) == NOP_EXPR
8553 && TREE_CODE (subexp1) == NOP_EXPR
8554 && TREE_CODE (type) == INTEGER_TYPE
8555 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8556 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8557 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8558 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8559 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8560 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8562 enum machine_mode innermode
8563 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8564 this_optab = usmul_widen_optab;
8565 if (mode == GET_MODE_WIDER_MODE (innermode))
8567 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8569 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8570 expand_operands (TREE_OPERAND (subexp0, 0),
8571 TREE_OPERAND (subexp1, 0),
8572 NULL_RTX, &op0, &op1, 0);
8573 else
8574 expand_operands (TREE_OPERAND (subexp0, 0),
8575 TREE_OPERAND (subexp1, 0),
8576 NULL_RTX, &op1, &op0, 0);
8578 goto binop3;
8582 /* Check for a multiplication with matching signedness. */
8583 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8584 && TREE_CODE (type) == INTEGER_TYPE
8585 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8586 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8587 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8588 && int_fits_type_p (TREE_OPERAND (exp, 1),
8589 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8590 /* Don't use a widening multiply if a shift will do. */
8591 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8592 > HOST_BITS_PER_WIDE_INT)
8593 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8595 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8596 && (TYPE_PRECISION (TREE_TYPE
8597 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8598 == TYPE_PRECISION (TREE_TYPE
8599 (TREE_OPERAND
8600 (TREE_OPERAND (exp, 0), 0))))
8601 /* If both operands are extended, they must either both
8602 be zero-extended or both be sign-extended. */
8603 && (TYPE_UNSIGNED (TREE_TYPE
8604 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8605 == TYPE_UNSIGNED (TREE_TYPE
8606 (TREE_OPERAND
8607 (TREE_OPERAND (exp, 0), 0)))))))
8609 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8610 enum machine_mode innermode = TYPE_MODE (op0type);
8611 bool zextend_p = TYPE_UNSIGNED (op0type);
8612 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8613 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8615 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8617 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8619 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8620 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8621 TREE_OPERAND (exp, 1),
8622 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8623 else
8624 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8625 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8626 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8627 goto binop3;
8629 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8630 && innermode == word_mode)
8632 rtx htem, hipart;
8633 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8634 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8635 op1 = convert_modes (innermode, mode,
8636 expand_normal (TREE_OPERAND (exp, 1)),
8637 unsignedp);
8638 else
8639 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8640 temp = expand_binop (mode, other_optab, op0, op1, target,
8641 unsignedp, OPTAB_LIB_WIDEN);
8642 hipart = gen_highpart (innermode, temp);
8643 htem = expand_mult_highpart_adjust (innermode, hipart,
8644 op0, op1, hipart,
8645 zextend_p);
8646 if (htem != hipart)
8647 emit_move_insn (hipart, htem);
8648 return REDUCE_BIT_FIELD (temp);
8652 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8653 subtarget, &op0, &op1, 0);
8654 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8656 case TRUNC_DIV_EXPR:
8657 case FLOOR_DIV_EXPR:
8658 case CEIL_DIV_EXPR:
8659 case ROUND_DIV_EXPR:
8660 case EXACT_DIV_EXPR:
8661 /* If this is a fixed-point operation, then we cannot use the code
8662 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8663 divisions. */
8664 if (ALL_FIXED_POINT_MODE_P (mode))
8665 goto binop;
8667 if (modifier == EXPAND_STACK_PARM)
8668 target = 0;
8669 /* Possible optimization: compute the dividend with EXPAND_SUM
8670 then if the divisor is constant can optimize the case
8671 where some terms of the dividend have coeffs divisible by it. */
8672 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8673 subtarget, &op0, &op1, 0);
8674 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8676 case RDIV_EXPR:
8677 goto binop;
8679 case TRUNC_MOD_EXPR:
8680 case FLOOR_MOD_EXPR:
8681 case CEIL_MOD_EXPR:
8682 case ROUND_MOD_EXPR:
8683 if (modifier == EXPAND_STACK_PARM)
8684 target = 0;
8685 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8686 subtarget, &op0, &op1, 0);
8687 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8689 case FIXED_CONVERT_EXPR:
8690 op0 = expand_normal (TREE_OPERAND (exp, 0));
8691 if (target == 0 || modifier == EXPAND_STACK_PARM)
8692 target = gen_reg_rtx (mode);
8694 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
8695 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
8696 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8697 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8698 else
8699 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8700 return target;
8702 case FIX_TRUNC_EXPR:
8703 op0 = expand_normal (TREE_OPERAND (exp, 0));
8704 if (target == 0 || modifier == EXPAND_STACK_PARM)
8705 target = gen_reg_rtx (mode);
8706 expand_fix (target, op0, unsignedp);
8707 return target;
8709 case FLOAT_EXPR:
8710 op0 = expand_normal (TREE_OPERAND (exp, 0));
8711 if (target == 0 || modifier == EXPAND_STACK_PARM)
8712 target = gen_reg_rtx (mode);
8713 /* expand_float can't figure out what to do if FROM has VOIDmode.
8714 So give it the correct mode. With -O, cse will optimize this. */
8715 if (GET_MODE (op0) == VOIDmode)
8716 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8717 op0);
8718 expand_float (target, op0,
8719 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8720 return target;
8722 case NEGATE_EXPR:
8723 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8724 VOIDmode, EXPAND_NORMAL);
8725 if (modifier == EXPAND_STACK_PARM)
8726 target = 0;
8727 temp = expand_unop (mode,
8728 optab_for_tree_code (NEGATE_EXPR, type),
8729 op0, target, 0);
8730 gcc_assert (temp);
8731 return REDUCE_BIT_FIELD (temp);
8733 case ABS_EXPR:
8734 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8735 VOIDmode, EXPAND_NORMAL);
8736 if (modifier == EXPAND_STACK_PARM)
8737 target = 0;
8739 /* ABS_EXPR is not valid for complex arguments. */
8740 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8741 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8743 /* Unsigned abs is simply the operand. Testing here means we don't
8744 risk generating incorrect code below. */
8745 if (TYPE_UNSIGNED (type))
8746 return op0;
8748 return expand_abs (mode, op0, target, unsignedp,
8749 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8751 case MAX_EXPR:
8752 case MIN_EXPR:
8753 target = original_target;
8754 if (target == 0
8755 || modifier == EXPAND_STACK_PARM
8756 || (MEM_P (target) && MEM_VOLATILE_P (target))
8757 || GET_MODE (target) != mode
8758 || (REG_P (target)
8759 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8760 target = gen_reg_rtx (mode);
8761 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8762 target, &op0, &op1, 0);
8764 /* First try to do it with a special MIN or MAX instruction.
8765 If that does not win, use a conditional jump to select the proper
8766 value. */
8767 this_optab = optab_for_tree_code (code, type);
8768 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8769 OPTAB_WIDEN);
8770 if (temp != 0)
8771 return temp;
8773 /* At this point, a MEM target is no longer useful; we will get better
8774 code without it. */
8776 if (! REG_P (target))
8777 target = gen_reg_rtx (mode);
8779 /* If op1 was placed in target, swap op0 and op1. */
8780 if (target != op0 && target == op1)
8782 temp = op0;
8783 op0 = op1;
8784 op1 = temp;
8787 /* We generate better code and avoid problems with op1 mentioning
8788 target by forcing op1 into a pseudo if it isn't a constant. */
8789 if (! CONSTANT_P (op1))
8790 op1 = force_reg (mode, op1);
8793 enum rtx_code comparison_code;
8794 rtx cmpop1 = op1;
8796 if (code == MAX_EXPR)
8797 comparison_code = unsignedp ? GEU : GE;
8798 else
8799 comparison_code = unsignedp ? LEU : LE;
8801 /* Canonicalize to comparisons against 0. */
8802 if (op1 == const1_rtx)
8804 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8805 or (a != 0 ? a : 1) for unsigned.
8806 For MIN we are safe converting (a <= 1 ? a : 1)
8807 into (a <= 0 ? a : 1) */
8808 cmpop1 = const0_rtx;
8809 if (code == MAX_EXPR)
8810 comparison_code = unsignedp ? NE : GT;
8812 if (op1 == constm1_rtx && !unsignedp)
8814 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8815 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8816 cmpop1 = const0_rtx;
8817 if (code == MIN_EXPR)
8818 comparison_code = LT;
8820 #ifdef HAVE_conditional_move
8821 /* Use a conditional move if possible. */
8822 if (can_conditionally_move_p (mode))
8824 rtx insn;
8826 /* ??? Same problem as in expmed.c: emit_conditional_move
8827 forces a stack adjustment via compare_from_rtx, and we
8828 lose the stack adjustment if the sequence we are about
8829 to create is discarded. */
8830 do_pending_stack_adjust ();
8832 start_sequence ();
8834 /* Try to emit the conditional move. */
8835 insn = emit_conditional_move (target, comparison_code,
8836 op0, cmpop1, mode,
8837 op0, op1, mode,
8838 unsignedp);
8840 /* If we could do the conditional move, emit the sequence,
8841 and return. */
8842 if (insn)
8844 rtx seq = get_insns ();
8845 end_sequence ();
8846 emit_insn (seq);
8847 return target;
8850 /* Otherwise discard the sequence and fall back to code with
8851 branches. */
8852 end_sequence ();
8854 #endif
8855 if (target != op0)
8856 emit_move_insn (target, op0);
8858 temp = gen_label_rtx ();
8859 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8860 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8862 emit_move_insn (target, op1);
8863 emit_label (temp);
8864 return target;
8866 case BIT_NOT_EXPR:
8867 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8868 VOIDmode, EXPAND_NORMAL);
8869 if (modifier == EXPAND_STACK_PARM)
8870 target = 0;
8871 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8872 gcc_assert (temp);
8873 return temp;
8875 /* ??? Can optimize bitwise operations with one arg constant.
8876 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8877 and (a bitwise1 b) bitwise2 b (etc)
8878 but that is probably not worth while. */
8880 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8881 boolean values when we want in all cases to compute both of them. In
8882 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8883 as actual zero-or-1 values and then bitwise anding. In cases where
8884 there cannot be any side effects, better code would be made by
8885 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8886 how to recognize those cases. */
8888 case TRUTH_AND_EXPR:
8889 code = BIT_AND_EXPR;
8890 case BIT_AND_EXPR:
8891 goto binop;
8893 case TRUTH_OR_EXPR:
8894 code = BIT_IOR_EXPR;
8895 case BIT_IOR_EXPR:
8896 goto binop;
8898 case TRUTH_XOR_EXPR:
8899 code = BIT_XOR_EXPR;
8900 case BIT_XOR_EXPR:
8901 goto binop;
8903 case LROTATE_EXPR:
8904 case RROTATE_EXPR:
8905 /* The expansion code only handles expansion of mode precision
8906 rotates. */
8907 gcc_assert (GET_MODE_PRECISION (TYPE_MODE (type))
8908 == TYPE_PRECISION (type));
8910 /* Falltrough. */
8911 case LSHIFT_EXPR:
8912 case RSHIFT_EXPR:
8913 /* If this is a fixed-point operation, then we cannot use the code
8914 below because "expand_shift" doesn't support sat/no-sat fixed-point
8915 shifts. */
8916 if (ALL_FIXED_POINT_MODE_P (mode))
8917 goto binop;
8919 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8920 subtarget = 0;
8921 if (modifier == EXPAND_STACK_PARM)
8922 target = 0;
8923 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8924 VOIDmode, EXPAND_NORMAL);
8925 temp = expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8926 unsignedp);
8927 if (code == LSHIFT_EXPR)
8928 temp = REDUCE_BIT_FIELD (temp);
8929 return temp;
8931 /* Could determine the answer when only additive constants differ. Also,
8932 the addition of one can be handled by changing the condition. */
8933 case LT_EXPR:
8934 case LE_EXPR:
8935 case GT_EXPR:
8936 case GE_EXPR:
8937 case EQ_EXPR:
8938 case NE_EXPR:
8939 case UNORDERED_EXPR:
8940 case ORDERED_EXPR:
8941 case UNLT_EXPR:
8942 case UNLE_EXPR:
8943 case UNGT_EXPR:
8944 case UNGE_EXPR:
8945 case UNEQ_EXPR:
8946 case LTGT_EXPR:
8947 temp = do_store_flag (exp,
8948 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8949 tmode != VOIDmode ? tmode : mode, 0);
8950 if (temp != 0)
8951 return temp;
8953 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8954 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8955 && original_target
8956 && REG_P (original_target)
8957 && (GET_MODE (original_target)
8958 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8960 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8961 VOIDmode, EXPAND_NORMAL);
8963 /* If temp is constant, we can just compute the result. */
8964 if (GET_CODE (temp) == CONST_INT)
8966 if (INTVAL (temp) != 0)
8967 emit_move_insn (target, const1_rtx);
8968 else
8969 emit_move_insn (target, const0_rtx);
8971 return target;
8974 if (temp != original_target)
8976 enum machine_mode mode1 = GET_MODE (temp);
8977 if (mode1 == VOIDmode)
8978 mode1 = tmode != VOIDmode ? tmode : mode;
8980 temp = copy_to_mode_reg (mode1, temp);
8983 op1 = gen_label_rtx ();
8984 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8985 GET_MODE (temp), unsignedp, op1);
8986 emit_move_insn (temp, const1_rtx);
8987 emit_label (op1);
8988 return temp;
8991 /* If no set-flag instruction, must generate a conditional store
8992 into a temporary variable. Drop through and handle this
8993 like && and ||. */
8995 if (! ignore
8996 && (target == 0
8997 || modifier == EXPAND_STACK_PARM
8998 || ! safe_from_p (target, exp, 1)
8999 /* Make sure we don't have a hard reg (such as function's return
9000 value) live across basic blocks, if not optimizing. */
9001 || (!optimize && REG_P (target)
9002 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9003 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9005 if (target)
9006 emit_move_insn (target, const0_rtx);
9008 op1 = gen_label_rtx ();
9009 jumpifnot (exp, op1);
9011 if (target)
9012 emit_move_insn (target, const1_rtx);
9014 emit_label (op1);
9015 return ignore ? const0_rtx : target;
9017 case TRUTH_NOT_EXPR:
9018 if (modifier == EXPAND_STACK_PARM)
9019 target = 0;
9020 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
9021 VOIDmode, EXPAND_NORMAL);
9022 /* The parser is careful to generate TRUTH_NOT_EXPR
9023 only with operands that are always zero or one. */
9024 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
9025 target, 1, OPTAB_LIB_WIDEN);
9026 gcc_assert (temp);
9027 return temp;
9029 case STATEMENT_LIST:
9031 tree_stmt_iterator iter;
9033 gcc_assert (ignore);
9035 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9036 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9038 return const0_rtx;
9040 case COND_EXPR:
9041 /* A COND_EXPR with its type being VOID_TYPE represents a
9042 conditional jump and is handled in
9043 expand_gimple_cond_expr. */
9044 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
9046 /* Note that COND_EXPRs whose type is a structure or union
9047 are required to be constructed to contain assignments of
9048 a temporary variable, so that we can evaluate them here
9049 for side effect only. If type is void, we must do likewise. */
9051 gcc_assert (!TREE_ADDRESSABLE (type)
9052 && !ignore
9053 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
9054 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
9056 /* If we are not to produce a result, we have no target. Otherwise,
9057 if a target was specified use it; it will not be used as an
9058 intermediate target unless it is safe. If no target, use a
9059 temporary. */
9061 if (modifier != EXPAND_STACK_PARM
9062 && original_target
9063 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
9064 && GET_MODE (original_target) == mode
9065 #ifdef HAVE_conditional_move
9066 && (! can_conditionally_move_p (mode)
9067 || REG_P (original_target))
9068 #endif
9069 && !MEM_P (original_target))
9070 temp = original_target;
9071 else
9072 temp = assign_temp (type, 0, 0, 1);
9074 do_pending_stack_adjust ();
9075 NO_DEFER_POP;
9076 op0 = gen_label_rtx ();
9077 op1 = gen_label_rtx ();
9078 jumpifnot (TREE_OPERAND (exp, 0), op0);
9079 store_expr (TREE_OPERAND (exp, 1), temp,
9080 modifier == EXPAND_STACK_PARM,
9081 false);
9083 emit_jump_insn (gen_jump (op1));
9084 emit_barrier ();
9085 emit_label (op0);
9086 store_expr (TREE_OPERAND (exp, 2), temp,
9087 modifier == EXPAND_STACK_PARM,
9088 false);
9090 emit_label (op1);
9091 OK_DEFER_POP;
9092 return temp;
9094 case VEC_COND_EXPR:
9095 target = expand_vec_cond_expr (exp, target);
9096 return target;
9098 case MODIFY_EXPR:
9100 tree lhs = TREE_OPERAND (exp, 0);
9101 tree rhs = TREE_OPERAND (exp, 1);
9102 gcc_assert (ignore);
9103 expand_assignment (lhs, rhs, false);
9104 return const0_rtx;
9107 case GIMPLE_MODIFY_STMT:
9109 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
9110 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
9112 gcc_assert (ignore);
9114 /* Check for |= or &= of a bitfield of size one into another bitfield
9115 of size 1. In this case, (unless we need the result of the
9116 assignment) we can do this more efficiently with a
9117 test followed by an assignment, if necessary.
9119 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9120 things change so we do, this code should be enhanced to
9121 support it. */
9122 if (TREE_CODE (lhs) == COMPONENT_REF
9123 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9124 || TREE_CODE (rhs) == BIT_AND_EXPR)
9125 && TREE_OPERAND (rhs, 0) == lhs
9126 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9127 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9128 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9130 rtx label = gen_label_rtx ();
9131 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9132 do_jump (TREE_OPERAND (rhs, 1),
9133 value ? label : 0,
9134 value ? 0 : label);
9135 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9136 MOVE_NONTEMPORAL (exp));
9137 do_pending_stack_adjust ();
9138 emit_label (label);
9139 return const0_rtx;
9142 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9143 return const0_rtx;
9146 case RETURN_EXPR:
9147 if (!TREE_OPERAND (exp, 0))
9148 expand_null_return ();
9149 else
9150 expand_return (TREE_OPERAND (exp, 0));
9151 return const0_rtx;
9153 case ADDR_EXPR:
9154 return expand_expr_addr_expr (exp, target, tmode, modifier);
9156 case COMPLEX_EXPR:
9157 /* Get the rtx code of the operands. */
9158 op0 = expand_normal (TREE_OPERAND (exp, 0));
9159 op1 = expand_normal (TREE_OPERAND (exp, 1));
9161 if (!target)
9162 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9164 /* Move the real (op0) and imaginary (op1) parts to their location. */
9165 write_complex_part (target, op0, false);
9166 write_complex_part (target, op1, true);
9168 return target;
9170 case REALPART_EXPR:
9171 op0 = expand_normal (TREE_OPERAND (exp, 0));
9172 return read_complex_part (op0, false);
9174 case IMAGPART_EXPR:
9175 op0 = expand_normal (TREE_OPERAND (exp, 0));
9176 return read_complex_part (op0, true);
9178 case RESX_EXPR:
9179 expand_resx_expr (exp);
9180 return const0_rtx;
9182 case TRY_CATCH_EXPR:
9183 case CATCH_EXPR:
9184 case EH_FILTER_EXPR:
9185 case TRY_FINALLY_EXPR:
9186 /* Lowered by tree-eh.c. */
9187 gcc_unreachable ();
9189 case WITH_CLEANUP_EXPR:
9190 case CLEANUP_POINT_EXPR:
9191 case TARGET_EXPR:
9192 case CASE_LABEL_EXPR:
9193 case VA_ARG_EXPR:
9194 case BIND_EXPR:
9195 case INIT_EXPR:
9196 case CONJ_EXPR:
9197 case COMPOUND_EXPR:
9198 case PREINCREMENT_EXPR:
9199 case PREDECREMENT_EXPR:
9200 case POSTINCREMENT_EXPR:
9201 case POSTDECREMENT_EXPR:
9202 case LOOP_EXPR:
9203 case EXIT_EXPR:
9204 case TRUTH_ANDIF_EXPR:
9205 case TRUTH_ORIF_EXPR:
9206 /* Lowered by gimplify.c. */
9207 gcc_unreachable ();
9209 case CHANGE_DYNAMIC_TYPE_EXPR:
9210 /* This is ignored at the RTL level. The tree level set
9211 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9212 overkill for the RTL layer but is all that we can
9213 represent. */
9214 return const0_rtx;
9216 case EXC_PTR_EXPR:
9217 return get_exception_pointer (cfun);
9219 case FILTER_EXPR:
9220 return get_exception_filter (cfun);
9222 case FDESC_EXPR:
9223 /* Function descriptors are not valid except for as
9224 initialization constants, and should not be expanded. */
9225 gcc_unreachable ();
9227 case SWITCH_EXPR:
9228 expand_case (exp);
9229 return const0_rtx;
9231 case LABEL_EXPR:
9232 expand_label (TREE_OPERAND (exp, 0));
9233 return const0_rtx;
9235 case ASM_EXPR:
9236 expand_asm_expr (exp);
9237 return const0_rtx;
9239 case WITH_SIZE_EXPR:
9240 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9241 have pulled out the size to use in whatever context it needed. */
9242 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9243 modifier, alt_rtl);
9245 case REALIGN_LOAD_EXPR:
9247 tree oprnd0 = TREE_OPERAND (exp, 0);
9248 tree oprnd1 = TREE_OPERAND (exp, 1);
9249 tree oprnd2 = TREE_OPERAND (exp, 2);
9250 rtx op2;
9252 this_optab = optab_for_tree_code (code, type);
9253 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9254 op2 = expand_normal (oprnd2);
9255 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9256 target, unsignedp);
9257 gcc_assert (temp);
9258 return temp;
9261 case DOT_PROD_EXPR:
9263 tree oprnd0 = TREE_OPERAND (exp, 0);
9264 tree oprnd1 = TREE_OPERAND (exp, 1);
9265 tree oprnd2 = TREE_OPERAND (exp, 2);
9266 rtx op2;
9268 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9269 op2 = expand_normal (oprnd2);
9270 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9271 target, unsignedp);
9272 return target;
9275 case WIDEN_SUM_EXPR:
9277 tree oprnd0 = TREE_OPERAND (exp, 0);
9278 tree oprnd1 = TREE_OPERAND (exp, 1);
9280 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9281 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9282 target, unsignedp);
9283 return target;
9286 case REDUC_MAX_EXPR:
9287 case REDUC_MIN_EXPR:
9288 case REDUC_PLUS_EXPR:
9290 op0 = expand_normal (TREE_OPERAND (exp, 0));
9291 this_optab = optab_for_tree_code (code, type);
9292 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9293 gcc_assert (temp);
9294 return temp;
9297 case VEC_EXTRACT_EVEN_EXPR:
9298 case VEC_EXTRACT_ODD_EXPR:
9300 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9301 NULL_RTX, &op0, &op1, 0);
9302 this_optab = optab_for_tree_code (code, type);
9303 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9304 OPTAB_WIDEN);
9305 gcc_assert (temp);
9306 return temp;
9309 case VEC_INTERLEAVE_HIGH_EXPR:
9310 case VEC_INTERLEAVE_LOW_EXPR:
9312 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9313 NULL_RTX, &op0, &op1, 0);
9314 this_optab = optab_for_tree_code (code, type);
9315 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9316 OPTAB_WIDEN);
9317 gcc_assert (temp);
9318 return temp;
9321 case VEC_LSHIFT_EXPR:
9322 case VEC_RSHIFT_EXPR:
9324 target = expand_vec_shift_expr (exp, target);
9325 return target;
9328 case VEC_UNPACK_HI_EXPR:
9329 case VEC_UNPACK_LO_EXPR:
9331 op0 = expand_normal (TREE_OPERAND (exp, 0));
9332 this_optab = optab_for_tree_code (code, type);
9333 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9334 target, unsignedp);
9335 gcc_assert (temp);
9336 return temp;
9339 case VEC_UNPACK_FLOAT_HI_EXPR:
9340 case VEC_UNPACK_FLOAT_LO_EXPR:
9342 op0 = expand_normal (TREE_OPERAND (exp, 0));
9343 /* The signedness is determined from input operand. */
9344 this_optab = optab_for_tree_code (code,
9345 TREE_TYPE (TREE_OPERAND (exp, 0)));
9346 temp = expand_widen_pattern_expr
9347 (exp, op0, NULL_RTX, NULL_RTX,
9348 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9350 gcc_assert (temp);
9351 return temp;
9354 case VEC_WIDEN_MULT_HI_EXPR:
9355 case VEC_WIDEN_MULT_LO_EXPR:
9357 tree oprnd0 = TREE_OPERAND (exp, 0);
9358 tree oprnd1 = TREE_OPERAND (exp, 1);
9360 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9361 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9362 target, unsignedp);
9363 gcc_assert (target);
9364 return target;
9367 case VEC_PACK_TRUNC_EXPR:
9368 case VEC_PACK_SAT_EXPR:
9369 case VEC_PACK_FIX_TRUNC_EXPR:
9371 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9372 goto binop;
9375 case OMP_ATOMIC_LOAD:
9376 case OMP_ATOMIC_STORE:
9377 /* OMP expansion is not run when there were errors, so these codes
9378 can get here. */
9379 gcc_assert (errorcount != 0);
9380 return NULL_RTX;
9382 default:
9383 return lang_hooks.expand_expr (exp, original_target, tmode,
9384 modifier, alt_rtl);
9387 /* Here to do an ordinary binary operator. */
9388 binop:
9389 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9390 subtarget, &op0, &op1, 0);
9391 binop2:
9392 this_optab = optab_for_tree_code (code, type);
9393 binop3:
9394 if (modifier == EXPAND_STACK_PARM)
9395 target = 0;
9396 temp = expand_binop (mode, this_optab, op0, op1, target,
9397 unsignedp, OPTAB_LIB_WIDEN);
9398 gcc_assert (temp);
9399 return REDUCE_BIT_FIELD (temp);
9401 #undef REDUCE_BIT_FIELD
9403 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9404 signedness of TYPE), possibly returning the result in TARGET. */
9405 static rtx
9406 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9408 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9409 if (target && GET_MODE (target) != GET_MODE (exp))
9410 target = 0;
9411 /* For constant values, reduce using build_int_cst_type. */
9412 if (GET_CODE (exp) == CONST_INT)
9414 HOST_WIDE_INT value = INTVAL (exp);
9415 tree t = build_int_cst_type (type, value);
9416 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9418 else if (TYPE_UNSIGNED (type))
9420 rtx mask;
9421 if (prec < HOST_BITS_PER_WIDE_INT)
9422 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9423 GET_MODE (exp));
9424 else
9425 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9426 ((unsigned HOST_WIDE_INT) 1
9427 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9428 GET_MODE (exp));
9429 return expand_and (GET_MODE (exp), exp, mask, target);
9431 else
9433 tree count = build_int_cst (NULL_TREE,
9434 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9435 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9436 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9440 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9441 when applied to the address of EXP produces an address known to be
9442 aligned more than BIGGEST_ALIGNMENT. */
9444 static int
9445 is_aligning_offset (const_tree offset, const_tree exp)
9447 /* Strip off any conversions. */
9448 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9449 || TREE_CODE (offset) == NOP_EXPR
9450 || TREE_CODE (offset) == CONVERT_EXPR)
9451 offset = TREE_OPERAND (offset, 0);
9453 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9454 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9455 if (TREE_CODE (offset) != BIT_AND_EXPR
9456 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9457 || compare_tree_int (TREE_OPERAND (offset, 1),
9458 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9459 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9460 return 0;
9462 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9463 It must be NEGATE_EXPR. Then strip any more conversions. */
9464 offset = TREE_OPERAND (offset, 0);
9465 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9466 || TREE_CODE (offset) == NOP_EXPR
9467 || TREE_CODE (offset) == CONVERT_EXPR)
9468 offset = TREE_OPERAND (offset, 0);
9470 if (TREE_CODE (offset) != NEGATE_EXPR)
9471 return 0;
9473 offset = TREE_OPERAND (offset, 0);
9474 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9475 || TREE_CODE (offset) == NOP_EXPR
9476 || TREE_CODE (offset) == CONVERT_EXPR)
9477 offset = TREE_OPERAND (offset, 0);
9479 /* This must now be the address of EXP. */
9480 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9483 /* Return the tree node if an ARG corresponds to a string constant or zero
9484 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9485 in bytes within the string that ARG is accessing. The type of the
9486 offset will be `sizetype'. */
9488 tree
9489 string_constant (tree arg, tree *ptr_offset)
9491 tree array, offset, lower_bound;
9492 STRIP_NOPS (arg);
9494 if (TREE_CODE (arg) == ADDR_EXPR)
9496 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9498 *ptr_offset = size_zero_node;
9499 return TREE_OPERAND (arg, 0);
9501 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9503 array = TREE_OPERAND (arg, 0);
9504 offset = size_zero_node;
9506 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9508 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9509 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9510 if (TREE_CODE (array) != STRING_CST
9511 && TREE_CODE (array) != VAR_DECL)
9512 return 0;
9514 /* Check if the array has a nonzero lower bound. */
9515 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9516 if (!integer_zerop (lower_bound))
9518 /* If the offset and base aren't both constants, return 0. */
9519 if (TREE_CODE (lower_bound) != INTEGER_CST)
9520 return 0;
9521 if (TREE_CODE (offset) != INTEGER_CST)
9522 return 0;
9523 /* Adjust offset by the lower bound. */
9524 offset = size_diffop (fold_convert (sizetype, offset),
9525 fold_convert (sizetype, lower_bound));
9528 else
9529 return 0;
9531 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9533 tree arg0 = TREE_OPERAND (arg, 0);
9534 tree arg1 = TREE_OPERAND (arg, 1);
9536 STRIP_NOPS (arg0);
9537 STRIP_NOPS (arg1);
9539 if (TREE_CODE (arg0) == ADDR_EXPR
9540 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9541 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9543 array = TREE_OPERAND (arg0, 0);
9544 offset = arg1;
9546 else if (TREE_CODE (arg1) == ADDR_EXPR
9547 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9548 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9550 array = TREE_OPERAND (arg1, 0);
9551 offset = arg0;
9553 else
9554 return 0;
9556 else
9557 return 0;
9559 if (TREE_CODE (array) == STRING_CST)
9561 *ptr_offset = fold_convert (sizetype, offset);
9562 return array;
9564 else if (TREE_CODE (array) == VAR_DECL)
9566 int length;
9568 /* Variables initialized to string literals can be handled too. */
9569 if (DECL_INITIAL (array) == NULL_TREE
9570 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9571 return 0;
9573 /* If they are read-only, non-volatile and bind locally. */
9574 if (! TREE_READONLY (array)
9575 || TREE_SIDE_EFFECTS (array)
9576 || ! targetm.binds_local_p (array))
9577 return 0;
9579 /* Avoid const char foo[4] = "abcde"; */
9580 if (DECL_SIZE_UNIT (array) == NULL_TREE
9581 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9582 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9583 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9584 return 0;
9586 /* If variable is bigger than the string literal, OFFSET must be constant
9587 and inside of the bounds of the string literal. */
9588 offset = fold_convert (sizetype, offset);
9589 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9590 && (! host_integerp (offset, 1)
9591 || compare_tree_int (offset, length) >= 0))
9592 return 0;
9594 *ptr_offset = offset;
9595 return DECL_INITIAL (array);
9598 return 0;
9601 /* Generate code to calculate EXP using a store-flag instruction
9602 and return an rtx for the result. EXP is either a comparison
9603 or a TRUTH_NOT_EXPR whose operand is a comparison.
9605 If TARGET is nonzero, store the result there if convenient.
9607 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9608 cheap.
9610 Return zero if there is no suitable set-flag instruction
9611 available on this machine.
9613 Once expand_expr has been called on the arguments of the comparison,
9614 we are committed to doing the store flag, since it is not safe to
9615 re-evaluate the expression. We emit the store-flag insn by calling
9616 emit_store_flag, but only expand the arguments if we have a reason
9617 to believe that emit_store_flag will be successful. If we think that
9618 it will, but it isn't, we have to simulate the store-flag with a
9619 set/jump/set sequence. */
9621 static rtx
9622 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9624 enum rtx_code code;
9625 tree arg0, arg1, type;
9626 tree tem;
9627 enum machine_mode operand_mode;
9628 int invert = 0;
9629 int unsignedp;
9630 rtx op0, op1;
9631 enum insn_code icode;
9632 rtx subtarget = target;
9633 rtx result, label;
9635 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9636 result at the end. We can't simply invert the test since it would
9637 have already been inverted if it were valid. This case occurs for
9638 some floating-point comparisons. */
9640 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9641 invert = 1, exp = TREE_OPERAND (exp, 0);
9643 arg0 = TREE_OPERAND (exp, 0);
9644 arg1 = TREE_OPERAND (exp, 1);
9646 /* Don't crash if the comparison was erroneous. */
9647 if (arg0 == error_mark_node || arg1 == error_mark_node)
9648 return const0_rtx;
9650 type = TREE_TYPE (arg0);
9651 operand_mode = TYPE_MODE (type);
9652 unsignedp = TYPE_UNSIGNED (type);
9654 /* We won't bother with BLKmode store-flag operations because it would mean
9655 passing a lot of information to emit_store_flag. */
9656 if (operand_mode == BLKmode)
9657 return 0;
9659 /* We won't bother with store-flag operations involving function pointers
9660 when function pointers must be canonicalized before comparisons. */
9661 #ifdef HAVE_canonicalize_funcptr_for_compare
9662 if (HAVE_canonicalize_funcptr_for_compare
9663 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9664 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9665 == FUNCTION_TYPE))
9666 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9667 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9668 == FUNCTION_TYPE))))
9669 return 0;
9670 #endif
9672 STRIP_NOPS (arg0);
9673 STRIP_NOPS (arg1);
9675 /* Get the rtx comparison code to use. We know that EXP is a comparison
9676 operation of some type. Some comparisons against 1 and -1 can be
9677 converted to comparisons with zero. Do so here so that the tests
9678 below will be aware that we have a comparison with zero. These
9679 tests will not catch constants in the first operand, but constants
9680 are rarely passed as the first operand. */
9682 switch (TREE_CODE (exp))
9684 case EQ_EXPR:
9685 code = EQ;
9686 break;
9687 case NE_EXPR:
9688 code = NE;
9689 break;
9690 case LT_EXPR:
9691 if (integer_onep (arg1))
9692 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9693 else
9694 code = unsignedp ? LTU : LT;
9695 break;
9696 case LE_EXPR:
9697 if (! unsignedp && integer_all_onesp (arg1))
9698 arg1 = integer_zero_node, code = LT;
9699 else
9700 code = unsignedp ? LEU : LE;
9701 break;
9702 case GT_EXPR:
9703 if (! unsignedp && integer_all_onesp (arg1))
9704 arg1 = integer_zero_node, code = GE;
9705 else
9706 code = unsignedp ? GTU : GT;
9707 break;
9708 case GE_EXPR:
9709 if (integer_onep (arg1))
9710 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9711 else
9712 code = unsignedp ? GEU : GE;
9713 break;
9715 case UNORDERED_EXPR:
9716 code = UNORDERED;
9717 break;
9718 case ORDERED_EXPR:
9719 code = ORDERED;
9720 break;
9721 case UNLT_EXPR:
9722 code = UNLT;
9723 break;
9724 case UNLE_EXPR:
9725 code = UNLE;
9726 break;
9727 case UNGT_EXPR:
9728 code = UNGT;
9729 break;
9730 case UNGE_EXPR:
9731 code = UNGE;
9732 break;
9733 case UNEQ_EXPR:
9734 code = UNEQ;
9735 break;
9736 case LTGT_EXPR:
9737 code = LTGT;
9738 break;
9740 default:
9741 gcc_unreachable ();
9744 /* Put a constant second. */
9745 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9746 || TREE_CODE (arg0) == FIXED_CST)
9748 tem = arg0; arg0 = arg1; arg1 = tem;
9749 code = swap_condition (code);
9752 /* If this is an equality or inequality test of a single bit, we can
9753 do this by shifting the bit being tested to the low-order bit and
9754 masking the result with the constant 1. If the condition was EQ,
9755 we xor it with 1. This does not require an scc insn and is faster
9756 than an scc insn even if we have it.
9758 The code to make this transformation was moved into fold_single_bit_test,
9759 so we just call into the folder and expand its result. */
9761 if ((code == NE || code == EQ)
9762 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9763 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9765 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9766 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9767 arg0, arg1, type),
9768 target, VOIDmode, EXPAND_NORMAL);
9771 /* Now see if we are likely to be able to do this. Return if not. */
9772 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9773 return 0;
9775 icode = setcc_gen_code[(int) code];
9777 if (icode == CODE_FOR_nothing)
9779 enum machine_mode wmode;
9781 for (wmode = operand_mode;
9782 icode == CODE_FOR_nothing && wmode != VOIDmode;
9783 wmode = GET_MODE_WIDER_MODE (wmode))
9784 icode = optab_handler (cstore_optab, wmode)->insn_code;
9787 if (icode == CODE_FOR_nothing
9788 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9790 /* We can only do this if it is one of the special cases that
9791 can be handled without an scc insn. */
9792 if ((code == LT && integer_zerop (arg1))
9793 || (! only_cheap && code == GE && integer_zerop (arg1)))
9795 else if (! only_cheap && (code == NE || code == EQ)
9796 && TREE_CODE (type) != REAL_TYPE
9797 && ((optab_handler (abs_optab, operand_mode)->insn_code
9798 != CODE_FOR_nothing)
9799 || (optab_handler (ffs_optab, operand_mode)->insn_code
9800 != CODE_FOR_nothing)))
9802 else
9803 return 0;
9806 if (! get_subtarget (target)
9807 || GET_MODE (subtarget) != operand_mode)
9808 subtarget = 0;
9810 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9812 if (target == 0)
9813 target = gen_reg_rtx (mode);
9815 result = emit_store_flag (target, code, op0, op1,
9816 operand_mode, unsignedp, 1);
9818 if (result)
9820 if (invert)
9821 result = expand_binop (mode, xor_optab, result, const1_rtx,
9822 result, 0, OPTAB_LIB_WIDEN);
9823 return result;
9826 /* If this failed, we have to do this with set/compare/jump/set code. */
9827 if (!REG_P (target)
9828 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9829 target = gen_reg_rtx (GET_MODE (target));
9831 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9832 label = gen_label_rtx ();
9833 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9834 NULL_RTX, label);
9836 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9837 emit_label (label);
9839 return target;
9843 /* Stubs in case we haven't got a casesi insn. */
9844 #ifndef HAVE_casesi
9845 # define HAVE_casesi 0
9846 # define gen_casesi(a, b, c, d, e) (0)
9847 # define CODE_FOR_casesi CODE_FOR_nothing
9848 #endif
9850 /* If the machine does not have a case insn that compares the bounds,
9851 this means extra overhead for dispatch tables, which raises the
9852 threshold for using them. */
9853 #ifndef CASE_VALUES_THRESHOLD
9854 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9855 #endif /* CASE_VALUES_THRESHOLD */
9857 unsigned int
9858 case_values_threshold (void)
9860 return CASE_VALUES_THRESHOLD;
9863 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9864 0 otherwise (i.e. if there is no casesi instruction). */
9866 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9867 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9869 enum machine_mode index_mode = SImode;
9870 int index_bits = GET_MODE_BITSIZE (index_mode);
9871 rtx op1, op2, index;
9872 enum machine_mode op_mode;
9874 if (! HAVE_casesi)
9875 return 0;
9877 /* Convert the index to SImode. */
9878 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9880 enum machine_mode omode = TYPE_MODE (index_type);
9881 rtx rangertx = expand_normal (range);
9883 /* We must handle the endpoints in the original mode. */
9884 index_expr = build2 (MINUS_EXPR, index_type,
9885 index_expr, minval);
9886 minval = integer_zero_node;
9887 index = expand_normal (index_expr);
9888 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9889 omode, 1, default_label);
9890 /* Now we can safely truncate. */
9891 index = convert_to_mode (index_mode, index, 0);
9893 else
9895 if (TYPE_MODE (index_type) != index_mode)
9897 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9898 index_expr = fold_convert (index_type, index_expr);
9901 index = expand_normal (index_expr);
9904 do_pending_stack_adjust ();
9906 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9907 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9908 (index, op_mode))
9909 index = copy_to_mode_reg (op_mode, index);
9911 op1 = expand_normal (minval);
9913 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9914 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9915 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9916 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9917 (op1, op_mode))
9918 op1 = copy_to_mode_reg (op_mode, op1);
9920 op2 = expand_normal (range);
9922 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9923 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9924 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9925 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9926 (op2, op_mode))
9927 op2 = copy_to_mode_reg (op_mode, op2);
9929 emit_jump_insn (gen_casesi (index, op1, op2,
9930 table_label, default_label));
9931 return 1;
9934 /* Attempt to generate a tablejump instruction; same concept. */
9935 #ifndef HAVE_tablejump
9936 #define HAVE_tablejump 0
9937 #define gen_tablejump(x, y) (0)
9938 #endif
9940 /* Subroutine of the next function.
9942 INDEX is the value being switched on, with the lowest value
9943 in the table already subtracted.
9944 MODE is its expected mode (needed if INDEX is constant).
9945 RANGE is the length of the jump table.
9946 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9948 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9949 index value is out of range. */
9951 static void
9952 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9953 rtx default_label)
9955 rtx temp, vector;
9957 if (INTVAL (range) > cfun->max_jumptable_ents)
9958 cfun->max_jumptable_ents = INTVAL (range);
9960 /* Do an unsigned comparison (in the proper mode) between the index
9961 expression and the value which represents the length of the range.
9962 Since we just finished subtracting the lower bound of the range
9963 from the index expression, this comparison allows us to simultaneously
9964 check that the original index expression value is both greater than
9965 or equal to the minimum value of the range and less than or equal to
9966 the maximum value of the range. */
9968 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9969 default_label);
9971 /* If index is in range, it must fit in Pmode.
9972 Convert to Pmode so we can index with it. */
9973 if (mode != Pmode)
9974 index = convert_to_mode (Pmode, index, 1);
9976 /* Don't let a MEM slip through, because then INDEX that comes
9977 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9978 and break_out_memory_refs will go to work on it and mess it up. */
9979 #ifdef PIC_CASE_VECTOR_ADDRESS
9980 if (flag_pic && !REG_P (index))
9981 index = copy_to_mode_reg (Pmode, index);
9982 #endif
9984 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9985 GET_MODE_SIZE, because this indicates how large insns are. The other
9986 uses should all be Pmode, because they are addresses. This code
9987 could fail if addresses and insns are not the same size. */
9988 index = gen_rtx_PLUS (Pmode,
9989 gen_rtx_MULT (Pmode, index,
9990 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9991 gen_rtx_LABEL_REF (Pmode, table_label));
9992 #ifdef PIC_CASE_VECTOR_ADDRESS
9993 if (flag_pic)
9994 index = PIC_CASE_VECTOR_ADDRESS (index);
9995 else
9996 #endif
9997 index = memory_address (CASE_VECTOR_MODE, index);
9998 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9999 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10000 convert_move (temp, vector, 0);
10002 emit_jump_insn (gen_tablejump (temp, table_label));
10004 /* If we are generating PIC code or if the table is PC-relative, the
10005 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10006 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10007 emit_barrier ();
10011 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10012 rtx table_label, rtx default_label)
10014 rtx index;
10016 if (! HAVE_tablejump)
10017 return 0;
10019 index_expr = fold_build2 (MINUS_EXPR, index_type,
10020 fold_convert (index_type, index_expr),
10021 fold_convert (index_type, minval));
10022 index = expand_normal (index_expr);
10023 do_pending_stack_adjust ();
10025 do_tablejump (index, TYPE_MODE (index_type),
10026 convert_modes (TYPE_MODE (index_type),
10027 TYPE_MODE (TREE_TYPE (range)),
10028 expand_normal (range),
10029 TYPE_UNSIGNED (TREE_TYPE (range))),
10030 table_label, default_label);
10031 return 1;
10034 /* Nonzero if the mode is a valid vector mode for this architecture.
10035 This returns nonzero even if there is no hardware support for the
10036 vector mode, but we can emulate with narrower modes. */
10039 vector_mode_valid_p (enum machine_mode mode)
10041 enum mode_class class = GET_MODE_CLASS (mode);
10042 enum machine_mode innermode;
10044 /* Doh! What's going on? */
10045 if (class != MODE_VECTOR_INT
10046 && class != MODE_VECTOR_FLOAT
10047 && class != MODE_VECTOR_FRACT
10048 && class != MODE_VECTOR_UFRACT
10049 && class != MODE_VECTOR_ACCUM
10050 && class != MODE_VECTOR_UACCUM)
10051 return 0;
10053 /* Hardware support. Woo hoo! */
10054 if (targetm.vector_mode_supported_p (mode))
10055 return 1;
10057 innermode = GET_MODE_INNER (mode);
10059 /* We should probably return 1 if requesting V4DI and we have no DI,
10060 but we have V2DI, but this is probably very unlikely. */
10062 /* If we have support for the inner mode, we can safely emulate it.
10063 We may not have V2DI, but me can emulate with a pair of DIs. */
10064 return targetm.scalar_mode_supported_p (innermode);
10067 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10068 static rtx
10069 const_vector_from_tree (tree exp)
10071 rtvec v;
10072 int units, i;
10073 tree link, elt;
10074 enum machine_mode inner, mode;
10076 mode = TYPE_MODE (TREE_TYPE (exp));
10078 if (initializer_zerop (exp))
10079 return CONST0_RTX (mode);
10081 units = GET_MODE_NUNITS (mode);
10082 inner = GET_MODE_INNER (mode);
10084 v = rtvec_alloc (units);
10086 link = TREE_VECTOR_CST_ELTS (exp);
10087 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10089 elt = TREE_VALUE (link);
10091 if (TREE_CODE (elt) == REAL_CST)
10092 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10093 inner);
10094 else if (TREE_CODE (elt) == FIXED_CST)
10095 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10096 inner);
10097 else
10098 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10099 TREE_INT_CST_HIGH (elt),
10100 inner);
10103 /* Initialize remaining elements to 0. */
10104 for (; i < units; ++i)
10105 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10107 return gen_rtx_CONST_VECTOR (mode, v);
10109 #include "gt-expr.h"