Merged trunk at revision 161680 into branch.
[official-gcc.git] / gcc / expr.c
bloba2a8054eb4ef69ea644197c47d991c1c140cbfaa
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
51 #include "target.h"
52 #include "timevar.h"
53 #include "df.h"
54 #include "diagnostic.h"
55 #include "ssaexpand.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
63 #ifdef PUSH_ROUNDING
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #endif
69 #endif
71 #endif
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
76 #else
77 #define STACK_PUSH_CODE PRE_INC
78 #endif
79 #endif
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
88 int cse_not_expected;
90 /* This structure is used by move_by_pieces to describe the move to
91 be performed. */
92 struct move_by_pieces_d
94 rtx to;
95 rtx to_addr;
96 int autinc_to;
97 int explicit_inc_to;
98 rtx from;
99 rtx from_addr;
100 int autinc_from;
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
104 int reverse;
107 /* This structure is used by store_by_pieces to describe the clear to
108 be performed. */
110 struct store_by_pieces_d
112 rtx to;
113 rtx to_addr;
114 int autinc_to;
115 int explicit_inc_to;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
119 void *constfundata;
120 int reverse;
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
124 unsigned int,
125 unsigned int);
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces_d *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces_d *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, alias_set_type);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, alias_set_type, bool);
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
149 static int is_aligning_offset (const_tree, const_tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (sepops, rtx, enum machine_mode);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* Record for each mode whether we can float-extend from memory. */
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
178 #endif
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
186 #endif
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero. */
190 #ifndef SET_BY_PIECES_P
191 #define SET_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
193 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
194 #endif
196 /* This macro is used to determine whether store_by_pieces should be
197 called to "memcpy" storage when the source is a constant string. */
198 #ifndef STORE_BY_PIECES_P
199 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
201 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
202 #endif
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movmem_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block sets. */
208 enum insn_code setmem_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of three different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
216 /* Synchronization primitives. */
217 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
236 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
237 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
239 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
241 #ifndef SLOW_UNALIGNED_ACCESS
242 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
243 #endif
245 /* This is run to set up which modes can be used
246 directly in memory and to initialize the block move optab. It is run
247 at the beginning of compilation and when the target is reinitialized. */
249 void
250 init_expr_target (void)
252 rtx insn, pat;
253 enum machine_mode mode;
254 int num_clobbers;
255 rtx mem, mem1;
256 rtx reg;
258 /* Try indexing by frame ptr and try by stack ptr.
259 It is known that on the Convex the stack ptr isn't a valid index.
260 With luck, one or the other is valid on any machine. */
261 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
262 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
264 /* A scratch register we can modify in-place below to avoid
265 useless RTL allocations. */
266 reg = gen_rtx_REG (VOIDmode, -1);
268 insn = rtx_alloc (INSN);
269 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
270 PATTERN (insn) = pat;
272 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
273 mode = (enum machine_mode) ((int) mode + 1))
275 int regno;
277 direct_load[(int) mode] = direct_store[(int) mode] = 0;
278 PUT_MODE (mem, mode);
279 PUT_MODE (mem1, mode);
280 PUT_MODE (reg, mode);
282 /* See if there is some register that can be used in this mode and
283 directly loaded or stored from memory. */
285 if (mode != VOIDmode && mode != BLKmode)
286 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
287 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
288 regno++)
290 if (! HARD_REGNO_MODE_OK (regno, mode))
291 continue;
293 SET_REGNO (reg, regno);
295 SET_SRC (pat) = mem;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
300 SET_SRC (pat) = mem1;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem1;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
317 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
319 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
320 mode = GET_MODE_WIDER_MODE (mode))
322 enum machine_mode srcmode;
323 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
324 srcmode = GET_MODE_WIDER_MODE (srcmode))
326 enum insn_code ic;
328 ic = can_extend_p (mode, srcmode, 0);
329 if (ic == CODE_FOR_nothing)
330 continue;
332 PUT_MODE (mem, srcmode);
334 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
335 float_extend_from_mem[mode][srcmode] = true;
340 /* This is run at the start of compiling a function. */
342 void
343 init_expr (void)
345 memset (&crtl->expr, 0, sizeof (crtl->expr));
348 /* Copy data from FROM to TO, where the machine modes are not the same.
349 Both modes may be integer, or both may be floating, or both may be
350 fixed-point.
351 UNSIGNEDP should be nonzero if FROM is an unsigned type.
352 This causes zero-extension instead of sign-extension. */
354 void
355 convert_move (rtx to, rtx from, int unsignedp)
357 enum machine_mode to_mode = GET_MODE (to);
358 enum machine_mode from_mode = GET_MODE (from);
359 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
360 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
361 enum insn_code code;
362 rtx libcall;
364 /* rtx code for making an equivalent value. */
365 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
366 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
369 gcc_assert (to_real == from_real);
370 gcc_assert (to_mode != BLKmode);
371 gcc_assert (from_mode != BLKmode);
373 /* If the source and destination are already the same, then there's
374 nothing to do. */
375 if (to == from)
376 return;
378 /* If FROM is a SUBREG that indicates that we have already done at least
379 the required extension, strip it. We don't handle such SUBREGs as
380 TO here. */
382 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
383 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
384 >= GET_MODE_SIZE (to_mode))
385 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
386 from = gen_lowpart (to_mode, from), from_mode = to_mode;
388 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
390 if (to_mode == from_mode
391 || (from_mode == VOIDmode && CONSTANT_P (from)))
393 emit_move_insn (to, from);
394 return;
397 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
399 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
401 if (VECTOR_MODE_P (to_mode))
402 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
403 else
404 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
406 emit_move_insn (to, from);
407 return;
410 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
412 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
413 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
414 return;
417 if (to_real)
419 rtx value, insns;
420 convert_optab tab;
422 gcc_assert ((GET_MODE_PRECISION (from_mode)
423 != GET_MODE_PRECISION (to_mode))
424 || (DECIMAL_FLOAT_MODE_P (from_mode)
425 != DECIMAL_FLOAT_MODE_P (to_mode)));
427 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
428 /* Conversion between decimal float and binary float, same size. */
429 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
430 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
431 tab = sext_optab;
432 else
433 tab = trunc_optab;
435 /* Try converting directly if the insn is supported. */
437 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
438 if (code != CODE_FOR_nothing)
440 emit_unop_insn (code, to, from,
441 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
442 return;
445 /* Otherwise use a libcall. */
446 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
448 /* Is this conversion implemented yet? */
449 gcc_assert (libcall);
451 start_sequence ();
452 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
453 1, from, from_mode);
454 insns = get_insns ();
455 end_sequence ();
456 emit_libcall_block (insns, to, value,
457 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
458 from)
459 : gen_rtx_FLOAT_EXTEND (to_mode, from));
460 return;
463 /* Handle pointer conversion. */ /* SPEE 900220. */
464 /* Targets are expected to provide conversion insns between PxImode and
465 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
466 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
468 enum machine_mode full_mode
469 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
471 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
472 != CODE_FOR_nothing);
474 if (full_mode != from_mode)
475 from = convert_to_mode (full_mode, from, unsignedp);
476 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
477 to, from, UNKNOWN);
478 return;
480 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
482 rtx new_from;
483 enum machine_mode full_mode
484 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
486 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
487 != CODE_FOR_nothing);
489 if (to_mode == full_mode)
491 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
492 to, from, UNKNOWN);
493 return;
496 new_from = gen_reg_rtx (full_mode);
497 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
498 new_from, from, UNKNOWN);
500 /* else proceed to integer conversions below. */
501 from_mode = full_mode;
502 from = new_from;
505 /* Make sure both are fixed-point modes or both are not. */
506 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
507 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
508 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
510 /* If we widen from_mode to to_mode and they are in the same class,
511 we won't saturate the result.
512 Otherwise, always saturate the result to play safe. */
513 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
514 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
515 expand_fixed_convert (to, from, 0, 0);
516 else
517 expand_fixed_convert (to, from, 0, 1);
518 return;
521 /* Now both modes are integers. */
523 /* Handle expanding beyond a word. */
524 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
525 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
527 rtx insns;
528 rtx lowpart;
529 rtx fill_value;
530 rtx lowfrom;
531 int i;
532 enum machine_mode lowpart_mode;
533 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
535 /* Try converting directly if the insn is supported. */
536 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
537 != CODE_FOR_nothing)
539 /* If FROM is a SUBREG, put it into a register. Do this
540 so that we always generate the same set of insns for
541 better cse'ing; if an intermediate assignment occurred,
542 we won't be doing the operation directly on the SUBREG. */
543 if (optimize > 0 && GET_CODE (from) == SUBREG)
544 from = force_reg (from_mode, from);
545 emit_unop_insn (code, to, from, equiv_code);
546 return;
548 /* Next, try converting via full word. */
549 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
550 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
551 != CODE_FOR_nothing))
553 rtx word_to = gen_reg_rtx (word_mode);
554 if (REG_P (to))
556 if (reg_overlap_mentioned_p (to, from))
557 from = force_reg (from_mode, from);
558 emit_clobber (to);
560 convert_move (word_to, from, unsignedp);
561 emit_unop_insn (code, to, word_to, equiv_code);
562 return;
565 /* No special multiword conversion insn; do it by hand. */
566 start_sequence ();
568 /* Since we will turn this into a no conflict block, we must ensure
569 that the source does not overlap the target. */
571 if (reg_overlap_mentioned_p (to, from))
572 from = force_reg (from_mode, from);
574 /* Get a copy of FROM widened to a word, if necessary. */
575 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
576 lowpart_mode = word_mode;
577 else
578 lowpart_mode = from_mode;
580 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
582 lowpart = gen_lowpart (lowpart_mode, to);
583 emit_move_insn (lowpart, lowfrom);
585 /* Compute the value to put in each remaining word. */
586 if (unsignedp)
587 fill_value = const0_rtx;
588 else
589 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
590 LT, lowfrom, const0_rtx,
591 VOIDmode, 0, -1);
593 /* Fill the remaining words. */
594 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
596 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
597 rtx subword = operand_subword (to, index, 1, to_mode);
599 gcc_assert (subword);
601 if (fill_value != subword)
602 emit_move_insn (subword, fill_value);
605 insns = get_insns ();
606 end_sequence ();
608 emit_insn (insns);
609 return;
612 /* Truncating multi-word to a word or less. */
613 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
614 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
616 if (!((MEM_P (from)
617 && ! MEM_VOLATILE_P (from)
618 && direct_load[(int) to_mode]
619 && ! mode_dependent_address_p (XEXP (from, 0)))
620 || REG_P (from)
621 || GET_CODE (from) == SUBREG))
622 from = force_reg (from_mode, from);
623 convert_move (to, gen_lowpart (word_mode, from), 0);
624 return;
627 /* Now follow all the conversions between integers
628 no more than a word long. */
630 /* For truncation, usually we can just refer to FROM in a narrower mode. */
631 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
632 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
633 GET_MODE_BITSIZE (from_mode)))
635 if (!((MEM_P (from)
636 && ! MEM_VOLATILE_P (from)
637 && direct_load[(int) to_mode]
638 && ! mode_dependent_address_p (XEXP (from, 0)))
639 || REG_P (from)
640 || GET_CODE (from) == SUBREG))
641 from = force_reg (from_mode, from);
642 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
643 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
644 from = copy_to_reg (from);
645 emit_move_insn (to, gen_lowpart (to_mode, from));
646 return;
649 /* Handle extension. */
650 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
652 /* Convert directly if that works. */
653 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
654 != CODE_FOR_nothing)
656 emit_unop_insn (code, to, from, equiv_code);
657 return;
659 else
661 enum machine_mode intermediate;
662 rtx tmp;
663 tree shift_amount;
665 /* Search for a mode to convert via. */
666 for (intermediate = from_mode; intermediate != VOIDmode;
667 intermediate = GET_MODE_WIDER_MODE (intermediate))
668 if (((can_extend_p (to_mode, intermediate, unsignedp)
669 != CODE_FOR_nothing)
670 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
671 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
672 GET_MODE_BITSIZE (intermediate))))
673 && (can_extend_p (intermediate, from_mode, unsignedp)
674 != CODE_FOR_nothing))
676 convert_move (to, convert_to_mode (intermediate, from,
677 unsignedp), unsignedp);
678 return;
681 /* No suitable intermediate mode.
682 Generate what we need with shifts. */
683 shift_amount = build_int_cst (NULL_TREE,
684 GET_MODE_BITSIZE (to_mode)
685 - GET_MODE_BITSIZE (from_mode));
686 from = gen_lowpart (to_mode, force_reg (from_mode, from));
687 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
688 to, unsignedp);
689 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
690 to, unsignedp);
691 if (tmp != to)
692 emit_move_insn (to, tmp);
693 return;
697 /* Support special truncate insns for certain modes. */
698 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
700 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
701 to, from, UNKNOWN);
702 return;
705 /* Handle truncation of volatile memrefs, and so on;
706 the things that couldn't be truncated directly,
707 and for which there was no special instruction.
709 ??? Code above formerly short-circuited this, for most integer
710 mode pairs, with a force_reg in from_mode followed by a recursive
711 call to this routine. Appears always to have been wrong. */
712 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
714 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
715 emit_move_insn (to, temp);
716 return;
719 /* Mode combination is not recognized. */
720 gcc_unreachable ();
723 /* Return an rtx for a value that would result
724 from converting X to mode MODE.
725 Both X and MODE may be floating, or both integer.
726 UNSIGNEDP is nonzero if X is an unsigned value.
727 This can be done by referring to a part of X in place
728 or by copying to a new temporary with conversion. */
731 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
733 return convert_modes (mode, VOIDmode, x, unsignedp);
736 /* Return an rtx for a value that would result
737 from converting X from mode OLDMODE to mode MODE.
738 Both modes may be floating, or both integer.
739 UNSIGNEDP is nonzero if X is an unsigned value.
741 This can be done by referring to a part of X in place
742 or by copying to a new temporary with conversion.
744 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
747 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
749 rtx temp;
751 /* If FROM is a SUBREG that indicates that we have already done at least
752 the required extension, strip it. */
754 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
755 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
756 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
757 x = gen_lowpart (mode, x);
759 if (GET_MODE (x) != VOIDmode)
760 oldmode = GET_MODE (x);
762 if (mode == oldmode)
763 return x;
765 /* There is one case that we must handle specially: If we are converting
766 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
767 we are to interpret the constant as unsigned, gen_lowpart will do
768 the wrong if the constant appears negative. What we want to do is
769 make the high-order word of the constant zero, not all ones. */
771 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
772 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
773 && CONST_INT_P (x) && INTVAL (x) < 0)
775 double_int val = uhwi_to_double_int (INTVAL (x));
777 /* We need to zero extend VAL. */
778 if (oldmode != VOIDmode)
779 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
781 return immed_double_int_const (val, mode);
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
789 if ((CONST_INT_P (x)
790 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
791 || (GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_CLASS (oldmode) == MODE_INT
793 && (GET_CODE (x) == CONST_DOUBLE
794 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
795 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
796 && direct_load[(int) mode])
797 || (REG_P (x)
798 && (! HARD_REGISTER_P (x)
799 || HARD_REGNO_MODE_OK (REGNO (x), mode))
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801 GET_MODE_BITSIZE (GET_MODE (x)))))))))
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (CONST_INT_P (x) && oldmode != VOIDmode
807 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
809 HOST_WIDE_INT val = INTVAL (x);
810 int width = GET_MODE_BITSIZE (oldmode);
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val &= ((HOST_WIDE_INT) 1 << width) - 1;
815 if (! unsignedp
816 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817 val |= (HOST_WIDE_INT) (-1) << width;
819 return gen_int_mode (val, mode);
822 return gen_lowpart (mode, x);
825 /* Converting from integer constant into mode is always equivalent to an
826 subreg operation. */
827 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
829 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
830 return simplify_gen_subreg (mode, x, oldmode, 0);
833 temp = gen_reg_rtx (mode);
834 convert_move (temp, x, unsignedp);
835 return temp;
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
843 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
845 /* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
847 succeed. */
850 can_move_by_pieces (unsigned HOST_WIDE_INT len,
851 unsigned int align ATTRIBUTE_UNUSED)
853 return MOVE_BY_PIECES_P (len, align);
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857 block TO. (These are MEM rtx's with BLKmode).
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
862 ALIGN is maximum stack alignment we can assume.
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
866 stpcpy. */
869 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870 unsigned int align, int endp)
872 struct move_by_pieces_d data;
873 enum machine_mode to_addr_mode, from_addr_mode
874 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
875 rtx to_addr, from_addr = XEXP (from, 0);
876 unsigned int max_size = MOVE_MAX_PIECES + 1;
877 enum machine_mode mode = VOIDmode, tmode;
878 enum insn_code icode;
880 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
882 data.offset = 0;
883 data.from_addr = from_addr;
884 if (to)
886 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
887 to_addr = XEXP (to, 0);
888 data.to = to;
889 data.autinc_to
890 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
891 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
892 data.reverse
893 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
895 else
897 to_addr_mode = VOIDmode;
898 to_addr = NULL_RTX;
899 data.to = NULL_RTX;
900 data.autinc_to = 1;
901 #ifdef STACK_GROWS_DOWNWARD
902 data.reverse = 1;
903 #else
904 data.reverse = 0;
905 #endif
907 data.to_addr = to_addr;
908 data.from = from;
909 data.autinc_from
910 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
911 || GET_CODE (from_addr) == POST_INC
912 || GET_CODE (from_addr) == POST_DEC);
914 data.explicit_inc_from = 0;
915 data.explicit_inc_to = 0;
916 if (data.reverse) data.offset = len;
917 data.len = len;
919 /* If copying requires more than two move insns,
920 copy addresses to registers (to make displacements shorter)
921 and use post-increment if available. */
922 if (!(data.autinc_from && data.autinc_to)
923 && move_by_pieces_ninsns (len, align, max_size) > 2)
925 /* Find the mode of the largest move... */
926 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
927 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
928 if (GET_MODE_SIZE (tmode) < max_size)
929 mode = tmode;
931 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
933 data.from_addr = copy_to_mode_reg (from_addr_mode,
934 plus_constant (from_addr, len));
935 data.autinc_from = 1;
936 data.explicit_inc_from = -1;
938 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
940 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
941 data.autinc_from = 1;
942 data.explicit_inc_from = 1;
944 if (!data.autinc_from && CONSTANT_P (from_addr))
945 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
946 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
948 data.to_addr = copy_to_mode_reg (to_addr_mode,
949 plus_constant (to_addr, len));
950 data.autinc_to = 1;
951 data.explicit_inc_to = -1;
953 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
955 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
956 data.autinc_to = 1;
957 data.explicit_inc_to = 1;
959 if (!data.autinc_to && CONSTANT_P (to_addr))
960 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
963 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
964 if (align >= GET_MODE_ALIGNMENT (tmode))
965 align = GET_MODE_ALIGNMENT (tmode);
966 else
968 enum machine_mode xmode;
970 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
971 tmode != VOIDmode;
972 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
973 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
974 || SLOW_UNALIGNED_ACCESS (tmode, align))
975 break;
977 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
980 /* First move what we can in the largest integer mode, then go to
981 successively smaller modes. */
983 while (max_size > 1)
985 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
986 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
987 if (GET_MODE_SIZE (tmode) < max_size)
988 mode = tmode;
990 if (mode == VOIDmode)
991 break;
993 icode = optab_handler (mov_optab, mode)->insn_code;
994 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
995 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
997 max_size = GET_MODE_SIZE (mode);
1000 /* The code above should have handled everything. */
1001 gcc_assert (!data.len);
1003 if (endp)
1005 rtx to1;
1007 gcc_assert (!data.reverse);
1008 if (data.autinc_to)
1010 if (endp == 2)
1012 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1013 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1014 else
1015 data.to_addr = copy_to_mode_reg (to_addr_mode,
1016 plus_constant (data.to_addr,
1017 -1));
1019 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1020 data.offset);
1022 else
1024 if (endp == 2)
1025 --data.offset;
1026 to1 = adjust_address (data.to, QImode, data.offset);
1028 return to1;
1030 else
1031 return data.to;
1034 /* Return number of insns required to move L bytes by pieces.
1035 ALIGN (in bits) is maximum alignment we can assume. */
1037 static unsigned HOST_WIDE_INT
1038 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1039 unsigned int max_size)
1041 unsigned HOST_WIDE_INT n_insns = 0;
1042 enum machine_mode tmode;
1044 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1045 if (align >= GET_MODE_ALIGNMENT (tmode))
1046 align = GET_MODE_ALIGNMENT (tmode);
1047 else
1049 enum machine_mode tmode, xmode;
1051 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1052 tmode != VOIDmode;
1053 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1054 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1055 || SLOW_UNALIGNED_ACCESS (tmode, align))
1056 break;
1058 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1061 while (max_size > 1)
1063 enum machine_mode mode = VOIDmode;
1064 enum insn_code icode;
1066 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1067 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1068 if (GET_MODE_SIZE (tmode) < max_size)
1069 mode = tmode;
1071 if (mode == VOIDmode)
1072 break;
1074 icode = optab_handler (mov_optab, mode)->insn_code;
1075 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1076 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1078 max_size = GET_MODE_SIZE (mode);
1081 gcc_assert (!l);
1082 return n_insns;
1085 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1086 with move instructions for mode MODE. GENFUN is the gen_... function
1087 to make a move insn for that mode. DATA has all the other info. */
1089 static void
1090 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1091 struct move_by_pieces_d *data)
1093 unsigned int size = GET_MODE_SIZE (mode);
1094 rtx to1 = NULL_RTX, from1;
1096 while (data->len >= size)
1098 if (data->reverse)
1099 data->offset -= size;
1101 if (data->to)
1103 if (data->autinc_to)
1104 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1105 data->offset);
1106 else
1107 to1 = adjust_address (data->to, mode, data->offset);
1110 if (data->autinc_from)
1111 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1112 data->offset);
1113 else
1114 from1 = adjust_address (data->from, mode, data->offset);
1116 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1117 emit_insn (gen_add2_insn (data->to_addr,
1118 GEN_INT (-(HOST_WIDE_INT)size)));
1119 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1120 emit_insn (gen_add2_insn (data->from_addr,
1121 GEN_INT (-(HOST_WIDE_INT)size)));
1123 if (data->to)
1124 emit_insn ((*genfun) (to1, from1));
1125 else
1127 #ifdef PUSH_ROUNDING
1128 emit_single_push_insn (mode, from1, NULL);
1129 #else
1130 gcc_unreachable ();
1131 #endif
1134 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1135 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1136 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1137 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1139 if (! data->reverse)
1140 data->offset += size;
1142 data->len -= size;
1146 /* Emit code to move a block Y to a block X. This may be done with
1147 string-move instructions, with multiple scalar move instructions,
1148 or with a library call.
1150 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1151 SIZE is an rtx that says how long they are.
1152 ALIGN is the maximum alignment we can assume they have.
1153 METHOD describes what kind of copy this is, and what mechanisms may be used.
1155 Return the address of the new block, if memcpy is called and returns it,
1156 0 otherwise. */
1159 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1160 unsigned int expected_align, HOST_WIDE_INT expected_size)
1162 bool may_use_call;
1163 rtx retval = 0;
1164 unsigned int align;
1166 switch (method)
1168 case BLOCK_OP_NORMAL:
1169 case BLOCK_OP_TAILCALL:
1170 may_use_call = true;
1171 break;
1173 case BLOCK_OP_CALL_PARM:
1174 may_use_call = block_move_libcall_safe_for_call_parm ();
1176 /* Make inhibit_defer_pop nonzero around the library call
1177 to force it to pop the arguments right away. */
1178 NO_DEFER_POP;
1179 break;
1181 case BLOCK_OP_NO_LIBCALL:
1182 may_use_call = false;
1183 break;
1185 default:
1186 gcc_unreachable ();
1189 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1190 gcc_assert (align >= BITS_PER_UNIT);
1192 gcc_assert (MEM_P (x));
1193 gcc_assert (MEM_P (y));
1194 gcc_assert (size);
1196 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1197 block copy is more efficient for other large modes, e.g. DCmode. */
1198 x = adjust_address (x, BLKmode, 0);
1199 y = adjust_address (y, BLKmode, 0);
1201 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1202 can be incorrect is coming from __builtin_memcpy. */
1203 if (CONST_INT_P (size))
1205 if (INTVAL (size) == 0)
1206 return 0;
1208 x = shallow_copy_rtx (x);
1209 y = shallow_copy_rtx (y);
1210 set_mem_size (x, size);
1211 set_mem_size (y, size);
1214 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1215 move_by_pieces (x, y, INTVAL (size), align, 0);
1216 else if (emit_block_move_via_movmem (x, y, size, align,
1217 expected_align, expected_size))
1219 else if (may_use_call
1220 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1221 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1222 retval = emit_block_move_via_libcall (x, y, size,
1223 method == BLOCK_OP_TAILCALL);
1224 else
1225 emit_block_move_via_loop (x, y, size, align);
1227 if (method == BLOCK_OP_CALL_PARM)
1228 OK_DEFER_POP;
1230 return retval;
1234 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1236 return emit_block_move_hints (x, y, size, method, 0, -1);
1239 /* A subroutine of emit_block_move. Returns true if calling the
1240 block move libcall will not clobber any parameters which may have
1241 already been placed on the stack. */
1243 static bool
1244 block_move_libcall_safe_for_call_parm (void)
1246 #if defined (REG_PARM_STACK_SPACE)
1247 tree fn;
1248 #endif
1250 /* If arguments are pushed on the stack, then they're safe. */
1251 if (PUSH_ARGS)
1252 return true;
1254 /* If registers go on the stack anyway, any argument is sure to clobber
1255 an outgoing argument. */
1256 #if defined (REG_PARM_STACK_SPACE)
1257 fn = emit_block_move_libcall_fn (false);
1258 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1259 depend on its argument. */
1260 (void) fn;
1261 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1262 && REG_PARM_STACK_SPACE (fn) != 0)
1263 return false;
1264 #endif
1266 /* If any argument goes in memory, then it might clobber an outgoing
1267 argument. */
1269 CUMULATIVE_ARGS args_so_far;
1270 tree fn, arg;
1272 fn = emit_block_move_libcall_fn (false);
1273 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1275 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1276 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1278 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1279 rtx tmp = targetm.calls.function_arg (&args_so_far, mode,
1280 NULL_TREE, true);
1281 if (!tmp || !REG_P (tmp))
1282 return false;
1283 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1284 return false;
1285 targetm.calls.function_arg_advance (&args_so_far, mode,
1286 NULL_TREE, true);
1289 return true;
1292 /* A subroutine of emit_block_move. Expand a movmem pattern;
1293 return true if successful. */
1295 static bool
1296 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1297 unsigned int expected_align, HOST_WIDE_INT expected_size)
1299 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1300 int save_volatile_ok = volatile_ok;
1301 enum machine_mode mode;
1303 if (expected_align < align)
1304 expected_align = align;
1306 /* Since this is a move insn, we don't care about volatility. */
1307 volatile_ok = 1;
1309 /* Try the most limited insn first, because there's no point
1310 including more than one in the machine description unless
1311 the more limited one has some advantage. */
1313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1314 mode = GET_MODE_WIDER_MODE (mode))
1316 enum insn_code code = movmem_optab[(int) mode];
1317 insn_operand_predicate_fn pred;
1319 if (code != CODE_FOR_nothing
1320 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1321 here because if SIZE is less than the mode mask, as it is
1322 returned by the macro, it will definitely be less than the
1323 actual mode mask. */
1324 && ((CONST_INT_P (size)
1325 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1326 <= (GET_MODE_MASK (mode) >> 1)))
1327 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1328 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1329 || (*pred) (x, BLKmode))
1330 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1331 || (*pred) (y, BLKmode))
1332 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1333 || (*pred) (opalign, VOIDmode)))
1335 rtx op2;
1336 rtx last = get_last_insn ();
1337 rtx pat;
1339 op2 = convert_to_mode (mode, size, 1);
1340 pred = insn_data[(int) code].operand[2].predicate;
1341 if (pred != 0 && ! (*pred) (op2, mode))
1342 op2 = copy_to_mode_reg (mode, op2);
1344 /* ??? When called via emit_block_move_for_call, it'd be
1345 nice if there were some way to inform the backend, so
1346 that it doesn't fail the expansion because it thinks
1347 emitting the libcall would be more efficient. */
1349 if (insn_data[(int) code].n_operands == 4)
1350 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1351 else
1352 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1353 GEN_INT (expected_align
1354 / BITS_PER_UNIT),
1355 GEN_INT (expected_size));
1356 if (pat)
1358 emit_insn (pat);
1359 volatile_ok = save_volatile_ok;
1360 return true;
1362 else
1363 delete_insns_since (last);
1367 volatile_ok = save_volatile_ok;
1368 return false;
1371 /* A subroutine of emit_block_move. Expand a call to memcpy.
1372 Return the return value from memcpy, 0 otherwise. */
1375 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1377 rtx dst_addr, src_addr;
1378 tree call_expr, fn, src_tree, dst_tree, size_tree;
1379 enum machine_mode size_mode;
1380 rtx retval;
1382 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1383 pseudos. We can then place those new pseudos into a VAR_DECL and
1384 use them later. */
1386 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1387 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1389 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1390 src_addr = convert_memory_address (ptr_mode, src_addr);
1392 dst_tree = make_tree (ptr_type_node, dst_addr);
1393 src_tree = make_tree (ptr_type_node, src_addr);
1395 size_mode = TYPE_MODE (sizetype);
1397 size = convert_to_mode (size_mode, size, 1);
1398 size = copy_to_mode_reg (size_mode, size);
1400 /* It is incorrect to use the libcall calling conventions to call
1401 memcpy in this context. This could be a user call to memcpy and
1402 the user may wish to examine the return value from memcpy. For
1403 targets where libcalls and normal calls have different conventions
1404 for returning pointers, we could end up generating incorrect code. */
1406 size_tree = make_tree (sizetype, size);
1408 fn = emit_block_move_libcall_fn (true);
1409 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1410 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1412 retval = expand_normal (call_expr);
1414 return retval;
1417 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1418 for the function we use for block copies. The first time FOR_CALL
1419 is true, we call assemble_external. */
1421 static GTY(()) tree block_move_fn;
1423 void
1424 init_block_move_fn (const char *asmspec)
1426 if (!block_move_fn)
1428 tree args, fn;
1430 fn = get_identifier ("memcpy");
1431 args = build_function_type_list (ptr_type_node, ptr_type_node,
1432 const_ptr_type_node, sizetype,
1433 NULL_TREE);
1435 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1436 DECL_EXTERNAL (fn) = 1;
1437 TREE_PUBLIC (fn) = 1;
1438 DECL_ARTIFICIAL (fn) = 1;
1439 TREE_NOTHROW (fn) = 1;
1440 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1441 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1443 block_move_fn = fn;
1446 if (asmspec)
1447 set_user_assembler_name (block_move_fn, asmspec);
1450 static tree
1451 emit_block_move_libcall_fn (int for_call)
1453 static bool emitted_extern;
1455 if (!block_move_fn)
1456 init_block_move_fn (NULL);
1458 if (for_call && !emitted_extern)
1460 emitted_extern = true;
1461 make_decl_rtl (block_move_fn);
1462 assemble_external (block_move_fn);
1465 return block_move_fn;
1468 /* A subroutine of emit_block_move. Copy the data via an explicit
1469 loop. This is used only when libcalls are forbidden. */
1470 /* ??? It'd be nice to copy in hunks larger than QImode. */
1472 static void
1473 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1474 unsigned int align ATTRIBUTE_UNUSED)
1476 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1477 enum machine_mode x_addr_mode
1478 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1479 enum machine_mode y_addr_mode
1480 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1481 enum machine_mode iter_mode;
1483 iter_mode = GET_MODE (size);
1484 if (iter_mode == VOIDmode)
1485 iter_mode = word_mode;
1487 top_label = gen_label_rtx ();
1488 cmp_label = gen_label_rtx ();
1489 iter = gen_reg_rtx (iter_mode);
1491 emit_move_insn (iter, const0_rtx);
1493 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1494 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1495 do_pending_stack_adjust ();
1497 emit_jump (cmp_label);
1498 emit_label (top_label);
1500 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1501 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1503 if (x_addr_mode != y_addr_mode)
1504 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1505 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1507 x = change_address (x, QImode, x_addr);
1508 y = change_address (y, QImode, y_addr);
1510 emit_move_insn (x, y);
1512 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1513 true, OPTAB_LIB_WIDEN);
1514 if (tmp != iter)
1515 emit_move_insn (iter, tmp);
1517 emit_label (cmp_label);
1519 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1520 true, top_label);
1523 /* Copy all or part of a value X into registers starting at REGNO.
1524 The number of registers to be filled is NREGS. */
1526 void
1527 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1529 int i;
1530 #ifdef HAVE_load_multiple
1531 rtx pat;
1532 rtx last;
1533 #endif
1535 if (nregs == 0)
1536 return;
1538 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1539 x = validize_mem (force_const_mem (mode, x));
1541 /* See if the machine can do this with a load multiple insn. */
1542 #ifdef HAVE_load_multiple
1543 if (HAVE_load_multiple)
1545 last = get_last_insn ();
1546 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1547 GEN_INT (nregs));
1548 if (pat)
1550 emit_insn (pat);
1551 return;
1553 else
1554 delete_insns_since (last);
1556 #endif
1558 for (i = 0; i < nregs; i++)
1559 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1560 operand_subword_force (x, i, mode));
1563 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1564 The number of registers to be filled is NREGS. */
1566 void
1567 move_block_from_reg (int regno, rtx x, int nregs)
1569 int i;
1571 if (nregs == 0)
1572 return;
1574 /* See if the machine can do this with a store multiple insn. */
1575 #ifdef HAVE_store_multiple
1576 if (HAVE_store_multiple)
1578 rtx last = get_last_insn ();
1579 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1580 GEN_INT (nregs));
1581 if (pat)
1583 emit_insn (pat);
1584 return;
1586 else
1587 delete_insns_since (last);
1589 #endif
1591 for (i = 0; i < nregs; i++)
1593 rtx tem = operand_subword (x, i, 1, BLKmode);
1595 gcc_assert (tem);
1597 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1601 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1602 ORIG, where ORIG is a non-consecutive group of registers represented by
1603 a PARALLEL. The clone is identical to the original except in that the
1604 original set of registers is replaced by a new set of pseudo registers.
1605 The new set has the same modes as the original set. */
1608 gen_group_rtx (rtx orig)
1610 int i, length;
1611 rtx *tmps;
1613 gcc_assert (GET_CODE (orig) == PARALLEL);
1615 length = XVECLEN (orig, 0);
1616 tmps = XALLOCAVEC (rtx, length);
1618 /* Skip a NULL entry in first slot. */
1619 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1621 if (i)
1622 tmps[0] = 0;
1624 for (; i < length; i++)
1626 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1627 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1629 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1632 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1635 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1636 except that values are placed in TMPS[i], and must later be moved
1637 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1639 static void
1640 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1642 rtx src;
1643 int start, i;
1644 enum machine_mode m = GET_MODE (orig_src);
1646 gcc_assert (GET_CODE (dst) == PARALLEL);
1648 if (m != VOIDmode
1649 && !SCALAR_INT_MODE_P (m)
1650 && !MEM_P (orig_src)
1651 && GET_CODE (orig_src) != CONCAT)
1653 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1654 if (imode == BLKmode)
1655 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1656 else
1657 src = gen_reg_rtx (imode);
1658 if (imode != BLKmode)
1659 src = gen_lowpart (GET_MODE (orig_src), src);
1660 emit_move_insn (src, orig_src);
1661 /* ...and back again. */
1662 if (imode != BLKmode)
1663 src = gen_lowpart (imode, src);
1664 emit_group_load_1 (tmps, dst, src, type, ssize);
1665 return;
1668 /* Check for a NULL entry, used to indicate that the parameter goes
1669 both on the stack and in registers. */
1670 if (XEXP (XVECEXP (dst, 0, 0), 0))
1671 start = 0;
1672 else
1673 start = 1;
1675 /* Process the pieces. */
1676 for (i = start; i < XVECLEN (dst, 0); i++)
1678 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1679 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1680 unsigned int bytelen = GET_MODE_SIZE (mode);
1681 int shift = 0;
1683 /* Handle trailing fragments that run over the size of the struct. */
1684 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1686 /* Arrange to shift the fragment to where it belongs.
1687 extract_bit_field loads to the lsb of the reg. */
1688 if (
1689 #ifdef BLOCK_REG_PADDING
1690 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1691 == (BYTES_BIG_ENDIAN ? upward : downward)
1692 #else
1693 BYTES_BIG_ENDIAN
1694 #endif
1696 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1697 bytelen = ssize - bytepos;
1698 gcc_assert (bytelen > 0);
1701 /* If we won't be loading directly from memory, protect the real source
1702 from strange tricks we might play; but make sure that the source can
1703 be loaded directly into the destination. */
1704 src = orig_src;
1705 if (!MEM_P (orig_src)
1706 && (!CONSTANT_P (orig_src)
1707 || (GET_MODE (orig_src) != mode
1708 && GET_MODE (orig_src) != VOIDmode)))
1710 if (GET_MODE (orig_src) == VOIDmode)
1711 src = gen_reg_rtx (mode);
1712 else
1713 src = gen_reg_rtx (GET_MODE (orig_src));
1715 emit_move_insn (src, orig_src);
1718 /* Optimize the access just a bit. */
1719 if (MEM_P (src)
1720 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1721 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1722 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1723 && bytelen == GET_MODE_SIZE (mode))
1725 tmps[i] = gen_reg_rtx (mode);
1726 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1728 else if (COMPLEX_MODE_P (mode)
1729 && GET_MODE (src) == mode
1730 && bytelen == GET_MODE_SIZE (mode))
1731 /* Let emit_move_complex do the bulk of the work. */
1732 tmps[i] = src;
1733 else if (GET_CODE (src) == CONCAT)
1735 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1736 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1738 if ((bytepos == 0 && bytelen == slen0)
1739 || (bytepos != 0 && bytepos + bytelen <= slen))
1741 /* The following assumes that the concatenated objects all
1742 have the same size. In this case, a simple calculation
1743 can be used to determine the object and the bit field
1744 to be extracted. */
1745 tmps[i] = XEXP (src, bytepos / slen0);
1746 if (! CONSTANT_P (tmps[i])
1747 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1748 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1749 (bytepos % slen0) * BITS_PER_UNIT,
1750 1, NULL_RTX, mode, mode);
1752 else
1754 rtx mem;
1756 gcc_assert (!bytepos);
1757 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1758 emit_move_insn (mem, src);
1759 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1760 0, 1, NULL_RTX, mode, mode);
1763 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1764 SIMD register, which is currently broken. While we get GCC
1765 to emit proper RTL for these cases, let's dump to memory. */
1766 else if (VECTOR_MODE_P (GET_MODE (dst))
1767 && REG_P (src))
1769 int slen = GET_MODE_SIZE (GET_MODE (src));
1770 rtx mem;
1772 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1773 emit_move_insn (mem, src);
1774 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1776 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1777 && XVECLEN (dst, 0) > 1)
1778 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1779 else if (CONSTANT_P (src))
1781 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1783 if (len == ssize)
1784 tmps[i] = src;
1785 else
1787 rtx first, second;
1789 gcc_assert (2 * len == ssize);
1790 split_double (src, &first, &second);
1791 if (i)
1792 tmps[i] = second;
1793 else
1794 tmps[i] = first;
1797 else if (REG_P (src) && GET_MODE (src) == mode)
1798 tmps[i] = src;
1799 else
1800 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1801 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1802 mode, mode);
1804 if (shift)
1805 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1806 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1810 /* Emit code to move a block SRC of type TYPE to a block DST,
1811 where DST is non-consecutive registers represented by a PARALLEL.
1812 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1813 if not known. */
1815 void
1816 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1818 rtx *tmps;
1819 int i;
1821 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1822 emit_group_load_1 (tmps, dst, src, type, ssize);
1824 /* Copy the extracted pieces into the proper (probable) hard regs. */
1825 for (i = 0; i < XVECLEN (dst, 0); i++)
1827 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1828 if (d == NULL)
1829 continue;
1830 emit_move_insn (d, tmps[i]);
1834 /* Similar, but load SRC into new pseudos in a format that looks like
1835 PARALLEL. This can later be fed to emit_group_move to get things
1836 in the right place. */
1839 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1841 rtvec vec;
1842 int i;
1844 vec = rtvec_alloc (XVECLEN (parallel, 0));
1845 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1847 /* Convert the vector to look just like the original PARALLEL, except
1848 with the computed values. */
1849 for (i = 0; i < XVECLEN (parallel, 0); i++)
1851 rtx e = XVECEXP (parallel, 0, i);
1852 rtx d = XEXP (e, 0);
1854 if (d)
1856 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1857 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1859 RTVEC_ELT (vec, i) = e;
1862 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1865 /* Emit code to move a block SRC to block DST, where SRC and DST are
1866 non-consecutive groups of registers, each represented by a PARALLEL. */
1868 void
1869 emit_group_move (rtx dst, rtx src)
1871 int i;
1873 gcc_assert (GET_CODE (src) == PARALLEL
1874 && GET_CODE (dst) == PARALLEL
1875 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1877 /* Skip first entry if NULL. */
1878 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1879 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1880 XEXP (XVECEXP (src, 0, i), 0));
1883 /* Move a group of registers represented by a PARALLEL into pseudos. */
1886 emit_group_move_into_temps (rtx src)
1888 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1889 int i;
1891 for (i = 0; i < XVECLEN (src, 0); i++)
1893 rtx e = XVECEXP (src, 0, i);
1894 rtx d = XEXP (e, 0);
1896 if (d)
1897 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1898 RTVEC_ELT (vec, i) = e;
1901 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1904 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1905 where SRC is non-consecutive registers represented by a PARALLEL.
1906 SSIZE represents the total size of block ORIG_DST, or -1 if not
1907 known. */
1909 void
1910 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1912 rtx *tmps, dst;
1913 int start, finish, i;
1914 enum machine_mode m = GET_MODE (orig_dst);
1916 gcc_assert (GET_CODE (src) == PARALLEL);
1918 if (!SCALAR_INT_MODE_P (m)
1919 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1921 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1922 if (imode == BLKmode)
1923 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1924 else
1925 dst = gen_reg_rtx (imode);
1926 emit_group_store (dst, src, type, ssize);
1927 if (imode != BLKmode)
1928 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1929 emit_move_insn (orig_dst, dst);
1930 return;
1933 /* Check for a NULL entry, used to indicate that the parameter goes
1934 both on the stack and in registers. */
1935 if (XEXP (XVECEXP (src, 0, 0), 0))
1936 start = 0;
1937 else
1938 start = 1;
1939 finish = XVECLEN (src, 0);
1941 tmps = XALLOCAVEC (rtx, finish);
1943 /* Copy the (probable) hard regs into pseudos. */
1944 for (i = start; i < finish; i++)
1946 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1947 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1949 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1950 emit_move_insn (tmps[i], reg);
1952 else
1953 tmps[i] = reg;
1956 /* If we won't be storing directly into memory, protect the real destination
1957 from strange tricks we might play. */
1958 dst = orig_dst;
1959 if (GET_CODE (dst) == PARALLEL)
1961 rtx temp;
1963 /* We can get a PARALLEL dst if there is a conditional expression in
1964 a return statement. In that case, the dst and src are the same,
1965 so no action is necessary. */
1966 if (rtx_equal_p (dst, src))
1967 return;
1969 /* It is unclear if we can ever reach here, but we may as well handle
1970 it. Allocate a temporary, and split this into a store/load to/from
1971 the temporary. */
1973 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1974 emit_group_store (temp, src, type, ssize);
1975 emit_group_load (dst, temp, type, ssize);
1976 return;
1978 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1980 enum machine_mode outer = GET_MODE (dst);
1981 enum machine_mode inner;
1982 HOST_WIDE_INT bytepos;
1983 bool done = false;
1984 rtx temp;
1986 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1987 dst = gen_reg_rtx (outer);
1989 /* Make life a bit easier for combine. */
1990 /* If the first element of the vector is the low part
1991 of the destination mode, use a paradoxical subreg to
1992 initialize the destination. */
1993 if (start < finish)
1995 inner = GET_MODE (tmps[start]);
1996 bytepos = subreg_lowpart_offset (inner, outer);
1997 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1999 temp = simplify_gen_subreg (outer, tmps[start],
2000 inner, 0);
2001 if (temp)
2003 emit_move_insn (dst, temp);
2004 done = true;
2005 start++;
2010 /* If the first element wasn't the low part, try the last. */
2011 if (!done
2012 && start < finish - 1)
2014 inner = GET_MODE (tmps[finish - 1]);
2015 bytepos = subreg_lowpart_offset (inner, outer);
2016 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2018 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2019 inner, 0);
2020 if (temp)
2022 emit_move_insn (dst, temp);
2023 done = true;
2024 finish--;
2029 /* Otherwise, simply initialize the result to zero. */
2030 if (!done)
2031 emit_move_insn (dst, CONST0_RTX (outer));
2034 /* Process the pieces. */
2035 for (i = start; i < finish; i++)
2037 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2038 enum machine_mode mode = GET_MODE (tmps[i]);
2039 unsigned int bytelen = GET_MODE_SIZE (mode);
2040 unsigned int adj_bytelen = bytelen;
2041 rtx dest = dst;
2043 /* Handle trailing fragments that run over the size of the struct. */
2044 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2045 adj_bytelen = ssize - bytepos;
2047 if (GET_CODE (dst) == CONCAT)
2049 if (bytepos + adj_bytelen
2050 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2051 dest = XEXP (dst, 0);
2052 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2054 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2055 dest = XEXP (dst, 1);
2057 else
2059 enum machine_mode dest_mode = GET_MODE (dest);
2060 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2062 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2064 if (GET_MODE_ALIGNMENT (dest_mode)
2065 >= GET_MODE_ALIGNMENT (tmp_mode))
2067 dest = assign_stack_temp (dest_mode,
2068 GET_MODE_SIZE (dest_mode),
2070 emit_move_insn (adjust_address (dest,
2071 tmp_mode,
2072 bytepos),
2073 tmps[i]);
2074 dst = dest;
2076 else
2078 dest = assign_stack_temp (tmp_mode,
2079 GET_MODE_SIZE (tmp_mode),
2081 emit_move_insn (dest, tmps[i]);
2082 dst = adjust_address (dest, dest_mode, bytepos);
2084 break;
2088 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2090 /* store_bit_field always takes its value from the lsb.
2091 Move the fragment to the lsb if it's not already there. */
2092 if (
2093 #ifdef BLOCK_REG_PADDING
2094 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2095 == (BYTES_BIG_ENDIAN ? upward : downward)
2096 #else
2097 BYTES_BIG_ENDIAN
2098 #endif
2101 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2102 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2103 build_int_cst (NULL_TREE, shift),
2104 tmps[i], 0);
2106 bytelen = adj_bytelen;
2109 /* Optimize the access just a bit. */
2110 if (MEM_P (dest)
2111 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2112 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2113 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2114 && bytelen == GET_MODE_SIZE (mode))
2115 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2116 else
2117 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2118 mode, tmps[i]);
2121 /* Copy from the pseudo into the (probable) hard reg. */
2122 if (orig_dst != dst)
2123 emit_move_insn (orig_dst, dst);
2126 /* Generate code to copy a BLKmode object of TYPE out of a
2127 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2128 is null, a stack temporary is created. TGTBLK is returned.
2130 The purpose of this routine is to handle functions that return
2131 BLKmode structures in registers. Some machines (the PA for example)
2132 want to return all small structures in registers regardless of the
2133 structure's alignment. */
2136 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2138 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2139 rtx src = NULL, dst = NULL;
2140 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2141 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2142 enum machine_mode copy_mode;
2144 if (tgtblk == 0)
2146 tgtblk = assign_temp (build_qualified_type (type,
2147 (TYPE_QUALS (type)
2148 | TYPE_QUAL_CONST)),
2149 0, 1, 1);
2150 preserve_temp_slots (tgtblk);
2153 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2154 into a new pseudo which is a full word. */
2156 if (GET_MODE (srcreg) != BLKmode
2157 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2158 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2160 /* If the structure doesn't take up a whole number of words, see whether
2161 SRCREG is padded on the left or on the right. If it's on the left,
2162 set PADDING_CORRECTION to the number of bits to skip.
2164 In most ABIs, the structure will be returned at the least end of
2165 the register, which translates to right padding on little-endian
2166 targets and left padding on big-endian targets. The opposite
2167 holds if the structure is returned at the most significant
2168 end of the register. */
2169 if (bytes % UNITS_PER_WORD != 0
2170 && (targetm.calls.return_in_msb (type)
2171 ? !BYTES_BIG_ENDIAN
2172 : BYTES_BIG_ENDIAN))
2173 padding_correction
2174 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2176 /* Copy the structure BITSIZE bits at a time. If the target lives in
2177 memory, take care of not reading/writing past its end by selecting
2178 a copy mode suited to BITSIZE. This should always be possible given
2179 how it is computed.
2181 We could probably emit more efficient code for machines which do not use
2182 strict alignment, but it doesn't seem worth the effort at the current
2183 time. */
2185 copy_mode = word_mode;
2186 if (MEM_P (tgtblk))
2188 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2189 if (mem_mode != BLKmode)
2190 copy_mode = mem_mode;
2193 for (bitpos = 0, xbitpos = padding_correction;
2194 bitpos < bytes * BITS_PER_UNIT;
2195 bitpos += bitsize, xbitpos += bitsize)
2197 /* We need a new source operand each time xbitpos is on a
2198 word boundary and when xbitpos == padding_correction
2199 (the first time through). */
2200 if (xbitpos % BITS_PER_WORD == 0
2201 || xbitpos == padding_correction)
2202 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2203 GET_MODE (srcreg));
2205 /* We need a new destination operand each time bitpos is on
2206 a word boundary. */
2207 if (bitpos % BITS_PER_WORD == 0)
2208 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2210 /* Use xbitpos for the source extraction (right justified) and
2211 bitpos for the destination store (left justified). */
2212 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2213 extract_bit_field (src, bitsize,
2214 xbitpos % BITS_PER_WORD, 1,
2215 NULL_RTX, copy_mode, copy_mode));
2218 return tgtblk;
2221 /* Add a USE expression for REG to the (possibly empty) list pointed
2222 to by CALL_FUSAGE. REG must denote a hard register. */
2224 void
2225 use_reg (rtx *call_fusage, rtx reg)
2227 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2229 *call_fusage
2230 = gen_rtx_EXPR_LIST (VOIDmode,
2231 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2234 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2235 starting at REGNO. All of these registers must be hard registers. */
2237 void
2238 use_regs (rtx *call_fusage, int regno, int nregs)
2240 int i;
2242 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2244 for (i = 0; i < nregs; i++)
2245 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2248 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2249 PARALLEL REGS. This is for calls that pass values in multiple
2250 non-contiguous locations. The Irix 6 ABI has examples of this. */
2252 void
2253 use_group_regs (rtx *call_fusage, rtx regs)
2255 int i;
2257 for (i = 0; i < XVECLEN (regs, 0); i++)
2259 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2261 /* A NULL entry means the parameter goes both on the stack and in
2262 registers. This can also be a MEM for targets that pass values
2263 partially on the stack and partially in registers. */
2264 if (reg != 0 && REG_P (reg))
2265 use_reg (call_fusage, reg);
2269 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2270 assigment and the code of the expresion on the RHS is CODE. Return
2271 NULL otherwise. */
2273 static gimple
2274 get_def_for_expr (tree name, enum tree_code code)
2276 gimple def_stmt;
2278 if (TREE_CODE (name) != SSA_NAME)
2279 return NULL;
2281 def_stmt = get_gimple_for_ssa_name (name);
2282 if (!def_stmt
2283 || gimple_assign_rhs_code (def_stmt) != code)
2284 return NULL;
2286 return def_stmt;
2290 /* Determine whether the LEN bytes generated by CONSTFUN can be
2291 stored to memory using several move instructions. CONSTFUNDATA is
2292 a pointer which will be passed as argument in every CONSTFUN call.
2293 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2294 a memset operation and false if it's a copy of a constant string.
2295 Return nonzero if a call to store_by_pieces should succeed. */
2298 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2299 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2300 void *constfundata, unsigned int align, bool memsetp)
2302 unsigned HOST_WIDE_INT l;
2303 unsigned int max_size;
2304 HOST_WIDE_INT offset = 0;
2305 enum machine_mode mode, tmode;
2306 enum insn_code icode;
2307 int reverse;
2308 rtx cst;
2310 if (len == 0)
2311 return 1;
2313 if (! (memsetp
2314 ? SET_BY_PIECES_P (len, align)
2315 : STORE_BY_PIECES_P (len, align)))
2316 return 0;
2318 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2319 if (align >= GET_MODE_ALIGNMENT (tmode))
2320 align = GET_MODE_ALIGNMENT (tmode);
2321 else
2323 enum machine_mode xmode;
2325 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2326 tmode != VOIDmode;
2327 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2328 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2329 || SLOW_UNALIGNED_ACCESS (tmode, align))
2330 break;
2332 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2335 /* We would first store what we can in the largest integer mode, then go to
2336 successively smaller modes. */
2338 for (reverse = 0;
2339 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2340 reverse++)
2342 l = len;
2343 mode = VOIDmode;
2344 max_size = STORE_MAX_PIECES + 1;
2345 while (max_size > 1)
2347 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2348 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2349 if (GET_MODE_SIZE (tmode) < max_size)
2350 mode = tmode;
2352 if (mode == VOIDmode)
2353 break;
2355 icode = optab_handler (mov_optab, mode)->insn_code;
2356 if (icode != CODE_FOR_nothing
2357 && align >= GET_MODE_ALIGNMENT (mode))
2359 unsigned int size = GET_MODE_SIZE (mode);
2361 while (l >= size)
2363 if (reverse)
2364 offset -= size;
2366 cst = (*constfun) (constfundata, offset, mode);
2367 if (!LEGITIMATE_CONSTANT_P (cst))
2368 return 0;
2370 if (!reverse)
2371 offset += size;
2373 l -= size;
2377 max_size = GET_MODE_SIZE (mode);
2380 /* The code above should have handled everything. */
2381 gcc_assert (!l);
2384 return 1;
2387 /* Generate several move instructions to store LEN bytes generated by
2388 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2389 pointer which will be passed as argument in every CONSTFUN call.
2390 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2391 a memset operation and false if it's a copy of a constant string.
2392 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2393 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2394 stpcpy. */
2397 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2398 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2399 void *constfundata, unsigned int align, bool memsetp, int endp)
2401 enum machine_mode to_addr_mode
2402 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2403 struct store_by_pieces_d data;
2405 if (len == 0)
2407 gcc_assert (endp != 2);
2408 return to;
2411 gcc_assert (memsetp
2412 ? SET_BY_PIECES_P (len, align)
2413 : STORE_BY_PIECES_P (len, align));
2414 data.constfun = constfun;
2415 data.constfundata = constfundata;
2416 data.len = len;
2417 data.to = to;
2418 store_by_pieces_1 (&data, align);
2419 if (endp)
2421 rtx to1;
2423 gcc_assert (!data.reverse);
2424 if (data.autinc_to)
2426 if (endp == 2)
2428 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2429 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2430 else
2431 data.to_addr = copy_to_mode_reg (to_addr_mode,
2432 plus_constant (data.to_addr,
2433 -1));
2435 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2436 data.offset);
2438 else
2440 if (endp == 2)
2441 --data.offset;
2442 to1 = adjust_address (data.to, QImode, data.offset);
2444 return to1;
2446 else
2447 return data.to;
2450 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2451 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2453 static void
2454 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2456 struct store_by_pieces_d data;
2458 if (len == 0)
2459 return;
2461 data.constfun = clear_by_pieces_1;
2462 data.constfundata = NULL;
2463 data.len = len;
2464 data.to = to;
2465 store_by_pieces_1 (&data, align);
2468 /* Callback routine for clear_by_pieces.
2469 Return const0_rtx unconditionally. */
2471 static rtx
2472 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2473 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2474 enum machine_mode mode ATTRIBUTE_UNUSED)
2476 return const0_rtx;
2479 /* Subroutine of clear_by_pieces and store_by_pieces.
2480 Generate several move instructions to store LEN bytes of block TO. (A MEM
2481 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2483 static void
2484 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2485 unsigned int align ATTRIBUTE_UNUSED)
2487 enum machine_mode to_addr_mode
2488 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2489 rtx to_addr = XEXP (data->to, 0);
2490 unsigned int max_size = STORE_MAX_PIECES + 1;
2491 enum machine_mode mode = VOIDmode, tmode;
2492 enum insn_code icode;
2494 data->offset = 0;
2495 data->to_addr = to_addr;
2496 data->autinc_to
2497 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2498 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2500 data->explicit_inc_to = 0;
2501 data->reverse
2502 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2503 if (data->reverse)
2504 data->offset = data->len;
2506 /* If storing requires more than two move insns,
2507 copy addresses to registers (to make displacements shorter)
2508 and use post-increment if available. */
2509 if (!data->autinc_to
2510 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2512 /* Determine the main mode we'll be using. */
2513 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2514 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2515 if (GET_MODE_SIZE (tmode) < max_size)
2516 mode = tmode;
2518 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2520 data->to_addr = copy_to_mode_reg (to_addr_mode,
2521 plus_constant (to_addr, data->len));
2522 data->autinc_to = 1;
2523 data->explicit_inc_to = -1;
2526 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2527 && ! data->autinc_to)
2529 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2530 data->autinc_to = 1;
2531 data->explicit_inc_to = 1;
2534 if ( !data->autinc_to && CONSTANT_P (to_addr))
2535 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2538 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2539 if (align >= GET_MODE_ALIGNMENT (tmode))
2540 align = GET_MODE_ALIGNMENT (tmode);
2541 else
2543 enum machine_mode xmode;
2545 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2546 tmode != VOIDmode;
2547 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2548 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2549 || SLOW_UNALIGNED_ACCESS (tmode, align))
2550 break;
2552 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2555 /* First store what we can in the largest integer mode, then go to
2556 successively smaller modes. */
2558 while (max_size > 1)
2560 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2561 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2562 if (GET_MODE_SIZE (tmode) < max_size)
2563 mode = tmode;
2565 if (mode == VOIDmode)
2566 break;
2568 icode = optab_handler (mov_optab, mode)->insn_code;
2569 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2570 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2572 max_size = GET_MODE_SIZE (mode);
2575 /* The code above should have handled everything. */
2576 gcc_assert (!data->len);
2579 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2580 with move instructions for mode MODE. GENFUN is the gen_... function
2581 to make a move insn for that mode. DATA has all the other info. */
2583 static void
2584 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2585 struct store_by_pieces_d *data)
2587 unsigned int size = GET_MODE_SIZE (mode);
2588 rtx to1, cst;
2590 while (data->len >= size)
2592 if (data->reverse)
2593 data->offset -= size;
2595 if (data->autinc_to)
2596 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2597 data->offset);
2598 else
2599 to1 = adjust_address (data->to, mode, data->offset);
2601 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2602 emit_insn (gen_add2_insn (data->to_addr,
2603 GEN_INT (-(HOST_WIDE_INT) size)));
2605 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2606 emit_insn ((*genfun) (to1, cst));
2608 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2609 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2611 if (! data->reverse)
2612 data->offset += size;
2614 data->len -= size;
2618 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2619 its length in bytes. */
2622 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2623 unsigned int expected_align, HOST_WIDE_INT expected_size)
2625 enum machine_mode mode = GET_MODE (object);
2626 unsigned int align;
2628 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2630 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2631 just move a zero. Otherwise, do this a piece at a time. */
2632 if (mode != BLKmode
2633 && CONST_INT_P (size)
2634 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2636 rtx zero = CONST0_RTX (mode);
2637 if (zero != NULL)
2639 emit_move_insn (object, zero);
2640 return NULL;
2643 if (COMPLEX_MODE_P (mode))
2645 zero = CONST0_RTX (GET_MODE_INNER (mode));
2646 if (zero != NULL)
2648 write_complex_part (object, zero, 0);
2649 write_complex_part (object, zero, 1);
2650 return NULL;
2655 if (size == const0_rtx)
2656 return NULL;
2658 align = MEM_ALIGN (object);
2660 if (CONST_INT_P (size)
2661 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2662 clear_by_pieces (object, INTVAL (size), align);
2663 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2664 expected_align, expected_size))
2666 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2667 return set_storage_via_libcall (object, size, const0_rtx,
2668 method == BLOCK_OP_TAILCALL);
2669 else
2670 gcc_unreachable ();
2672 return NULL;
2676 clear_storage (rtx object, rtx size, enum block_op_methods method)
2678 return clear_storage_hints (object, size, method, 0, -1);
2682 /* A subroutine of clear_storage. Expand a call to memset.
2683 Return the return value of memset, 0 otherwise. */
2686 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2688 tree call_expr, fn, object_tree, size_tree, val_tree;
2689 enum machine_mode size_mode;
2690 rtx retval;
2692 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2693 place those into new pseudos into a VAR_DECL and use them later. */
2695 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2697 size_mode = TYPE_MODE (sizetype);
2698 size = convert_to_mode (size_mode, size, 1);
2699 size = copy_to_mode_reg (size_mode, size);
2701 /* It is incorrect to use the libcall calling conventions to call
2702 memset in this context. This could be a user call to memset and
2703 the user may wish to examine the return value from memset. For
2704 targets where libcalls and normal calls have different conventions
2705 for returning pointers, we could end up generating incorrect code. */
2707 object_tree = make_tree (ptr_type_node, object);
2708 if (!CONST_INT_P (val))
2709 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2710 size_tree = make_tree (sizetype, size);
2711 val_tree = make_tree (integer_type_node, val);
2713 fn = clear_storage_libcall_fn (true);
2714 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2715 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2717 retval = expand_normal (call_expr);
2719 return retval;
2722 /* A subroutine of set_storage_via_libcall. Create the tree node
2723 for the function we use for block clears. The first time FOR_CALL
2724 is true, we call assemble_external. */
2726 tree block_clear_fn;
2728 void
2729 init_block_clear_fn (const char *asmspec)
2731 if (!block_clear_fn)
2733 tree fn, args;
2735 fn = get_identifier ("memset");
2736 args = build_function_type_list (ptr_type_node, ptr_type_node,
2737 integer_type_node, sizetype,
2738 NULL_TREE);
2740 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2741 DECL_EXTERNAL (fn) = 1;
2742 TREE_PUBLIC (fn) = 1;
2743 DECL_ARTIFICIAL (fn) = 1;
2744 TREE_NOTHROW (fn) = 1;
2745 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2746 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2748 block_clear_fn = fn;
2751 if (asmspec)
2752 set_user_assembler_name (block_clear_fn, asmspec);
2755 static tree
2756 clear_storage_libcall_fn (int for_call)
2758 static bool emitted_extern;
2760 if (!block_clear_fn)
2761 init_block_clear_fn (NULL);
2763 if (for_call && !emitted_extern)
2765 emitted_extern = true;
2766 make_decl_rtl (block_clear_fn);
2767 assemble_external (block_clear_fn);
2770 return block_clear_fn;
2773 /* Expand a setmem pattern; return true if successful. */
2775 bool
2776 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2777 unsigned int expected_align, HOST_WIDE_INT expected_size)
2779 /* Try the most limited insn first, because there's no point
2780 including more than one in the machine description unless
2781 the more limited one has some advantage. */
2783 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2784 enum machine_mode mode;
2786 if (expected_align < align)
2787 expected_align = align;
2789 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2790 mode = GET_MODE_WIDER_MODE (mode))
2792 enum insn_code code = setmem_optab[(int) mode];
2793 insn_operand_predicate_fn pred;
2795 if (code != CODE_FOR_nothing
2796 /* We don't need MODE to be narrower than
2797 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2798 the mode mask, as it is returned by the macro, it will
2799 definitely be less than the actual mode mask. */
2800 && ((CONST_INT_P (size)
2801 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2802 <= (GET_MODE_MASK (mode) >> 1)))
2803 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2804 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2805 || (*pred) (object, BLKmode))
2806 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2807 || (*pred) (opalign, VOIDmode)))
2809 rtx opsize, opchar;
2810 enum machine_mode char_mode;
2811 rtx last = get_last_insn ();
2812 rtx pat;
2814 opsize = convert_to_mode (mode, size, 1);
2815 pred = insn_data[(int) code].operand[1].predicate;
2816 if (pred != 0 && ! (*pred) (opsize, mode))
2817 opsize = copy_to_mode_reg (mode, opsize);
2819 opchar = val;
2820 char_mode = insn_data[(int) code].operand[2].mode;
2821 if (char_mode != VOIDmode)
2823 opchar = convert_to_mode (char_mode, opchar, 1);
2824 pred = insn_data[(int) code].operand[2].predicate;
2825 if (pred != 0 && ! (*pred) (opchar, char_mode))
2826 opchar = copy_to_mode_reg (char_mode, opchar);
2829 if (insn_data[(int) code].n_operands == 4)
2830 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2831 else
2832 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2833 GEN_INT (expected_align
2834 / BITS_PER_UNIT),
2835 GEN_INT (expected_size));
2836 if (pat)
2838 emit_insn (pat);
2839 return true;
2841 else
2842 delete_insns_since (last);
2846 return false;
2850 /* Write to one of the components of the complex value CPLX. Write VAL to
2851 the real part if IMAG_P is false, and the imaginary part if its true. */
2853 static void
2854 write_complex_part (rtx cplx, rtx val, bool imag_p)
2856 enum machine_mode cmode;
2857 enum machine_mode imode;
2858 unsigned ibitsize;
2860 if (GET_CODE (cplx) == CONCAT)
2862 emit_move_insn (XEXP (cplx, imag_p), val);
2863 return;
2866 cmode = GET_MODE (cplx);
2867 imode = GET_MODE_INNER (cmode);
2868 ibitsize = GET_MODE_BITSIZE (imode);
2870 /* For MEMs simplify_gen_subreg may generate an invalid new address
2871 because, e.g., the original address is considered mode-dependent
2872 by the target, which restricts simplify_subreg from invoking
2873 adjust_address_nv. Instead of preparing fallback support for an
2874 invalid address, we call adjust_address_nv directly. */
2875 if (MEM_P (cplx))
2877 emit_move_insn (adjust_address_nv (cplx, imode,
2878 imag_p ? GET_MODE_SIZE (imode) : 0),
2879 val);
2880 return;
2883 /* If the sub-object is at least word sized, then we know that subregging
2884 will work. This special case is important, since store_bit_field
2885 wants to operate on integer modes, and there's rarely an OImode to
2886 correspond to TCmode. */
2887 if (ibitsize >= BITS_PER_WORD
2888 /* For hard regs we have exact predicates. Assume we can split
2889 the original object if it spans an even number of hard regs.
2890 This special case is important for SCmode on 64-bit platforms
2891 where the natural size of floating-point regs is 32-bit. */
2892 || (REG_P (cplx)
2893 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2894 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2896 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2897 imag_p ? GET_MODE_SIZE (imode) : 0);
2898 if (part)
2900 emit_move_insn (part, val);
2901 return;
2903 else
2904 /* simplify_gen_subreg may fail for sub-word MEMs. */
2905 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2908 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2911 /* Extract one of the components of the complex value CPLX. Extract the
2912 real part if IMAG_P is false, and the imaginary part if it's true. */
2914 static rtx
2915 read_complex_part (rtx cplx, bool imag_p)
2917 enum machine_mode cmode, imode;
2918 unsigned ibitsize;
2920 if (GET_CODE (cplx) == CONCAT)
2921 return XEXP (cplx, imag_p);
2923 cmode = GET_MODE (cplx);
2924 imode = GET_MODE_INNER (cmode);
2925 ibitsize = GET_MODE_BITSIZE (imode);
2927 /* Special case reads from complex constants that got spilled to memory. */
2928 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2930 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2931 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2933 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2934 if (CONSTANT_CLASS_P (part))
2935 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2939 /* For MEMs simplify_gen_subreg may generate an invalid new address
2940 because, e.g., the original address is considered mode-dependent
2941 by the target, which restricts simplify_subreg from invoking
2942 adjust_address_nv. Instead of preparing fallback support for an
2943 invalid address, we call adjust_address_nv directly. */
2944 if (MEM_P (cplx))
2945 return adjust_address_nv (cplx, imode,
2946 imag_p ? GET_MODE_SIZE (imode) : 0);
2948 /* If the sub-object is at least word sized, then we know that subregging
2949 will work. This special case is important, since extract_bit_field
2950 wants to operate on integer modes, and there's rarely an OImode to
2951 correspond to TCmode. */
2952 if (ibitsize >= BITS_PER_WORD
2953 /* For hard regs we have exact predicates. Assume we can split
2954 the original object if it spans an even number of hard regs.
2955 This special case is important for SCmode on 64-bit platforms
2956 where the natural size of floating-point regs is 32-bit. */
2957 || (REG_P (cplx)
2958 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2959 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2961 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2962 imag_p ? GET_MODE_SIZE (imode) : 0);
2963 if (ret)
2964 return ret;
2965 else
2966 /* simplify_gen_subreg may fail for sub-word MEMs. */
2967 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2970 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2971 true, NULL_RTX, imode, imode);
2974 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2975 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2976 represented in NEW_MODE. If FORCE is true, this will never happen, as
2977 we'll force-create a SUBREG if needed. */
2979 static rtx
2980 emit_move_change_mode (enum machine_mode new_mode,
2981 enum machine_mode old_mode, rtx x, bool force)
2983 rtx ret;
2985 if (push_operand (x, GET_MODE (x)))
2987 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2988 MEM_COPY_ATTRIBUTES (ret, x);
2990 else if (MEM_P (x))
2992 /* We don't have to worry about changing the address since the
2993 size in bytes is supposed to be the same. */
2994 if (reload_in_progress)
2996 /* Copy the MEM to change the mode and move any
2997 substitutions from the old MEM to the new one. */
2998 ret = adjust_address_nv (x, new_mode, 0);
2999 copy_replacements (x, ret);
3001 else
3002 ret = adjust_address (x, new_mode, 0);
3004 else
3006 /* Note that we do want simplify_subreg's behavior of validating
3007 that the new mode is ok for a hard register. If we were to use
3008 simplify_gen_subreg, we would create the subreg, but would
3009 probably run into the target not being able to implement it. */
3010 /* Except, of course, when FORCE is true, when this is exactly what
3011 we want. Which is needed for CCmodes on some targets. */
3012 if (force)
3013 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3014 else
3015 ret = simplify_subreg (new_mode, x, old_mode, 0);
3018 return ret;
3021 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3022 an integer mode of the same size as MODE. Returns the instruction
3023 emitted, or NULL if such a move could not be generated. */
3025 static rtx
3026 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3028 enum machine_mode imode;
3029 enum insn_code code;
3031 /* There must exist a mode of the exact size we require. */
3032 imode = int_mode_for_mode (mode);
3033 if (imode == BLKmode)
3034 return NULL_RTX;
3036 /* The target must support moves in this mode. */
3037 code = optab_handler (mov_optab, imode)->insn_code;
3038 if (code == CODE_FOR_nothing)
3039 return NULL_RTX;
3041 x = emit_move_change_mode (imode, mode, x, force);
3042 if (x == NULL_RTX)
3043 return NULL_RTX;
3044 y = emit_move_change_mode (imode, mode, y, force);
3045 if (y == NULL_RTX)
3046 return NULL_RTX;
3047 return emit_insn (GEN_FCN (code) (x, y));
3050 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3051 Return an equivalent MEM that does not use an auto-increment. */
3053 static rtx
3054 emit_move_resolve_push (enum machine_mode mode, rtx x)
3056 enum rtx_code code = GET_CODE (XEXP (x, 0));
3057 HOST_WIDE_INT adjust;
3058 rtx temp;
3060 adjust = GET_MODE_SIZE (mode);
3061 #ifdef PUSH_ROUNDING
3062 adjust = PUSH_ROUNDING (adjust);
3063 #endif
3064 if (code == PRE_DEC || code == POST_DEC)
3065 adjust = -adjust;
3066 else if (code == PRE_MODIFY || code == POST_MODIFY)
3068 rtx expr = XEXP (XEXP (x, 0), 1);
3069 HOST_WIDE_INT val;
3071 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3072 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3073 val = INTVAL (XEXP (expr, 1));
3074 if (GET_CODE (expr) == MINUS)
3075 val = -val;
3076 gcc_assert (adjust == val || adjust == -val);
3077 adjust = val;
3080 /* Do not use anti_adjust_stack, since we don't want to update
3081 stack_pointer_delta. */
3082 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3083 GEN_INT (adjust), stack_pointer_rtx,
3084 0, OPTAB_LIB_WIDEN);
3085 if (temp != stack_pointer_rtx)
3086 emit_move_insn (stack_pointer_rtx, temp);
3088 switch (code)
3090 case PRE_INC:
3091 case PRE_DEC:
3092 case PRE_MODIFY:
3093 temp = stack_pointer_rtx;
3094 break;
3095 case POST_INC:
3096 case POST_DEC:
3097 case POST_MODIFY:
3098 temp = plus_constant (stack_pointer_rtx, -adjust);
3099 break;
3100 default:
3101 gcc_unreachable ();
3104 return replace_equiv_address (x, temp);
3107 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3108 X is known to satisfy push_operand, and MODE is known to be complex.
3109 Returns the last instruction emitted. */
3112 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3114 enum machine_mode submode = GET_MODE_INNER (mode);
3115 bool imag_first;
3117 #ifdef PUSH_ROUNDING
3118 unsigned int submodesize = GET_MODE_SIZE (submode);
3120 /* In case we output to the stack, but the size is smaller than the
3121 machine can push exactly, we need to use move instructions. */
3122 if (PUSH_ROUNDING (submodesize) != submodesize)
3124 x = emit_move_resolve_push (mode, x);
3125 return emit_move_insn (x, y);
3127 #endif
3129 /* Note that the real part always precedes the imag part in memory
3130 regardless of machine's endianness. */
3131 switch (GET_CODE (XEXP (x, 0)))
3133 case PRE_DEC:
3134 case POST_DEC:
3135 imag_first = true;
3136 break;
3137 case PRE_INC:
3138 case POST_INC:
3139 imag_first = false;
3140 break;
3141 default:
3142 gcc_unreachable ();
3145 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3146 read_complex_part (y, imag_first));
3147 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3148 read_complex_part (y, !imag_first));
3151 /* A subroutine of emit_move_complex. Perform the move from Y to X
3152 via two moves of the parts. Returns the last instruction emitted. */
3155 emit_move_complex_parts (rtx x, rtx y)
3157 /* Show the output dies here. This is necessary for SUBREGs
3158 of pseudos since we cannot track their lifetimes correctly;
3159 hard regs shouldn't appear here except as return values. */
3160 if (!reload_completed && !reload_in_progress
3161 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3162 emit_clobber (x);
3164 write_complex_part (x, read_complex_part (y, false), false);
3165 write_complex_part (x, read_complex_part (y, true), true);
3167 return get_last_insn ();
3170 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3171 MODE is known to be complex. Returns the last instruction emitted. */
3173 static rtx
3174 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3176 bool try_int;
3178 /* Need to take special care for pushes, to maintain proper ordering
3179 of the data, and possibly extra padding. */
3180 if (push_operand (x, mode))
3181 return emit_move_complex_push (mode, x, y);
3183 /* See if we can coerce the target into moving both values at once. */
3185 /* Move floating point as parts. */
3186 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3187 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3188 try_int = false;
3189 /* Not possible if the values are inherently not adjacent. */
3190 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3191 try_int = false;
3192 /* Is possible if both are registers (or subregs of registers). */
3193 else if (register_operand (x, mode) && register_operand (y, mode))
3194 try_int = true;
3195 /* If one of the operands is a memory, and alignment constraints
3196 are friendly enough, we may be able to do combined memory operations.
3197 We do not attempt this if Y is a constant because that combination is
3198 usually better with the by-parts thing below. */
3199 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3200 && (!STRICT_ALIGNMENT
3201 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3202 try_int = true;
3203 else
3204 try_int = false;
3206 if (try_int)
3208 rtx ret;
3210 /* For memory to memory moves, optimal behavior can be had with the
3211 existing block move logic. */
3212 if (MEM_P (x) && MEM_P (y))
3214 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3215 BLOCK_OP_NO_LIBCALL);
3216 return get_last_insn ();
3219 ret = emit_move_via_integer (mode, x, y, true);
3220 if (ret)
3221 return ret;
3224 return emit_move_complex_parts (x, y);
3227 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3228 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3230 static rtx
3231 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3233 rtx ret;
3235 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3236 if (mode != CCmode)
3238 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3239 if (code != CODE_FOR_nothing)
3241 x = emit_move_change_mode (CCmode, mode, x, true);
3242 y = emit_move_change_mode (CCmode, mode, y, true);
3243 return emit_insn (GEN_FCN (code) (x, y));
3247 /* Otherwise, find the MODE_INT mode of the same width. */
3248 ret = emit_move_via_integer (mode, x, y, false);
3249 gcc_assert (ret != NULL);
3250 return ret;
3253 /* Return true if word I of OP lies entirely in the
3254 undefined bits of a paradoxical subreg. */
3256 static bool
3257 undefined_operand_subword_p (const_rtx op, int i)
3259 enum machine_mode innermode, innermostmode;
3260 int offset;
3261 if (GET_CODE (op) != SUBREG)
3262 return false;
3263 innermode = GET_MODE (op);
3264 innermostmode = GET_MODE (SUBREG_REG (op));
3265 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3266 /* The SUBREG_BYTE represents offset, as if the value were stored in
3267 memory, except for a paradoxical subreg where we define
3268 SUBREG_BYTE to be 0; undo this exception as in
3269 simplify_subreg. */
3270 if (SUBREG_BYTE (op) == 0
3271 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3273 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3274 if (WORDS_BIG_ENDIAN)
3275 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3276 if (BYTES_BIG_ENDIAN)
3277 offset += difference % UNITS_PER_WORD;
3279 if (offset >= GET_MODE_SIZE (innermostmode)
3280 || offset <= -GET_MODE_SIZE (word_mode))
3281 return true;
3282 return false;
3285 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3286 MODE is any multi-word or full-word mode that lacks a move_insn
3287 pattern. Note that you will get better code if you define such
3288 patterns, even if they must turn into multiple assembler instructions. */
3290 static rtx
3291 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3293 rtx last_insn = 0;
3294 rtx seq, inner;
3295 bool need_clobber;
3296 int i;
3298 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3300 /* If X is a push on the stack, do the push now and replace
3301 X with a reference to the stack pointer. */
3302 if (push_operand (x, mode))
3303 x = emit_move_resolve_push (mode, x);
3305 /* If we are in reload, see if either operand is a MEM whose address
3306 is scheduled for replacement. */
3307 if (reload_in_progress && MEM_P (x)
3308 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3309 x = replace_equiv_address_nv (x, inner);
3310 if (reload_in_progress && MEM_P (y)
3311 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3312 y = replace_equiv_address_nv (y, inner);
3314 start_sequence ();
3316 need_clobber = false;
3317 for (i = 0;
3318 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3319 i++)
3321 rtx xpart = operand_subword (x, i, 1, mode);
3322 rtx ypart;
3324 /* Do not generate code for a move if it would come entirely
3325 from the undefined bits of a paradoxical subreg. */
3326 if (undefined_operand_subword_p (y, i))
3327 continue;
3329 ypart = operand_subword (y, i, 1, mode);
3331 /* If we can't get a part of Y, put Y into memory if it is a
3332 constant. Otherwise, force it into a register. Then we must
3333 be able to get a part of Y. */
3334 if (ypart == 0 && CONSTANT_P (y))
3336 y = use_anchored_address (force_const_mem (mode, y));
3337 ypart = operand_subword (y, i, 1, mode);
3339 else if (ypart == 0)
3340 ypart = operand_subword_force (y, i, mode);
3342 gcc_assert (xpart && ypart);
3344 need_clobber |= (GET_CODE (xpart) == SUBREG);
3346 last_insn = emit_move_insn (xpart, ypart);
3349 seq = get_insns ();
3350 end_sequence ();
3352 /* Show the output dies here. This is necessary for SUBREGs
3353 of pseudos since we cannot track their lifetimes correctly;
3354 hard regs shouldn't appear here except as return values.
3355 We never want to emit such a clobber after reload. */
3356 if (x != y
3357 && ! (reload_in_progress || reload_completed)
3358 && need_clobber != 0)
3359 emit_clobber (x);
3361 emit_insn (seq);
3363 return last_insn;
3366 /* Low level part of emit_move_insn.
3367 Called just like emit_move_insn, but assumes X and Y
3368 are basically valid. */
3371 emit_move_insn_1 (rtx x, rtx y)
3373 enum machine_mode mode = GET_MODE (x);
3374 enum insn_code code;
3376 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3378 code = optab_handler (mov_optab, mode)->insn_code;
3379 if (code != CODE_FOR_nothing)
3380 return emit_insn (GEN_FCN (code) (x, y));
3382 /* Expand complex moves by moving real part and imag part. */
3383 if (COMPLEX_MODE_P (mode))
3384 return emit_move_complex (mode, x, y);
3386 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3387 || ALL_FIXED_POINT_MODE_P (mode))
3389 rtx result = emit_move_via_integer (mode, x, y, true);
3391 /* If we can't find an integer mode, use multi words. */
3392 if (result)
3393 return result;
3394 else
3395 return emit_move_multi_word (mode, x, y);
3398 if (GET_MODE_CLASS (mode) == MODE_CC)
3399 return emit_move_ccmode (mode, x, y);
3401 /* Try using a move pattern for the corresponding integer mode. This is
3402 only safe when simplify_subreg can convert MODE constants into integer
3403 constants. At present, it can only do this reliably if the value
3404 fits within a HOST_WIDE_INT. */
3405 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3407 rtx ret = emit_move_via_integer (mode, x, y, false);
3408 if (ret)
3409 return ret;
3412 return emit_move_multi_word (mode, x, y);
3415 /* Generate code to copy Y into X.
3416 Both Y and X must have the same mode, except that
3417 Y can be a constant with VOIDmode.
3418 This mode cannot be BLKmode; use emit_block_move for that.
3420 Return the last instruction emitted. */
3423 emit_move_insn (rtx x, rtx y)
3425 enum machine_mode mode = GET_MODE (x);
3426 rtx y_cst = NULL_RTX;
3427 rtx last_insn, set;
3429 gcc_assert (mode != BLKmode
3430 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3432 if (CONSTANT_P (y))
3434 if (optimize
3435 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3436 && (last_insn = compress_float_constant (x, y)))
3437 return last_insn;
3439 y_cst = y;
3441 if (!LEGITIMATE_CONSTANT_P (y))
3443 y = force_const_mem (mode, y);
3445 /* If the target's cannot_force_const_mem prevented the spill,
3446 assume that the target's move expanders will also take care
3447 of the non-legitimate constant. */
3448 if (!y)
3449 y = y_cst;
3450 else
3451 y = use_anchored_address (y);
3455 /* If X or Y are memory references, verify that their addresses are valid
3456 for the machine. */
3457 if (MEM_P (x)
3458 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3459 MEM_ADDR_SPACE (x))
3460 && ! push_operand (x, GET_MODE (x))))
3461 x = validize_mem (x);
3463 if (MEM_P (y)
3464 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3465 MEM_ADDR_SPACE (y)))
3466 y = validize_mem (y);
3468 gcc_assert (mode != BLKmode);
3470 last_insn = emit_move_insn_1 (x, y);
3472 if (y_cst && REG_P (x)
3473 && (set = single_set (last_insn)) != NULL_RTX
3474 && SET_DEST (set) == x
3475 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3476 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3478 return last_insn;
3481 /* If Y is representable exactly in a narrower mode, and the target can
3482 perform the extension directly from constant or memory, then emit the
3483 move as an extension. */
3485 static rtx
3486 compress_float_constant (rtx x, rtx y)
3488 enum machine_mode dstmode = GET_MODE (x);
3489 enum machine_mode orig_srcmode = GET_MODE (y);
3490 enum machine_mode srcmode;
3491 REAL_VALUE_TYPE r;
3492 int oldcost, newcost;
3493 bool speed = optimize_insn_for_speed_p ();
3495 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3497 if (LEGITIMATE_CONSTANT_P (y))
3498 oldcost = rtx_cost (y, SET, speed);
3499 else
3500 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3502 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3503 srcmode != orig_srcmode;
3504 srcmode = GET_MODE_WIDER_MODE (srcmode))
3506 enum insn_code ic;
3507 rtx trunc_y, last_insn;
3509 /* Skip if the target can't extend this way. */
3510 ic = can_extend_p (dstmode, srcmode, 0);
3511 if (ic == CODE_FOR_nothing)
3512 continue;
3514 /* Skip if the narrowed value isn't exact. */
3515 if (! exact_real_truncate (srcmode, &r))
3516 continue;
3518 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3520 if (LEGITIMATE_CONSTANT_P (trunc_y))
3522 /* Skip if the target needs extra instructions to perform
3523 the extension. */
3524 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3525 continue;
3526 /* This is valid, but may not be cheaper than the original. */
3527 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3528 if (oldcost < newcost)
3529 continue;
3531 else if (float_extend_from_mem[dstmode][srcmode])
3533 trunc_y = force_const_mem (srcmode, trunc_y);
3534 /* This is valid, but may not be cheaper than the original. */
3535 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3536 if (oldcost < newcost)
3537 continue;
3538 trunc_y = validize_mem (trunc_y);
3540 else
3541 continue;
3543 /* For CSE's benefit, force the compressed constant pool entry
3544 into a new pseudo. This constant may be used in different modes,
3545 and if not, combine will put things back together for us. */
3546 trunc_y = force_reg (srcmode, trunc_y);
3547 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3548 last_insn = get_last_insn ();
3550 if (REG_P (x))
3551 set_unique_reg_note (last_insn, REG_EQUAL, y);
3553 return last_insn;
3556 return NULL_RTX;
3559 /* Pushing data onto the stack. */
3561 /* Push a block of length SIZE (perhaps variable)
3562 and return an rtx to address the beginning of the block.
3563 The value may be virtual_outgoing_args_rtx.
3565 EXTRA is the number of bytes of padding to push in addition to SIZE.
3566 BELOW nonzero means this padding comes at low addresses;
3567 otherwise, the padding comes at high addresses. */
3570 push_block (rtx size, int extra, int below)
3572 rtx temp;
3574 size = convert_modes (Pmode, ptr_mode, size, 1);
3575 if (CONSTANT_P (size))
3576 anti_adjust_stack (plus_constant (size, extra));
3577 else if (REG_P (size) && extra == 0)
3578 anti_adjust_stack (size);
3579 else
3581 temp = copy_to_mode_reg (Pmode, size);
3582 if (extra != 0)
3583 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3584 temp, 0, OPTAB_LIB_WIDEN);
3585 anti_adjust_stack (temp);
3588 #ifndef STACK_GROWS_DOWNWARD
3589 if (0)
3590 #else
3591 if (1)
3592 #endif
3594 temp = virtual_outgoing_args_rtx;
3595 if (extra != 0 && below)
3596 temp = plus_constant (temp, extra);
3598 else
3600 if (CONST_INT_P (size))
3601 temp = plus_constant (virtual_outgoing_args_rtx,
3602 -INTVAL (size) - (below ? 0 : extra));
3603 else if (extra != 0 && !below)
3604 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3605 negate_rtx (Pmode, plus_constant (size, extra)));
3606 else
3607 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3608 negate_rtx (Pmode, size));
3611 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3614 #ifdef PUSH_ROUNDING
3616 /* Emit single push insn. */
3618 static void
3619 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3621 rtx dest_addr;
3622 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3623 rtx dest;
3624 enum insn_code icode;
3625 insn_operand_predicate_fn pred;
3627 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3628 /* If there is push pattern, use it. Otherwise try old way of throwing
3629 MEM representing push operation to move expander. */
3630 icode = optab_handler (push_optab, mode)->insn_code;
3631 if (icode != CODE_FOR_nothing)
3633 if (((pred = insn_data[(int) icode].operand[0].predicate)
3634 && !((*pred) (x, mode))))
3635 x = force_reg (mode, x);
3636 emit_insn (GEN_FCN (icode) (x));
3637 return;
3639 if (GET_MODE_SIZE (mode) == rounded_size)
3640 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3641 /* If we are to pad downward, adjust the stack pointer first and
3642 then store X into the stack location using an offset. This is
3643 because emit_move_insn does not know how to pad; it does not have
3644 access to type. */
3645 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3647 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3648 HOST_WIDE_INT offset;
3650 emit_move_insn (stack_pointer_rtx,
3651 expand_binop (Pmode,
3652 #ifdef STACK_GROWS_DOWNWARD
3653 sub_optab,
3654 #else
3655 add_optab,
3656 #endif
3657 stack_pointer_rtx,
3658 GEN_INT (rounded_size),
3659 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3661 offset = (HOST_WIDE_INT) padding_size;
3662 #ifdef STACK_GROWS_DOWNWARD
3663 if (STACK_PUSH_CODE == POST_DEC)
3664 /* We have already decremented the stack pointer, so get the
3665 previous value. */
3666 offset += (HOST_WIDE_INT) rounded_size;
3667 #else
3668 if (STACK_PUSH_CODE == POST_INC)
3669 /* We have already incremented the stack pointer, so get the
3670 previous value. */
3671 offset -= (HOST_WIDE_INT) rounded_size;
3672 #endif
3673 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3675 else
3677 #ifdef STACK_GROWS_DOWNWARD
3678 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3679 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3680 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3681 #else
3682 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3683 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3684 GEN_INT (rounded_size));
3685 #endif
3686 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3689 dest = gen_rtx_MEM (mode, dest_addr);
3691 if (type != 0)
3693 set_mem_attributes (dest, type, 1);
3695 if (flag_optimize_sibling_calls)
3696 /* Function incoming arguments may overlap with sibling call
3697 outgoing arguments and we cannot allow reordering of reads
3698 from function arguments with stores to outgoing arguments
3699 of sibling calls. */
3700 set_mem_alias_set (dest, 0);
3702 emit_move_insn (dest, x);
3704 #endif
3706 /* Generate code to push X onto the stack, assuming it has mode MODE and
3707 type TYPE.
3708 MODE is redundant except when X is a CONST_INT (since they don't
3709 carry mode info).
3710 SIZE is an rtx for the size of data to be copied (in bytes),
3711 needed only if X is BLKmode.
3713 ALIGN (in bits) is maximum alignment we can assume.
3715 If PARTIAL and REG are both nonzero, then copy that many of the first
3716 bytes of X into registers starting with REG, and push the rest of X.
3717 The amount of space pushed is decreased by PARTIAL bytes.
3718 REG must be a hard register in this case.
3719 If REG is zero but PARTIAL is not, take any all others actions for an
3720 argument partially in registers, but do not actually load any
3721 registers.
3723 EXTRA is the amount in bytes of extra space to leave next to this arg.
3724 This is ignored if an argument block has already been allocated.
3726 On a machine that lacks real push insns, ARGS_ADDR is the address of
3727 the bottom of the argument block for this call. We use indexing off there
3728 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3729 argument block has not been preallocated.
3731 ARGS_SO_FAR is the size of args previously pushed for this call.
3733 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3734 for arguments passed in registers. If nonzero, it will be the number
3735 of bytes required. */
3737 void
3738 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3739 unsigned int align, int partial, rtx reg, int extra,
3740 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3741 rtx alignment_pad)
3743 rtx xinner;
3744 enum direction stack_direction
3745 #ifdef STACK_GROWS_DOWNWARD
3746 = downward;
3747 #else
3748 = upward;
3749 #endif
3751 /* Decide where to pad the argument: `downward' for below,
3752 `upward' for above, or `none' for don't pad it.
3753 Default is below for small data on big-endian machines; else above. */
3754 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3756 /* Invert direction if stack is post-decrement.
3757 FIXME: why? */
3758 if (STACK_PUSH_CODE == POST_DEC)
3759 if (where_pad != none)
3760 where_pad = (where_pad == downward ? upward : downward);
3762 xinner = x;
3764 if (mode == BLKmode
3765 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3767 /* Copy a block into the stack, entirely or partially. */
3769 rtx temp;
3770 int used;
3771 int offset;
3772 int skip;
3774 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3775 used = partial - offset;
3777 if (mode != BLKmode)
3779 /* A value is to be stored in an insufficiently aligned
3780 stack slot; copy via a suitably aligned slot if
3781 necessary. */
3782 size = GEN_INT (GET_MODE_SIZE (mode));
3783 if (!MEM_P (xinner))
3785 temp = assign_temp (type, 0, 1, 1);
3786 emit_move_insn (temp, xinner);
3787 xinner = temp;
3791 gcc_assert (size);
3793 /* USED is now the # of bytes we need not copy to the stack
3794 because registers will take care of them. */
3796 if (partial != 0)
3797 xinner = adjust_address (xinner, BLKmode, used);
3799 /* If the partial register-part of the arg counts in its stack size,
3800 skip the part of stack space corresponding to the registers.
3801 Otherwise, start copying to the beginning of the stack space,
3802 by setting SKIP to 0. */
3803 skip = (reg_parm_stack_space == 0) ? 0 : used;
3805 #ifdef PUSH_ROUNDING
3806 /* Do it with several push insns if that doesn't take lots of insns
3807 and if there is no difficulty with push insns that skip bytes
3808 on the stack for alignment purposes. */
3809 if (args_addr == 0
3810 && PUSH_ARGS
3811 && CONST_INT_P (size)
3812 && skip == 0
3813 && MEM_ALIGN (xinner) >= align
3814 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3815 /* Here we avoid the case of a structure whose weak alignment
3816 forces many pushes of a small amount of data,
3817 and such small pushes do rounding that causes trouble. */
3818 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3819 || align >= BIGGEST_ALIGNMENT
3820 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3821 == (align / BITS_PER_UNIT)))
3822 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3824 /* Push padding now if padding above and stack grows down,
3825 or if padding below and stack grows up.
3826 But if space already allocated, this has already been done. */
3827 if (extra && args_addr == 0
3828 && where_pad != none && where_pad != stack_direction)
3829 anti_adjust_stack (GEN_INT (extra));
3831 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3833 else
3834 #endif /* PUSH_ROUNDING */
3836 rtx target;
3838 /* Otherwise make space on the stack and copy the data
3839 to the address of that space. */
3841 /* Deduct words put into registers from the size we must copy. */
3842 if (partial != 0)
3844 if (CONST_INT_P (size))
3845 size = GEN_INT (INTVAL (size) - used);
3846 else
3847 size = expand_binop (GET_MODE (size), sub_optab, size,
3848 GEN_INT (used), NULL_RTX, 0,
3849 OPTAB_LIB_WIDEN);
3852 /* Get the address of the stack space.
3853 In this case, we do not deal with EXTRA separately.
3854 A single stack adjust will do. */
3855 if (! args_addr)
3857 temp = push_block (size, extra, where_pad == downward);
3858 extra = 0;
3860 else if (CONST_INT_P (args_so_far))
3861 temp = memory_address (BLKmode,
3862 plus_constant (args_addr,
3863 skip + INTVAL (args_so_far)));
3864 else
3865 temp = memory_address (BLKmode,
3866 plus_constant (gen_rtx_PLUS (Pmode,
3867 args_addr,
3868 args_so_far),
3869 skip));
3871 if (!ACCUMULATE_OUTGOING_ARGS)
3873 /* If the source is referenced relative to the stack pointer,
3874 copy it to another register to stabilize it. We do not need
3875 to do this if we know that we won't be changing sp. */
3877 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3878 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3879 temp = copy_to_reg (temp);
3882 target = gen_rtx_MEM (BLKmode, temp);
3884 /* We do *not* set_mem_attributes here, because incoming arguments
3885 may overlap with sibling call outgoing arguments and we cannot
3886 allow reordering of reads from function arguments with stores
3887 to outgoing arguments of sibling calls. We do, however, want
3888 to record the alignment of the stack slot. */
3889 /* ALIGN may well be better aligned than TYPE, e.g. due to
3890 PARM_BOUNDARY. Assume the caller isn't lying. */
3891 set_mem_align (target, align);
3893 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3896 else if (partial > 0)
3898 /* Scalar partly in registers. */
3900 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3901 int i;
3902 int not_stack;
3903 /* # bytes of start of argument
3904 that we must make space for but need not store. */
3905 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3906 int args_offset = INTVAL (args_so_far);
3907 int skip;
3909 /* Push padding now if padding above and stack grows down,
3910 or if padding below and stack grows up.
3911 But if space already allocated, this has already been done. */
3912 if (extra && args_addr == 0
3913 && where_pad != none && where_pad != stack_direction)
3914 anti_adjust_stack (GEN_INT (extra));
3916 /* If we make space by pushing it, we might as well push
3917 the real data. Otherwise, we can leave OFFSET nonzero
3918 and leave the space uninitialized. */
3919 if (args_addr == 0)
3920 offset = 0;
3922 /* Now NOT_STACK gets the number of words that we don't need to
3923 allocate on the stack. Convert OFFSET to words too. */
3924 not_stack = (partial - offset) / UNITS_PER_WORD;
3925 offset /= UNITS_PER_WORD;
3927 /* If the partial register-part of the arg counts in its stack size,
3928 skip the part of stack space corresponding to the registers.
3929 Otherwise, start copying to the beginning of the stack space,
3930 by setting SKIP to 0. */
3931 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3933 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3934 x = validize_mem (force_const_mem (mode, x));
3936 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3937 SUBREGs of such registers are not allowed. */
3938 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3939 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3940 x = copy_to_reg (x);
3942 /* Loop over all the words allocated on the stack for this arg. */
3943 /* We can do it by words, because any scalar bigger than a word
3944 has a size a multiple of a word. */
3945 #ifndef PUSH_ARGS_REVERSED
3946 for (i = not_stack; i < size; i++)
3947 #else
3948 for (i = size - 1; i >= not_stack; i--)
3949 #endif
3950 if (i >= not_stack + offset)
3951 emit_push_insn (operand_subword_force (x, i, mode),
3952 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3953 0, args_addr,
3954 GEN_INT (args_offset + ((i - not_stack + skip)
3955 * UNITS_PER_WORD)),
3956 reg_parm_stack_space, alignment_pad);
3958 else
3960 rtx addr;
3961 rtx dest;
3963 /* Push padding now if padding above and stack grows down,
3964 or if padding below and stack grows up.
3965 But if space already allocated, this has already been done. */
3966 if (extra && args_addr == 0
3967 && where_pad != none && where_pad != stack_direction)
3968 anti_adjust_stack (GEN_INT (extra));
3970 #ifdef PUSH_ROUNDING
3971 if (args_addr == 0 && PUSH_ARGS)
3972 emit_single_push_insn (mode, x, type);
3973 else
3974 #endif
3976 if (CONST_INT_P (args_so_far))
3977 addr
3978 = memory_address (mode,
3979 plus_constant (args_addr,
3980 INTVAL (args_so_far)));
3981 else
3982 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3983 args_so_far));
3984 dest = gen_rtx_MEM (mode, addr);
3986 /* We do *not* set_mem_attributes here, because incoming arguments
3987 may overlap with sibling call outgoing arguments and we cannot
3988 allow reordering of reads from function arguments with stores
3989 to outgoing arguments of sibling calls. We do, however, want
3990 to record the alignment of the stack slot. */
3991 /* ALIGN may well be better aligned than TYPE, e.g. due to
3992 PARM_BOUNDARY. Assume the caller isn't lying. */
3993 set_mem_align (dest, align);
3995 emit_move_insn (dest, x);
3999 /* If part should go in registers, copy that part
4000 into the appropriate registers. Do this now, at the end,
4001 since mem-to-mem copies above may do function calls. */
4002 if (partial > 0 && reg != 0)
4004 /* Handle calls that pass values in multiple non-contiguous locations.
4005 The Irix 6 ABI has examples of this. */
4006 if (GET_CODE (reg) == PARALLEL)
4007 emit_group_load (reg, x, type, -1);
4008 else
4010 gcc_assert (partial % UNITS_PER_WORD == 0);
4011 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4015 if (extra && args_addr == 0 && where_pad == stack_direction)
4016 anti_adjust_stack (GEN_INT (extra));
4018 if (alignment_pad && args_addr == 0)
4019 anti_adjust_stack (alignment_pad);
4022 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4023 operations. */
4025 static rtx
4026 get_subtarget (rtx x)
4028 return (optimize
4029 || x == 0
4030 /* Only registers can be subtargets. */
4031 || !REG_P (x)
4032 /* Don't use hard regs to avoid extending their life. */
4033 || REGNO (x) < FIRST_PSEUDO_REGISTER
4034 ? 0 : x);
4037 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4038 FIELD is a bitfield. Returns true if the optimization was successful,
4039 and there's nothing else to do. */
4041 static bool
4042 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4043 unsigned HOST_WIDE_INT bitpos,
4044 enum machine_mode mode1, rtx str_rtx,
4045 tree to, tree src)
4047 enum machine_mode str_mode = GET_MODE (str_rtx);
4048 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4049 tree op0, op1;
4050 rtx value, result;
4051 optab binop;
4053 if (mode1 != VOIDmode
4054 || bitsize >= BITS_PER_WORD
4055 || str_bitsize > BITS_PER_WORD
4056 || TREE_SIDE_EFFECTS (to)
4057 || TREE_THIS_VOLATILE (to))
4058 return false;
4060 STRIP_NOPS (src);
4061 if (!BINARY_CLASS_P (src)
4062 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4063 return false;
4065 op0 = TREE_OPERAND (src, 0);
4066 op1 = TREE_OPERAND (src, 1);
4067 STRIP_NOPS (op0);
4069 if (!operand_equal_p (to, op0, 0))
4070 return false;
4072 if (MEM_P (str_rtx))
4074 unsigned HOST_WIDE_INT offset1;
4076 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4077 str_mode = word_mode;
4078 str_mode = get_best_mode (bitsize, bitpos,
4079 MEM_ALIGN (str_rtx), str_mode, 0);
4080 if (str_mode == VOIDmode)
4081 return false;
4082 str_bitsize = GET_MODE_BITSIZE (str_mode);
4084 offset1 = bitpos;
4085 bitpos %= str_bitsize;
4086 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4087 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4089 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4090 return false;
4092 /* If the bit field covers the whole REG/MEM, store_field
4093 will likely generate better code. */
4094 if (bitsize >= str_bitsize)
4095 return false;
4097 /* We can't handle fields split across multiple entities. */
4098 if (bitpos + bitsize > str_bitsize)
4099 return false;
4101 if (BYTES_BIG_ENDIAN)
4102 bitpos = str_bitsize - bitpos - bitsize;
4104 switch (TREE_CODE (src))
4106 case PLUS_EXPR:
4107 case MINUS_EXPR:
4108 /* For now, just optimize the case of the topmost bitfield
4109 where we don't need to do any masking and also
4110 1 bit bitfields where xor can be used.
4111 We might win by one instruction for the other bitfields
4112 too if insv/extv instructions aren't used, so that
4113 can be added later. */
4114 if (bitpos + bitsize != str_bitsize
4115 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4116 break;
4118 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4119 value = convert_modes (str_mode,
4120 TYPE_MODE (TREE_TYPE (op1)), value,
4121 TYPE_UNSIGNED (TREE_TYPE (op1)));
4123 /* We may be accessing data outside the field, which means
4124 we can alias adjacent data. */
4125 if (MEM_P (str_rtx))
4127 str_rtx = shallow_copy_rtx (str_rtx);
4128 set_mem_alias_set (str_rtx, 0);
4129 set_mem_expr (str_rtx, 0);
4132 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4133 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4135 value = expand_and (str_mode, value, const1_rtx, NULL);
4136 binop = xor_optab;
4138 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4139 build_int_cst (NULL_TREE, bitpos),
4140 NULL_RTX, 1);
4141 result = expand_binop (str_mode, binop, str_rtx,
4142 value, str_rtx, 1, OPTAB_WIDEN);
4143 if (result != str_rtx)
4144 emit_move_insn (str_rtx, result);
4145 return true;
4147 case BIT_IOR_EXPR:
4148 case BIT_XOR_EXPR:
4149 if (TREE_CODE (op1) != INTEGER_CST)
4150 break;
4151 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4152 value = convert_modes (GET_MODE (str_rtx),
4153 TYPE_MODE (TREE_TYPE (op1)), value,
4154 TYPE_UNSIGNED (TREE_TYPE (op1)));
4156 /* We may be accessing data outside the field, which means
4157 we can alias adjacent data. */
4158 if (MEM_P (str_rtx))
4160 str_rtx = shallow_copy_rtx (str_rtx);
4161 set_mem_alias_set (str_rtx, 0);
4162 set_mem_expr (str_rtx, 0);
4165 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4166 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4168 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4169 - 1);
4170 value = expand_and (GET_MODE (str_rtx), value, mask,
4171 NULL_RTX);
4173 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4174 build_int_cst (NULL_TREE, bitpos),
4175 NULL_RTX, 1);
4176 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4177 value, str_rtx, 1, OPTAB_WIDEN);
4178 if (result != str_rtx)
4179 emit_move_insn (str_rtx, result);
4180 return true;
4182 default:
4183 break;
4186 return false;
4190 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4191 is true, try generating a nontemporal store. */
4193 void
4194 expand_assignment (tree to, tree from, bool nontemporal)
4196 rtx to_rtx = 0;
4197 rtx result;
4199 /* Don't crash if the lhs of the assignment was erroneous. */
4200 if (TREE_CODE (to) == ERROR_MARK)
4202 result = expand_normal (from);
4203 return;
4206 /* Optimize away no-op moves without side-effects. */
4207 if (operand_equal_p (to, from, 0))
4208 return;
4210 /* Assignment of a structure component needs special treatment
4211 if the structure component's rtx is not simply a MEM.
4212 Assignment of an array element at a constant index, and assignment of
4213 an array element in an unaligned packed structure field, has the same
4214 problem. */
4215 if (handled_component_p (to)
4216 /* ??? We only need to handle MEM_REF here if the access is not
4217 a full access of the base object. */
4218 || (TREE_CODE (to) == MEM_REF
4219 && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
4220 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4222 enum machine_mode mode1;
4223 HOST_WIDE_INT bitsize, bitpos;
4224 tree offset;
4225 int unsignedp;
4226 int volatilep = 0;
4227 tree tem;
4229 push_temp_slots ();
4230 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4231 &unsignedp, &volatilep, true);
4233 /* If we are going to use store_bit_field and extract_bit_field,
4234 make sure to_rtx will be safe for multiple use. */
4236 to_rtx = expand_normal (tem);
4238 /* If the bitfield is volatile, we want to access it in the
4239 field's mode, not the computed mode. */
4240 if (volatilep
4241 && GET_CODE (to_rtx) == MEM
4242 && flag_strict_volatile_bitfields > 0)
4243 to_rtx = adjust_address (to_rtx, mode1, 0);
4245 if (offset != 0)
4247 enum machine_mode address_mode;
4248 rtx offset_rtx;
4250 if (!MEM_P (to_rtx))
4252 /* We can get constant negative offsets into arrays with broken
4253 user code. Translate this to a trap instead of ICEing. */
4254 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4255 expand_builtin_trap ();
4256 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4259 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4260 address_mode
4261 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4262 if (GET_MODE (offset_rtx) != address_mode)
4263 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4265 /* A constant address in TO_RTX can have VOIDmode, we must not try
4266 to call force_reg for that case. Avoid that case. */
4267 if (MEM_P (to_rtx)
4268 && GET_MODE (to_rtx) == BLKmode
4269 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4270 && bitsize > 0
4271 && (bitpos % bitsize) == 0
4272 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4273 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4275 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4276 bitpos = 0;
4279 to_rtx = offset_address (to_rtx, offset_rtx,
4280 highest_pow2_factor_for_target (to,
4281 offset));
4284 /* No action is needed if the target is not a memory and the field
4285 lies completely outside that target. This can occur if the source
4286 code contains an out-of-bounds access to a small array. */
4287 if (!MEM_P (to_rtx)
4288 && GET_MODE (to_rtx) != BLKmode
4289 && (unsigned HOST_WIDE_INT) bitpos
4290 >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4292 expand_normal (from);
4293 result = NULL;
4295 /* Handle expand_expr of a complex value returning a CONCAT. */
4296 else if (GET_CODE (to_rtx) == CONCAT)
4298 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
4300 gcc_assert (bitpos == 0);
4301 result = store_expr (from, to_rtx, false, nontemporal);
4303 else
4305 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4306 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4307 nontemporal);
4310 else
4312 if (MEM_P (to_rtx))
4314 /* If the field is at offset zero, we could have been given the
4315 DECL_RTX of the parent struct. Don't munge it. */
4316 to_rtx = shallow_copy_rtx (to_rtx);
4318 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4320 /* Deal with volatile and readonly fields. The former is only
4321 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4322 if (volatilep)
4323 MEM_VOLATILE_P (to_rtx) = 1;
4324 if (component_uses_parent_alias_set (to))
4325 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4328 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4329 to_rtx, to, from))
4330 result = NULL;
4331 else
4332 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4333 TREE_TYPE (tem), get_alias_set (to),
4334 nontemporal);
4337 if (result)
4338 preserve_temp_slots (result);
4339 free_temp_slots ();
4340 pop_temp_slots ();
4341 return;
4344 else if (TREE_CODE (to) == MISALIGNED_INDIRECT_REF)
4346 addr_space_t as = ADDR_SPACE_GENERIC;
4347 enum machine_mode mode, op_mode1;
4348 enum insn_code icode;
4349 rtx reg, addr, mem, insn;
4351 if (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (to, 0))))
4352 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
4354 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4355 reg = force_not_mem (reg);
4357 mode = TYPE_MODE (TREE_TYPE (to));
4358 addr = expand_expr (TREE_OPERAND (to, 0), NULL_RTX, VOIDmode,
4359 EXPAND_SUM);
4360 addr = memory_address_addr_space (mode, addr, as);
4361 mem = gen_rtx_MEM (mode, addr);
4363 set_mem_attributes (mem, to, 0);
4364 set_mem_addr_space (mem, as);
4366 icode = movmisalign_optab->handlers[mode].insn_code;
4367 gcc_assert (icode != CODE_FOR_nothing);
4369 op_mode1 = insn_data[icode].operand[1].mode;
4370 if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
4371 && op_mode1 != VOIDmode)
4372 reg = copy_to_mode_reg (op_mode1, reg);
4374 insn = GEN_FCN (icode) (mem, reg);
4375 emit_insn (insn);
4376 return;
4379 /* If the rhs is a function call and its value is not an aggregate,
4380 call the function before we start to compute the lhs.
4381 This is needed for correct code for cases such as
4382 val = setjmp (buf) on machines where reference to val
4383 requires loading up part of an address in a separate insn.
4385 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4386 since it might be a promoted variable where the zero- or sign- extension
4387 needs to be done. Handling this in the normal way is safe because no
4388 computation is done before the call. The same is true for SSA names. */
4389 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4390 && COMPLETE_TYPE_P (TREE_TYPE (from))
4391 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4392 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4393 && REG_P (DECL_RTL (to)))
4394 || TREE_CODE (to) == SSA_NAME))
4396 rtx value;
4398 push_temp_slots ();
4399 value = expand_normal (from);
4400 if (to_rtx == 0)
4401 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4403 /* Handle calls that return values in multiple non-contiguous locations.
4404 The Irix 6 ABI has examples of this. */
4405 if (GET_CODE (to_rtx) == PARALLEL)
4406 emit_group_load (to_rtx, value, TREE_TYPE (from),
4407 int_size_in_bytes (TREE_TYPE (from)));
4408 else if (GET_MODE (to_rtx) == BLKmode)
4409 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4410 else
4412 if (POINTER_TYPE_P (TREE_TYPE (to)))
4413 value = convert_memory_address_addr_space
4414 (GET_MODE (to_rtx), value,
4415 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4417 emit_move_insn (to_rtx, value);
4419 preserve_temp_slots (to_rtx);
4420 free_temp_slots ();
4421 pop_temp_slots ();
4422 return;
4425 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4426 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4428 if (to_rtx == 0)
4429 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4431 /* Don't move directly into a return register. */
4432 if (TREE_CODE (to) == RESULT_DECL
4433 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4435 rtx temp;
4437 push_temp_slots ();
4438 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4440 if (GET_CODE (to_rtx) == PARALLEL)
4441 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4442 int_size_in_bytes (TREE_TYPE (from)));
4443 else
4444 emit_move_insn (to_rtx, temp);
4446 preserve_temp_slots (to_rtx);
4447 free_temp_slots ();
4448 pop_temp_slots ();
4449 return;
4452 /* In case we are returning the contents of an object which overlaps
4453 the place the value is being stored, use a safe function when copying
4454 a value through a pointer into a structure value return block. */
4455 if (TREE_CODE (to) == RESULT_DECL
4456 && TREE_CODE (from) == INDIRECT_REF
4457 && ADDR_SPACE_GENERIC_P
4458 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4459 && refs_may_alias_p (to, from)
4460 && cfun->returns_struct
4461 && !cfun->returns_pcc_struct)
4463 rtx from_rtx, size;
4465 push_temp_slots ();
4466 size = expr_size (from);
4467 from_rtx = expand_normal (from);
4469 emit_library_call (memmove_libfunc, LCT_NORMAL,
4470 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4471 XEXP (from_rtx, 0), Pmode,
4472 convert_to_mode (TYPE_MODE (sizetype),
4473 size, TYPE_UNSIGNED (sizetype)),
4474 TYPE_MODE (sizetype));
4476 preserve_temp_slots (to_rtx);
4477 free_temp_slots ();
4478 pop_temp_slots ();
4479 return;
4482 /* Compute FROM and store the value in the rtx we got. */
4484 push_temp_slots ();
4485 result = store_expr (from, to_rtx, 0, nontemporal);
4486 preserve_temp_slots (result);
4487 free_temp_slots ();
4488 pop_temp_slots ();
4489 return;
4492 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4493 succeeded, false otherwise. */
4495 bool
4496 emit_storent_insn (rtx to, rtx from)
4498 enum machine_mode mode = GET_MODE (to), imode;
4499 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4500 rtx pattern;
4502 if (code == CODE_FOR_nothing)
4503 return false;
4505 imode = insn_data[code].operand[0].mode;
4506 if (!insn_data[code].operand[0].predicate (to, imode))
4507 return false;
4509 imode = insn_data[code].operand[1].mode;
4510 if (!insn_data[code].operand[1].predicate (from, imode))
4512 from = copy_to_mode_reg (imode, from);
4513 if (!insn_data[code].operand[1].predicate (from, imode))
4514 return false;
4517 pattern = GEN_FCN (code) (to, from);
4518 if (pattern == NULL_RTX)
4519 return false;
4521 emit_insn (pattern);
4522 return true;
4525 /* Generate code for computing expression EXP,
4526 and storing the value into TARGET.
4528 If the mode is BLKmode then we may return TARGET itself.
4529 It turns out that in BLKmode it doesn't cause a problem.
4530 because C has no operators that could combine two different
4531 assignments into the same BLKmode object with different values
4532 with no sequence point. Will other languages need this to
4533 be more thorough?
4535 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4536 stack, and block moves may need to be treated specially.
4538 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4541 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4543 rtx temp;
4544 rtx alt_rtl = NULL_RTX;
4545 location_t loc = EXPR_LOCATION (exp);
4547 if (VOID_TYPE_P (TREE_TYPE (exp)))
4549 /* C++ can generate ?: expressions with a throw expression in one
4550 branch and an rvalue in the other. Here, we resolve attempts to
4551 store the throw expression's nonexistent result. */
4552 gcc_assert (!call_param_p);
4553 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4554 return NULL_RTX;
4556 if (TREE_CODE (exp) == COMPOUND_EXPR)
4558 /* Perform first part of compound expression, then assign from second
4559 part. */
4560 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4561 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4562 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4563 nontemporal);
4565 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4567 /* For conditional expression, get safe form of the target. Then
4568 test the condition, doing the appropriate assignment on either
4569 side. This avoids the creation of unnecessary temporaries.
4570 For non-BLKmode, it is more efficient not to do this. */
4572 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4574 do_pending_stack_adjust ();
4575 NO_DEFER_POP;
4576 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4577 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4578 nontemporal);
4579 emit_jump_insn (gen_jump (lab2));
4580 emit_barrier ();
4581 emit_label (lab1);
4582 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4583 nontemporal);
4584 emit_label (lab2);
4585 OK_DEFER_POP;
4587 return NULL_RTX;
4589 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4590 /* If this is a scalar in a register that is stored in a wider mode
4591 than the declared mode, compute the result into its declared mode
4592 and then convert to the wider mode. Our value is the computed
4593 expression. */
4595 rtx inner_target = 0;
4597 /* We can do the conversion inside EXP, which will often result
4598 in some optimizations. Do the conversion in two steps: first
4599 change the signedness, if needed, then the extend. But don't
4600 do this if the type of EXP is a subtype of something else
4601 since then the conversion might involve more than just
4602 converting modes. */
4603 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4604 && TREE_TYPE (TREE_TYPE (exp)) == 0
4605 && GET_MODE_PRECISION (GET_MODE (target))
4606 == TYPE_PRECISION (TREE_TYPE (exp)))
4608 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4609 != SUBREG_PROMOTED_UNSIGNED_P (target))
4611 /* Some types, e.g. Fortran's logical*4, won't have a signed
4612 version, so use the mode instead. */
4613 tree ntype
4614 = (signed_or_unsigned_type_for
4615 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4616 if (ntype == NULL)
4617 ntype = lang_hooks.types.type_for_mode
4618 (TYPE_MODE (TREE_TYPE (exp)),
4619 SUBREG_PROMOTED_UNSIGNED_P (target));
4621 exp = fold_convert_loc (loc, ntype, exp);
4624 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4625 (GET_MODE (SUBREG_REG (target)),
4626 SUBREG_PROMOTED_UNSIGNED_P (target)),
4627 exp);
4629 inner_target = SUBREG_REG (target);
4632 temp = expand_expr (exp, inner_target, VOIDmode,
4633 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4635 /* If TEMP is a VOIDmode constant, use convert_modes to make
4636 sure that we properly convert it. */
4637 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4639 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4640 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4641 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4642 GET_MODE (target), temp,
4643 SUBREG_PROMOTED_UNSIGNED_P (target));
4646 convert_move (SUBREG_REG (target), temp,
4647 SUBREG_PROMOTED_UNSIGNED_P (target));
4649 return NULL_RTX;
4651 else if (TREE_CODE (exp) == STRING_CST
4652 && !nontemporal && !call_param_p
4653 && TREE_STRING_LENGTH (exp) > 0
4654 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4656 /* Optimize initialization of an array with a STRING_CST. */
4657 HOST_WIDE_INT exp_len, str_copy_len;
4658 rtx dest_mem;
4660 exp_len = int_expr_size (exp);
4661 if (exp_len <= 0)
4662 goto normal_expr;
4664 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4665 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4666 goto normal_expr;
4668 str_copy_len = TREE_STRING_LENGTH (exp);
4669 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4671 str_copy_len += STORE_MAX_PIECES - 1;
4672 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4674 str_copy_len = MIN (str_copy_len, exp_len);
4675 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4676 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4677 MEM_ALIGN (target), false))
4678 goto normal_expr;
4680 dest_mem = target;
4682 dest_mem = store_by_pieces (dest_mem,
4683 str_copy_len, builtin_strncpy_read_str,
4684 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4685 MEM_ALIGN (target), false,
4686 exp_len > str_copy_len ? 1 : 0);
4687 if (exp_len > str_copy_len)
4688 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4689 GEN_INT (exp_len - str_copy_len),
4690 BLOCK_OP_NORMAL);
4691 return NULL_RTX;
4693 else if (TREE_CODE (exp) == MEM_REF
4694 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4695 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == STRING_CST
4696 && integer_zerop (TREE_OPERAND (exp, 1))
4697 && !nontemporal && !call_param_p
4698 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4700 /* Optimize initialization of an array with a STRING_CST. */
4701 HOST_WIDE_INT exp_len, str_copy_len;
4702 rtx dest_mem;
4703 tree str = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4705 exp_len = int_expr_size (exp);
4706 if (exp_len <= 0)
4707 goto normal_expr;
4709 str_copy_len = strlen (TREE_STRING_POINTER (str));
4710 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
4711 goto normal_expr;
4713 str_copy_len = TREE_STRING_LENGTH (str);
4714 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4716 str_copy_len += STORE_MAX_PIECES - 1;
4717 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4719 str_copy_len = MIN (str_copy_len, exp_len);
4720 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4721 CONST_CAST(char *, TREE_STRING_POINTER (str)),
4722 MEM_ALIGN (target), false))
4723 goto normal_expr;
4725 dest_mem = target;
4727 dest_mem = store_by_pieces (dest_mem,
4728 str_copy_len, builtin_strncpy_read_str,
4729 CONST_CAST(char *, TREE_STRING_POINTER (str)),
4730 MEM_ALIGN (target), false,
4731 exp_len > str_copy_len ? 1 : 0);
4732 if (exp_len > str_copy_len)
4733 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4734 GEN_INT (exp_len - str_copy_len),
4735 BLOCK_OP_NORMAL);
4736 return NULL_RTX;
4738 else
4740 rtx tmp_target;
4742 normal_expr:
4743 /* If we want to use a nontemporal store, force the value to
4744 register first. */
4745 tmp_target = nontemporal ? NULL_RTX : target;
4746 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4747 (call_param_p
4748 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4749 &alt_rtl);
4752 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4753 the same as that of TARGET, adjust the constant. This is needed, for
4754 example, in case it is a CONST_DOUBLE and we want only a word-sized
4755 value. */
4756 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4757 && TREE_CODE (exp) != ERROR_MARK
4758 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4759 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4760 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4762 /* If value was not generated in the target, store it there.
4763 Convert the value to TARGET's type first if necessary and emit the
4764 pending incrementations that have been queued when expanding EXP.
4765 Note that we cannot emit the whole queue blindly because this will
4766 effectively disable the POST_INC optimization later.
4768 If TEMP and TARGET compare equal according to rtx_equal_p, but
4769 one or both of them are volatile memory refs, we have to distinguish
4770 two cases:
4771 - expand_expr has used TARGET. In this case, we must not generate
4772 another copy. This can be detected by TARGET being equal according
4773 to == .
4774 - expand_expr has not used TARGET - that means that the source just
4775 happens to have the same RTX form. Since temp will have been created
4776 by expand_expr, it will compare unequal according to == .
4777 We must generate a copy in this case, to reach the correct number
4778 of volatile memory references. */
4780 if ((! rtx_equal_p (temp, target)
4781 || (temp != target && (side_effects_p (temp)
4782 || side_effects_p (target))))
4783 && TREE_CODE (exp) != ERROR_MARK
4784 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4785 but TARGET is not valid memory reference, TEMP will differ
4786 from TARGET although it is really the same location. */
4787 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4788 /* If there's nothing to copy, don't bother. Don't call
4789 expr_size unless necessary, because some front-ends (C++)
4790 expr_size-hook must not be given objects that are not
4791 supposed to be bit-copied or bit-initialized. */
4792 && expr_size (exp) != const0_rtx)
4794 if (GET_MODE (temp) != GET_MODE (target)
4795 && GET_MODE (temp) != VOIDmode)
4797 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4798 if (GET_MODE (target) == BLKmode
4799 || GET_MODE (temp) == BLKmode)
4800 emit_block_move (target, temp, expr_size (exp),
4801 (call_param_p
4802 ? BLOCK_OP_CALL_PARM
4803 : BLOCK_OP_NORMAL));
4804 else
4805 convert_move (target, temp, unsignedp);
4808 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4810 /* Handle copying a string constant into an array. The string
4811 constant may be shorter than the array. So copy just the string's
4812 actual length, and clear the rest. First get the size of the data
4813 type of the string, which is actually the size of the target. */
4814 rtx size = expr_size (exp);
4816 if (CONST_INT_P (size)
4817 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4818 emit_block_move (target, temp, size,
4819 (call_param_p
4820 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4821 else
4823 enum machine_mode pointer_mode
4824 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4825 enum machine_mode address_mode
4826 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4828 /* Compute the size of the data to copy from the string. */
4829 tree copy_size
4830 = size_binop_loc (loc, MIN_EXPR,
4831 make_tree (sizetype, size),
4832 size_int (TREE_STRING_LENGTH (exp)));
4833 rtx copy_size_rtx
4834 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4835 (call_param_p
4836 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4837 rtx label = 0;
4839 /* Copy that much. */
4840 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4841 TYPE_UNSIGNED (sizetype));
4842 emit_block_move (target, temp, copy_size_rtx,
4843 (call_param_p
4844 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4846 /* Figure out how much is left in TARGET that we have to clear.
4847 Do all calculations in pointer_mode. */
4848 if (CONST_INT_P (copy_size_rtx))
4850 size = plus_constant (size, -INTVAL (copy_size_rtx));
4851 target = adjust_address (target, BLKmode,
4852 INTVAL (copy_size_rtx));
4854 else
4856 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4857 copy_size_rtx, NULL_RTX, 0,
4858 OPTAB_LIB_WIDEN);
4860 if (GET_MODE (copy_size_rtx) != address_mode)
4861 copy_size_rtx = convert_to_mode (address_mode,
4862 copy_size_rtx,
4863 TYPE_UNSIGNED (sizetype));
4865 target = offset_address (target, copy_size_rtx,
4866 highest_pow2_factor (copy_size));
4867 label = gen_label_rtx ();
4868 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4869 GET_MODE (size), 0, label);
4872 if (size != const0_rtx)
4873 clear_storage (target, size, BLOCK_OP_NORMAL);
4875 if (label)
4876 emit_label (label);
4879 /* Handle calls that return values in multiple non-contiguous locations.
4880 The Irix 6 ABI has examples of this. */
4881 else if (GET_CODE (target) == PARALLEL)
4882 emit_group_load (target, temp, TREE_TYPE (exp),
4883 int_size_in_bytes (TREE_TYPE (exp)));
4884 else if (GET_MODE (temp) == BLKmode)
4885 emit_block_move (target, temp, expr_size (exp),
4886 (call_param_p
4887 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4888 else if (nontemporal
4889 && emit_storent_insn (target, temp))
4890 /* If we managed to emit a nontemporal store, there is nothing else to
4891 do. */
4893 else
4895 temp = force_operand (temp, target);
4896 if (temp != target)
4897 emit_move_insn (target, temp);
4901 return NULL_RTX;
4904 /* Helper for categorize_ctor_elements. Identical interface. */
4906 static bool
4907 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4908 HOST_WIDE_INT *p_elt_count,
4909 bool *p_must_clear)
4911 unsigned HOST_WIDE_INT idx;
4912 HOST_WIDE_INT nz_elts, elt_count;
4913 tree value, purpose;
4915 /* Whether CTOR is a valid constant initializer, in accordance with what
4916 initializer_constant_valid_p does. If inferred from the constructor
4917 elements, true until proven otherwise. */
4918 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4919 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4921 nz_elts = 0;
4922 elt_count = 0;
4924 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4926 HOST_WIDE_INT mult = 1;
4928 if (TREE_CODE (purpose) == RANGE_EXPR)
4930 tree lo_index = TREE_OPERAND (purpose, 0);
4931 tree hi_index = TREE_OPERAND (purpose, 1);
4933 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4934 mult = (tree_low_cst (hi_index, 1)
4935 - tree_low_cst (lo_index, 1) + 1);
4938 switch (TREE_CODE (value))
4940 case CONSTRUCTOR:
4942 HOST_WIDE_INT nz = 0, ic = 0;
4944 bool const_elt_p
4945 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4947 nz_elts += mult * nz;
4948 elt_count += mult * ic;
4950 if (const_from_elts_p && const_p)
4951 const_p = const_elt_p;
4953 break;
4955 case INTEGER_CST:
4956 case REAL_CST:
4957 case FIXED_CST:
4958 if (!initializer_zerop (value))
4959 nz_elts += mult;
4960 elt_count += mult;
4961 break;
4963 case STRING_CST:
4964 nz_elts += mult * TREE_STRING_LENGTH (value);
4965 elt_count += mult * TREE_STRING_LENGTH (value);
4966 break;
4968 case COMPLEX_CST:
4969 if (!initializer_zerop (TREE_REALPART (value)))
4970 nz_elts += mult;
4971 if (!initializer_zerop (TREE_IMAGPART (value)))
4972 nz_elts += mult;
4973 elt_count += mult;
4974 break;
4976 case VECTOR_CST:
4978 tree v;
4979 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4981 if (!initializer_zerop (TREE_VALUE (v)))
4982 nz_elts += mult;
4983 elt_count += mult;
4986 break;
4988 default:
4990 HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
4991 if (tc < 1)
4992 tc = 1;
4993 nz_elts += mult * tc;
4994 elt_count += mult * tc;
4996 if (const_from_elts_p && const_p)
4997 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4998 != NULL_TREE;
5000 break;
5004 if (!*p_must_clear
5005 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
5006 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
5008 tree init_sub_type;
5009 bool clear_this = true;
5011 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
5013 /* We don't expect more than one element of the union to be
5014 initialized. Not sure what we should do otherwise... */
5015 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
5016 == 1);
5018 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
5019 CONSTRUCTOR_ELTS (ctor),
5020 0)->value);
5022 /* ??? We could look at each element of the union, and find the
5023 largest element. Which would avoid comparing the size of the
5024 initialized element against any tail padding in the union.
5025 Doesn't seem worth the effort... */
5026 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
5027 TYPE_SIZE (init_sub_type)) == 1)
5029 /* And now we have to find out if the element itself is fully
5030 constructed. E.g. for union { struct { int a, b; } s; } u
5031 = { .s = { .a = 1 } }. */
5032 if (elt_count == count_type_elements (init_sub_type, false))
5033 clear_this = false;
5037 *p_must_clear = clear_this;
5040 *p_nz_elts += nz_elts;
5041 *p_elt_count += elt_count;
5043 return const_p;
5046 /* Examine CTOR to discover:
5047 * how many scalar fields are set to nonzero values,
5048 and place it in *P_NZ_ELTS;
5049 * how many scalar fields in total are in CTOR,
5050 and place it in *P_ELT_COUNT.
5051 * if a type is a union, and the initializer from the constructor
5052 is not the largest element in the union, then set *p_must_clear.
5054 Return whether or not CTOR is a valid static constant initializer, the same
5055 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5057 bool
5058 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5059 HOST_WIDE_INT *p_elt_count,
5060 bool *p_must_clear)
5062 *p_nz_elts = 0;
5063 *p_elt_count = 0;
5064 *p_must_clear = false;
5066 return
5067 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5070 /* Count the number of scalars in TYPE. Return -1 on overflow or
5071 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5072 array member at the end of the structure. */
5074 HOST_WIDE_INT
5075 count_type_elements (const_tree type, bool allow_flexarr)
5077 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5078 switch (TREE_CODE (type))
5080 case ARRAY_TYPE:
5082 tree telts = array_type_nelts (type);
5083 if (telts && host_integerp (telts, 1))
5085 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5086 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5087 if (n == 0)
5088 return 0;
5089 else if (max / n > m)
5090 return n * m;
5092 return -1;
5095 case RECORD_TYPE:
5097 HOST_WIDE_INT n = 0, t;
5098 tree f;
5100 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5101 if (TREE_CODE (f) == FIELD_DECL)
5103 t = count_type_elements (TREE_TYPE (f), false);
5104 if (t < 0)
5106 /* Check for structures with flexible array member. */
5107 tree tf = TREE_TYPE (f);
5108 if (allow_flexarr
5109 && TREE_CHAIN (f) == NULL
5110 && TREE_CODE (tf) == ARRAY_TYPE
5111 && TYPE_DOMAIN (tf)
5112 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5113 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5114 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5115 && int_size_in_bytes (type) >= 0)
5116 break;
5118 return -1;
5120 n += t;
5123 return n;
5126 case UNION_TYPE:
5127 case QUAL_UNION_TYPE:
5128 return -1;
5130 case COMPLEX_TYPE:
5131 return 2;
5133 case VECTOR_TYPE:
5134 return TYPE_VECTOR_SUBPARTS (type);
5136 case INTEGER_TYPE:
5137 case REAL_TYPE:
5138 case FIXED_POINT_TYPE:
5139 case ENUMERAL_TYPE:
5140 case BOOLEAN_TYPE:
5141 case POINTER_TYPE:
5142 case OFFSET_TYPE:
5143 case REFERENCE_TYPE:
5144 return 1;
5146 case ERROR_MARK:
5147 return 0;
5149 case VOID_TYPE:
5150 case METHOD_TYPE:
5151 case FUNCTION_TYPE:
5152 case LANG_TYPE:
5153 default:
5154 gcc_unreachable ();
5158 /* Return 1 if EXP contains mostly (3/4) zeros. */
5160 static int
5161 mostly_zeros_p (const_tree exp)
5163 if (TREE_CODE (exp) == CONSTRUCTOR)
5166 HOST_WIDE_INT nz_elts, count, elts;
5167 bool must_clear;
5169 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5170 if (must_clear)
5171 return 1;
5173 elts = count_type_elements (TREE_TYPE (exp), false);
5175 return nz_elts < elts / 4;
5178 return initializer_zerop (exp);
5181 /* Return 1 if EXP contains all zeros. */
5183 static int
5184 all_zeros_p (const_tree exp)
5186 if (TREE_CODE (exp) == CONSTRUCTOR)
5189 HOST_WIDE_INT nz_elts, count;
5190 bool must_clear;
5192 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5193 return nz_elts == 0;
5196 return initializer_zerop (exp);
5199 /* Helper function for store_constructor.
5200 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5201 TYPE is the type of the CONSTRUCTOR, not the element type.
5202 CLEARED is as for store_constructor.
5203 ALIAS_SET is the alias set to use for any stores.
5205 This provides a recursive shortcut back to store_constructor when it isn't
5206 necessary to go through store_field. This is so that we can pass through
5207 the cleared field to let store_constructor know that we may not have to
5208 clear a substructure if the outer structure has already been cleared. */
5210 static void
5211 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5212 HOST_WIDE_INT bitpos, enum machine_mode mode,
5213 tree exp, tree type, int cleared,
5214 alias_set_type alias_set)
5216 if (TREE_CODE (exp) == CONSTRUCTOR
5217 /* We can only call store_constructor recursively if the size and
5218 bit position are on a byte boundary. */
5219 && bitpos % BITS_PER_UNIT == 0
5220 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5221 /* If we have a nonzero bitpos for a register target, then we just
5222 let store_field do the bitfield handling. This is unlikely to
5223 generate unnecessary clear instructions anyways. */
5224 && (bitpos == 0 || MEM_P (target)))
5226 if (MEM_P (target))
5227 target
5228 = adjust_address (target,
5229 GET_MODE (target) == BLKmode
5230 || 0 != (bitpos
5231 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5232 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5235 /* Update the alias set, if required. */
5236 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5237 && MEM_ALIAS_SET (target) != 0)
5239 target = copy_rtx (target);
5240 set_mem_alias_set (target, alias_set);
5243 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5245 else
5246 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5249 /* Store the value of constructor EXP into the rtx TARGET.
5250 TARGET is either a REG or a MEM; we know it cannot conflict, since
5251 safe_from_p has been called.
5252 CLEARED is true if TARGET is known to have been zero'd.
5253 SIZE is the number of bytes of TARGET we are allowed to modify: this
5254 may not be the same as the size of EXP if we are assigning to a field
5255 which has been packed to exclude padding bits. */
5257 static void
5258 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5260 tree type = TREE_TYPE (exp);
5261 #ifdef WORD_REGISTER_OPERATIONS
5262 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5263 #endif
5265 switch (TREE_CODE (type))
5267 case RECORD_TYPE:
5268 case UNION_TYPE:
5269 case QUAL_UNION_TYPE:
5271 unsigned HOST_WIDE_INT idx;
5272 tree field, value;
5274 /* If size is zero or the target is already cleared, do nothing. */
5275 if (size == 0 || cleared)
5276 cleared = 1;
5277 /* We either clear the aggregate or indicate the value is dead. */
5278 else if ((TREE_CODE (type) == UNION_TYPE
5279 || TREE_CODE (type) == QUAL_UNION_TYPE)
5280 && ! CONSTRUCTOR_ELTS (exp))
5281 /* If the constructor is empty, clear the union. */
5283 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5284 cleared = 1;
5287 /* If we are building a static constructor into a register,
5288 set the initial value as zero so we can fold the value into
5289 a constant. But if more than one register is involved,
5290 this probably loses. */
5291 else if (REG_P (target) && TREE_STATIC (exp)
5292 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5294 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5295 cleared = 1;
5298 /* If the constructor has fewer fields than the structure or
5299 if we are initializing the structure to mostly zeros, clear
5300 the whole structure first. Don't do this if TARGET is a
5301 register whose mode size isn't equal to SIZE since
5302 clear_storage can't handle this case. */
5303 else if (size > 0
5304 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5305 != fields_length (type))
5306 || mostly_zeros_p (exp))
5307 && (!REG_P (target)
5308 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5309 == size)))
5311 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5312 cleared = 1;
5315 if (REG_P (target) && !cleared)
5316 emit_clobber (target);
5318 /* Store each element of the constructor into the
5319 corresponding field of TARGET. */
5320 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5322 enum machine_mode mode;
5323 HOST_WIDE_INT bitsize;
5324 HOST_WIDE_INT bitpos = 0;
5325 tree offset;
5326 rtx to_rtx = target;
5328 /* Just ignore missing fields. We cleared the whole
5329 structure, above, if any fields are missing. */
5330 if (field == 0)
5331 continue;
5333 if (cleared && initializer_zerop (value))
5334 continue;
5336 if (host_integerp (DECL_SIZE (field), 1))
5337 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5338 else
5339 bitsize = -1;
5341 mode = DECL_MODE (field);
5342 if (DECL_BIT_FIELD (field))
5343 mode = VOIDmode;
5345 offset = DECL_FIELD_OFFSET (field);
5346 if (host_integerp (offset, 0)
5347 && host_integerp (bit_position (field), 0))
5349 bitpos = int_bit_position (field);
5350 offset = 0;
5352 else
5353 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5355 if (offset)
5357 enum machine_mode address_mode;
5358 rtx offset_rtx;
5360 offset
5361 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5362 make_tree (TREE_TYPE (exp),
5363 target));
5365 offset_rtx = expand_normal (offset);
5366 gcc_assert (MEM_P (to_rtx));
5368 address_mode
5369 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5370 if (GET_MODE (offset_rtx) != address_mode)
5371 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5373 to_rtx = offset_address (to_rtx, offset_rtx,
5374 highest_pow2_factor (offset));
5377 #ifdef WORD_REGISTER_OPERATIONS
5378 /* If this initializes a field that is smaller than a
5379 word, at the start of a word, try to widen it to a full
5380 word. This special case allows us to output C++ member
5381 function initializations in a form that the optimizers
5382 can understand. */
5383 if (REG_P (target)
5384 && bitsize < BITS_PER_WORD
5385 && bitpos % BITS_PER_WORD == 0
5386 && GET_MODE_CLASS (mode) == MODE_INT
5387 && TREE_CODE (value) == INTEGER_CST
5388 && exp_size >= 0
5389 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5391 tree type = TREE_TYPE (value);
5393 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5395 type = lang_hooks.types.type_for_size
5396 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5397 value = fold_convert (type, value);
5400 if (BYTES_BIG_ENDIAN)
5401 value
5402 = fold_build2 (LSHIFT_EXPR, type, value,
5403 build_int_cst (type,
5404 BITS_PER_WORD - bitsize));
5405 bitsize = BITS_PER_WORD;
5406 mode = word_mode;
5408 #endif
5410 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5411 && DECL_NONADDRESSABLE_P (field))
5413 to_rtx = copy_rtx (to_rtx);
5414 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5417 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5418 value, type, cleared,
5419 get_alias_set (TREE_TYPE (field)));
5421 break;
5423 case ARRAY_TYPE:
5425 tree value, index;
5426 unsigned HOST_WIDE_INT i;
5427 int need_to_clear;
5428 tree domain;
5429 tree elttype = TREE_TYPE (type);
5430 int const_bounds_p;
5431 HOST_WIDE_INT minelt = 0;
5432 HOST_WIDE_INT maxelt = 0;
5434 domain = TYPE_DOMAIN (type);
5435 const_bounds_p = (TYPE_MIN_VALUE (domain)
5436 && TYPE_MAX_VALUE (domain)
5437 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5438 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5440 /* If we have constant bounds for the range of the type, get them. */
5441 if (const_bounds_p)
5443 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5444 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5447 /* If the constructor has fewer elements than the array, clear
5448 the whole array first. Similarly if this is static
5449 constructor of a non-BLKmode object. */
5450 if (cleared)
5451 need_to_clear = 0;
5452 else if (REG_P (target) && TREE_STATIC (exp))
5453 need_to_clear = 1;
5454 else
5456 unsigned HOST_WIDE_INT idx;
5457 tree index, value;
5458 HOST_WIDE_INT count = 0, zero_count = 0;
5459 need_to_clear = ! const_bounds_p;
5461 /* This loop is a more accurate version of the loop in
5462 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5463 is also needed to check for missing elements. */
5464 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5466 HOST_WIDE_INT this_node_count;
5468 if (need_to_clear)
5469 break;
5471 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5473 tree lo_index = TREE_OPERAND (index, 0);
5474 tree hi_index = TREE_OPERAND (index, 1);
5476 if (! host_integerp (lo_index, 1)
5477 || ! host_integerp (hi_index, 1))
5479 need_to_clear = 1;
5480 break;
5483 this_node_count = (tree_low_cst (hi_index, 1)
5484 - tree_low_cst (lo_index, 1) + 1);
5486 else
5487 this_node_count = 1;
5489 count += this_node_count;
5490 if (mostly_zeros_p (value))
5491 zero_count += this_node_count;
5494 /* Clear the entire array first if there are any missing
5495 elements, or if the incidence of zero elements is >=
5496 75%. */
5497 if (! need_to_clear
5498 && (count < maxelt - minelt + 1
5499 || 4 * zero_count >= 3 * count))
5500 need_to_clear = 1;
5503 if (need_to_clear && size > 0)
5505 if (REG_P (target))
5506 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5507 else
5508 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5509 cleared = 1;
5512 if (!cleared && REG_P (target))
5513 /* Inform later passes that the old value is dead. */
5514 emit_clobber (target);
5516 /* Store each element of the constructor into the
5517 corresponding element of TARGET, determined by counting the
5518 elements. */
5519 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5521 enum machine_mode mode;
5522 HOST_WIDE_INT bitsize;
5523 HOST_WIDE_INT bitpos;
5524 rtx xtarget = target;
5526 if (cleared && initializer_zerop (value))
5527 continue;
5529 mode = TYPE_MODE (elttype);
5530 if (mode == BLKmode)
5531 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5532 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5533 : -1);
5534 else
5535 bitsize = GET_MODE_BITSIZE (mode);
5537 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5539 tree lo_index = TREE_OPERAND (index, 0);
5540 tree hi_index = TREE_OPERAND (index, 1);
5541 rtx index_r, pos_rtx;
5542 HOST_WIDE_INT lo, hi, count;
5543 tree position;
5545 /* If the range is constant and "small", unroll the loop. */
5546 if (const_bounds_p
5547 && host_integerp (lo_index, 0)
5548 && host_integerp (hi_index, 0)
5549 && (lo = tree_low_cst (lo_index, 0),
5550 hi = tree_low_cst (hi_index, 0),
5551 count = hi - lo + 1,
5552 (!MEM_P (target)
5553 || count <= 2
5554 || (host_integerp (TYPE_SIZE (elttype), 1)
5555 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5556 <= 40 * 8)))))
5558 lo -= minelt; hi -= minelt;
5559 for (; lo <= hi; lo++)
5561 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5563 if (MEM_P (target)
5564 && !MEM_KEEP_ALIAS_SET_P (target)
5565 && TREE_CODE (type) == ARRAY_TYPE
5566 && TYPE_NONALIASED_COMPONENT (type))
5568 target = copy_rtx (target);
5569 MEM_KEEP_ALIAS_SET_P (target) = 1;
5572 store_constructor_field
5573 (target, bitsize, bitpos, mode, value, type, cleared,
5574 get_alias_set (elttype));
5577 else
5579 rtx loop_start = gen_label_rtx ();
5580 rtx loop_end = gen_label_rtx ();
5581 tree exit_cond;
5583 expand_normal (hi_index);
5585 index = build_decl (EXPR_LOCATION (exp),
5586 VAR_DECL, NULL_TREE, domain);
5587 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5588 SET_DECL_RTL (index, index_r);
5589 store_expr (lo_index, index_r, 0, false);
5591 /* Build the head of the loop. */
5592 do_pending_stack_adjust ();
5593 emit_label (loop_start);
5595 /* Assign value to element index. */
5596 position =
5597 fold_convert (ssizetype,
5598 fold_build2 (MINUS_EXPR,
5599 TREE_TYPE (index),
5600 index,
5601 TYPE_MIN_VALUE (domain)));
5603 position =
5604 size_binop (MULT_EXPR, position,
5605 fold_convert (ssizetype,
5606 TYPE_SIZE_UNIT (elttype)));
5608 pos_rtx = expand_normal (position);
5609 xtarget = offset_address (target, pos_rtx,
5610 highest_pow2_factor (position));
5611 xtarget = adjust_address (xtarget, mode, 0);
5612 if (TREE_CODE (value) == CONSTRUCTOR)
5613 store_constructor (value, xtarget, cleared,
5614 bitsize / BITS_PER_UNIT);
5615 else
5616 store_expr (value, xtarget, 0, false);
5618 /* Generate a conditional jump to exit the loop. */
5619 exit_cond = build2 (LT_EXPR, integer_type_node,
5620 index, hi_index);
5621 jumpif (exit_cond, loop_end, -1);
5623 /* Update the loop counter, and jump to the head of
5624 the loop. */
5625 expand_assignment (index,
5626 build2 (PLUS_EXPR, TREE_TYPE (index),
5627 index, integer_one_node),
5628 false);
5630 emit_jump (loop_start);
5632 /* Build the end of the loop. */
5633 emit_label (loop_end);
5636 else if ((index != 0 && ! host_integerp (index, 0))
5637 || ! host_integerp (TYPE_SIZE (elttype), 1))
5639 tree position;
5641 if (index == 0)
5642 index = ssize_int (1);
5644 if (minelt)
5645 index = fold_convert (ssizetype,
5646 fold_build2 (MINUS_EXPR,
5647 TREE_TYPE (index),
5648 index,
5649 TYPE_MIN_VALUE (domain)));
5651 position =
5652 size_binop (MULT_EXPR, index,
5653 fold_convert (ssizetype,
5654 TYPE_SIZE_UNIT (elttype)));
5655 xtarget = offset_address (target,
5656 expand_normal (position),
5657 highest_pow2_factor (position));
5658 xtarget = adjust_address (xtarget, mode, 0);
5659 store_expr (value, xtarget, 0, false);
5661 else
5663 if (index != 0)
5664 bitpos = ((tree_low_cst (index, 0) - minelt)
5665 * tree_low_cst (TYPE_SIZE (elttype), 1));
5666 else
5667 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5669 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5670 && TREE_CODE (type) == ARRAY_TYPE
5671 && TYPE_NONALIASED_COMPONENT (type))
5673 target = copy_rtx (target);
5674 MEM_KEEP_ALIAS_SET_P (target) = 1;
5676 store_constructor_field (target, bitsize, bitpos, mode, value,
5677 type, cleared, get_alias_set (elttype));
5680 break;
5683 case VECTOR_TYPE:
5685 unsigned HOST_WIDE_INT idx;
5686 constructor_elt *ce;
5687 int i;
5688 int need_to_clear;
5689 int icode = 0;
5690 tree elttype = TREE_TYPE (type);
5691 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5692 enum machine_mode eltmode = TYPE_MODE (elttype);
5693 HOST_WIDE_INT bitsize;
5694 HOST_WIDE_INT bitpos;
5695 rtvec vector = NULL;
5696 unsigned n_elts;
5697 alias_set_type alias;
5699 gcc_assert (eltmode != BLKmode);
5701 n_elts = TYPE_VECTOR_SUBPARTS (type);
5702 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5704 enum machine_mode mode = GET_MODE (target);
5706 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5707 if (icode != CODE_FOR_nothing)
5709 unsigned int i;
5711 vector = rtvec_alloc (n_elts);
5712 for (i = 0; i < n_elts; i++)
5713 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5717 /* If the constructor has fewer elements than the vector,
5718 clear the whole array first. Similarly if this is static
5719 constructor of a non-BLKmode object. */
5720 if (cleared)
5721 need_to_clear = 0;
5722 else if (REG_P (target) && TREE_STATIC (exp))
5723 need_to_clear = 1;
5724 else
5726 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5727 tree value;
5729 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5731 int n_elts_here = tree_low_cst
5732 (int_const_binop (TRUNC_DIV_EXPR,
5733 TYPE_SIZE (TREE_TYPE (value)),
5734 TYPE_SIZE (elttype), 0), 1);
5736 count += n_elts_here;
5737 if (mostly_zeros_p (value))
5738 zero_count += n_elts_here;
5741 /* Clear the entire vector first if there are any missing elements,
5742 or if the incidence of zero elements is >= 75%. */
5743 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5746 if (need_to_clear && size > 0 && !vector)
5748 if (REG_P (target))
5749 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5750 else
5751 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5752 cleared = 1;
5755 /* Inform later passes that the old value is dead. */
5756 if (!cleared && !vector && REG_P (target))
5757 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5759 if (MEM_P (target))
5760 alias = MEM_ALIAS_SET (target);
5761 else
5762 alias = get_alias_set (elttype);
5764 /* Store each element of the constructor into the corresponding
5765 element of TARGET, determined by counting the elements. */
5766 for (idx = 0, i = 0;
5767 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5768 idx++, i += bitsize / elt_size)
5770 HOST_WIDE_INT eltpos;
5771 tree value = ce->value;
5773 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5774 if (cleared && initializer_zerop (value))
5775 continue;
5777 if (ce->index)
5778 eltpos = tree_low_cst (ce->index, 1);
5779 else
5780 eltpos = i;
5782 if (vector)
5784 /* Vector CONSTRUCTORs should only be built from smaller
5785 vectors in the case of BLKmode vectors. */
5786 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5787 RTVEC_ELT (vector, eltpos)
5788 = expand_normal (value);
5790 else
5792 enum machine_mode value_mode =
5793 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5794 ? TYPE_MODE (TREE_TYPE (value))
5795 : eltmode;
5796 bitpos = eltpos * elt_size;
5797 store_constructor_field (target, bitsize, bitpos,
5798 value_mode, value, type,
5799 cleared, alias);
5803 if (vector)
5804 emit_insn (GEN_FCN (icode)
5805 (target,
5806 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5807 break;
5810 default:
5811 gcc_unreachable ();
5815 /* Store the value of EXP (an expression tree)
5816 into a subfield of TARGET which has mode MODE and occupies
5817 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5818 If MODE is VOIDmode, it means that we are storing into a bit-field.
5820 Always return const0_rtx unless we have something particular to
5821 return.
5823 TYPE is the type of the underlying object,
5825 ALIAS_SET is the alias set for the destination. This value will
5826 (in general) be different from that for TARGET, since TARGET is a
5827 reference to the containing structure.
5829 If NONTEMPORAL is true, try generating a nontemporal store. */
5831 static rtx
5832 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5833 enum machine_mode mode, tree exp, tree type,
5834 alias_set_type alias_set, bool nontemporal)
5836 if (TREE_CODE (exp) == ERROR_MARK)
5837 return const0_rtx;
5839 /* If we have nothing to store, do nothing unless the expression has
5840 side-effects. */
5841 if (bitsize == 0)
5842 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5844 /* If we are storing into an unaligned field of an aligned union that is
5845 in a register, we may have the mode of TARGET being an integer mode but
5846 MODE == BLKmode. In that case, get an aligned object whose size and
5847 alignment are the same as TARGET and store TARGET into it (we can avoid
5848 the store if the field being stored is the entire width of TARGET). Then
5849 call ourselves recursively to store the field into a BLKmode version of
5850 that object. Finally, load from the object into TARGET. This is not
5851 very efficient in general, but should only be slightly more expensive
5852 than the otherwise-required unaligned accesses. Perhaps this can be
5853 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5854 twice, once with emit_move_insn and once via store_field. */
5856 if (mode == BLKmode
5857 && (REG_P (target) || GET_CODE (target) == SUBREG))
5859 rtx object = assign_temp (type, 0, 1, 1);
5860 rtx blk_object = adjust_address (object, BLKmode, 0);
5862 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5863 emit_move_insn (object, target);
5865 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5866 nontemporal);
5868 emit_move_insn (target, object);
5870 /* We want to return the BLKmode version of the data. */
5871 return blk_object;
5874 if (GET_CODE (target) == CONCAT)
5876 /* We're storing into a struct containing a single __complex. */
5878 gcc_assert (!bitpos);
5879 return store_expr (exp, target, 0, nontemporal);
5882 /* If the structure is in a register or if the component
5883 is a bit field, we cannot use addressing to access it.
5884 Use bit-field techniques or SUBREG to store in it. */
5886 if (mode == VOIDmode
5887 || (mode != BLKmode && ! direct_store[(int) mode]
5888 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5889 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5890 || REG_P (target)
5891 || GET_CODE (target) == SUBREG
5892 /* If the field isn't aligned enough to store as an ordinary memref,
5893 store it as a bit field. */
5894 || (mode != BLKmode
5895 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5896 || bitpos % GET_MODE_ALIGNMENT (mode))
5897 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5898 || (bitpos % BITS_PER_UNIT != 0)))
5899 /* If the RHS and field are a constant size and the size of the
5900 RHS isn't the same size as the bitfield, we must use bitfield
5901 operations. */
5902 || (bitsize >= 0
5903 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5904 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
5905 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
5906 decl we must use bitfield operations. */
5907 || (bitsize >= 0
5908 && TREE_CODE (exp) == MEM_REF
5909 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5910 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5911 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
5912 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
5914 rtx temp;
5915 gimple nop_def;
5917 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5918 implies a mask operation. If the precision is the same size as
5919 the field we're storing into, that mask is redundant. This is
5920 particularly common with bit field assignments generated by the
5921 C front end. */
5922 nop_def = get_def_for_expr (exp, NOP_EXPR);
5923 if (nop_def)
5925 tree type = TREE_TYPE (exp);
5926 if (INTEGRAL_TYPE_P (type)
5927 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5928 && bitsize == TYPE_PRECISION (type))
5930 tree op = gimple_assign_rhs1 (nop_def);
5931 type = TREE_TYPE (op);
5932 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5933 exp = op;
5937 temp = expand_normal (exp);
5939 /* If BITSIZE is narrower than the size of the type of EXP
5940 we will be narrowing TEMP. Normally, what's wanted are the
5941 low-order bits. However, if EXP's type is a record and this is
5942 big-endian machine, we want the upper BITSIZE bits. */
5943 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5944 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5945 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5946 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5947 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5948 - bitsize),
5949 NULL_RTX, 1);
5951 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5952 MODE. */
5953 if (mode != VOIDmode && mode != BLKmode
5954 && mode != TYPE_MODE (TREE_TYPE (exp)))
5955 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5957 /* If the modes of TEMP and TARGET are both BLKmode, both
5958 must be in memory and BITPOS must be aligned on a byte
5959 boundary. If so, we simply do a block copy. Likewise
5960 for a BLKmode-like TARGET. */
5961 if (GET_MODE (temp) == BLKmode
5962 && (GET_MODE (target) == BLKmode
5963 || (MEM_P (target)
5964 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5965 && (bitpos % BITS_PER_UNIT) == 0
5966 && (bitsize % BITS_PER_UNIT) == 0)))
5968 gcc_assert (MEM_P (target) && MEM_P (temp)
5969 && (bitpos % BITS_PER_UNIT) == 0);
5971 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5972 emit_block_move (target, temp,
5973 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5974 / BITS_PER_UNIT),
5975 BLOCK_OP_NORMAL);
5977 return const0_rtx;
5980 /* Store the value in the bitfield. */
5981 store_bit_field (target, bitsize, bitpos, mode, temp);
5983 return const0_rtx;
5985 else
5987 /* Now build a reference to just the desired component. */
5988 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5990 if (to_rtx == target)
5991 to_rtx = copy_rtx (to_rtx);
5993 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5994 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5995 set_mem_alias_set (to_rtx, alias_set);
5997 return store_expr (exp, to_rtx, 0, nontemporal);
6001 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6002 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6003 codes and find the ultimate containing object, which we return.
6005 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6006 bit position, and *PUNSIGNEDP to the signedness of the field.
6007 If the position of the field is variable, we store a tree
6008 giving the variable offset (in units) in *POFFSET.
6009 This offset is in addition to the bit position.
6010 If the position is not variable, we store 0 in *POFFSET.
6012 If any of the extraction expressions is volatile,
6013 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6015 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6016 Otherwise, it is a mode that can be used to access the field.
6018 If the field describes a variable-sized object, *PMODE is set to
6019 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6020 this case, but the address of the object can be found.
6022 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6023 look through nodes that serve as markers of a greater alignment than
6024 the one that can be deduced from the expression. These nodes make it
6025 possible for front-ends to prevent temporaries from being created by
6026 the middle-end on alignment considerations. For that purpose, the
6027 normal operating mode at high-level is to always pass FALSE so that
6028 the ultimate containing object is really returned; moreover, the
6029 associated predicate handled_component_p will always return TRUE
6030 on these nodes, thus indicating that they are essentially handled
6031 by get_inner_reference. TRUE should only be passed when the caller
6032 is scanning the expression in order to build another representation
6033 and specifically knows how to handle these nodes; as such, this is
6034 the normal operating mode in the RTL expanders. */
6036 tree
6037 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6038 HOST_WIDE_INT *pbitpos, tree *poffset,
6039 enum machine_mode *pmode, int *punsignedp,
6040 int *pvolatilep, bool keep_aligning)
6042 tree size_tree = 0;
6043 enum machine_mode mode = VOIDmode;
6044 bool blkmode_bitfield = false;
6045 tree offset = size_zero_node;
6046 double_int bit_offset = double_int_zero;
6048 /* First get the mode, signedness, and size. We do this from just the
6049 outermost expression. */
6050 *pbitsize = -1;
6051 if (TREE_CODE (exp) == COMPONENT_REF)
6053 tree field = TREE_OPERAND (exp, 1);
6054 size_tree = DECL_SIZE (field);
6055 if (!DECL_BIT_FIELD (field))
6056 mode = DECL_MODE (field);
6057 else if (DECL_MODE (field) == BLKmode)
6058 blkmode_bitfield = true;
6059 else if (TREE_THIS_VOLATILE (exp)
6060 && flag_strict_volatile_bitfields > 0)
6061 /* Volatile bitfields should be accessed in the mode of the
6062 field's type, not the mode computed based on the bit
6063 size. */
6064 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6066 *punsignedp = DECL_UNSIGNED (field);
6068 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6070 size_tree = TREE_OPERAND (exp, 1);
6071 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6072 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6074 /* For vector types, with the correct size of access, use the mode of
6075 inner type. */
6076 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6077 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6078 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6079 mode = TYPE_MODE (TREE_TYPE (exp));
6081 else
6083 mode = TYPE_MODE (TREE_TYPE (exp));
6084 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6086 if (mode == BLKmode)
6087 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6088 else
6089 *pbitsize = GET_MODE_BITSIZE (mode);
6092 if (size_tree != 0)
6094 if (! host_integerp (size_tree, 1))
6095 mode = BLKmode, *pbitsize = -1;
6096 else
6097 *pbitsize = tree_low_cst (size_tree, 1);
6100 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6101 and find the ultimate containing object. */
6102 while (1)
6104 switch (TREE_CODE (exp))
6106 case BIT_FIELD_REF:
6107 bit_offset
6108 = double_int_add (bit_offset,
6109 tree_to_double_int (TREE_OPERAND (exp, 2)));
6110 break;
6112 case COMPONENT_REF:
6114 tree field = TREE_OPERAND (exp, 1);
6115 tree this_offset = component_ref_field_offset (exp);
6117 /* If this field hasn't been filled in yet, don't go past it.
6118 This should only happen when folding expressions made during
6119 type construction. */
6120 if (this_offset == 0)
6121 break;
6123 offset = size_binop (PLUS_EXPR, offset, this_offset);
6124 bit_offset = double_int_add (bit_offset,
6125 tree_to_double_int
6126 (DECL_FIELD_BIT_OFFSET (field)));
6128 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6130 break;
6132 case ARRAY_REF:
6133 case ARRAY_RANGE_REF:
6135 tree index = TREE_OPERAND (exp, 1);
6136 tree low_bound = array_ref_low_bound (exp);
6137 tree unit_size = array_ref_element_size (exp);
6139 /* We assume all arrays have sizes that are a multiple of a byte.
6140 First subtract the lower bound, if any, in the type of the
6141 index, then convert to sizetype and multiply by the size of
6142 the array element. */
6143 if (! integer_zerop (low_bound))
6144 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6145 index, low_bound);
6147 offset = size_binop (PLUS_EXPR, offset,
6148 size_binop (MULT_EXPR,
6149 fold_convert (sizetype, index),
6150 unit_size));
6152 break;
6154 case REALPART_EXPR:
6155 break;
6157 case IMAGPART_EXPR:
6158 bit_offset = double_int_add (bit_offset,
6159 uhwi_to_double_int (*pbitsize));
6160 break;
6162 case VIEW_CONVERT_EXPR:
6163 if (keep_aligning && STRICT_ALIGNMENT
6164 && (TYPE_ALIGN (TREE_TYPE (exp))
6165 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6166 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6167 < BIGGEST_ALIGNMENT)
6168 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6169 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6170 goto done;
6171 break;
6173 case MEM_REF:
6174 /* Hand back the decl for MEM[&decl, off]. */
6175 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6177 tree off = TREE_OPERAND (exp, 1);
6178 if (!integer_zerop (off))
6180 double_int boff, coff = mem_ref_offset (exp);
6181 boff = double_int_lshift (coff,
6182 BITS_PER_UNIT == 8
6183 ? 3 : exact_log2 (BITS_PER_UNIT),
6184 HOST_BITS_PER_DOUBLE_INT, true);
6185 bit_offset = double_int_add (bit_offset, boff);
6187 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6189 goto done;
6191 default:
6192 goto done;
6195 /* If any reference in the chain is volatile, the effect is volatile. */
6196 if (TREE_THIS_VOLATILE (exp))
6197 *pvolatilep = 1;
6199 exp = TREE_OPERAND (exp, 0);
6201 done:
6203 /* If OFFSET is constant, see if we can return the whole thing as a
6204 constant bit position. Make sure to handle overflow during
6205 this conversion. */
6206 if (host_integerp (offset, 0))
6208 double_int tem = double_int_lshift (tree_to_double_int (offset),
6209 BITS_PER_UNIT == 8
6210 ? 3 : exact_log2 (BITS_PER_UNIT),
6211 HOST_BITS_PER_DOUBLE_INT, true);
6212 tem = double_int_add (tem, bit_offset);
6213 if (double_int_fits_in_shwi_p (tem))
6215 *pbitpos = double_int_to_shwi (tem);
6216 *poffset = offset = NULL_TREE;
6220 /* Otherwise, split it up. */
6221 if (offset)
6223 *pbitpos = double_int_to_shwi (bit_offset);
6224 *poffset = offset;
6227 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6228 if (mode == VOIDmode
6229 && blkmode_bitfield
6230 && (*pbitpos % BITS_PER_UNIT) == 0
6231 && (*pbitsize % BITS_PER_UNIT) == 0)
6232 *pmode = BLKmode;
6233 else
6234 *pmode = mode;
6236 return exp;
6239 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6240 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6241 EXP is marked as PACKED. */
6243 bool
6244 contains_packed_reference (const_tree exp)
6246 bool packed_p = false;
6248 while (1)
6250 switch (TREE_CODE (exp))
6252 case COMPONENT_REF:
6254 tree field = TREE_OPERAND (exp, 1);
6255 packed_p = DECL_PACKED (field)
6256 || TYPE_PACKED (TREE_TYPE (field))
6257 || TYPE_PACKED (TREE_TYPE (exp));
6258 if (packed_p)
6259 goto done;
6261 break;
6263 case BIT_FIELD_REF:
6264 case ARRAY_REF:
6265 case ARRAY_RANGE_REF:
6266 case REALPART_EXPR:
6267 case IMAGPART_EXPR:
6268 case VIEW_CONVERT_EXPR:
6269 break;
6271 default:
6272 goto done;
6274 exp = TREE_OPERAND (exp, 0);
6276 done:
6277 return packed_p;
6280 /* Return a tree of sizetype representing the size, in bytes, of the element
6281 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6283 tree
6284 array_ref_element_size (tree exp)
6286 tree aligned_size = TREE_OPERAND (exp, 3);
6287 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6288 location_t loc = EXPR_LOCATION (exp);
6290 /* If a size was specified in the ARRAY_REF, it's the size measured
6291 in alignment units of the element type. So multiply by that value. */
6292 if (aligned_size)
6294 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6295 sizetype from another type of the same width and signedness. */
6296 if (TREE_TYPE (aligned_size) != sizetype)
6297 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6298 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6299 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6302 /* Otherwise, take the size from that of the element type. Substitute
6303 any PLACEHOLDER_EXPR that we have. */
6304 else
6305 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6308 /* Return a tree representing the lower bound of the array mentioned in
6309 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6311 tree
6312 array_ref_low_bound (tree exp)
6314 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6316 /* If a lower bound is specified in EXP, use it. */
6317 if (TREE_OPERAND (exp, 2))
6318 return TREE_OPERAND (exp, 2);
6320 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6321 substituting for a PLACEHOLDER_EXPR as needed. */
6322 if (domain_type && TYPE_MIN_VALUE (domain_type))
6323 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6325 /* Otherwise, return a zero of the appropriate type. */
6326 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6329 /* Return a tree representing the upper bound of the array mentioned in
6330 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6332 tree
6333 array_ref_up_bound (tree exp)
6335 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6337 /* If there is a domain type and it has an upper bound, use it, substituting
6338 for a PLACEHOLDER_EXPR as needed. */
6339 if (domain_type && TYPE_MAX_VALUE (domain_type))
6340 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6342 /* Otherwise fail. */
6343 return NULL_TREE;
6346 /* Return a tree representing the offset, in bytes, of the field referenced
6347 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6349 tree
6350 component_ref_field_offset (tree exp)
6352 tree aligned_offset = TREE_OPERAND (exp, 2);
6353 tree field = TREE_OPERAND (exp, 1);
6354 location_t loc = EXPR_LOCATION (exp);
6356 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6357 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6358 value. */
6359 if (aligned_offset)
6361 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6362 sizetype from another type of the same width and signedness. */
6363 if (TREE_TYPE (aligned_offset) != sizetype)
6364 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6365 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6366 size_int (DECL_OFFSET_ALIGN (field)
6367 / BITS_PER_UNIT));
6370 /* Otherwise, take the offset from that of the field. Substitute
6371 any PLACEHOLDER_EXPR that we have. */
6372 else
6373 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6376 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6378 static unsigned HOST_WIDE_INT
6379 target_align (const_tree target)
6381 /* We might have a chain of nested references with intermediate misaligning
6382 bitfields components, so need to recurse to find out. */
6384 unsigned HOST_WIDE_INT this_align, outer_align;
6386 switch (TREE_CODE (target))
6388 case BIT_FIELD_REF:
6389 return 1;
6391 case COMPONENT_REF:
6392 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6393 outer_align = target_align (TREE_OPERAND (target, 0));
6394 return MIN (this_align, outer_align);
6396 case ARRAY_REF:
6397 case ARRAY_RANGE_REF:
6398 this_align = TYPE_ALIGN (TREE_TYPE (target));
6399 outer_align = target_align (TREE_OPERAND (target, 0));
6400 return MIN (this_align, outer_align);
6402 CASE_CONVERT:
6403 case NON_LVALUE_EXPR:
6404 case VIEW_CONVERT_EXPR:
6405 this_align = TYPE_ALIGN (TREE_TYPE (target));
6406 outer_align = target_align (TREE_OPERAND (target, 0));
6407 return MAX (this_align, outer_align);
6409 default:
6410 return TYPE_ALIGN (TREE_TYPE (target));
6415 /* Given an rtx VALUE that may contain additions and multiplications, return
6416 an equivalent value that just refers to a register, memory, or constant.
6417 This is done by generating instructions to perform the arithmetic and
6418 returning a pseudo-register containing the value.
6420 The returned value may be a REG, SUBREG, MEM or constant. */
6423 force_operand (rtx value, rtx target)
6425 rtx op1, op2;
6426 /* Use subtarget as the target for operand 0 of a binary operation. */
6427 rtx subtarget = get_subtarget (target);
6428 enum rtx_code code = GET_CODE (value);
6430 /* Check for subreg applied to an expression produced by loop optimizer. */
6431 if (code == SUBREG
6432 && !REG_P (SUBREG_REG (value))
6433 && !MEM_P (SUBREG_REG (value)))
6435 value
6436 = simplify_gen_subreg (GET_MODE (value),
6437 force_reg (GET_MODE (SUBREG_REG (value)),
6438 force_operand (SUBREG_REG (value),
6439 NULL_RTX)),
6440 GET_MODE (SUBREG_REG (value)),
6441 SUBREG_BYTE (value));
6442 code = GET_CODE (value);
6445 /* Check for a PIC address load. */
6446 if ((code == PLUS || code == MINUS)
6447 && XEXP (value, 0) == pic_offset_table_rtx
6448 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6449 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6450 || GET_CODE (XEXP (value, 1)) == CONST))
6452 if (!subtarget)
6453 subtarget = gen_reg_rtx (GET_MODE (value));
6454 emit_move_insn (subtarget, value);
6455 return subtarget;
6458 if (ARITHMETIC_P (value))
6460 op2 = XEXP (value, 1);
6461 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6462 subtarget = 0;
6463 if (code == MINUS && CONST_INT_P (op2))
6465 code = PLUS;
6466 op2 = negate_rtx (GET_MODE (value), op2);
6469 /* Check for an addition with OP2 a constant integer and our first
6470 operand a PLUS of a virtual register and something else. In that
6471 case, we want to emit the sum of the virtual register and the
6472 constant first and then add the other value. This allows virtual
6473 register instantiation to simply modify the constant rather than
6474 creating another one around this addition. */
6475 if (code == PLUS && CONST_INT_P (op2)
6476 && GET_CODE (XEXP (value, 0)) == PLUS
6477 && REG_P (XEXP (XEXP (value, 0), 0))
6478 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6479 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6481 rtx temp = expand_simple_binop (GET_MODE (value), code,
6482 XEXP (XEXP (value, 0), 0), op2,
6483 subtarget, 0, OPTAB_LIB_WIDEN);
6484 return expand_simple_binop (GET_MODE (value), code, temp,
6485 force_operand (XEXP (XEXP (value,
6486 0), 1), 0),
6487 target, 0, OPTAB_LIB_WIDEN);
6490 op1 = force_operand (XEXP (value, 0), subtarget);
6491 op2 = force_operand (op2, NULL_RTX);
6492 switch (code)
6494 case MULT:
6495 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6496 case DIV:
6497 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6498 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6499 target, 1, OPTAB_LIB_WIDEN);
6500 else
6501 return expand_divmod (0,
6502 FLOAT_MODE_P (GET_MODE (value))
6503 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6504 GET_MODE (value), op1, op2, target, 0);
6505 case MOD:
6506 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6507 target, 0);
6508 case UDIV:
6509 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6510 target, 1);
6511 case UMOD:
6512 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6513 target, 1);
6514 case ASHIFTRT:
6515 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6516 target, 0, OPTAB_LIB_WIDEN);
6517 default:
6518 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6519 target, 1, OPTAB_LIB_WIDEN);
6522 if (UNARY_P (value))
6524 if (!target)
6525 target = gen_reg_rtx (GET_MODE (value));
6526 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6527 switch (code)
6529 case ZERO_EXTEND:
6530 case SIGN_EXTEND:
6531 case TRUNCATE:
6532 case FLOAT_EXTEND:
6533 case FLOAT_TRUNCATE:
6534 convert_move (target, op1, code == ZERO_EXTEND);
6535 return target;
6537 case FIX:
6538 case UNSIGNED_FIX:
6539 expand_fix (target, op1, code == UNSIGNED_FIX);
6540 return target;
6542 case FLOAT:
6543 case UNSIGNED_FLOAT:
6544 expand_float (target, op1, code == UNSIGNED_FLOAT);
6545 return target;
6547 default:
6548 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6552 #ifdef INSN_SCHEDULING
6553 /* On machines that have insn scheduling, we want all memory reference to be
6554 explicit, so we need to deal with such paradoxical SUBREGs. */
6555 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6556 && (GET_MODE_SIZE (GET_MODE (value))
6557 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6558 value
6559 = simplify_gen_subreg (GET_MODE (value),
6560 force_reg (GET_MODE (SUBREG_REG (value)),
6561 force_operand (SUBREG_REG (value),
6562 NULL_RTX)),
6563 GET_MODE (SUBREG_REG (value)),
6564 SUBREG_BYTE (value));
6565 #endif
6567 return value;
6570 /* Subroutine of expand_expr: return nonzero iff there is no way that
6571 EXP can reference X, which is being modified. TOP_P is nonzero if this
6572 call is going to be used to determine whether we need a temporary
6573 for EXP, as opposed to a recursive call to this function.
6575 It is always safe for this routine to return zero since it merely
6576 searches for optimization opportunities. */
6579 safe_from_p (const_rtx x, tree exp, int top_p)
6581 rtx exp_rtl = 0;
6582 int i, nops;
6584 if (x == 0
6585 /* If EXP has varying size, we MUST use a target since we currently
6586 have no way of allocating temporaries of variable size
6587 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6588 So we assume here that something at a higher level has prevented a
6589 clash. This is somewhat bogus, but the best we can do. Only
6590 do this when X is BLKmode and when we are at the top level. */
6591 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6592 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6593 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6594 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6595 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6596 != INTEGER_CST)
6597 && GET_MODE (x) == BLKmode)
6598 /* If X is in the outgoing argument area, it is always safe. */
6599 || (MEM_P (x)
6600 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6601 || (GET_CODE (XEXP (x, 0)) == PLUS
6602 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6603 return 1;
6605 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6606 find the underlying pseudo. */
6607 if (GET_CODE (x) == SUBREG)
6609 x = SUBREG_REG (x);
6610 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6611 return 0;
6614 /* Now look at our tree code and possibly recurse. */
6615 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6617 case tcc_declaration:
6618 exp_rtl = DECL_RTL_IF_SET (exp);
6619 break;
6621 case tcc_constant:
6622 return 1;
6624 case tcc_exceptional:
6625 if (TREE_CODE (exp) == TREE_LIST)
6627 while (1)
6629 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6630 return 0;
6631 exp = TREE_CHAIN (exp);
6632 if (!exp)
6633 return 1;
6634 if (TREE_CODE (exp) != TREE_LIST)
6635 return safe_from_p (x, exp, 0);
6638 else if (TREE_CODE (exp) == CONSTRUCTOR)
6640 constructor_elt *ce;
6641 unsigned HOST_WIDE_INT idx;
6643 for (idx = 0;
6644 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6645 idx++)
6646 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6647 || !safe_from_p (x, ce->value, 0))
6648 return 0;
6649 return 1;
6651 else if (TREE_CODE (exp) == ERROR_MARK)
6652 return 1; /* An already-visited SAVE_EXPR? */
6653 else
6654 return 0;
6656 case tcc_statement:
6657 /* The only case we look at here is the DECL_INITIAL inside a
6658 DECL_EXPR. */
6659 return (TREE_CODE (exp) != DECL_EXPR
6660 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6661 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6662 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6664 case tcc_binary:
6665 case tcc_comparison:
6666 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6667 return 0;
6668 /* Fall through. */
6670 case tcc_unary:
6671 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6673 case tcc_expression:
6674 case tcc_reference:
6675 case tcc_vl_exp:
6676 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6677 the expression. If it is set, we conflict iff we are that rtx or
6678 both are in memory. Otherwise, we check all operands of the
6679 expression recursively. */
6681 switch (TREE_CODE (exp))
6683 case ADDR_EXPR:
6684 /* If the operand is static or we are static, we can't conflict.
6685 Likewise if we don't conflict with the operand at all. */
6686 if (staticp (TREE_OPERAND (exp, 0))
6687 || TREE_STATIC (exp)
6688 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6689 return 1;
6691 /* Otherwise, the only way this can conflict is if we are taking
6692 the address of a DECL a that address if part of X, which is
6693 very rare. */
6694 exp = TREE_OPERAND (exp, 0);
6695 if (DECL_P (exp))
6697 if (!DECL_RTL_SET_P (exp)
6698 || !MEM_P (DECL_RTL (exp)))
6699 return 0;
6700 else
6701 exp_rtl = XEXP (DECL_RTL (exp), 0);
6703 break;
6705 case MISALIGNED_INDIRECT_REF:
6706 case ALIGN_INDIRECT_REF:
6707 case INDIRECT_REF:
6708 if (MEM_P (x)
6709 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6710 get_alias_set (exp)))
6711 return 0;
6712 break;
6714 case CALL_EXPR:
6715 /* Assume that the call will clobber all hard registers and
6716 all of memory. */
6717 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6718 || MEM_P (x))
6719 return 0;
6720 break;
6722 case WITH_CLEANUP_EXPR:
6723 case CLEANUP_POINT_EXPR:
6724 /* Lowered by gimplify.c. */
6725 gcc_unreachable ();
6727 case SAVE_EXPR:
6728 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6730 default:
6731 break;
6734 /* If we have an rtx, we do not need to scan our operands. */
6735 if (exp_rtl)
6736 break;
6738 nops = TREE_OPERAND_LENGTH (exp);
6739 for (i = 0; i < nops; i++)
6740 if (TREE_OPERAND (exp, i) != 0
6741 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6742 return 0;
6744 break;
6746 case tcc_type:
6747 /* Should never get a type here. */
6748 gcc_unreachable ();
6751 /* If we have an rtl, find any enclosed object. Then see if we conflict
6752 with it. */
6753 if (exp_rtl)
6755 if (GET_CODE (exp_rtl) == SUBREG)
6757 exp_rtl = SUBREG_REG (exp_rtl);
6758 if (REG_P (exp_rtl)
6759 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6760 return 0;
6763 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6764 are memory and they conflict. */
6765 return ! (rtx_equal_p (x, exp_rtl)
6766 || (MEM_P (x) && MEM_P (exp_rtl)
6767 && true_dependence (exp_rtl, VOIDmode, x,
6768 rtx_addr_varies_p)));
6771 /* If we reach here, it is safe. */
6772 return 1;
6776 /* Return the highest power of two that EXP is known to be a multiple of.
6777 This is used in updating alignment of MEMs in array references. */
6779 unsigned HOST_WIDE_INT
6780 highest_pow2_factor (const_tree exp)
6782 unsigned HOST_WIDE_INT c0, c1;
6784 switch (TREE_CODE (exp))
6786 case INTEGER_CST:
6787 /* We can find the lowest bit that's a one. If the low
6788 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6789 We need to handle this case since we can find it in a COND_EXPR,
6790 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6791 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6792 later ICE. */
6793 if (TREE_OVERFLOW (exp))
6794 return BIGGEST_ALIGNMENT;
6795 else
6797 /* Note: tree_low_cst is intentionally not used here,
6798 we don't care about the upper bits. */
6799 c0 = TREE_INT_CST_LOW (exp);
6800 c0 &= -c0;
6801 return c0 ? c0 : BIGGEST_ALIGNMENT;
6803 break;
6805 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6806 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6807 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6808 return MIN (c0, c1);
6810 case MULT_EXPR:
6811 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6812 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6813 return c0 * c1;
6815 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6816 case CEIL_DIV_EXPR:
6817 if (integer_pow2p (TREE_OPERAND (exp, 1))
6818 && host_integerp (TREE_OPERAND (exp, 1), 1))
6820 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6821 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6822 return MAX (1, c0 / c1);
6824 break;
6826 case BIT_AND_EXPR:
6827 /* The highest power of two of a bit-and expression is the maximum of
6828 that of its operands. We typically get here for a complex LHS and
6829 a constant negative power of two on the RHS to force an explicit
6830 alignment, so don't bother looking at the LHS. */
6831 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6833 CASE_CONVERT:
6834 case SAVE_EXPR:
6835 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6837 case COMPOUND_EXPR:
6838 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6840 case COND_EXPR:
6841 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6842 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6843 return MIN (c0, c1);
6845 default:
6846 break;
6849 return 1;
6852 /* Similar, except that the alignment requirements of TARGET are
6853 taken into account. Assume it is at least as aligned as its
6854 type, unless it is a COMPONENT_REF in which case the layout of
6855 the structure gives the alignment. */
6857 static unsigned HOST_WIDE_INT
6858 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6860 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6861 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6863 return MAX (factor, talign);
6866 /* Return &VAR expression for emulated thread local VAR. */
6868 static tree
6869 emutls_var_address (tree var)
6871 tree emuvar = emutls_decl (var);
6872 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6873 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6874 tree arglist = build_tree_list (NULL_TREE, arg);
6875 tree call = build_function_call_expr (UNKNOWN_LOCATION, fn, arglist);
6876 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6880 /* Subroutine of expand_expr. Expand the two operands of a binary
6881 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6882 The value may be stored in TARGET if TARGET is nonzero. The
6883 MODIFIER argument is as documented by expand_expr. */
6885 static void
6886 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6887 enum expand_modifier modifier)
6889 if (! safe_from_p (target, exp1, 1))
6890 target = 0;
6891 if (operand_equal_p (exp0, exp1, 0))
6893 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6894 *op1 = copy_rtx (*op0);
6896 else
6898 /* If we need to preserve evaluation order, copy exp0 into its own
6899 temporary variable so that it can't be clobbered by exp1. */
6900 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6901 exp0 = save_expr (exp0);
6902 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6903 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6908 /* Return a MEM that contains constant EXP. DEFER is as for
6909 output_constant_def and MODIFIER is as for expand_expr. */
6911 static rtx
6912 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6914 rtx mem;
6916 mem = output_constant_def (exp, defer);
6917 if (modifier != EXPAND_INITIALIZER)
6918 mem = use_anchored_address (mem);
6919 return mem;
6922 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6923 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6925 static rtx
6926 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6927 enum expand_modifier modifier, addr_space_t as)
6929 rtx result, subtarget;
6930 tree inner, offset;
6931 HOST_WIDE_INT bitsize, bitpos;
6932 int volatilep, unsignedp;
6933 enum machine_mode mode1;
6935 /* If we are taking the address of a constant and are at the top level,
6936 we have to use output_constant_def since we can't call force_const_mem
6937 at top level. */
6938 /* ??? This should be considered a front-end bug. We should not be
6939 generating ADDR_EXPR of something that isn't an LVALUE. The only
6940 exception here is STRING_CST. */
6941 if (CONSTANT_CLASS_P (exp))
6942 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6944 /* Everything must be something allowed by is_gimple_addressable. */
6945 switch (TREE_CODE (exp))
6947 case INDIRECT_REF:
6948 /* This case will happen via recursion for &a->b. */
6949 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6951 case MEM_REF:
6953 tree tem = TREE_OPERAND (exp, 0);
6954 if (!integer_zerop (TREE_OPERAND (exp, 1)))
6955 tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6956 tem,
6957 double_int_to_tree (sizetype, mem_ref_offset (exp)));
6958 return expand_expr (tem, target, tmode, modifier);
6961 case CONST_DECL:
6962 /* Expand the initializer like constants above. */
6963 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6965 case REALPART_EXPR:
6966 /* The real part of the complex number is always first, therefore
6967 the address is the same as the address of the parent object. */
6968 offset = 0;
6969 bitpos = 0;
6970 inner = TREE_OPERAND (exp, 0);
6971 break;
6973 case IMAGPART_EXPR:
6974 /* The imaginary part of the complex number is always second.
6975 The expression is therefore always offset by the size of the
6976 scalar type. */
6977 offset = 0;
6978 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6979 inner = TREE_OPERAND (exp, 0);
6980 break;
6982 case VAR_DECL:
6983 /* TLS emulation hook - replace __thread VAR's &VAR with
6984 __emutls_get_address (&_emutls.VAR). */
6985 if (! targetm.have_tls
6986 && TREE_CODE (exp) == VAR_DECL
6987 && DECL_THREAD_LOCAL_P (exp))
6989 exp = emutls_var_address (exp);
6990 return expand_expr (exp, target, tmode, modifier);
6992 /* Fall through. */
6994 default:
6995 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6996 expand_expr, as that can have various side effects; LABEL_DECLs for
6997 example, may not have their DECL_RTL set yet. Expand the rtl of
6998 CONSTRUCTORs too, which should yield a memory reference for the
6999 constructor's contents. Assume language specific tree nodes can
7000 be expanded in some interesting way. */
7001 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7002 if (DECL_P (exp)
7003 || TREE_CODE (exp) == CONSTRUCTOR
7004 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7006 result = expand_expr (exp, target, tmode,
7007 modifier == EXPAND_INITIALIZER
7008 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7010 /* If the DECL isn't in memory, then the DECL wasn't properly
7011 marked TREE_ADDRESSABLE, which will be either a front-end
7012 or a tree optimizer bug. */
7013 gcc_assert (MEM_P (result));
7014 result = XEXP (result, 0);
7016 /* ??? Is this needed anymore? */
7017 if (DECL_P (exp) && !TREE_USED (exp) == 0)
7019 assemble_external (exp);
7020 TREE_USED (exp) = 1;
7023 if (modifier != EXPAND_INITIALIZER
7024 && modifier != EXPAND_CONST_ADDRESS)
7025 result = force_operand (result, target);
7026 return result;
7029 /* Pass FALSE as the last argument to get_inner_reference although
7030 we are expanding to RTL. The rationale is that we know how to
7031 handle "aligning nodes" here: we can just bypass them because
7032 they won't change the final object whose address will be returned
7033 (they actually exist only for that purpose). */
7034 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7035 &mode1, &unsignedp, &volatilep, false);
7036 break;
7039 /* We must have made progress. */
7040 gcc_assert (inner != exp);
7042 subtarget = offset || bitpos ? NULL_RTX : target;
7043 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7044 inner alignment, force the inner to be sufficiently aligned. */
7045 if (CONSTANT_CLASS_P (inner)
7046 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7048 inner = copy_node (inner);
7049 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7050 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7051 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7053 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7055 if (offset)
7057 rtx tmp;
7059 if (modifier != EXPAND_NORMAL)
7060 result = force_operand (result, NULL);
7061 tmp = expand_expr (offset, NULL_RTX, tmode,
7062 modifier == EXPAND_INITIALIZER
7063 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7065 result = convert_memory_address_addr_space (tmode, result, as);
7066 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7068 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7069 result = gen_rtx_PLUS (tmode, result, tmp);
7070 else
7072 subtarget = bitpos ? NULL_RTX : target;
7073 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7074 1, OPTAB_LIB_WIDEN);
7078 if (bitpos)
7080 /* Someone beforehand should have rejected taking the address
7081 of such an object. */
7082 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7084 result = plus_constant (result, bitpos / BITS_PER_UNIT);
7085 if (modifier < EXPAND_SUM)
7086 result = force_operand (result, target);
7089 return result;
7092 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7093 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7095 static rtx
7096 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7097 enum expand_modifier modifier)
7099 addr_space_t as = ADDR_SPACE_GENERIC;
7100 enum machine_mode address_mode = Pmode;
7101 enum machine_mode pointer_mode = ptr_mode;
7102 enum machine_mode rmode;
7103 rtx result;
7105 /* Target mode of VOIDmode says "whatever's natural". */
7106 if (tmode == VOIDmode)
7107 tmode = TYPE_MODE (TREE_TYPE (exp));
7109 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7111 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7112 address_mode = targetm.addr_space.address_mode (as);
7113 pointer_mode = targetm.addr_space.pointer_mode (as);
7116 /* We can get called with some Weird Things if the user does silliness
7117 like "(short) &a". In that case, convert_memory_address won't do
7118 the right thing, so ignore the given target mode. */
7119 if (tmode != address_mode && tmode != pointer_mode)
7120 tmode = address_mode;
7122 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7123 tmode, modifier, as);
7125 /* Despite expand_expr claims concerning ignoring TMODE when not
7126 strictly convenient, stuff breaks if we don't honor it. Note
7127 that combined with the above, we only do this for pointer modes. */
7128 rmode = GET_MODE (result);
7129 if (rmode == VOIDmode)
7130 rmode = tmode;
7131 if (rmode != tmode)
7132 result = convert_memory_address_addr_space (tmode, result, as);
7134 return result;
7137 /* Generate code for computing CONSTRUCTOR EXP.
7138 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7139 is TRUE, instead of creating a temporary variable in memory
7140 NULL is returned and the caller needs to handle it differently. */
7142 static rtx
7143 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7144 bool avoid_temp_mem)
7146 tree type = TREE_TYPE (exp);
7147 enum machine_mode mode = TYPE_MODE (type);
7149 /* Try to avoid creating a temporary at all. This is possible
7150 if all of the initializer is zero.
7151 FIXME: try to handle all [0..255] initializers we can handle
7152 with memset. */
7153 if (TREE_STATIC (exp)
7154 && !TREE_ADDRESSABLE (exp)
7155 && target != 0 && mode == BLKmode
7156 && all_zeros_p (exp))
7158 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7159 return target;
7162 /* All elts simple constants => refer to a constant in memory. But
7163 if this is a non-BLKmode mode, let it store a field at a time
7164 since that should make a CONST_INT or CONST_DOUBLE when we
7165 fold. Likewise, if we have a target we can use, it is best to
7166 store directly into the target unless the type is large enough
7167 that memcpy will be used. If we are making an initializer and
7168 all operands are constant, put it in memory as well.
7170 FIXME: Avoid trying to fill vector constructors piece-meal.
7171 Output them with output_constant_def below unless we're sure
7172 they're zeros. This should go away when vector initializers
7173 are treated like VECTOR_CST instead of arrays. */
7174 if ((TREE_STATIC (exp)
7175 && ((mode == BLKmode
7176 && ! (target != 0 && safe_from_p (target, exp, 1)))
7177 || TREE_ADDRESSABLE (exp)
7178 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7179 && (! MOVE_BY_PIECES_P
7180 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7181 TYPE_ALIGN (type)))
7182 && ! mostly_zeros_p (exp))))
7183 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7184 && TREE_CONSTANT (exp)))
7186 rtx constructor;
7188 if (avoid_temp_mem)
7189 return NULL_RTX;
7191 constructor = expand_expr_constant (exp, 1, modifier);
7193 if (modifier != EXPAND_CONST_ADDRESS
7194 && modifier != EXPAND_INITIALIZER
7195 && modifier != EXPAND_SUM)
7196 constructor = validize_mem (constructor);
7198 return constructor;
7201 /* Handle calls that pass values in multiple non-contiguous
7202 locations. The Irix 6 ABI has examples of this. */
7203 if (target == 0 || ! safe_from_p (target, exp, 1)
7204 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7206 if (avoid_temp_mem)
7207 return NULL_RTX;
7209 target
7210 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7211 | (TREE_READONLY (exp)
7212 * TYPE_QUAL_CONST))),
7213 0, TREE_ADDRESSABLE (exp), 1);
7216 store_constructor (exp, target, 0, int_expr_size (exp));
7217 return target;
7221 /* expand_expr: generate code for computing expression EXP.
7222 An rtx for the computed value is returned. The value is never null.
7223 In the case of a void EXP, const0_rtx is returned.
7225 The value may be stored in TARGET if TARGET is nonzero.
7226 TARGET is just a suggestion; callers must assume that
7227 the rtx returned may not be the same as TARGET.
7229 If TARGET is CONST0_RTX, it means that the value will be ignored.
7231 If TMODE is not VOIDmode, it suggests generating the
7232 result in mode TMODE. But this is done only when convenient.
7233 Otherwise, TMODE is ignored and the value generated in its natural mode.
7234 TMODE is just a suggestion; callers must assume that
7235 the rtx returned may not have mode TMODE.
7237 Note that TARGET may have neither TMODE nor MODE. In that case, it
7238 probably will not be used.
7240 If MODIFIER is EXPAND_SUM then when EXP is an addition
7241 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7242 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7243 products as above, or REG or MEM, or constant.
7244 Ordinarily in such cases we would output mul or add instructions
7245 and then return a pseudo reg containing the sum.
7247 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7248 it also marks a label as absolutely required (it can't be dead).
7249 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7250 This is used for outputting expressions used in initializers.
7252 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7253 with a constant address even if that address is not normally legitimate.
7254 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7256 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7257 a call parameter. Such targets require special care as we haven't yet
7258 marked TARGET so that it's safe from being trashed by libcalls. We
7259 don't want to use TARGET for anything but the final result;
7260 Intermediate values must go elsewhere. Additionally, calls to
7261 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7263 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7264 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7265 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7266 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7267 recursively. */
7270 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7271 enum expand_modifier modifier, rtx *alt_rtl)
7273 rtx ret;
7275 /* Handle ERROR_MARK before anybody tries to access its type. */
7276 if (TREE_CODE (exp) == ERROR_MARK
7277 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7279 ret = CONST0_RTX (tmode);
7280 return ret ? ret : const0_rtx;
7283 /* If this is an expression of some kind and it has an associated line
7284 number, then emit the line number before expanding the expression.
7286 We need to save and restore the file and line information so that
7287 errors discovered during expansion are emitted with the right
7288 information. It would be better of the diagnostic routines
7289 used the file/line information embedded in the tree nodes rather
7290 than globals. */
7291 if (cfun && EXPR_HAS_LOCATION (exp))
7293 location_t saved_location = input_location;
7294 location_t saved_curr_loc = get_curr_insn_source_location ();
7295 tree saved_block = get_curr_insn_block ();
7296 input_location = EXPR_LOCATION (exp);
7297 set_curr_insn_source_location (input_location);
7299 /* Record where the insns produced belong. */
7300 set_curr_insn_block (TREE_BLOCK (exp));
7302 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7304 input_location = saved_location;
7305 set_curr_insn_block (saved_block);
7306 set_curr_insn_source_location (saved_curr_loc);
7308 else
7310 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7313 return ret;
7317 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7318 enum expand_modifier modifier)
7320 rtx op0, op1, op2, temp;
7321 tree type;
7322 int unsignedp;
7323 enum machine_mode mode;
7324 enum tree_code code = ops->code;
7325 optab this_optab;
7326 rtx subtarget, original_target;
7327 int ignore;
7328 bool reduce_bit_field;
7329 location_t loc = ops->location;
7330 tree treeop0, treeop1;
7331 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7332 ? reduce_to_bit_field_precision ((expr), \
7333 target, \
7334 type) \
7335 : (expr))
7337 type = ops->type;
7338 mode = TYPE_MODE (type);
7339 unsignedp = TYPE_UNSIGNED (type);
7341 treeop0 = ops->op0;
7342 treeop1 = ops->op1;
7344 /* We should be called only on simple (binary or unary) expressions,
7345 exactly those that are valid in gimple expressions that aren't
7346 GIMPLE_SINGLE_RHS (or invalid). */
7347 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7348 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7349 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7351 ignore = (target == const0_rtx
7352 || ((CONVERT_EXPR_CODE_P (code)
7353 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7354 && TREE_CODE (type) == VOID_TYPE));
7356 /* We should be called only if we need the result. */
7357 gcc_assert (!ignore);
7359 /* An operation in what may be a bit-field type needs the
7360 result to be reduced to the precision of the bit-field type,
7361 which is narrower than that of the type's mode. */
7362 reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7363 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7365 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7366 target = 0;
7368 /* Use subtarget as the target for operand 0 of a binary operation. */
7369 subtarget = get_subtarget (target);
7370 original_target = target;
7372 switch (code)
7374 case NON_LVALUE_EXPR:
7375 case PAREN_EXPR:
7376 CASE_CONVERT:
7377 if (treeop0 == error_mark_node)
7378 return const0_rtx;
7380 if (TREE_CODE (type) == UNION_TYPE)
7382 tree valtype = TREE_TYPE (treeop0);
7384 /* If both input and output are BLKmode, this conversion isn't doing
7385 anything except possibly changing memory attribute. */
7386 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7388 rtx result = expand_expr (treeop0, target, tmode,
7389 modifier);
7391 result = copy_rtx (result);
7392 set_mem_attributes (result, type, 0);
7393 return result;
7396 if (target == 0)
7398 if (TYPE_MODE (type) != BLKmode)
7399 target = gen_reg_rtx (TYPE_MODE (type));
7400 else
7401 target = assign_temp (type, 0, 1, 1);
7404 if (MEM_P (target))
7405 /* Store data into beginning of memory target. */
7406 store_expr (treeop0,
7407 adjust_address (target, TYPE_MODE (valtype), 0),
7408 modifier == EXPAND_STACK_PARM,
7409 false);
7411 else
7413 gcc_assert (REG_P (target));
7415 /* Store this field into a union of the proper type. */
7416 store_field (target,
7417 MIN ((int_size_in_bytes (TREE_TYPE
7418 (treeop0))
7419 * BITS_PER_UNIT),
7420 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7421 0, TYPE_MODE (valtype), treeop0,
7422 type, 0, false);
7425 /* Return the entire union. */
7426 return target;
7429 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7431 op0 = expand_expr (treeop0, target, VOIDmode,
7432 modifier);
7434 /* If the signedness of the conversion differs and OP0 is
7435 a promoted SUBREG, clear that indication since we now
7436 have to do the proper extension. */
7437 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7438 && GET_CODE (op0) == SUBREG)
7439 SUBREG_PROMOTED_VAR_P (op0) = 0;
7441 return REDUCE_BIT_FIELD (op0);
7444 op0 = expand_expr (treeop0, NULL_RTX, mode,
7445 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7446 if (GET_MODE (op0) == mode)
7449 /* If OP0 is a constant, just convert it into the proper mode. */
7450 else if (CONSTANT_P (op0))
7452 tree inner_type = TREE_TYPE (treeop0);
7453 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7455 if (modifier == EXPAND_INITIALIZER)
7456 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7457 subreg_lowpart_offset (mode,
7458 inner_mode));
7459 else
7460 op0= convert_modes (mode, inner_mode, op0,
7461 TYPE_UNSIGNED (inner_type));
7464 else if (modifier == EXPAND_INITIALIZER)
7465 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7467 else if (target == 0)
7468 op0 = convert_to_mode (mode, op0,
7469 TYPE_UNSIGNED (TREE_TYPE
7470 (treeop0)));
7471 else
7473 convert_move (target, op0,
7474 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7475 op0 = target;
7478 return REDUCE_BIT_FIELD (op0);
7480 case ADDR_SPACE_CONVERT_EXPR:
7482 tree treeop0_type = TREE_TYPE (treeop0);
7483 addr_space_t as_to;
7484 addr_space_t as_from;
7486 gcc_assert (POINTER_TYPE_P (type));
7487 gcc_assert (POINTER_TYPE_P (treeop0_type));
7489 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7490 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7492 /* Conversions between pointers to the same address space should
7493 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7494 gcc_assert (as_to != as_from);
7496 /* Ask target code to handle conversion between pointers
7497 to overlapping address spaces. */
7498 if (targetm.addr_space.subset_p (as_to, as_from)
7499 || targetm.addr_space.subset_p (as_from, as_to))
7501 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7502 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7503 gcc_assert (op0);
7504 return op0;
7507 /* For disjoint address spaces, converting anything but
7508 a null pointer invokes undefined behaviour. We simply
7509 always return a null pointer here. */
7510 return CONST0_RTX (mode);
7513 case POINTER_PLUS_EXPR:
7514 /* Even though the sizetype mode and the pointer's mode can be different
7515 expand is able to handle this correctly and get the correct result out
7516 of the PLUS_EXPR code. */
7517 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7518 if sizetype precision is smaller than pointer precision. */
7519 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7520 treeop1 = fold_convert_loc (loc, type,
7521 fold_convert_loc (loc, ssizetype,
7522 treeop1));
7523 case PLUS_EXPR:
7524 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7525 something else, make sure we add the register to the constant and
7526 then to the other thing. This case can occur during strength
7527 reduction and doing it this way will produce better code if the
7528 frame pointer or argument pointer is eliminated.
7530 fold-const.c will ensure that the constant is always in the inner
7531 PLUS_EXPR, so the only case we need to do anything about is if
7532 sp, ap, or fp is our second argument, in which case we must swap
7533 the innermost first argument and our second argument. */
7535 if (TREE_CODE (treeop0) == PLUS_EXPR
7536 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7537 && TREE_CODE (treeop1) == VAR_DECL
7538 && (DECL_RTL (treeop1) == frame_pointer_rtx
7539 || DECL_RTL (treeop1) == stack_pointer_rtx
7540 || DECL_RTL (treeop1) == arg_pointer_rtx))
7542 tree t = treeop1;
7544 treeop1 = TREE_OPERAND (treeop0, 0);
7545 TREE_OPERAND (treeop0, 0) = t;
7548 /* If the result is to be ptr_mode and we are adding an integer to
7549 something, we might be forming a constant. So try to use
7550 plus_constant. If it produces a sum and we can't accept it,
7551 use force_operand. This allows P = &ARR[const] to generate
7552 efficient code on machines where a SYMBOL_REF is not a valid
7553 address.
7555 If this is an EXPAND_SUM call, always return the sum. */
7556 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7557 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7559 if (modifier == EXPAND_STACK_PARM)
7560 target = 0;
7561 if (TREE_CODE (treeop0) == INTEGER_CST
7562 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7563 && TREE_CONSTANT (treeop1))
7565 rtx constant_part;
7567 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7568 EXPAND_SUM);
7569 /* Use immed_double_const to ensure that the constant is
7570 truncated according to the mode of OP1, then sign extended
7571 to a HOST_WIDE_INT. Using the constant directly can result
7572 in non-canonical RTL in a 64x32 cross compile. */
7573 constant_part
7574 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7575 (HOST_WIDE_INT) 0,
7576 TYPE_MODE (TREE_TYPE (treeop1)));
7577 op1 = plus_constant (op1, INTVAL (constant_part));
7578 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7579 op1 = force_operand (op1, target);
7580 return REDUCE_BIT_FIELD (op1);
7583 else if (TREE_CODE (treeop1) == INTEGER_CST
7584 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7585 && TREE_CONSTANT (treeop0))
7587 rtx constant_part;
7589 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7590 (modifier == EXPAND_INITIALIZER
7591 ? EXPAND_INITIALIZER : EXPAND_SUM));
7592 if (! CONSTANT_P (op0))
7594 op1 = expand_expr (treeop1, NULL_RTX,
7595 VOIDmode, modifier);
7596 /* Return a PLUS if modifier says it's OK. */
7597 if (modifier == EXPAND_SUM
7598 || modifier == EXPAND_INITIALIZER)
7599 return simplify_gen_binary (PLUS, mode, op0, op1);
7600 goto binop2;
7602 /* Use immed_double_const to ensure that the constant is
7603 truncated according to the mode of OP1, then sign extended
7604 to a HOST_WIDE_INT. Using the constant directly can result
7605 in non-canonical RTL in a 64x32 cross compile. */
7606 constant_part
7607 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7608 (HOST_WIDE_INT) 0,
7609 TYPE_MODE (TREE_TYPE (treeop0)));
7610 op0 = plus_constant (op0, INTVAL (constant_part));
7611 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7612 op0 = force_operand (op0, target);
7613 return REDUCE_BIT_FIELD (op0);
7617 /* No sense saving up arithmetic to be done
7618 if it's all in the wrong mode to form part of an address.
7619 And force_operand won't know whether to sign-extend or
7620 zero-extend. */
7621 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7622 || mode != ptr_mode)
7624 expand_operands (treeop0, treeop1,
7625 subtarget, &op0, &op1, EXPAND_NORMAL);
7626 if (op0 == const0_rtx)
7627 return op1;
7628 if (op1 == const0_rtx)
7629 return op0;
7630 goto binop2;
7633 expand_operands (treeop0, treeop1,
7634 subtarget, &op0, &op1, modifier);
7635 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7637 case MINUS_EXPR:
7638 /* For initializers, we are allowed to return a MINUS of two
7639 symbolic constants. Here we handle all cases when both operands
7640 are constant. */
7641 /* Handle difference of two symbolic constants,
7642 for the sake of an initializer. */
7643 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7644 && really_constant_p (treeop0)
7645 && really_constant_p (treeop1))
7647 expand_operands (treeop0, treeop1,
7648 NULL_RTX, &op0, &op1, modifier);
7650 /* If the last operand is a CONST_INT, use plus_constant of
7651 the negated constant. Else make the MINUS. */
7652 if (CONST_INT_P (op1))
7653 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7654 else
7655 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7658 /* No sense saving up arithmetic to be done
7659 if it's all in the wrong mode to form part of an address.
7660 And force_operand won't know whether to sign-extend or
7661 zero-extend. */
7662 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7663 || mode != ptr_mode)
7664 goto binop;
7666 expand_operands (treeop0, treeop1,
7667 subtarget, &op0, &op1, modifier);
7669 /* Convert A - const to A + (-const). */
7670 if (CONST_INT_P (op1))
7672 op1 = negate_rtx (mode, op1);
7673 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7676 goto binop2;
7678 case WIDEN_MULT_PLUS_EXPR:
7679 case WIDEN_MULT_MINUS_EXPR:
7680 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
7681 op2 = expand_normal (ops->op2);
7682 target = expand_widen_pattern_expr (ops, op0, op1, op2,
7683 target, unsignedp);
7684 return target;
7686 case WIDEN_MULT_EXPR:
7687 /* If first operand is constant, swap them.
7688 Thus the following special case checks need only
7689 check the second operand. */
7690 if (TREE_CODE (treeop0) == INTEGER_CST)
7692 tree t1 = treeop0;
7693 treeop0 = treeop1;
7694 treeop1 = t1;
7697 /* First, check if we have a multiplication of one signed and one
7698 unsigned operand. */
7699 if (TREE_CODE (treeop1) != INTEGER_CST
7700 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
7701 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
7703 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
7704 this_optab = usmul_widen_optab;
7705 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7707 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7709 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7710 expand_operands (treeop0, treeop1, subtarget, &op0, &op1,
7711 EXPAND_NORMAL);
7712 else
7713 expand_operands (treeop0, treeop1, subtarget, &op1, &op0,
7714 EXPAND_NORMAL);
7715 goto binop3;
7719 /* Check for a multiplication with matching signedness. */
7720 else if ((TREE_CODE (treeop1) == INTEGER_CST
7721 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
7722 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
7723 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
7725 tree op0type = TREE_TYPE (treeop0);
7726 enum machine_mode innermode = TYPE_MODE (op0type);
7727 bool zextend_p = TYPE_UNSIGNED (op0type);
7728 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7729 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7731 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7733 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7735 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7736 EXPAND_NORMAL);
7737 temp = expand_widening_mult (mode, op0, op1, target,
7738 unsignedp, this_optab);
7739 return REDUCE_BIT_FIELD (temp);
7741 if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
7742 && innermode == word_mode)
7744 rtx htem, hipart;
7745 op0 = expand_normal (treeop0);
7746 if (TREE_CODE (treeop1) == INTEGER_CST)
7747 op1 = convert_modes (innermode, mode,
7748 expand_normal (treeop1), unsignedp);
7749 else
7750 op1 = expand_normal (treeop1);
7751 temp = expand_binop (mode, other_optab, op0, op1, target,
7752 unsignedp, OPTAB_LIB_WIDEN);
7753 hipart = gen_highpart (innermode, temp);
7754 htem = expand_mult_highpart_adjust (innermode, hipart,
7755 op0, op1, hipart,
7756 zextend_p);
7757 if (htem != hipart)
7758 emit_move_insn (hipart, htem);
7759 return REDUCE_BIT_FIELD (temp);
7763 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
7764 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
7765 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7766 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7768 case MULT_EXPR:
7769 /* If this is a fixed-point operation, then we cannot use the code
7770 below because "expand_mult" doesn't support sat/no-sat fixed-point
7771 multiplications. */
7772 if (ALL_FIXED_POINT_MODE_P (mode))
7773 goto binop;
7775 /* If first operand is constant, swap them.
7776 Thus the following special case checks need only
7777 check the second operand. */
7778 if (TREE_CODE (treeop0) == INTEGER_CST)
7780 tree t1 = treeop0;
7781 treeop0 = treeop1;
7782 treeop1 = t1;
7785 /* Attempt to return something suitable for generating an
7786 indexed address, for machines that support that. */
7788 if (modifier == EXPAND_SUM && mode == ptr_mode
7789 && host_integerp (treeop1, 0))
7791 tree exp1 = treeop1;
7793 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7794 EXPAND_SUM);
7796 if (!REG_P (op0))
7797 op0 = force_operand (op0, NULL_RTX);
7798 if (!REG_P (op0))
7799 op0 = copy_to_mode_reg (mode, op0);
7801 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7802 gen_int_mode (tree_low_cst (exp1, 0),
7803 TYPE_MODE (TREE_TYPE (exp1)))));
7806 if (modifier == EXPAND_STACK_PARM)
7807 target = 0;
7809 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7810 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7812 case TRUNC_DIV_EXPR:
7813 case FLOOR_DIV_EXPR:
7814 case CEIL_DIV_EXPR:
7815 case ROUND_DIV_EXPR:
7816 case EXACT_DIV_EXPR:
7817 /* If this is a fixed-point operation, then we cannot use the code
7818 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7819 divisions. */
7820 if (ALL_FIXED_POINT_MODE_P (mode))
7821 goto binop;
7823 if (modifier == EXPAND_STACK_PARM)
7824 target = 0;
7825 /* Possible optimization: compute the dividend with EXPAND_SUM
7826 then if the divisor is constant can optimize the case
7827 where some terms of the dividend have coeffs divisible by it. */
7828 expand_operands (treeop0, treeop1,
7829 subtarget, &op0, &op1, EXPAND_NORMAL);
7830 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7832 case RDIV_EXPR:
7833 goto binop;
7835 case TRUNC_MOD_EXPR:
7836 case FLOOR_MOD_EXPR:
7837 case CEIL_MOD_EXPR:
7838 case ROUND_MOD_EXPR:
7839 if (modifier == EXPAND_STACK_PARM)
7840 target = 0;
7841 expand_operands (treeop0, treeop1,
7842 subtarget, &op0, &op1, EXPAND_NORMAL);
7843 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7845 case FIXED_CONVERT_EXPR:
7846 op0 = expand_normal (treeop0);
7847 if (target == 0 || modifier == EXPAND_STACK_PARM)
7848 target = gen_reg_rtx (mode);
7850 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7851 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7852 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7853 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7854 else
7855 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7856 return target;
7858 case FIX_TRUNC_EXPR:
7859 op0 = expand_normal (treeop0);
7860 if (target == 0 || modifier == EXPAND_STACK_PARM)
7861 target = gen_reg_rtx (mode);
7862 expand_fix (target, op0, unsignedp);
7863 return target;
7865 case FLOAT_EXPR:
7866 op0 = expand_normal (treeop0);
7867 if (target == 0 || modifier == EXPAND_STACK_PARM)
7868 target = gen_reg_rtx (mode);
7869 /* expand_float can't figure out what to do if FROM has VOIDmode.
7870 So give it the correct mode. With -O, cse will optimize this. */
7871 if (GET_MODE (op0) == VOIDmode)
7872 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7873 op0);
7874 expand_float (target, op0,
7875 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7876 return target;
7878 case NEGATE_EXPR:
7879 op0 = expand_expr (treeop0, subtarget,
7880 VOIDmode, EXPAND_NORMAL);
7881 if (modifier == EXPAND_STACK_PARM)
7882 target = 0;
7883 temp = expand_unop (mode,
7884 optab_for_tree_code (NEGATE_EXPR, type,
7885 optab_default),
7886 op0, target, 0);
7887 gcc_assert (temp);
7888 return REDUCE_BIT_FIELD (temp);
7890 case ABS_EXPR:
7891 op0 = expand_expr (treeop0, subtarget,
7892 VOIDmode, EXPAND_NORMAL);
7893 if (modifier == EXPAND_STACK_PARM)
7894 target = 0;
7896 /* ABS_EXPR is not valid for complex arguments. */
7897 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7898 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7900 /* Unsigned abs is simply the operand. Testing here means we don't
7901 risk generating incorrect code below. */
7902 if (TYPE_UNSIGNED (type))
7903 return op0;
7905 return expand_abs (mode, op0, target, unsignedp,
7906 safe_from_p (target, treeop0, 1));
7908 case MAX_EXPR:
7909 case MIN_EXPR:
7910 target = original_target;
7911 if (target == 0
7912 || modifier == EXPAND_STACK_PARM
7913 || (MEM_P (target) && MEM_VOLATILE_P (target))
7914 || GET_MODE (target) != mode
7915 || (REG_P (target)
7916 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7917 target = gen_reg_rtx (mode);
7918 expand_operands (treeop0, treeop1,
7919 target, &op0, &op1, EXPAND_NORMAL);
7921 /* First try to do it with a special MIN or MAX instruction.
7922 If that does not win, use a conditional jump to select the proper
7923 value. */
7924 this_optab = optab_for_tree_code (code, type, optab_default);
7925 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7926 OPTAB_WIDEN);
7927 if (temp != 0)
7928 return temp;
7930 /* At this point, a MEM target is no longer useful; we will get better
7931 code without it. */
7933 if (! REG_P (target))
7934 target = gen_reg_rtx (mode);
7936 /* If op1 was placed in target, swap op0 and op1. */
7937 if (target != op0 && target == op1)
7939 temp = op0;
7940 op0 = op1;
7941 op1 = temp;
7944 /* We generate better code and avoid problems with op1 mentioning
7945 target by forcing op1 into a pseudo if it isn't a constant. */
7946 if (! CONSTANT_P (op1))
7947 op1 = force_reg (mode, op1);
7950 enum rtx_code comparison_code;
7951 rtx cmpop1 = op1;
7953 if (code == MAX_EXPR)
7954 comparison_code = unsignedp ? GEU : GE;
7955 else
7956 comparison_code = unsignedp ? LEU : LE;
7958 /* Canonicalize to comparisons against 0. */
7959 if (op1 == const1_rtx)
7961 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7962 or (a != 0 ? a : 1) for unsigned.
7963 For MIN we are safe converting (a <= 1 ? a : 1)
7964 into (a <= 0 ? a : 1) */
7965 cmpop1 = const0_rtx;
7966 if (code == MAX_EXPR)
7967 comparison_code = unsignedp ? NE : GT;
7969 if (op1 == constm1_rtx && !unsignedp)
7971 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7972 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7973 cmpop1 = const0_rtx;
7974 if (code == MIN_EXPR)
7975 comparison_code = LT;
7977 #ifdef HAVE_conditional_move
7978 /* Use a conditional move if possible. */
7979 if (can_conditionally_move_p (mode))
7981 rtx insn;
7983 /* ??? Same problem as in expmed.c: emit_conditional_move
7984 forces a stack adjustment via compare_from_rtx, and we
7985 lose the stack adjustment if the sequence we are about
7986 to create is discarded. */
7987 do_pending_stack_adjust ();
7989 start_sequence ();
7991 /* Try to emit the conditional move. */
7992 insn = emit_conditional_move (target, comparison_code,
7993 op0, cmpop1, mode,
7994 op0, op1, mode,
7995 unsignedp);
7997 /* If we could do the conditional move, emit the sequence,
7998 and return. */
7999 if (insn)
8001 rtx seq = get_insns ();
8002 end_sequence ();
8003 emit_insn (seq);
8004 return target;
8007 /* Otherwise discard the sequence and fall back to code with
8008 branches. */
8009 end_sequence ();
8011 #endif
8012 if (target != op0)
8013 emit_move_insn (target, op0);
8015 temp = gen_label_rtx ();
8016 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8017 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8018 -1);
8020 emit_move_insn (target, op1);
8021 emit_label (temp);
8022 return target;
8024 case BIT_NOT_EXPR:
8025 op0 = expand_expr (treeop0, subtarget,
8026 VOIDmode, EXPAND_NORMAL);
8027 if (modifier == EXPAND_STACK_PARM)
8028 target = 0;
8029 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8030 gcc_assert (temp);
8031 return temp;
8033 /* ??? Can optimize bitwise operations with one arg constant.
8034 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8035 and (a bitwise1 b) bitwise2 b (etc)
8036 but that is probably not worth while. */
8038 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8039 boolean values when we want in all cases to compute both of them. In
8040 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8041 as actual zero-or-1 values and then bitwise anding. In cases where
8042 there cannot be any side effects, better code would be made by
8043 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8044 how to recognize those cases. */
8046 case TRUTH_AND_EXPR:
8047 code = BIT_AND_EXPR;
8048 case BIT_AND_EXPR:
8049 goto binop;
8051 case TRUTH_OR_EXPR:
8052 code = BIT_IOR_EXPR;
8053 case BIT_IOR_EXPR:
8054 goto binop;
8056 case TRUTH_XOR_EXPR:
8057 code = BIT_XOR_EXPR;
8058 case BIT_XOR_EXPR:
8059 goto binop;
8061 case LROTATE_EXPR:
8062 case RROTATE_EXPR:
8063 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8064 || (GET_MODE_PRECISION (TYPE_MODE (type))
8065 == TYPE_PRECISION (type)));
8066 /* fall through */
8068 case LSHIFT_EXPR:
8069 case RSHIFT_EXPR:
8070 /* If this is a fixed-point operation, then we cannot use the code
8071 below because "expand_shift" doesn't support sat/no-sat fixed-point
8072 shifts. */
8073 if (ALL_FIXED_POINT_MODE_P (mode))
8074 goto binop;
8076 if (! safe_from_p (subtarget, treeop1, 1))
8077 subtarget = 0;
8078 if (modifier == EXPAND_STACK_PARM)
8079 target = 0;
8080 op0 = expand_expr (treeop0, subtarget,
8081 VOIDmode, EXPAND_NORMAL);
8082 temp = expand_shift (code, mode, op0, treeop1, target,
8083 unsignedp);
8084 if (code == LSHIFT_EXPR)
8085 temp = REDUCE_BIT_FIELD (temp);
8086 return temp;
8088 /* Could determine the answer when only additive constants differ. Also,
8089 the addition of one can be handled by changing the condition. */
8090 case LT_EXPR:
8091 case LE_EXPR:
8092 case GT_EXPR:
8093 case GE_EXPR:
8094 case EQ_EXPR:
8095 case NE_EXPR:
8096 case UNORDERED_EXPR:
8097 case ORDERED_EXPR:
8098 case UNLT_EXPR:
8099 case UNLE_EXPR:
8100 case UNGT_EXPR:
8101 case UNGE_EXPR:
8102 case UNEQ_EXPR:
8103 case LTGT_EXPR:
8104 temp = do_store_flag (ops,
8105 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8106 tmode != VOIDmode ? tmode : mode);
8107 if (temp)
8108 return temp;
8110 /* Use a compare and a jump for BLKmode comparisons, or for function
8111 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8113 if ((target == 0
8114 || modifier == EXPAND_STACK_PARM
8115 || ! safe_from_p (target, treeop0, 1)
8116 || ! safe_from_p (target, treeop1, 1)
8117 /* Make sure we don't have a hard reg (such as function's return
8118 value) live across basic blocks, if not optimizing. */
8119 || (!optimize && REG_P (target)
8120 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8121 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8123 emit_move_insn (target, const0_rtx);
8125 op1 = gen_label_rtx ();
8126 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8128 emit_move_insn (target, const1_rtx);
8130 emit_label (op1);
8131 return target;
8133 case TRUTH_NOT_EXPR:
8134 if (modifier == EXPAND_STACK_PARM)
8135 target = 0;
8136 op0 = expand_expr (treeop0, target,
8137 VOIDmode, EXPAND_NORMAL);
8138 /* The parser is careful to generate TRUTH_NOT_EXPR
8139 only with operands that are always zero or one. */
8140 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8141 target, 1, OPTAB_LIB_WIDEN);
8142 gcc_assert (temp);
8143 return temp;
8145 case COMPLEX_EXPR:
8146 /* Get the rtx code of the operands. */
8147 op0 = expand_normal (treeop0);
8148 op1 = expand_normal (treeop1);
8150 if (!target)
8151 target = gen_reg_rtx (TYPE_MODE (type));
8153 /* Move the real (op0) and imaginary (op1) parts to their location. */
8154 write_complex_part (target, op0, false);
8155 write_complex_part (target, op1, true);
8157 return target;
8159 case WIDEN_SUM_EXPR:
8161 tree oprnd0 = treeop0;
8162 tree oprnd1 = treeop1;
8164 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8165 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8166 target, unsignedp);
8167 return target;
8170 case REDUC_MAX_EXPR:
8171 case REDUC_MIN_EXPR:
8172 case REDUC_PLUS_EXPR:
8174 op0 = expand_normal (treeop0);
8175 this_optab = optab_for_tree_code (code, type, optab_default);
8176 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8177 gcc_assert (temp);
8178 return temp;
8181 case VEC_EXTRACT_EVEN_EXPR:
8182 case VEC_EXTRACT_ODD_EXPR:
8184 expand_operands (treeop0, treeop1,
8185 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8186 this_optab = optab_for_tree_code (code, type, optab_default);
8187 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8188 OPTAB_WIDEN);
8189 gcc_assert (temp);
8190 return temp;
8193 case VEC_INTERLEAVE_HIGH_EXPR:
8194 case VEC_INTERLEAVE_LOW_EXPR:
8196 expand_operands (treeop0, treeop1,
8197 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8198 this_optab = optab_for_tree_code (code, type, optab_default);
8199 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8200 OPTAB_WIDEN);
8201 gcc_assert (temp);
8202 return temp;
8205 case VEC_LSHIFT_EXPR:
8206 case VEC_RSHIFT_EXPR:
8208 target = expand_vec_shift_expr (ops, target);
8209 return target;
8212 case VEC_UNPACK_HI_EXPR:
8213 case VEC_UNPACK_LO_EXPR:
8215 op0 = expand_normal (treeop0);
8216 this_optab = optab_for_tree_code (code, type, optab_default);
8217 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8218 target, unsignedp);
8219 gcc_assert (temp);
8220 return temp;
8223 case VEC_UNPACK_FLOAT_HI_EXPR:
8224 case VEC_UNPACK_FLOAT_LO_EXPR:
8226 op0 = expand_normal (treeop0);
8227 /* The signedness is determined from input operand. */
8228 this_optab = optab_for_tree_code (code,
8229 TREE_TYPE (treeop0),
8230 optab_default);
8231 temp = expand_widen_pattern_expr
8232 (ops, op0, NULL_RTX, NULL_RTX,
8233 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8235 gcc_assert (temp);
8236 return temp;
8239 case VEC_WIDEN_MULT_HI_EXPR:
8240 case VEC_WIDEN_MULT_LO_EXPR:
8242 tree oprnd0 = treeop0;
8243 tree oprnd1 = treeop1;
8245 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8246 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8247 target, unsignedp);
8248 gcc_assert (target);
8249 return target;
8252 case VEC_PACK_TRUNC_EXPR:
8253 case VEC_PACK_SAT_EXPR:
8254 case VEC_PACK_FIX_TRUNC_EXPR:
8255 mode = TYPE_MODE (TREE_TYPE (treeop0));
8256 goto binop;
8258 default:
8259 gcc_unreachable ();
8262 /* Here to do an ordinary binary operator. */
8263 binop:
8264 expand_operands (treeop0, treeop1,
8265 subtarget, &op0, &op1, EXPAND_NORMAL);
8266 binop2:
8267 this_optab = optab_for_tree_code (code, type, optab_default);
8268 binop3:
8269 if (modifier == EXPAND_STACK_PARM)
8270 target = 0;
8271 temp = expand_binop (mode, this_optab, op0, op1, target,
8272 unsignedp, OPTAB_LIB_WIDEN);
8273 gcc_assert (temp);
8274 return REDUCE_BIT_FIELD (temp);
8276 #undef REDUCE_BIT_FIELD
8279 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8280 enum expand_modifier modifier, rtx *alt_rtl)
8282 rtx op0, op1, temp, decl_rtl;
8283 tree type;
8284 int unsignedp;
8285 enum machine_mode mode;
8286 enum tree_code code = TREE_CODE (exp);
8287 optab this_optab;
8288 rtx subtarget, original_target;
8289 int ignore;
8290 tree context;
8291 bool reduce_bit_field;
8292 location_t loc = EXPR_LOCATION (exp);
8293 struct separate_ops ops;
8294 tree treeop0, treeop1, treeop2;
8295 tree ssa_name = NULL_TREE;
8296 gimple g;
8298 type = TREE_TYPE (exp);
8299 mode = TYPE_MODE (type);
8300 unsignedp = TYPE_UNSIGNED (type);
8302 treeop0 = treeop1 = treeop2 = NULL_TREE;
8303 if (!VL_EXP_CLASS_P (exp))
8304 switch (TREE_CODE_LENGTH (code))
8306 default:
8307 case 3: treeop2 = TREE_OPERAND (exp, 2);
8308 case 2: treeop1 = TREE_OPERAND (exp, 1);
8309 case 1: treeop0 = TREE_OPERAND (exp, 0);
8310 case 0: break;
8312 ops.code = code;
8313 ops.type = type;
8314 ops.op0 = treeop0;
8315 ops.op1 = treeop1;
8316 ops.op2 = treeop2;
8317 ops.location = loc;
8319 ignore = (target == const0_rtx
8320 || ((CONVERT_EXPR_CODE_P (code)
8321 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8322 && TREE_CODE (type) == VOID_TYPE));
8324 /* An operation in what may be a bit-field type needs the
8325 result to be reduced to the precision of the bit-field type,
8326 which is narrower than that of the type's mode. */
8327 reduce_bit_field = (!ignore
8328 && TREE_CODE (type) == INTEGER_TYPE
8329 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8331 /* If we are going to ignore this result, we need only do something
8332 if there is a side-effect somewhere in the expression. If there
8333 is, short-circuit the most common cases here. Note that we must
8334 not call expand_expr with anything but const0_rtx in case this
8335 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8337 if (ignore)
8339 if (! TREE_SIDE_EFFECTS (exp))
8340 return const0_rtx;
8342 /* Ensure we reference a volatile object even if value is ignored, but
8343 don't do this if all we are doing is taking its address. */
8344 if (TREE_THIS_VOLATILE (exp)
8345 && TREE_CODE (exp) != FUNCTION_DECL
8346 && mode != VOIDmode && mode != BLKmode
8347 && modifier != EXPAND_CONST_ADDRESS)
8349 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8350 if (MEM_P (temp))
8351 temp = copy_to_reg (temp);
8352 return const0_rtx;
8355 if (TREE_CODE_CLASS (code) == tcc_unary
8356 || code == COMPONENT_REF || code == INDIRECT_REF)
8357 return expand_expr (treeop0, const0_rtx, VOIDmode,
8358 modifier);
8360 else if (TREE_CODE_CLASS (code) == tcc_binary
8361 || TREE_CODE_CLASS (code) == tcc_comparison
8362 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8364 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8365 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8366 return const0_rtx;
8368 else if (code == BIT_FIELD_REF)
8370 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8371 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8372 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8373 return const0_rtx;
8376 target = 0;
8379 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8380 target = 0;
8382 /* Use subtarget as the target for operand 0 of a binary operation. */
8383 subtarget = get_subtarget (target);
8384 original_target = target;
8386 switch (code)
8388 case LABEL_DECL:
8390 tree function = decl_function_context (exp);
8392 temp = label_rtx (exp);
8393 temp = gen_rtx_LABEL_REF (Pmode, temp);
8395 if (function != current_function_decl
8396 && function != 0)
8397 LABEL_REF_NONLOCAL_P (temp) = 1;
8399 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8400 return temp;
8403 case SSA_NAME:
8404 /* ??? ivopts calls expander, without any preparation from
8405 out-of-ssa. So fake instructions as if this was an access to the
8406 base variable. This unnecessarily allocates a pseudo, see how we can
8407 reuse it, if partition base vars have it set already. */
8408 if (!currently_expanding_to_rtl)
8409 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
8410 NULL);
8412 g = get_gimple_for_ssa_name (exp);
8413 if (g)
8414 return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
8415 modifier, NULL);
8417 ssa_name = exp;
8418 decl_rtl = get_rtx_for_ssa_name (ssa_name);
8419 exp = SSA_NAME_VAR (ssa_name);
8420 goto expand_decl_rtl;
8422 case PARM_DECL:
8423 case VAR_DECL:
8424 /* If a static var's type was incomplete when the decl was written,
8425 but the type is complete now, lay out the decl now. */
8426 if (DECL_SIZE (exp) == 0
8427 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8428 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8429 layout_decl (exp, 0);
8431 /* TLS emulation hook - replace __thread vars with
8432 *__emutls_get_address (&_emutls.var). */
8433 if (! targetm.have_tls
8434 && TREE_CODE (exp) == VAR_DECL
8435 && DECL_THREAD_LOCAL_P (exp))
8437 exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp));
8438 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
8441 /* ... fall through ... */
8443 case FUNCTION_DECL:
8444 case RESULT_DECL:
8445 decl_rtl = DECL_RTL (exp);
8446 expand_decl_rtl:
8447 gcc_assert (decl_rtl);
8448 decl_rtl = copy_rtx (decl_rtl);
8449 /* Record writes to register variables. */
8450 if (modifier == EXPAND_WRITE && REG_P (decl_rtl)
8451 && REGNO (decl_rtl) < FIRST_PSEUDO_REGISTER)
8453 int i = REGNO (decl_rtl);
8454 int nregs = hard_regno_nregs[i][GET_MODE (decl_rtl)];
8455 while (nregs)
8457 SET_HARD_REG_BIT (crtl->asm_clobbers, i);
8458 i++;
8459 nregs--;
8463 /* Ensure variable marked as used even if it doesn't go through
8464 a parser. If it hasn't be used yet, write out an external
8465 definition. */
8466 if (! TREE_USED (exp))
8468 assemble_external (exp);
8469 TREE_USED (exp) = 1;
8472 /* Show we haven't gotten RTL for this yet. */
8473 temp = 0;
8475 /* Variables inherited from containing functions should have
8476 been lowered by this point. */
8477 context = decl_function_context (exp);
8478 gcc_assert (!context
8479 || context == current_function_decl
8480 || TREE_STATIC (exp)
8481 /* ??? C++ creates functions that are not TREE_STATIC. */
8482 || TREE_CODE (exp) == FUNCTION_DECL);
8484 /* This is the case of an array whose size is to be determined
8485 from its initializer, while the initializer is still being parsed.
8486 See expand_decl. */
8488 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8489 temp = validize_mem (decl_rtl);
8491 /* If DECL_RTL is memory, we are in the normal case and the
8492 address is not valid, get the address into a register. */
8494 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8496 if (alt_rtl)
8497 *alt_rtl = decl_rtl;
8498 decl_rtl = use_anchored_address (decl_rtl);
8499 if (modifier != EXPAND_CONST_ADDRESS
8500 && modifier != EXPAND_SUM
8501 && !memory_address_addr_space_p (DECL_MODE (exp),
8502 XEXP (decl_rtl, 0),
8503 MEM_ADDR_SPACE (decl_rtl)))
8504 temp = replace_equiv_address (decl_rtl,
8505 copy_rtx (XEXP (decl_rtl, 0)));
8508 /* If we got something, return it. But first, set the alignment
8509 if the address is a register. */
8510 if (temp != 0)
8512 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8513 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8515 return temp;
8518 /* If the mode of DECL_RTL does not match that of the decl, it
8519 must be a promoted value. We return a SUBREG of the wanted mode,
8520 but mark it so that we know that it was already extended. */
8521 if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp))
8523 enum machine_mode pmode;
8525 /* Get the signedness to be used for this variable. Ensure we get
8526 the same mode we got when the variable was declared. */
8527 if (code == SSA_NAME
8528 && (g = SSA_NAME_DEF_STMT (ssa_name))
8529 && gimple_code (g) == GIMPLE_CALL)
8530 pmode = promote_function_mode (type, mode, &unsignedp,
8531 TREE_TYPE
8532 (TREE_TYPE (gimple_call_fn (g))),
8534 else
8535 pmode = promote_decl_mode (exp, &unsignedp);
8536 gcc_assert (GET_MODE (decl_rtl) == pmode);
8538 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8539 SUBREG_PROMOTED_VAR_P (temp) = 1;
8540 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8541 return temp;
8544 return decl_rtl;
8546 case INTEGER_CST:
8547 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8548 TREE_INT_CST_HIGH (exp), mode);
8550 return temp;
8552 case VECTOR_CST:
8554 tree tmp = NULL_TREE;
8555 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8556 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8557 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8558 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8559 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8560 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8561 return const_vector_from_tree (exp);
8562 if (GET_MODE_CLASS (mode) == MODE_INT)
8564 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8565 if (type_for_mode)
8566 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8568 if (!tmp)
8569 tmp = build_constructor_from_list (type,
8570 TREE_VECTOR_CST_ELTS (exp));
8571 return expand_expr (tmp, ignore ? const0_rtx : target,
8572 tmode, modifier);
8575 case CONST_DECL:
8576 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8578 case REAL_CST:
8579 /* If optimized, generate immediate CONST_DOUBLE
8580 which will be turned into memory by reload if necessary.
8582 We used to force a register so that loop.c could see it. But
8583 this does not allow gen_* patterns to perform optimizations with
8584 the constants. It also produces two insns in cases like "x = 1.0;".
8585 On most machines, floating-point constants are not permitted in
8586 many insns, so we'd end up copying it to a register in any case.
8588 Now, we do the copying in expand_binop, if appropriate. */
8589 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8590 TYPE_MODE (TREE_TYPE (exp)));
8592 case FIXED_CST:
8593 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8594 TYPE_MODE (TREE_TYPE (exp)));
8596 case COMPLEX_CST:
8597 /* Handle evaluating a complex constant in a CONCAT target. */
8598 if (original_target && GET_CODE (original_target) == CONCAT)
8600 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8601 rtx rtarg, itarg;
8603 rtarg = XEXP (original_target, 0);
8604 itarg = XEXP (original_target, 1);
8606 /* Move the real and imaginary parts separately. */
8607 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8608 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8610 if (op0 != rtarg)
8611 emit_move_insn (rtarg, op0);
8612 if (op1 != itarg)
8613 emit_move_insn (itarg, op1);
8615 return original_target;
8618 /* ... fall through ... */
8620 case STRING_CST:
8621 temp = expand_expr_constant (exp, 1, modifier);
8623 /* temp contains a constant address.
8624 On RISC machines where a constant address isn't valid,
8625 make some insns to get that address into a register. */
8626 if (modifier != EXPAND_CONST_ADDRESS
8627 && modifier != EXPAND_INITIALIZER
8628 && modifier != EXPAND_SUM
8629 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8630 MEM_ADDR_SPACE (temp)))
8631 return replace_equiv_address (temp,
8632 copy_rtx (XEXP (temp, 0)));
8633 return temp;
8635 case SAVE_EXPR:
8637 tree val = treeop0;
8638 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8640 if (!SAVE_EXPR_RESOLVED_P (exp))
8642 /* We can indeed still hit this case, typically via builtin
8643 expanders calling save_expr immediately before expanding
8644 something. Assume this means that we only have to deal
8645 with non-BLKmode values. */
8646 gcc_assert (GET_MODE (ret) != BLKmode);
8648 val = build_decl (EXPR_LOCATION (exp),
8649 VAR_DECL, NULL, TREE_TYPE (exp));
8650 DECL_ARTIFICIAL (val) = 1;
8651 DECL_IGNORED_P (val) = 1;
8652 treeop0 = val;
8653 TREE_OPERAND (exp, 0) = treeop0;
8654 SAVE_EXPR_RESOLVED_P (exp) = 1;
8656 if (!CONSTANT_P (ret))
8657 ret = copy_to_reg (ret);
8658 SET_DECL_RTL (val, ret);
8661 return ret;
8665 case CONSTRUCTOR:
8666 /* If we don't need the result, just ensure we evaluate any
8667 subexpressions. */
8668 if (ignore)
8670 unsigned HOST_WIDE_INT idx;
8671 tree value;
8673 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8674 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8676 return const0_rtx;
8679 return expand_constructor (exp, target, modifier, false);
8681 case MISALIGNED_INDIRECT_REF:
8682 case ALIGN_INDIRECT_REF:
8683 case INDIRECT_REF:
8685 tree exp1 = treeop0;
8686 addr_space_t as = ADDR_SPACE_GENERIC;
8687 enum machine_mode address_mode = Pmode;
8689 if (modifier != EXPAND_WRITE)
8691 tree t;
8693 t = fold_read_from_constant_string (exp);
8694 if (t)
8695 return expand_expr (t, target, tmode, modifier);
8698 if (POINTER_TYPE_P (TREE_TYPE (exp1)))
8700 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp1)));
8701 address_mode = targetm.addr_space.address_mode (as);
8704 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
8705 op0 = memory_address_addr_space (mode, op0, as);
8707 if (code == ALIGN_INDIRECT_REF)
8709 int align = TYPE_ALIGN_UNIT (type);
8710 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
8711 op0 = memory_address_addr_space (mode, op0, as);
8714 temp = gen_rtx_MEM (mode, op0);
8716 set_mem_attributes (temp, exp, 0);
8717 set_mem_addr_space (temp, as);
8719 /* Resolve the misalignment now, so that we don't have to remember
8720 to resolve it later. Of course, this only works for reads. */
8721 if (code == MISALIGNED_INDIRECT_REF)
8723 int icode;
8724 rtx reg, insn;
8726 gcc_assert (modifier == EXPAND_NORMAL
8727 || modifier == EXPAND_STACK_PARM);
8729 /* The vectorizer should have already checked the mode. */
8730 icode = optab_handler (movmisalign_optab, mode)->insn_code;
8731 gcc_assert (icode != CODE_FOR_nothing);
8733 /* We've already validated the memory, and we're creating a
8734 new pseudo destination. The predicates really can't fail. */
8735 reg = gen_reg_rtx (mode);
8737 /* Nor can the insn generator. */
8738 insn = GEN_FCN (icode) (reg, temp);
8739 emit_insn (insn);
8741 return reg;
8744 return temp;
8747 case TARGET_MEM_REF:
8749 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8750 struct mem_address addr;
8751 tree base;
8753 get_address_description (exp, &addr);
8754 op0 = addr_for_mem_ref (&addr, as, true);
8755 op0 = memory_address_addr_space (mode, op0, as);
8756 temp = gen_rtx_MEM (mode, op0);
8757 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
8758 set_mem_addr_space (temp, as);
8759 base = get_base_address (TMR_ORIGINAL (exp));
8760 if (INDIRECT_REF_P (base)
8761 && TMR_BASE (exp)
8762 && TREE_CODE (TMR_BASE (exp)) == SSA_NAME
8763 && POINTER_TYPE_P (TREE_TYPE (TMR_BASE (exp))))
8765 set_mem_expr (temp, build1 (INDIRECT_REF,
8766 TREE_TYPE (exp), TMR_BASE (exp)));
8767 set_mem_offset (temp, NULL_RTX);
8770 return temp;
8772 case MEM_REF:
8774 addr_space_t as
8775 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8776 enum machine_mode address_mode;
8777 tree base = TREE_OPERAND (exp, 0);
8778 /* Handle expansion of non-aliased memory with non-BLKmode. That
8779 might end up in a register. */
8780 if (TREE_CODE (base) == ADDR_EXPR)
8782 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
8783 tree bit_offset;
8784 base = TREE_OPERAND (base, 0);
8785 if (!DECL_P (base))
8787 HOST_WIDE_INT off;
8788 base = get_addr_base_and_unit_offset (base, &off);
8789 gcc_assert (base);
8790 offset += off;
8792 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
8793 decl we must use bitfield operations. */
8794 if (DECL_P (base)
8795 && !TREE_ADDRESSABLE (base)
8796 && DECL_MODE (base) != BLKmode
8797 && DECL_RTL_SET_P (base)
8798 && !MEM_P (DECL_RTL (base)))
8800 tree bftype;
8801 if (offset == 0
8802 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
8803 && (GET_MODE_BITSIZE (DECL_MODE (base))
8804 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
8805 return expand_expr (build1 (VIEW_CONVERT_EXPR,
8806 TREE_TYPE (exp), base),
8807 target, tmode, modifier);
8808 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
8809 bftype = TREE_TYPE (base);
8810 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
8811 bftype = TREE_TYPE (exp);
8812 return expand_expr (build3 (BIT_FIELD_REF, bftype,
8813 base,
8814 TYPE_SIZE (TREE_TYPE (exp)),
8815 bit_offset),
8816 target, tmode, modifier);
8819 address_mode = targetm.addr_space.address_mode (as);
8820 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, address_mode,
8821 EXPAND_NORMAL);
8822 if (!integer_zerop (TREE_OPERAND (exp, 1)))
8824 rtx off;
8825 off = immed_double_int_const (mem_ref_offset (exp), address_mode);
8826 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
8828 op0 = memory_address_addr_space (mode, op0, as);
8829 temp = gen_rtx_MEM (mode, op0);
8830 set_mem_attributes (temp, exp, 0);
8831 set_mem_addr_space (temp, as);
8832 if (TREE_THIS_VOLATILE (exp))
8833 MEM_VOLATILE_P (temp) = 1;
8834 return temp;
8837 case ARRAY_REF:
8840 tree array = treeop0;
8841 tree index = treeop1;
8843 /* Fold an expression like: "foo"[2].
8844 This is not done in fold so it won't happen inside &.
8845 Don't fold if this is for wide characters since it's too
8846 difficult to do correctly and this is a very rare case. */
8848 if (modifier != EXPAND_CONST_ADDRESS
8849 && modifier != EXPAND_INITIALIZER
8850 && modifier != EXPAND_MEMORY)
8852 tree t = fold_read_from_constant_string (exp);
8854 if (t)
8855 return expand_expr (t, target, tmode, modifier);
8858 /* If this is a constant index into a constant array,
8859 just get the value from the array. Handle both the cases when
8860 we have an explicit constructor and when our operand is a variable
8861 that was declared const. */
8863 if (modifier != EXPAND_CONST_ADDRESS
8864 && modifier != EXPAND_INITIALIZER
8865 && modifier != EXPAND_MEMORY
8866 && TREE_CODE (array) == CONSTRUCTOR
8867 && ! TREE_SIDE_EFFECTS (array)
8868 && TREE_CODE (index) == INTEGER_CST)
8870 unsigned HOST_WIDE_INT ix;
8871 tree field, value;
8873 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8874 field, value)
8875 if (tree_int_cst_equal (field, index))
8877 if (!TREE_SIDE_EFFECTS (value))
8878 return expand_expr (fold (value), target, tmode, modifier);
8879 break;
8883 else if (optimize >= 1
8884 && modifier != EXPAND_CONST_ADDRESS
8885 && modifier != EXPAND_INITIALIZER
8886 && modifier != EXPAND_MEMORY
8887 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8888 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8889 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8890 && targetm.binds_local_p (array))
8892 if (TREE_CODE (index) == INTEGER_CST)
8894 tree init = DECL_INITIAL (array);
8896 if (TREE_CODE (init) == CONSTRUCTOR)
8898 unsigned HOST_WIDE_INT ix;
8899 tree field, value;
8901 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8902 field, value)
8903 if (tree_int_cst_equal (field, index))
8905 if (TREE_SIDE_EFFECTS (value))
8906 break;
8908 if (TREE_CODE (value) == CONSTRUCTOR)
8910 /* If VALUE is a CONSTRUCTOR, this
8911 optimization is only useful if
8912 this doesn't store the CONSTRUCTOR
8913 into memory. If it does, it is more
8914 efficient to just load the data from
8915 the array directly. */
8916 rtx ret = expand_constructor (value, target,
8917 modifier, true);
8918 if (ret == NULL_RTX)
8919 break;
8922 return expand_expr (fold (value), target, tmode,
8923 modifier);
8926 else if(TREE_CODE (init) == STRING_CST)
8928 tree index1 = index;
8929 tree low_bound = array_ref_low_bound (exp);
8930 index1 = fold_convert_loc (loc, sizetype,
8931 treeop1);
8933 /* Optimize the special-case of a zero lower bound.
8935 We convert the low_bound to sizetype to avoid some problems
8936 with constant folding. (E.g. suppose the lower bound is 1,
8937 and its mode is QI. Without the conversion,l (ARRAY
8938 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8939 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8941 if (! integer_zerop (low_bound))
8942 index1 = size_diffop_loc (loc, index1,
8943 fold_convert_loc (loc, sizetype,
8944 low_bound));
8946 if (0 > compare_tree_int (index1,
8947 TREE_STRING_LENGTH (init)))
8949 tree type = TREE_TYPE (TREE_TYPE (init));
8950 enum machine_mode mode = TYPE_MODE (type);
8952 if (GET_MODE_CLASS (mode) == MODE_INT
8953 && GET_MODE_SIZE (mode) == 1)
8954 return gen_int_mode (TREE_STRING_POINTER (init)
8955 [TREE_INT_CST_LOW (index1)],
8956 mode);
8962 goto normal_inner_ref;
8964 case COMPONENT_REF:
8965 /* If the operand is a CONSTRUCTOR, we can just extract the
8966 appropriate field if it is present. */
8967 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8969 unsigned HOST_WIDE_INT idx;
8970 tree field, value;
8972 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8973 idx, field, value)
8974 if (field == treeop1
8975 /* We can normally use the value of the field in the
8976 CONSTRUCTOR. However, if this is a bitfield in
8977 an integral mode that we can fit in a HOST_WIDE_INT,
8978 we must mask only the number of bits in the bitfield,
8979 since this is done implicitly by the constructor. If
8980 the bitfield does not meet either of those conditions,
8981 we can't do this optimization. */
8982 && (! DECL_BIT_FIELD (field)
8983 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8984 && (GET_MODE_BITSIZE (DECL_MODE (field))
8985 <= HOST_BITS_PER_WIDE_INT))))
8987 if (DECL_BIT_FIELD (field)
8988 && modifier == EXPAND_STACK_PARM)
8989 target = 0;
8990 op0 = expand_expr (value, target, tmode, modifier);
8991 if (DECL_BIT_FIELD (field))
8993 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8994 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8996 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8998 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8999 op0 = expand_and (imode, op0, op1, target);
9001 else
9003 tree count
9004 = build_int_cst (NULL_TREE,
9005 GET_MODE_BITSIZE (imode) - bitsize);
9007 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9008 target, 0);
9009 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9010 target, 0);
9014 return op0;
9017 goto normal_inner_ref;
9019 case BIT_FIELD_REF:
9020 case ARRAY_RANGE_REF:
9021 normal_inner_ref:
9023 enum machine_mode mode1, mode2;
9024 HOST_WIDE_INT bitsize, bitpos;
9025 tree offset;
9026 int volatilep = 0, must_force_mem;
9027 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9028 &mode1, &unsignedp, &volatilep, true);
9029 rtx orig_op0, memloc;
9031 /* If we got back the original object, something is wrong. Perhaps
9032 we are evaluating an expression too early. In any event, don't
9033 infinitely recurse. */
9034 gcc_assert (tem != exp);
9036 /* If TEM's type is a union of variable size, pass TARGET to the inner
9037 computation, since it will need a temporary and TARGET is known
9038 to have to do. This occurs in unchecked conversion in Ada. */
9039 orig_op0 = op0
9040 = expand_expr (tem,
9041 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9042 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9043 != INTEGER_CST)
9044 && modifier != EXPAND_STACK_PARM
9045 ? target : NULL_RTX),
9046 VOIDmode,
9047 (modifier == EXPAND_INITIALIZER
9048 || modifier == EXPAND_CONST_ADDRESS
9049 || modifier == EXPAND_STACK_PARM)
9050 ? modifier : EXPAND_NORMAL);
9053 /* If the bitfield is volatile, we want to access it in the
9054 field's mode, not the computed mode. */
9055 if (volatilep
9056 && GET_CODE (op0) == MEM
9057 && flag_strict_volatile_bitfields > 0)
9058 op0 = adjust_address (op0, mode1, 0);
9060 mode2
9061 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9063 /* If we have either an offset, a BLKmode result, or a reference
9064 outside the underlying object, we must force it to memory.
9065 Such a case can occur in Ada if we have unchecked conversion
9066 of an expression from a scalar type to an aggregate type or
9067 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9068 passed a partially uninitialized object or a view-conversion
9069 to a larger size. */
9070 must_force_mem = (offset
9071 || mode1 == BLKmode
9072 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9074 /* Handle CONCAT first. */
9075 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9077 if (bitpos == 0
9078 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9079 return op0;
9080 if (bitpos == 0
9081 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9082 && bitsize)
9084 op0 = XEXP (op0, 0);
9085 mode2 = GET_MODE (op0);
9087 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9088 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9089 && bitpos
9090 && bitsize)
9092 op0 = XEXP (op0, 1);
9093 bitpos = 0;
9094 mode2 = GET_MODE (op0);
9096 else
9097 /* Otherwise force into memory. */
9098 must_force_mem = 1;
9101 /* If this is a constant, put it in a register if it is a legitimate
9102 constant and we don't need a memory reference. */
9103 if (CONSTANT_P (op0)
9104 && mode2 != BLKmode
9105 && LEGITIMATE_CONSTANT_P (op0)
9106 && !must_force_mem)
9107 op0 = force_reg (mode2, op0);
9109 /* Otherwise, if this is a constant, try to force it to the constant
9110 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9111 is a legitimate constant. */
9112 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9113 op0 = validize_mem (memloc);
9115 /* Otherwise, if this is a constant or the object is not in memory
9116 and need be, put it there. */
9117 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9119 tree nt = build_qualified_type (TREE_TYPE (tem),
9120 (TYPE_QUALS (TREE_TYPE (tem))
9121 | TYPE_QUAL_CONST));
9122 memloc = assign_temp (nt, 1, 1, 1);
9123 emit_move_insn (memloc, op0);
9124 op0 = memloc;
9127 if (offset)
9129 enum machine_mode address_mode;
9130 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9131 EXPAND_SUM);
9133 gcc_assert (MEM_P (op0));
9135 address_mode
9136 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9137 if (GET_MODE (offset_rtx) != address_mode)
9138 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9140 if (GET_MODE (op0) == BLKmode
9141 /* A constant address in OP0 can have VOIDmode, we must
9142 not try to call force_reg in that case. */
9143 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9144 && bitsize != 0
9145 && (bitpos % bitsize) == 0
9146 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9147 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9149 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9150 bitpos = 0;
9153 op0 = offset_address (op0, offset_rtx,
9154 highest_pow2_factor (offset));
9157 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9158 record its alignment as BIGGEST_ALIGNMENT. */
9159 if (MEM_P (op0) && bitpos == 0 && offset != 0
9160 && is_aligning_offset (offset, tem))
9161 set_mem_align (op0, BIGGEST_ALIGNMENT);
9163 /* Don't forget about volatility even if this is a bitfield. */
9164 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9166 if (op0 == orig_op0)
9167 op0 = copy_rtx (op0);
9169 MEM_VOLATILE_P (op0) = 1;
9172 /* In cases where an aligned union has an unaligned object
9173 as a field, we might be extracting a BLKmode value from
9174 an integer-mode (e.g., SImode) object. Handle this case
9175 by doing the extract into an object as wide as the field
9176 (which we know to be the width of a basic mode), then
9177 storing into memory, and changing the mode to BLKmode. */
9178 if (mode1 == VOIDmode
9179 || REG_P (op0) || GET_CODE (op0) == SUBREG
9180 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9181 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9182 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9183 && modifier != EXPAND_CONST_ADDRESS
9184 && modifier != EXPAND_INITIALIZER)
9185 /* If the field is volatile, we always want an aligned
9186 access. */
9187 || (volatilep && flag_strict_volatile_bitfields > 0)
9188 /* If the field isn't aligned enough to fetch as a memref,
9189 fetch it as a bit field. */
9190 || (mode1 != BLKmode
9191 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9192 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9193 || (MEM_P (op0)
9194 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9195 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9196 && ((modifier == EXPAND_CONST_ADDRESS
9197 || modifier == EXPAND_INITIALIZER)
9198 ? STRICT_ALIGNMENT
9199 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9200 || (bitpos % BITS_PER_UNIT != 0)))
9201 /* If the type and the field are a constant size and the
9202 size of the type isn't the same size as the bitfield,
9203 we must use bitfield operations. */
9204 || (bitsize >= 0
9205 && TYPE_SIZE (TREE_TYPE (exp))
9206 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9207 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9208 bitsize)))
9210 enum machine_mode ext_mode = mode;
9212 if (ext_mode == BLKmode
9213 && ! (target != 0 && MEM_P (op0)
9214 && MEM_P (target)
9215 && bitpos % BITS_PER_UNIT == 0))
9216 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9218 if (ext_mode == BLKmode)
9220 if (target == 0)
9221 target = assign_temp (type, 0, 1, 1);
9223 if (bitsize == 0)
9224 return target;
9226 /* In this case, BITPOS must start at a byte boundary and
9227 TARGET, if specified, must be a MEM. */
9228 gcc_assert (MEM_P (op0)
9229 && (!target || MEM_P (target))
9230 && !(bitpos % BITS_PER_UNIT));
9232 emit_block_move (target,
9233 adjust_address (op0, VOIDmode,
9234 bitpos / BITS_PER_UNIT),
9235 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9236 / BITS_PER_UNIT),
9237 (modifier == EXPAND_STACK_PARM
9238 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9240 return target;
9243 op0 = validize_mem (op0);
9245 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9246 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9248 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
9249 (modifier == EXPAND_STACK_PARM
9250 ? NULL_RTX : target),
9251 ext_mode, ext_mode);
9253 /* If the result is a record type and BITSIZE is narrower than
9254 the mode of OP0, an integral mode, and this is a big endian
9255 machine, we must put the field into the high-order bits. */
9256 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9257 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9258 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9259 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9260 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9261 - bitsize),
9262 op0, 1);
9264 /* If the result type is BLKmode, store the data into a temporary
9265 of the appropriate type, but with the mode corresponding to the
9266 mode for the data we have (op0's mode). It's tempting to make
9267 this a constant type, since we know it's only being stored once,
9268 but that can cause problems if we are taking the address of this
9269 COMPONENT_REF because the MEM of any reference via that address
9270 will have flags corresponding to the type, which will not
9271 necessarily be constant. */
9272 if (mode == BLKmode)
9274 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9275 rtx new_rtx;
9277 /* If the reference doesn't use the alias set of its type,
9278 we cannot create the temporary using that type. */
9279 if (component_uses_parent_alias_set (exp))
9281 new_rtx = assign_stack_local (ext_mode, size, 0);
9282 set_mem_alias_set (new_rtx, get_alias_set (exp));
9284 else
9285 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9287 emit_move_insn (new_rtx, op0);
9288 op0 = copy_rtx (new_rtx);
9289 PUT_MODE (op0, BLKmode);
9290 set_mem_attributes (op0, exp, 1);
9293 return op0;
9296 /* If the result is BLKmode, use that to access the object
9297 now as well. */
9298 if (mode == BLKmode)
9299 mode1 = BLKmode;
9301 /* Get a reference to just this component. */
9302 if (modifier == EXPAND_CONST_ADDRESS
9303 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9304 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9305 else
9306 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9308 if (op0 == orig_op0)
9309 op0 = copy_rtx (op0);
9311 set_mem_attributes (op0, exp, 0);
9312 if (REG_P (XEXP (op0, 0)))
9313 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9315 MEM_VOLATILE_P (op0) |= volatilep;
9316 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9317 || modifier == EXPAND_CONST_ADDRESS
9318 || modifier == EXPAND_INITIALIZER)
9319 return op0;
9320 else if (target == 0)
9321 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9323 convert_move (target, op0, unsignedp);
9324 return target;
9327 case OBJ_TYPE_REF:
9328 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9330 case CALL_EXPR:
9331 /* All valid uses of __builtin_va_arg_pack () are removed during
9332 inlining. */
9333 if (CALL_EXPR_VA_ARG_PACK (exp))
9334 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9336 tree fndecl = get_callee_fndecl (exp), attr;
9338 if (fndecl
9339 && (attr = lookup_attribute ("error",
9340 DECL_ATTRIBUTES (fndecl))) != NULL)
9341 error ("%Kcall to %qs declared with attribute error: %s",
9342 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9343 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9344 if (fndecl
9345 && (attr = lookup_attribute ("warning",
9346 DECL_ATTRIBUTES (fndecl))) != NULL)
9347 warning_at (tree_nonartificial_location (exp),
9348 0, "%Kcall to %qs declared with attribute warning: %s",
9349 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9350 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9352 /* Check for a built-in function. */
9353 if (fndecl && DECL_BUILT_IN (fndecl))
9355 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9356 return expand_builtin (exp, target, subtarget, tmode, ignore);
9359 return expand_call (exp, target, ignore);
9361 case VIEW_CONVERT_EXPR:
9362 op0 = NULL_RTX;
9364 /* If we are converting to BLKmode, try to avoid an intermediate
9365 temporary by fetching an inner memory reference. */
9366 if (mode == BLKmode
9367 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9368 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9369 && handled_component_p (treeop0))
9371 enum machine_mode mode1;
9372 HOST_WIDE_INT bitsize, bitpos;
9373 tree offset;
9374 int unsignedp;
9375 int volatilep = 0;
9376 tree tem
9377 = get_inner_reference (treeop0, &bitsize, &bitpos,
9378 &offset, &mode1, &unsignedp, &volatilep,
9379 true);
9380 rtx orig_op0;
9382 /* ??? We should work harder and deal with non-zero offsets. */
9383 if (!offset
9384 && (bitpos % BITS_PER_UNIT) == 0
9385 && bitsize >= 0
9386 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9388 /* See the normal_inner_ref case for the rationale. */
9389 orig_op0
9390 = expand_expr (tem,
9391 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9392 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9393 != INTEGER_CST)
9394 && modifier != EXPAND_STACK_PARM
9395 ? target : NULL_RTX),
9396 VOIDmode,
9397 (modifier == EXPAND_INITIALIZER
9398 || modifier == EXPAND_CONST_ADDRESS
9399 || modifier == EXPAND_STACK_PARM)
9400 ? modifier : EXPAND_NORMAL);
9402 if (MEM_P (orig_op0))
9404 op0 = orig_op0;
9406 /* Get a reference to just this component. */
9407 if (modifier == EXPAND_CONST_ADDRESS
9408 || modifier == EXPAND_SUM
9409 || modifier == EXPAND_INITIALIZER)
9410 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9411 else
9412 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9414 if (op0 == orig_op0)
9415 op0 = copy_rtx (op0);
9417 set_mem_attributes (op0, treeop0, 0);
9418 if (REG_P (XEXP (op0, 0)))
9419 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9421 MEM_VOLATILE_P (op0) |= volatilep;
9426 if (!op0)
9427 op0 = expand_expr (treeop0,
9428 NULL_RTX, VOIDmode, modifier);
9430 /* If the input and output modes are both the same, we are done. */
9431 if (mode == GET_MODE (op0))
9433 /* If neither mode is BLKmode, and both modes are the same size
9434 then we can use gen_lowpart. */
9435 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9436 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9437 && !COMPLEX_MODE_P (GET_MODE (op0)))
9439 if (GET_CODE (op0) == SUBREG)
9440 op0 = force_reg (GET_MODE (op0), op0);
9441 op0 = gen_lowpart (mode, op0);
9443 /* If both types are integral, convert from one mode to the other. */
9444 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
9445 op0 = convert_modes (mode, GET_MODE (op0), op0,
9446 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9447 /* As a last resort, spill op0 to memory, and reload it in a
9448 different mode. */
9449 else if (!MEM_P (op0))
9451 /* If the operand is not a MEM, force it into memory. Since we
9452 are going to be changing the mode of the MEM, don't call
9453 force_const_mem for constants because we don't allow pool
9454 constants to change mode. */
9455 tree inner_type = TREE_TYPE (treeop0);
9457 gcc_assert (!TREE_ADDRESSABLE (exp));
9459 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9460 target
9461 = assign_stack_temp_for_type
9462 (TYPE_MODE (inner_type),
9463 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9465 emit_move_insn (target, op0);
9466 op0 = target;
9469 /* At this point, OP0 is in the correct mode. If the output type is
9470 such that the operand is known to be aligned, indicate that it is.
9471 Otherwise, we need only be concerned about alignment for non-BLKmode
9472 results. */
9473 if (MEM_P (op0))
9475 op0 = copy_rtx (op0);
9477 if (TYPE_ALIGN_OK (type))
9478 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9479 else if (STRICT_ALIGNMENT
9480 && mode != BLKmode
9481 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9483 tree inner_type = TREE_TYPE (treeop0);
9484 HOST_WIDE_INT temp_size
9485 = MAX (int_size_in_bytes (inner_type),
9486 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9487 rtx new_rtx
9488 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9489 rtx new_with_op0_mode
9490 = adjust_address (new_rtx, GET_MODE (op0), 0);
9492 gcc_assert (!TREE_ADDRESSABLE (exp));
9494 if (GET_MODE (op0) == BLKmode)
9495 emit_block_move (new_with_op0_mode, op0,
9496 GEN_INT (GET_MODE_SIZE (mode)),
9497 (modifier == EXPAND_STACK_PARM
9498 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9499 else
9500 emit_move_insn (new_with_op0_mode, op0);
9502 op0 = new_rtx;
9505 op0 = adjust_address (op0, mode, 0);
9508 return op0;
9510 /* Use a compare and a jump for BLKmode comparisons, or for function
9511 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9513 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9514 are occassionally created by folding during expansion. */
9515 case TRUTH_ANDIF_EXPR:
9516 case TRUTH_ORIF_EXPR:
9517 if (! ignore
9518 && (target == 0
9519 || modifier == EXPAND_STACK_PARM
9520 || ! safe_from_p (target, treeop0, 1)
9521 || ! safe_from_p (target, treeop1, 1)
9522 /* Make sure we don't have a hard reg (such as function's return
9523 value) live across basic blocks, if not optimizing. */
9524 || (!optimize && REG_P (target)
9525 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9526 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9528 if (target)
9529 emit_move_insn (target, const0_rtx);
9531 op1 = gen_label_rtx ();
9532 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9534 if (target)
9535 emit_move_insn (target, const1_rtx);
9537 emit_label (op1);
9538 return ignore ? const0_rtx : target;
9540 case STATEMENT_LIST:
9542 tree_stmt_iterator iter;
9544 gcc_assert (ignore);
9546 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9547 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9549 return const0_rtx;
9551 case COND_EXPR:
9552 /* A COND_EXPR with its type being VOID_TYPE represents a
9553 conditional jump and is handled in
9554 expand_gimple_cond_expr. */
9555 gcc_assert (!VOID_TYPE_P (type));
9557 /* Note that COND_EXPRs whose type is a structure or union
9558 are required to be constructed to contain assignments of
9559 a temporary variable, so that we can evaluate them here
9560 for side effect only. If type is void, we must do likewise. */
9562 gcc_assert (!TREE_ADDRESSABLE (type)
9563 && !ignore
9564 && TREE_TYPE (treeop1) != void_type_node
9565 && TREE_TYPE (treeop2) != void_type_node);
9567 /* If we are not to produce a result, we have no target. Otherwise,
9568 if a target was specified use it; it will not be used as an
9569 intermediate target unless it is safe. If no target, use a
9570 temporary. */
9572 if (modifier != EXPAND_STACK_PARM
9573 && original_target
9574 && safe_from_p (original_target, treeop0, 1)
9575 && GET_MODE (original_target) == mode
9576 #ifdef HAVE_conditional_move
9577 && (! can_conditionally_move_p (mode)
9578 || REG_P (original_target))
9579 #endif
9580 && !MEM_P (original_target))
9581 temp = original_target;
9582 else
9583 temp = assign_temp (type, 0, 0, 1);
9585 do_pending_stack_adjust ();
9586 NO_DEFER_POP;
9587 op0 = gen_label_rtx ();
9588 op1 = gen_label_rtx ();
9589 jumpifnot (treeop0, op0, -1);
9590 store_expr (treeop1, temp,
9591 modifier == EXPAND_STACK_PARM,
9592 false);
9594 emit_jump_insn (gen_jump (op1));
9595 emit_barrier ();
9596 emit_label (op0);
9597 store_expr (treeop2, temp,
9598 modifier == EXPAND_STACK_PARM,
9599 false);
9601 emit_label (op1);
9602 OK_DEFER_POP;
9603 return temp;
9605 case VEC_COND_EXPR:
9606 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9607 return target;
9609 case MODIFY_EXPR:
9611 tree lhs = treeop0;
9612 tree rhs = treeop1;
9613 gcc_assert (ignore);
9615 /* Check for |= or &= of a bitfield of size one into another bitfield
9616 of size 1. In this case, (unless we need the result of the
9617 assignment) we can do this more efficiently with a
9618 test followed by an assignment, if necessary.
9620 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9621 things change so we do, this code should be enhanced to
9622 support it. */
9623 if (TREE_CODE (lhs) == COMPONENT_REF
9624 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9625 || TREE_CODE (rhs) == BIT_AND_EXPR)
9626 && TREE_OPERAND (rhs, 0) == lhs
9627 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9628 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9629 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9631 rtx label = gen_label_rtx ();
9632 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9633 do_jump (TREE_OPERAND (rhs, 1),
9634 value ? label : 0,
9635 value ? 0 : label, -1);
9636 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9637 MOVE_NONTEMPORAL (exp));
9638 do_pending_stack_adjust ();
9639 emit_label (label);
9640 return const0_rtx;
9643 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9644 return const0_rtx;
9647 case ADDR_EXPR:
9648 return expand_expr_addr_expr (exp, target, tmode, modifier);
9650 case REALPART_EXPR:
9651 op0 = expand_normal (treeop0);
9652 return read_complex_part (op0, false);
9654 case IMAGPART_EXPR:
9655 op0 = expand_normal (treeop0);
9656 return read_complex_part (op0, true);
9658 case RETURN_EXPR:
9659 case LABEL_EXPR:
9660 case GOTO_EXPR:
9661 case SWITCH_EXPR:
9662 case ASM_EXPR:
9663 /* Expanded in cfgexpand.c. */
9664 gcc_unreachable ();
9666 case TRY_CATCH_EXPR:
9667 case CATCH_EXPR:
9668 case EH_FILTER_EXPR:
9669 case TRY_FINALLY_EXPR:
9670 /* Lowered by tree-eh.c. */
9671 gcc_unreachable ();
9673 case WITH_CLEANUP_EXPR:
9674 case CLEANUP_POINT_EXPR:
9675 case TARGET_EXPR:
9676 case CASE_LABEL_EXPR:
9677 case VA_ARG_EXPR:
9678 case BIND_EXPR:
9679 case INIT_EXPR:
9680 case CONJ_EXPR:
9681 case COMPOUND_EXPR:
9682 case PREINCREMENT_EXPR:
9683 case PREDECREMENT_EXPR:
9684 case POSTINCREMENT_EXPR:
9685 case POSTDECREMENT_EXPR:
9686 case LOOP_EXPR:
9687 case EXIT_EXPR:
9688 /* Lowered by gimplify.c. */
9689 gcc_unreachable ();
9691 case FDESC_EXPR:
9692 /* Function descriptors are not valid except for as
9693 initialization constants, and should not be expanded. */
9694 gcc_unreachable ();
9696 case WITH_SIZE_EXPR:
9697 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9698 have pulled out the size to use in whatever context it needed. */
9699 return expand_expr_real (treeop0, original_target, tmode,
9700 modifier, alt_rtl);
9702 case REALIGN_LOAD_EXPR:
9704 tree oprnd0 = treeop0;
9705 tree oprnd1 = treeop1;
9706 tree oprnd2 = treeop2;
9707 rtx op2;
9709 this_optab = optab_for_tree_code (code, type, optab_default);
9710 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9711 op2 = expand_normal (oprnd2);
9712 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9713 target, unsignedp);
9714 gcc_assert (temp);
9715 return temp;
9718 case DOT_PROD_EXPR:
9720 tree oprnd0 = treeop0;
9721 tree oprnd1 = treeop1;
9722 tree oprnd2 = treeop2;
9723 rtx op2;
9725 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9726 op2 = expand_normal (oprnd2);
9727 target = expand_widen_pattern_expr (&ops, op0, op1, op2,
9728 target, unsignedp);
9729 return target;
9732 case COMPOUND_LITERAL_EXPR:
9734 /* Initialize the anonymous variable declared in the compound
9735 literal, then return the variable. */
9736 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9738 /* Create RTL for this variable. */
9739 if (!DECL_RTL_SET_P (decl))
9741 if (DECL_HARD_REGISTER (decl))
9742 /* The user specified an assembler name for this variable.
9743 Set that up now. */
9744 rest_of_decl_compilation (decl, 0, 0);
9745 else
9746 expand_decl (decl);
9749 return expand_expr_real (decl, original_target, tmode,
9750 modifier, alt_rtl);
9753 default:
9754 return expand_expr_real_2 (&ops, target, tmode, modifier);
9758 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9759 signedness of TYPE), possibly returning the result in TARGET. */
9760 static rtx
9761 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9763 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9764 if (target && GET_MODE (target) != GET_MODE (exp))
9765 target = 0;
9766 /* For constant values, reduce using build_int_cst_type. */
9767 if (CONST_INT_P (exp))
9769 HOST_WIDE_INT value = INTVAL (exp);
9770 tree t = build_int_cst_type (type, value);
9771 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9773 else if (TYPE_UNSIGNED (type))
9775 rtx mask = immed_double_int_const (double_int_mask (prec),
9776 GET_MODE (exp));
9777 return expand_and (GET_MODE (exp), exp, mask, target);
9779 else
9781 tree count = build_int_cst (NULL_TREE,
9782 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9783 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9784 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9788 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9789 when applied to the address of EXP produces an address known to be
9790 aligned more than BIGGEST_ALIGNMENT. */
9792 static int
9793 is_aligning_offset (const_tree offset, const_tree exp)
9795 /* Strip off any conversions. */
9796 while (CONVERT_EXPR_P (offset))
9797 offset = TREE_OPERAND (offset, 0);
9799 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9800 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9801 if (TREE_CODE (offset) != BIT_AND_EXPR
9802 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9803 || compare_tree_int (TREE_OPERAND (offset, 1),
9804 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9805 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9806 return 0;
9808 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9809 It must be NEGATE_EXPR. Then strip any more conversions. */
9810 offset = TREE_OPERAND (offset, 0);
9811 while (CONVERT_EXPR_P (offset))
9812 offset = TREE_OPERAND (offset, 0);
9814 if (TREE_CODE (offset) != NEGATE_EXPR)
9815 return 0;
9817 offset = TREE_OPERAND (offset, 0);
9818 while (CONVERT_EXPR_P (offset))
9819 offset = TREE_OPERAND (offset, 0);
9821 /* This must now be the address of EXP. */
9822 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9825 /* Return the tree node if an ARG corresponds to a string constant or zero
9826 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9827 in bytes within the string that ARG is accessing. The type of the
9828 offset will be `sizetype'. */
9830 tree
9831 string_constant (tree arg, tree *ptr_offset)
9833 tree array, offset, lower_bound;
9834 STRIP_NOPS (arg);
9836 if (TREE_CODE (arg) == ADDR_EXPR)
9838 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9840 *ptr_offset = size_zero_node;
9841 return TREE_OPERAND (arg, 0);
9843 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9845 array = TREE_OPERAND (arg, 0);
9846 offset = size_zero_node;
9848 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9850 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9851 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9852 if (TREE_CODE (array) != STRING_CST
9853 && TREE_CODE (array) != VAR_DECL)
9854 return 0;
9856 /* Check if the array has a nonzero lower bound. */
9857 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9858 if (!integer_zerop (lower_bound))
9860 /* If the offset and base aren't both constants, return 0. */
9861 if (TREE_CODE (lower_bound) != INTEGER_CST)
9862 return 0;
9863 if (TREE_CODE (offset) != INTEGER_CST)
9864 return 0;
9865 /* Adjust offset by the lower bound. */
9866 offset = size_diffop (fold_convert (sizetype, offset),
9867 fold_convert (sizetype, lower_bound));
9870 else
9871 return 0;
9873 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9875 tree arg0 = TREE_OPERAND (arg, 0);
9876 tree arg1 = TREE_OPERAND (arg, 1);
9878 STRIP_NOPS (arg0);
9879 STRIP_NOPS (arg1);
9881 if (TREE_CODE (arg0) == ADDR_EXPR
9882 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9883 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9885 array = TREE_OPERAND (arg0, 0);
9886 offset = arg1;
9888 else if (TREE_CODE (arg1) == ADDR_EXPR
9889 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9890 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9892 array = TREE_OPERAND (arg1, 0);
9893 offset = arg0;
9895 else
9896 return 0;
9898 else
9899 return 0;
9901 if (TREE_CODE (array) == STRING_CST)
9903 *ptr_offset = fold_convert (sizetype, offset);
9904 return array;
9906 else if (TREE_CODE (array) == VAR_DECL)
9908 int length;
9910 /* Variables initialized to string literals can be handled too. */
9911 if (DECL_INITIAL (array) == NULL_TREE
9912 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9913 return 0;
9915 /* If they are read-only, non-volatile and bind locally. */
9916 if (! TREE_READONLY (array)
9917 || TREE_SIDE_EFFECTS (array)
9918 || ! targetm.binds_local_p (array))
9919 return 0;
9921 /* Avoid const char foo[4] = "abcde"; */
9922 if (DECL_SIZE_UNIT (array) == NULL_TREE
9923 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9924 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9925 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9926 return 0;
9928 /* If variable is bigger than the string literal, OFFSET must be constant
9929 and inside of the bounds of the string literal. */
9930 offset = fold_convert (sizetype, offset);
9931 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9932 && (! host_integerp (offset, 1)
9933 || compare_tree_int (offset, length) >= 0))
9934 return 0;
9936 *ptr_offset = offset;
9937 return DECL_INITIAL (array);
9940 return 0;
9943 /* Generate code to calculate OPS, and exploded expression
9944 using a store-flag instruction and return an rtx for the result.
9945 OPS reflects a comparison.
9947 If TARGET is nonzero, store the result there if convenient.
9949 Return zero if there is no suitable set-flag instruction
9950 available on this machine.
9952 Once expand_expr has been called on the arguments of the comparison,
9953 we are committed to doing the store flag, since it is not safe to
9954 re-evaluate the expression. We emit the store-flag insn by calling
9955 emit_store_flag, but only expand the arguments if we have a reason
9956 to believe that emit_store_flag will be successful. If we think that
9957 it will, but it isn't, we have to simulate the store-flag with a
9958 set/jump/set sequence. */
9960 static rtx
9961 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9963 enum rtx_code code;
9964 tree arg0, arg1, type;
9965 tree tem;
9966 enum machine_mode operand_mode;
9967 int unsignedp;
9968 rtx op0, op1;
9969 rtx subtarget = target;
9970 location_t loc = ops->location;
9972 arg0 = ops->op0;
9973 arg1 = ops->op1;
9975 /* Don't crash if the comparison was erroneous. */
9976 if (arg0 == error_mark_node || arg1 == error_mark_node)
9977 return const0_rtx;
9979 type = TREE_TYPE (arg0);
9980 operand_mode = TYPE_MODE (type);
9981 unsignedp = TYPE_UNSIGNED (type);
9983 /* We won't bother with BLKmode store-flag operations because it would mean
9984 passing a lot of information to emit_store_flag. */
9985 if (operand_mode == BLKmode)
9986 return 0;
9988 /* We won't bother with store-flag operations involving function pointers
9989 when function pointers must be canonicalized before comparisons. */
9990 #ifdef HAVE_canonicalize_funcptr_for_compare
9991 if (HAVE_canonicalize_funcptr_for_compare
9992 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9993 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9994 == FUNCTION_TYPE))
9995 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9996 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9997 == FUNCTION_TYPE))))
9998 return 0;
9999 #endif
10001 STRIP_NOPS (arg0);
10002 STRIP_NOPS (arg1);
10004 /* Get the rtx comparison code to use. We know that EXP is a comparison
10005 operation of some type. Some comparisons against 1 and -1 can be
10006 converted to comparisons with zero. Do so here so that the tests
10007 below will be aware that we have a comparison with zero. These
10008 tests will not catch constants in the first operand, but constants
10009 are rarely passed as the first operand. */
10011 switch (ops->code)
10013 case EQ_EXPR:
10014 code = EQ;
10015 break;
10016 case NE_EXPR:
10017 code = NE;
10018 break;
10019 case LT_EXPR:
10020 if (integer_onep (arg1))
10021 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10022 else
10023 code = unsignedp ? LTU : LT;
10024 break;
10025 case LE_EXPR:
10026 if (! unsignedp && integer_all_onesp (arg1))
10027 arg1 = integer_zero_node, code = LT;
10028 else
10029 code = unsignedp ? LEU : LE;
10030 break;
10031 case GT_EXPR:
10032 if (! unsignedp && integer_all_onesp (arg1))
10033 arg1 = integer_zero_node, code = GE;
10034 else
10035 code = unsignedp ? GTU : GT;
10036 break;
10037 case GE_EXPR:
10038 if (integer_onep (arg1))
10039 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10040 else
10041 code = unsignedp ? GEU : GE;
10042 break;
10044 case UNORDERED_EXPR:
10045 code = UNORDERED;
10046 break;
10047 case ORDERED_EXPR:
10048 code = ORDERED;
10049 break;
10050 case UNLT_EXPR:
10051 code = UNLT;
10052 break;
10053 case UNLE_EXPR:
10054 code = UNLE;
10055 break;
10056 case UNGT_EXPR:
10057 code = UNGT;
10058 break;
10059 case UNGE_EXPR:
10060 code = UNGE;
10061 break;
10062 case UNEQ_EXPR:
10063 code = UNEQ;
10064 break;
10065 case LTGT_EXPR:
10066 code = LTGT;
10067 break;
10069 default:
10070 gcc_unreachable ();
10073 /* Put a constant second. */
10074 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10075 || TREE_CODE (arg0) == FIXED_CST)
10077 tem = arg0; arg0 = arg1; arg1 = tem;
10078 code = swap_condition (code);
10081 /* If this is an equality or inequality test of a single bit, we can
10082 do this by shifting the bit being tested to the low-order bit and
10083 masking the result with the constant 1. If the condition was EQ,
10084 we xor it with 1. This does not require an scc insn and is faster
10085 than an scc insn even if we have it.
10087 The code to make this transformation was moved into fold_single_bit_test,
10088 so we just call into the folder and expand its result. */
10090 if ((code == NE || code == EQ)
10091 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10092 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10094 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10095 return expand_expr (fold_single_bit_test (loc,
10096 code == NE ? NE_EXPR : EQ_EXPR,
10097 arg0, arg1, type),
10098 target, VOIDmode, EXPAND_NORMAL);
10101 if (! get_subtarget (target)
10102 || GET_MODE (subtarget) != operand_mode)
10103 subtarget = 0;
10105 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10107 if (target == 0)
10108 target = gen_reg_rtx (mode);
10110 /* Try a cstore if possible. */
10111 return emit_store_flag_force (target, code, op0, op1,
10112 operand_mode, unsignedp, 1);
10116 /* Stubs in case we haven't got a casesi insn. */
10117 #ifndef HAVE_casesi
10118 # define HAVE_casesi 0
10119 # define gen_casesi(a, b, c, d, e) (0)
10120 # define CODE_FOR_casesi CODE_FOR_nothing
10121 #endif
10123 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10124 0 otherwise (i.e. if there is no casesi instruction). */
10126 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10127 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10128 rtx fallback_label ATTRIBUTE_UNUSED)
10130 enum machine_mode index_mode = SImode;
10131 int index_bits = GET_MODE_BITSIZE (index_mode);
10132 rtx op1, op2, index;
10133 enum machine_mode op_mode;
10135 if (! HAVE_casesi)
10136 return 0;
10138 /* Convert the index to SImode. */
10139 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10141 enum machine_mode omode = TYPE_MODE (index_type);
10142 rtx rangertx = expand_normal (range);
10144 /* We must handle the endpoints in the original mode. */
10145 index_expr = build2 (MINUS_EXPR, index_type,
10146 index_expr, minval);
10147 minval = integer_zero_node;
10148 index = expand_normal (index_expr);
10149 if (default_label)
10150 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10151 omode, 1, default_label);
10152 /* Now we can safely truncate. */
10153 index = convert_to_mode (index_mode, index, 0);
10155 else
10157 if (TYPE_MODE (index_type) != index_mode)
10159 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10160 index_expr = fold_convert (index_type, index_expr);
10163 index = expand_normal (index_expr);
10166 do_pending_stack_adjust ();
10168 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10169 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10170 (index, op_mode))
10171 index = copy_to_mode_reg (op_mode, index);
10173 op1 = expand_normal (minval);
10175 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10176 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10177 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10178 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10179 (op1, op_mode))
10180 op1 = copy_to_mode_reg (op_mode, op1);
10182 op2 = expand_normal (range);
10184 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10185 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10186 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10187 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10188 (op2, op_mode))
10189 op2 = copy_to_mode_reg (op_mode, op2);
10191 emit_jump_insn (gen_casesi (index, op1, op2,
10192 table_label, !default_label
10193 ? fallback_label : default_label));
10194 return 1;
10197 /* Attempt to generate a tablejump instruction; same concept. */
10198 #ifndef HAVE_tablejump
10199 #define HAVE_tablejump 0
10200 #define gen_tablejump(x, y) (0)
10201 #endif
10203 /* Subroutine of the next function.
10205 INDEX is the value being switched on, with the lowest value
10206 in the table already subtracted.
10207 MODE is its expected mode (needed if INDEX is constant).
10208 RANGE is the length of the jump table.
10209 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10211 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10212 index value is out of range. */
10214 static void
10215 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10216 rtx default_label)
10218 rtx temp, vector;
10220 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10221 cfun->cfg->max_jumptable_ents = INTVAL (range);
10223 /* Do an unsigned comparison (in the proper mode) between the index
10224 expression and the value which represents the length of the range.
10225 Since we just finished subtracting the lower bound of the range
10226 from the index expression, this comparison allows us to simultaneously
10227 check that the original index expression value is both greater than
10228 or equal to the minimum value of the range and less than or equal to
10229 the maximum value of the range. */
10231 if (default_label)
10232 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10233 default_label);
10235 /* If index is in range, it must fit in Pmode.
10236 Convert to Pmode so we can index with it. */
10237 if (mode != Pmode)
10238 index = convert_to_mode (Pmode, index, 1);
10240 /* Don't let a MEM slip through, because then INDEX that comes
10241 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10242 and break_out_memory_refs will go to work on it and mess it up. */
10243 #ifdef PIC_CASE_VECTOR_ADDRESS
10244 if (flag_pic && !REG_P (index))
10245 index = copy_to_mode_reg (Pmode, index);
10246 #endif
10248 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10249 GET_MODE_SIZE, because this indicates how large insns are. The other
10250 uses should all be Pmode, because they are addresses. This code
10251 could fail if addresses and insns are not the same size. */
10252 index = gen_rtx_PLUS (Pmode,
10253 gen_rtx_MULT (Pmode, index,
10254 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10255 gen_rtx_LABEL_REF (Pmode, table_label));
10256 #ifdef PIC_CASE_VECTOR_ADDRESS
10257 if (flag_pic)
10258 index = PIC_CASE_VECTOR_ADDRESS (index);
10259 else
10260 #endif
10261 index = memory_address (CASE_VECTOR_MODE, index);
10262 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10263 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10264 convert_move (temp, vector, 0);
10266 emit_jump_insn (gen_tablejump (temp, table_label));
10268 /* If we are generating PIC code or if the table is PC-relative, the
10269 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10270 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10271 emit_barrier ();
10275 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10276 rtx table_label, rtx default_label)
10278 rtx index;
10280 if (! HAVE_tablejump)
10281 return 0;
10283 index_expr = fold_build2 (MINUS_EXPR, index_type,
10284 fold_convert (index_type, index_expr),
10285 fold_convert (index_type, minval));
10286 index = expand_normal (index_expr);
10287 do_pending_stack_adjust ();
10289 do_tablejump (index, TYPE_MODE (index_type),
10290 convert_modes (TYPE_MODE (index_type),
10291 TYPE_MODE (TREE_TYPE (range)),
10292 expand_normal (range),
10293 TYPE_UNSIGNED (TREE_TYPE (range))),
10294 table_label, default_label);
10295 return 1;
10298 /* Nonzero if the mode is a valid vector mode for this architecture.
10299 This returns nonzero even if there is no hardware support for the
10300 vector mode, but we can emulate with narrower modes. */
10303 vector_mode_valid_p (enum machine_mode mode)
10305 enum mode_class mclass = GET_MODE_CLASS (mode);
10306 enum machine_mode innermode;
10308 /* Doh! What's going on? */
10309 if (mclass != MODE_VECTOR_INT
10310 && mclass != MODE_VECTOR_FLOAT
10311 && mclass != MODE_VECTOR_FRACT
10312 && mclass != MODE_VECTOR_UFRACT
10313 && mclass != MODE_VECTOR_ACCUM
10314 && mclass != MODE_VECTOR_UACCUM)
10315 return 0;
10317 /* Hardware support. Woo hoo! */
10318 if (targetm.vector_mode_supported_p (mode))
10319 return 1;
10321 innermode = GET_MODE_INNER (mode);
10323 /* We should probably return 1 if requesting V4DI and we have no DI,
10324 but we have V2DI, but this is probably very unlikely. */
10326 /* If we have support for the inner mode, we can safely emulate it.
10327 We may not have V2DI, but me can emulate with a pair of DIs. */
10328 return targetm.scalar_mode_supported_p (innermode);
10331 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10332 static rtx
10333 const_vector_from_tree (tree exp)
10335 rtvec v;
10336 int units, i;
10337 tree link, elt;
10338 enum machine_mode inner, mode;
10340 mode = TYPE_MODE (TREE_TYPE (exp));
10342 if (initializer_zerop (exp))
10343 return CONST0_RTX (mode);
10345 units = GET_MODE_NUNITS (mode);
10346 inner = GET_MODE_INNER (mode);
10348 v = rtvec_alloc (units);
10350 link = TREE_VECTOR_CST_ELTS (exp);
10351 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10353 elt = TREE_VALUE (link);
10355 if (TREE_CODE (elt) == REAL_CST)
10356 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10357 inner);
10358 else if (TREE_CODE (elt) == FIXED_CST)
10359 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10360 inner);
10361 else
10362 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10363 inner);
10366 /* Initialize remaining elements to 0. */
10367 for (; i < units; ++i)
10368 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10370 return gen_rtx_CONST_VECTOR (mode, v);
10374 /* Build a decl for a EH personality function named NAME. */
10376 tree
10377 build_personality_function (const char *name)
10379 tree decl, type;
10381 type = build_function_type_list (integer_type_node, integer_type_node,
10382 long_long_unsigned_type_node,
10383 ptr_type_node, ptr_type_node, NULL_TREE);
10384 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10385 get_identifier (name), type);
10386 DECL_ARTIFICIAL (decl) = 1;
10387 DECL_EXTERNAL (decl) = 1;
10388 TREE_PUBLIC (decl) = 1;
10390 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10391 are the flags assigned by targetm.encode_section_info. */
10392 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10394 return decl;
10397 /* Extracts the personality function of DECL and returns the corresponding
10398 libfunc. */
10401 get_personality_function (tree decl)
10403 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10404 enum eh_personality_kind pk;
10406 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10407 if (pk == eh_personality_none)
10408 return NULL;
10410 if (!personality
10411 && pk == eh_personality_any)
10412 personality = lang_hooks.eh_personality ();
10414 if (pk == eh_personality_lang)
10415 gcc_assert (personality != NULL_TREE);
10417 return XEXP (DECL_RTL (personality), 0);
10420 #include "gt-expr.h"