Make build_check_stmt accept an SSA_NAME for its base
[official-gcc.git] / gcc / expr.c
blob1e41625b2edc659b434d7769e6f95da0b3f63066
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "typeclass.h"
43 #include "toplev.h"
44 #include "langhooks.h"
45 #include "intl.h"
46 #include "tm_p.h"
47 #include "tree-iterator.h"
48 #include "tree-flow.h"
49 #include "target.h"
50 #include "common/common-target.h"
51 #include "timevar.h"
52 #include "df.h"
53 #include "diagnostic.h"
54 #include "ssaexpand.h"
55 #include "target-globals.h"
56 #include "params.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
64 #ifdef PUSH_ROUNDING
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
69 #endif
70 #endif
72 #endif
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
77 #else
78 #define STACK_PUSH_CODE PRE_INC
79 #endif
80 #endif
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
91 /* This structure is used by move_by_pieces to describe the move to
92 be performed. */
93 struct move_by_pieces_d
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 rtx from;
100 rtx from_addr;
101 int autinc_from;
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
105 int reverse;
108 /* This structure is used by store_by_pieces to describe the clear to
109 be performed. */
111 struct store_by_pieces_d
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
120 void *constfundata;
121 int reverse;
124 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
125 struct move_by_pieces_d *);
126 static bool block_move_libcall_safe_for_call_parm (void);
127 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
128 static tree emit_block_move_libcall_fn (int);
129 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
130 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
131 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
132 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
133 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
134 struct store_by_pieces_d *);
135 static tree clear_storage_libcall_fn (int);
136 static rtx compress_float_constant (rtx, rtx);
137 static rtx get_subtarget (rtx);
138 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
139 HOST_WIDE_INT, enum machine_mode,
140 tree, tree, int, alias_set_type);
141 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
142 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
143 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
144 enum machine_mode,
145 tree, tree, alias_set_type, bool);
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
149 static int is_aligning_offset (const_tree, const_tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (sepops, rtx, enum machine_mode);
154 #ifdef PUSH_ROUNDING
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
156 #endif
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
161 /* This macro is used to determine whether move_by_pieces should be called
162 to perform a structure copy. */
163 #ifndef MOVE_BY_PIECES_P
164 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
165 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
166 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
167 #endif
169 /* This macro is used to determine whether clear_by_pieces should be
170 called to clear storage. */
171 #ifndef CLEAR_BY_PIECES_P
172 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
173 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
174 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
175 #endif
177 /* This macro is used to determine whether store_by_pieces should be
178 called to "memset" storage with byte values other than zero. */
179 #ifndef SET_BY_PIECES_P
180 #define SET_BY_PIECES_P(SIZE, ALIGN) \
181 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
182 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
183 #endif
185 /* This macro is used to determine whether store_by_pieces should be
186 called to "memcpy" storage when the source is a constant string. */
187 #ifndef STORE_BY_PIECES_P
188 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
189 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
190 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
191 #endif
193 /* This is run to set up which modes can be used
194 directly in memory and to initialize the block move optab. It is run
195 at the beginning of compilation and when the target is reinitialized. */
197 void
198 init_expr_target (void)
200 rtx insn, pat;
201 enum machine_mode mode;
202 int num_clobbers;
203 rtx mem, mem1;
204 rtx reg;
206 /* Try indexing by frame ptr and try by stack ptr.
207 It is known that on the Convex the stack ptr isn't a valid index.
208 With luck, one or the other is valid on any machine. */
209 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
210 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
212 /* A scratch register we can modify in-place below to avoid
213 useless RTL allocations. */
214 reg = gen_rtx_REG (VOIDmode, -1);
216 insn = rtx_alloc (INSN);
217 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
218 PATTERN (insn) = pat;
220 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
221 mode = (enum machine_mode) ((int) mode + 1))
223 int regno;
225 direct_load[(int) mode] = direct_store[(int) mode] = 0;
226 PUT_MODE (mem, mode);
227 PUT_MODE (mem1, mode);
228 PUT_MODE (reg, mode);
230 /* See if there is some register that can be used in this mode and
231 directly loaded or stored from memory. */
233 if (mode != VOIDmode && mode != BLKmode)
234 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
235 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
236 regno++)
238 if (! HARD_REGNO_MODE_OK (regno, mode))
239 continue;
241 SET_REGNO (reg, regno);
243 SET_SRC (pat) = mem;
244 SET_DEST (pat) = reg;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_load[(int) mode] = 1;
248 SET_SRC (pat) = mem1;
249 SET_DEST (pat) = reg;
250 if (recog (pat, insn, &num_clobbers) >= 0)
251 direct_load[(int) mode] = 1;
253 SET_SRC (pat) = reg;
254 SET_DEST (pat) = mem;
255 if (recog (pat, insn, &num_clobbers) >= 0)
256 direct_store[(int) mode] = 1;
258 SET_SRC (pat) = reg;
259 SET_DEST (pat) = mem1;
260 if (recog (pat, insn, &num_clobbers) >= 0)
261 direct_store[(int) mode] = 1;
265 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
267 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
268 mode = GET_MODE_WIDER_MODE (mode))
270 enum machine_mode srcmode;
271 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
272 srcmode = GET_MODE_WIDER_MODE (srcmode))
274 enum insn_code ic;
276 ic = can_extend_p (mode, srcmode, 0);
277 if (ic == CODE_FOR_nothing)
278 continue;
280 PUT_MODE (mem, srcmode);
282 if (insn_operand_matches (ic, 1, mem))
283 float_extend_from_mem[mode][srcmode] = true;
288 /* This is run at the start of compiling a function. */
290 void
291 init_expr (void)
293 memset (&crtl->expr, 0, sizeof (crtl->expr));
296 /* Copy data from FROM to TO, where the machine modes are not the same.
297 Both modes may be integer, or both may be floating, or both may be
298 fixed-point.
299 UNSIGNEDP should be nonzero if FROM is an unsigned type.
300 This causes zero-extension instead of sign-extension. */
302 void
303 convert_move (rtx to, rtx from, int unsignedp)
305 enum machine_mode to_mode = GET_MODE (to);
306 enum machine_mode from_mode = GET_MODE (from);
307 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
308 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
309 enum insn_code code;
310 rtx libcall;
312 /* rtx code for making an equivalent value. */
313 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
314 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
317 gcc_assert (to_real == from_real);
318 gcc_assert (to_mode != BLKmode);
319 gcc_assert (from_mode != BLKmode);
321 /* If the source and destination are already the same, then there's
322 nothing to do. */
323 if (to == from)
324 return;
326 /* If FROM is a SUBREG that indicates that we have already done at least
327 the required extension, strip it. We don't handle such SUBREGs as
328 TO here. */
330 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
331 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
332 >= GET_MODE_PRECISION (to_mode))
333 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
334 from = gen_lowpart (to_mode, from), from_mode = to_mode;
336 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
338 if (to_mode == from_mode
339 || (from_mode == VOIDmode && CONSTANT_P (from)))
341 emit_move_insn (to, from);
342 return;
345 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
347 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
349 if (VECTOR_MODE_P (to_mode))
350 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
351 else
352 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
354 emit_move_insn (to, from);
355 return;
358 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
360 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
361 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
362 return;
365 if (to_real)
367 rtx value, insns;
368 convert_optab tab;
370 gcc_assert ((GET_MODE_PRECISION (from_mode)
371 != GET_MODE_PRECISION (to_mode))
372 || (DECIMAL_FLOAT_MODE_P (from_mode)
373 != DECIMAL_FLOAT_MODE_P (to_mode)));
375 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
376 /* Conversion between decimal float and binary float, same size. */
377 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
378 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
379 tab = sext_optab;
380 else
381 tab = trunc_optab;
383 /* Try converting directly if the insn is supported. */
385 code = convert_optab_handler (tab, to_mode, from_mode);
386 if (code != CODE_FOR_nothing)
388 emit_unop_insn (code, to, from,
389 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
390 return;
393 /* Otherwise use a libcall. */
394 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
396 /* Is this conversion implemented yet? */
397 gcc_assert (libcall);
399 start_sequence ();
400 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
401 1, from, from_mode);
402 insns = get_insns ();
403 end_sequence ();
404 emit_libcall_block (insns, to, value,
405 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
406 from)
407 : gen_rtx_FLOAT_EXTEND (to_mode, from));
408 return;
411 /* Handle pointer conversion. */ /* SPEE 900220. */
412 /* Targets are expected to provide conversion insns between PxImode and
413 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
414 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
416 enum machine_mode full_mode
417 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
419 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
420 != CODE_FOR_nothing);
422 if (full_mode != from_mode)
423 from = convert_to_mode (full_mode, from, unsignedp);
424 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
425 to, from, UNKNOWN);
426 return;
428 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
430 rtx new_from;
431 enum machine_mode full_mode
432 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
433 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
434 enum insn_code icode;
436 icode = convert_optab_handler (ctab, full_mode, from_mode);
437 gcc_assert (icode != CODE_FOR_nothing);
439 if (to_mode == full_mode)
441 emit_unop_insn (icode, to, from, UNKNOWN);
442 return;
445 new_from = gen_reg_rtx (full_mode);
446 emit_unop_insn (icode, new_from, from, UNKNOWN);
448 /* else proceed to integer conversions below. */
449 from_mode = full_mode;
450 from = new_from;
453 /* Make sure both are fixed-point modes or both are not. */
454 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
455 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
456 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
458 /* If we widen from_mode to to_mode and they are in the same class,
459 we won't saturate the result.
460 Otherwise, always saturate the result to play safe. */
461 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
462 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
463 expand_fixed_convert (to, from, 0, 0);
464 else
465 expand_fixed_convert (to, from, 0, 1);
466 return;
469 /* Now both modes are integers. */
471 /* Handle expanding beyond a word. */
472 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
473 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
475 rtx insns;
476 rtx lowpart;
477 rtx fill_value;
478 rtx lowfrom;
479 int i;
480 enum machine_mode lowpart_mode;
481 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
483 /* Try converting directly if the insn is supported. */
484 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
485 != CODE_FOR_nothing)
487 /* If FROM is a SUBREG, put it into a register. Do this
488 so that we always generate the same set of insns for
489 better cse'ing; if an intermediate assignment occurred,
490 we won't be doing the operation directly on the SUBREG. */
491 if (optimize > 0 && GET_CODE (from) == SUBREG)
492 from = force_reg (from_mode, from);
493 emit_unop_insn (code, to, from, equiv_code);
494 return;
496 /* Next, try converting via full word. */
497 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
498 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
499 != CODE_FOR_nothing))
501 rtx word_to = gen_reg_rtx (word_mode);
502 if (REG_P (to))
504 if (reg_overlap_mentioned_p (to, from))
505 from = force_reg (from_mode, from);
506 emit_clobber (to);
508 convert_move (word_to, from, unsignedp);
509 emit_unop_insn (code, to, word_to, equiv_code);
510 return;
513 /* No special multiword conversion insn; do it by hand. */
514 start_sequence ();
516 /* Since we will turn this into a no conflict block, we must ensure the
517 the source does not overlap the target so force it into an isolated
518 register when maybe so. Likewise for any MEM input, since the
519 conversion sequence might require several references to it and we
520 must ensure we're getting the same value every time. */
522 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
523 from = force_reg (from_mode, from);
525 /* Get a copy of FROM widened to a word, if necessary. */
526 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
527 lowpart_mode = word_mode;
528 else
529 lowpart_mode = from_mode;
531 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
533 lowpart = gen_lowpart (lowpart_mode, to);
534 emit_move_insn (lowpart, lowfrom);
536 /* Compute the value to put in each remaining word. */
537 if (unsignedp)
538 fill_value = const0_rtx;
539 else
540 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
541 LT, lowfrom, const0_rtx,
542 VOIDmode, 0, -1);
544 /* Fill the remaining words. */
545 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
547 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
548 rtx subword = operand_subword (to, index, 1, to_mode);
550 gcc_assert (subword);
552 if (fill_value != subword)
553 emit_move_insn (subword, fill_value);
556 insns = get_insns ();
557 end_sequence ();
559 emit_insn (insns);
560 return;
563 /* Truncating multi-word to a word or less. */
564 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
565 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
567 if (!((MEM_P (from)
568 && ! MEM_VOLATILE_P (from)
569 && direct_load[(int) to_mode]
570 && ! mode_dependent_address_p (XEXP (from, 0),
571 MEM_ADDR_SPACE (from)))
572 || REG_P (from)
573 || GET_CODE (from) == SUBREG))
574 from = force_reg (from_mode, from);
575 convert_move (to, gen_lowpart (word_mode, from), 0);
576 return;
579 /* Now follow all the conversions between integers
580 no more than a word long. */
582 /* For truncation, usually we can just refer to FROM in a narrower mode. */
583 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
584 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
586 if (!((MEM_P (from)
587 && ! MEM_VOLATILE_P (from)
588 && direct_load[(int) to_mode]
589 && ! mode_dependent_address_p (XEXP (from, 0),
590 MEM_ADDR_SPACE (from)))
591 || REG_P (from)
592 || GET_CODE (from) == SUBREG))
593 from = force_reg (from_mode, from);
594 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
595 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
596 from = copy_to_reg (from);
597 emit_move_insn (to, gen_lowpart (to_mode, from));
598 return;
601 /* Handle extension. */
602 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
604 /* Convert directly if that works. */
605 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
606 != CODE_FOR_nothing)
608 emit_unop_insn (code, to, from, equiv_code);
609 return;
611 else
613 enum machine_mode intermediate;
614 rtx tmp;
615 int shift_amount;
617 /* Search for a mode to convert via. */
618 for (intermediate = from_mode; intermediate != VOIDmode;
619 intermediate = GET_MODE_WIDER_MODE (intermediate))
620 if (((can_extend_p (to_mode, intermediate, unsignedp)
621 != CODE_FOR_nothing)
622 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
623 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
624 && (can_extend_p (intermediate, from_mode, unsignedp)
625 != CODE_FOR_nothing))
627 convert_move (to, convert_to_mode (intermediate, from,
628 unsignedp), unsignedp);
629 return;
632 /* No suitable intermediate mode.
633 Generate what we need with shifts. */
634 shift_amount = (GET_MODE_PRECISION (to_mode)
635 - GET_MODE_PRECISION (from_mode));
636 from = gen_lowpart (to_mode, force_reg (from_mode, from));
637 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
638 to, unsignedp);
639 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
640 to, unsignedp);
641 if (tmp != to)
642 emit_move_insn (to, tmp);
643 return;
647 /* Support special truncate insns for certain modes. */
648 if (convert_optab_handler (trunc_optab, to_mode,
649 from_mode) != CODE_FOR_nothing)
651 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
652 to, from, UNKNOWN);
653 return;
656 /* Handle truncation of volatile memrefs, and so on;
657 the things that couldn't be truncated directly,
658 and for which there was no special instruction.
660 ??? Code above formerly short-circuited this, for most integer
661 mode pairs, with a force_reg in from_mode followed by a recursive
662 call to this routine. Appears always to have been wrong. */
663 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
665 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
666 emit_move_insn (to, temp);
667 return;
670 /* Mode combination is not recognized. */
671 gcc_unreachable ();
674 /* Return an rtx for a value that would result
675 from converting X to mode MODE.
676 Both X and MODE may be floating, or both integer.
677 UNSIGNEDP is nonzero if X is an unsigned value.
678 This can be done by referring to a part of X in place
679 or by copying to a new temporary with conversion. */
682 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
684 return convert_modes (mode, VOIDmode, x, unsignedp);
687 /* Return an rtx for a value that would result
688 from converting X from mode OLDMODE to mode MODE.
689 Both modes may be floating, or both integer.
690 UNSIGNEDP is nonzero if X is an unsigned value.
692 This can be done by referring to a part of X in place
693 or by copying to a new temporary with conversion.
695 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
698 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
700 rtx temp;
702 /* If FROM is a SUBREG that indicates that we have already done at least
703 the required extension, strip it. */
705 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
706 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
707 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
708 x = gen_lowpart (mode, x);
710 if (GET_MODE (x) != VOIDmode)
711 oldmode = GET_MODE (x);
713 if (mode == oldmode)
714 return x;
716 /* There is one case that we must handle specially: If we are converting
717 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
718 we are to interpret the constant as unsigned, gen_lowpart will do
719 the wrong if the constant appears negative. What we want to do is
720 make the high-order word of the constant zero, not all ones. */
722 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
723 && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
724 && CONST_INT_P (x) && INTVAL (x) < 0)
726 double_int val = double_int::from_uhwi (INTVAL (x));
728 /* We need to zero extend VAL. */
729 if (oldmode != VOIDmode)
730 val = val.zext (GET_MODE_BITSIZE (oldmode));
732 return immed_double_int_const (val, mode);
735 /* We can do this with a gen_lowpart if both desired and current modes
736 are integer, and this is either a constant integer, a register, or a
737 non-volatile MEM. Except for the constant case where MODE is no
738 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
740 if ((CONST_INT_P (x)
741 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
742 || (GET_MODE_CLASS (mode) == MODE_INT
743 && GET_MODE_CLASS (oldmode) == MODE_INT
744 && (CONST_DOUBLE_AS_INT_P (x)
745 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
746 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
747 && direct_load[(int) mode])
748 || (REG_P (x)
749 && (! HARD_REGISTER_P (x)
750 || HARD_REGNO_MODE_OK (REGNO (x), mode))
751 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
752 GET_MODE (x))))))))
754 /* ?? If we don't know OLDMODE, we have to assume here that
755 X does not need sign- or zero-extension. This may not be
756 the case, but it's the best we can do. */
757 if (CONST_INT_P (x) && oldmode != VOIDmode
758 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
760 HOST_WIDE_INT val = INTVAL (x);
762 /* We must sign or zero-extend in this case. Start by
763 zero-extending, then sign extend if we need to. */
764 val &= GET_MODE_MASK (oldmode);
765 if (! unsignedp
766 && val_signbit_known_set_p (oldmode, val))
767 val |= ~GET_MODE_MASK (oldmode);
769 return gen_int_mode (val, mode);
772 return gen_lowpart (mode, x);
775 /* Converting from integer constant into mode is always equivalent to an
776 subreg operation. */
777 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
779 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
780 return simplify_gen_subreg (mode, x, oldmode, 0);
783 temp = gen_reg_rtx (mode);
784 convert_move (temp, x, unsignedp);
785 return temp;
788 /* Return the largest alignment we can use for doing a move (or store)
789 of MAX_PIECES. ALIGN is the largest alignment we could use. */
791 static unsigned int
792 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
794 enum machine_mode tmode;
796 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
797 if (align >= GET_MODE_ALIGNMENT (tmode))
798 align = GET_MODE_ALIGNMENT (tmode);
799 else
801 enum machine_mode tmode, xmode;
803 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
804 tmode != VOIDmode;
805 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
806 if (GET_MODE_SIZE (tmode) > max_pieces
807 || SLOW_UNALIGNED_ACCESS (tmode, align))
808 break;
810 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
813 return align;
816 /* Return the widest integer mode no wider than SIZE. If no such mode
817 can be found, return VOIDmode. */
819 static enum machine_mode
820 widest_int_mode_for_size (unsigned int size)
822 enum machine_mode tmode, mode = VOIDmode;
824 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
825 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
826 if (GET_MODE_SIZE (tmode) < size)
827 mode = tmode;
829 return mode;
832 /* STORE_MAX_PIECES is the number of bytes at a time that we can
833 store efficiently. Due to internal GCC limitations, this is
834 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
835 for an immediate constant. */
837 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
839 /* Determine whether the LEN bytes can be moved by using several move
840 instructions. Return nonzero if a call to move_by_pieces should
841 succeed. */
844 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
845 unsigned int align ATTRIBUTE_UNUSED)
847 return MOVE_BY_PIECES_P (len, align);
850 /* Generate several move instructions to copy LEN bytes from block FROM to
851 block TO. (These are MEM rtx's with BLKmode).
853 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
854 used to push FROM to the stack.
856 ALIGN is maximum stack alignment we can assume.
858 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
859 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
860 stpcpy. */
863 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
864 unsigned int align, int endp)
866 struct move_by_pieces_d data;
867 enum machine_mode to_addr_mode;
868 enum machine_mode from_addr_mode = get_address_mode (from);
869 rtx to_addr, from_addr = XEXP (from, 0);
870 unsigned int max_size = MOVE_MAX_PIECES + 1;
871 enum insn_code icode;
873 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
875 data.offset = 0;
876 data.from_addr = from_addr;
877 if (to)
879 to_addr_mode = get_address_mode (to);
880 to_addr = XEXP (to, 0);
881 data.to = to;
882 data.autinc_to
883 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
884 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
885 data.reverse
886 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
888 else
890 to_addr_mode = VOIDmode;
891 to_addr = NULL_RTX;
892 data.to = NULL_RTX;
893 data.autinc_to = 1;
894 #ifdef STACK_GROWS_DOWNWARD
895 data.reverse = 1;
896 #else
897 data.reverse = 0;
898 #endif
900 data.to_addr = to_addr;
901 data.from = from;
902 data.autinc_from
903 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
904 || GET_CODE (from_addr) == POST_INC
905 || GET_CODE (from_addr) == POST_DEC);
907 data.explicit_inc_from = 0;
908 data.explicit_inc_to = 0;
909 if (data.reverse) data.offset = len;
910 data.len = len;
912 /* If copying requires more than two move insns,
913 copy addresses to registers (to make displacements shorter)
914 and use post-increment if available. */
915 if (!(data.autinc_from && data.autinc_to)
916 && move_by_pieces_ninsns (len, align, max_size) > 2)
918 /* Find the mode of the largest move...
919 MODE might not be used depending on the definitions of the
920 USE_* macros below. */
921 enum machine_mode mode ATTRIBUTE_UNUSED
922 = widest_int_mode_for_size (max_size);
924 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
926 data.from_addr = copy_to_mode_reg (from_addr_mode,
927 plus_constant (from_addr_mode,
928 from_addr, len));
929 data.autinc_from = 1;
930 data.explicit_inc_from = -1;
932 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
934 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
935 data.autinc_from = 1;
936 data.explicit_inc_from = 1;
938 if (!data.autinc_from && CONSTANT_P (from_addr))
939 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
940 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
942 data.to_addr = copy_to_mode_reg (to_addr_mode,
943 plus_constant (to_addr_mode,
944 to_addr, len));
945 data.autinc_to = 1;
946 data.explicit_inc_to = -1;
948 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
950 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
951 data.autinc_to = 1;
952 data.explicit_inc_to = 1;
954 if (!data.autinc_to && CONSTANT_P (to_addr))
955 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
958 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
960 /* First move what we can in the largest integer mode, then go to
961 successively smaller modes. */
963 while (max_size > 1 && data.len > 0)
965 enum machine_mode mode = widest_int_mode_for_size (max_size);
967 if (mode == VOIDmode)
968 break;
970 icode = optab_handler (mov_optab, mode);
971 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
972 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
974 max_size = GET_MODE_SIZE (mode);
977 /* The code above should have handled everything. */
978 gcc_assert (!data.len);
980 if (endp)
982 rtx to1;
984 gcc_assert (!data.reverse);
985 if (data.autinc_to)
987 if (endp == 2)
989 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
990 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
991 else
992 data.to_addr = copy_to_mode_reg (to_addr_mode,
993 plus_constant (to_addr_mode,
994 data.to_addr,
995 -1));
997 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
998 data.offset);
1000 else
1002 if (endp == 2)
1003 --data.offset;
1004 to1 = adjust_address (data.to, QImode, data.offset);
1006 return to1;
1008 else
1009 return data.to;
1012 /* Return number of insns required to move L bytes by pieces.
1013 ALIGN (in bits) is maximum alignment we can assume. */
1015 unsigned HOST_WIDE_INT
1016 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1017 unsigned int max_size)
1019 unsigned HOST_WIDE_INT n_insns = 0;
1021 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1023 while (max_size > 1 && l > 0)
1025 enum machine_mode mode;
1026 enum insn_code icode;
1028 mode = widest_int_mode_for_size (max_size);
1030 if (mode == VOIDmode)
1031 break;
1033 icode = optab_handler (mov_optab, mode);
1034 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1035 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1037 max_size = GET_MODE_SIZE (mode);
1040 gcc_assert (!l);
1041 return n_insns;
1044 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1045 with move instructions for mode MODE. GENFUN is the gen_... function
1046 to make a move insn for that mode. DATA has all the other info. */
1048 static void
1049 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1050 struct move_by_pieces_d *data)
1052 unsigned int size = GET_MODE_SIZE (mode);
1053 rtx to1 = NULL_RTX, from1;
1055 while (data->len >= size)
1057 if (data->reverse)
1058 data->offset -= size;
1060 if (data->to)
1062 if (data->autinc_to)
1063 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1064 data->offset);
1065 else
1066 to1 = adjust_address (data->to, mode, data->offset);
1069 if (data->autinc_from)
1070 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1071 data->offset);
1072 else
1073 from1 = adjust_address (data->from, mode, data->offset);
1075 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1076 emit_insn (gen_add2_insn (data->to_addr,
1077 GEN_INT (-(HOST_WIDE_INT)size)));
1078 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1079 emit_insn (gen_add2_insn (data->from_addr,
1080 GEN_INT (-(HOST_WIDE_INT)size)));
1082 if (data->to)
1083 emit_insn ((*genfun) (to1, from1));
1084 else
1086 #ifdef PUSH_ROUNDING
1087 emit_single_push_insn (mode, from1, NULL);
1088 #else
1089 gcc_unreachable ();
1090 #endif
1093 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1094 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1095 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1096 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1098 if (! data->reverse)
1099 data->offset += size;
1101 data->len -= size;
1105 /* Emit code to move a block Y to a block X. This may be done with
1106 string-move instructions, with multiple scalar move instructions,
1107 or with a library call.
1109 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1110 SIZE is an rtx that says how long they are.
1111 ALIGN is the maximum alignment we can assume they have.
1112 METHOD describes what kind of copy this is, and what mechanisms may be used.
1114 Return the address of the new block, if memcpy is called and returns it,
1115 0 otherwise. */
1118 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1119 unsigned int expected_align, HOST_WIDE_INT expected_size)
1121 bool may_use_call;
1122 rtx retval = 0;
1123 unsigned int align;
1125 gcc_assert (size);
1126 if (CONST_INT_P (size)
1127 && INTVAL (size) == 0)
1128 return 0;
1130 switch (method)
1132 case BLOCK_OP_NORMAL:
1133 case BLOCK_OP_TAILCALL:
1134 may_use_call = true;
1135 break;
1137 case BLOCK_OP_CALL_PARM:
1138 may_use_call = block_move_libcall_safe_for_call_parm ();
1140 /* Make inhibit_defer_pop nonzero around the library call
1141 to force it to pop the arguments right away. */
1142 NO_DEFER_POP;
1143 break;
1145 case BLOCK_OP_NO_LIBCALL:
1146 may_use_call = false;
1147 break;
1149 default:
1150 gcc_unreachable ();
1153 gcc_assert (MEM_P (x) && MEM_P (y));
1154 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1155 gcc_assert (align >= BITS_PER_UNIT);
1157 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1158 block copy is more efficient for other large modes, e.g. DCmode. */
1159 x = adjust_address (x, BLKmode, 0);
1160 y = adjust_address (y, BLKmode, 0);
1162 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1163 can be incorrect is coming from __builtin_memcpy. */
1164 if (CONST_INT_P (size))
1166 x = shallow_copy_rtx (x);
1167 y = shallow_copy_rtx (y);
1168 set_mem_size (x, INTVAL (size));
1169 set_mem_size (y, INTVAL (size));
1172 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1173 move_by_pieces (x, y, INTVAL (size), align, 0);
1174 else if (emit_block_move_via_movmem (x, y, size, align,
1175 expected_align, expected_size))
1177 else if (may_use_call
1178 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1179 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1181 /* Since x and y are passed to a libcall, mark the corresponding
1182 tree EXPR as addressable. */
1183 tree y_expr = MEM_EXPR (y);
1184 tree x_expr = MEM_EXPR (x);
1185 if (y_expr)
1186 mark_addressable (y_expr);
1187 if (x_expr)
1188 mark_addressable (x_expr);
1189 retval = emit_block_move_via_libcall (x, y, size,
1190 method == BLOCK_OP_TAILCALL);
1193 else
1194 emit_block_move_via_loop (x, y, size, align);
1196 if (method == BLOCK_OP_CALL_PARM)
1197 OK_DEFER_POP;
1199 return retval;
1203 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1205 return emit_block_move_hints (x, y, size, method, 0, -1);
1208 /* A subroutine of emit_block_move. Returns true if calling the
1209 block move libcall will not clobber any parameters which may have
1210 already been placed on the stack. */
1212 static bool
1213 block_move_libcall_safe_for_call_parm (void)
1215 #if defined (REG_PARM_STACK_SPACE)
1216 tree fn;
1217 #endif
1219 /* If arguments are pushed on the stack, then they're safe. */
1220 if (PUSH_ARGS)
1221 return true;
1223 /* If registers go on the stack anyway, any argument is sure to clobber
1224 an outgoing argument. */
1225 #if defined (REG_PARM_STACK_SPACE)
1226 fn = emit_block_move_libcall_fn (false);
1227 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1228 depend on its argument. */
1229 (void) fn;
1230 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1231 && REG_PARM_STACK_SPACE (fn) != 0)
1232 return false;
1233 #endif
1235 /* If any argument goes in memory, then it might clobber an outgoing
1236 argument. */
1238 CUMULATIVE_ARGS args_so_far_v;
1239 cumulative_args_t args_so_far;
1240 tree fn, arg;
1242 fn = emit_block_move_libcall_fn (false);
1243 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1244 args_so_far = pack_cumulative_args (&args_so_far_v);
1246 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1247 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1249 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1250 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1251 NULL_TREE, true);
1252 if (!tmp || !REG_P (tmp))
1253 return false;
1254 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1255 return false;
1256 targetm.calls.function_arg_advance (args_so_far, mode,
1257 NULL_TREE, true);
1260 return true;
1263 /* A subroutine of emit_block_move. Expand a movmem pattern;
1264 return true if successful. */
1266 static bool
1267 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1268 unsigned int expected_align, HOST_WIDE_INT expected_size)
1270 int save_volatile_ok = volatile_ok;
1271 enum machine_mode mode;
1273 if (expected_align < align)
1274 expected_align = align;
1276 /* Since this is a move insn, we don't care about volatility. */
1277 volatile_ok = 1;
1279 /* Try the most limited insn first, because there's no point
1280 including more than one in the machine description unless
1281 the more limited one has some advantage. */
1283 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1284 mode = GET_MODE_WIDER_MODE (mode))
1286 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1288 if (code != CODE_FOR_nothing
1289 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1290 here because if SIZE is less than the mode mask, as it is
1291 returned by the macro, it will definitely be less than the
1292 actual mode mask. */
1293 && ((CONST_INT_P (size)
1294 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1295 <= (GET_MODE_MASK (mode) >> 1)))
1296 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1298 struct expand_operand ops[6];
1299 unsigned int nops;
1301 /* ??? When called via emit_block_move_for_call, it'd be
1302 nice if there were some way to inform the backend, so
1303 that it doesn't fail the expansion because it thinks
1304 emitting the libcall would be more efficient. */
1305 nops = insn_data[(int) code].n_generator_args;
1306 gcc_assert (nops == 4 || nops == 6);
1308 create_fixed_operand (&ops[0], x);
1309 create_fixed_operand (&ops[1], y);
1310 /* The check above guarantees that this size conversion is valid. */
1311 create_convert_operand_to (&ops[2], size, mode, true);
1312 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1313 if (nops == 6)
1315 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1316 create_integer_operand (&ops[5], expected_size);
1318 if (maybe_expand_insn (code, nops, ops))
1320 volatile_ok = save_volatile_ok;
1321 return true;
1326 volatile_ok = save_volatile_ok;
1327 return false;
1330 /* A subroutine of emit_block_move. Expand a call to memcpy.
1331 Return the return value from memcpy, 0 otherwise. */
1334 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1336 rtx dst_addr, src_addr;
1337 tree call_expr, fn, src_tree, dst_tree, size_tree;
1338 enum machine_mode size_mode;
1339 rtx retval;
1341 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1342 pseudos. We can then place those new pseudos into a VAR_DECL and
1343 use them later. */
1345 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1346 src_addr = copy_addr_to_reg (XEXP (src, 0));
1348 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1349 src_addr = convert_memory_address (ptr_mode, src_addr);
1351 dst_tree = make_tree (ptr_type_node, dst_addr);
1352 src_tree = make_tree (ptr_type_node, src_addr);
1354 size_mode = TYPE_MODE (sizetype);
1356 size = convert_to_mode (size_mode, size, 1);
1357 size = copy_to_mode_reg (size_mode, size);
1359 /* It is incorrect to use the libcall calling conventions to call
1360 memcpy in this context. This could be a user call to memcpy and
1361 the user may wish to examine the return value from memcpy. For
1362 targets where libcalls and normal calls have different conventions
1363 for returning pointers, we could end up generating incorrect code. */
1365 size_tree = make_tree (sizetype, size);
1367 fn = emit_block_move_libcall_fn (true);
1368 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1369 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1371 retval = expand_normal (call_expr);
1373 return retval;
1376 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1377 for the function we use for block copies. */
1379 static GTY(()) tree block_move_fn;
1381 void
1382 init_block_move_fn (const char *asmspec)
1384 if (!block_move_fn)
1386 tree args, fn, attrs, attr_args;
1388 fn = get_identifier ("memcpy");
1389 args = build_function_type_list (ptr_type_node, ptr_type_node,
1390 const_ptr_type_node, sizetype,
1391 NULL_TREE);
1393 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1394 DECL_EXTERNAL (fn) = 1;
1395 TREE_PUBLIC (fn) = 1;
1396 DECL_ARTIFICIAL (fn) = 1;
1397 TREE_NOTHROW (fn) = 1;
1398 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1399 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1401 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1402 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1404 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1406 block_move_fn = fn;
1409 if (asmspec)
1410 set_user_assembler_name (block_move_fn, asmspec);
1413 static tree
1414 emit_block_move_libcall_fn (int for_call)
1416 static bool emitted_extern;
1418 if (!block_move_fn)
1419 init_block_move_fn (NULL);
1421 if (for_call && !emitted_extern)
1423 emitted_extern = true;
1424 make_decl_rtl (block_move_fn);
1427 return block_move_fn;
1430 /* A subroutine of emit_block_move. Copy the data via an explicit
1431 loop. This is used only when libcalls are forbidden. */
1432 /* ??? It'd be nice to copy in hunks larger than QImode. */
1434 static void
1435 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1436 unsigned int align ATTRIBUTE_UNUSED)
1438 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1439 enum machine_mode x_addr_mode = get_address_mode (x);
1440 enum machine_mode y_addr_mode = get_address_mode (y);
1441 enum machine_mode iter_mode;
1443 iter_mode = GET_MODE (size);
1444 if (iter_mode == VOIDmode)
1445 iter_mode = word_mode;
1447 top_label = gen_label_rtx ();
1448 cmp_label = gen_label_rtx ();
1449 iter = gen_reg_rtx (iter_mode);
1451 emit_move_insn (iter, const0_rtx);
1453 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1454 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1455 do_pending_stack_adjust ();
1457 emit_jump (cmp_label);
1458 emit_label (top_label);
1460 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1461 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1463 if (x_addr_mode != y_addr_mode)
1464 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1465 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1467 x = change_address (x, QImode, x_addr);
1468 y = change_address (y, QImode, y_addr);
1470 emit_move_insn (x, y);
1472 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1473 true, OPTAB_LIB_WIDEN);
1474 if (tmp != iter)
1475 emit_move_insn (iter, tmp);
1477 emit_label (cmp_label);
1479 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1480 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1483 /* Copy all or part of a value X into registers starting at REGNO.
1484 The number of registers to be filled is NREGS. */
1486 void
1487 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1489 int i;
1490 #ifdef HAVE_load_multiple
1491 rtx pat;
1492 rtx last;
1493 #endif
1495 if (nregs == 0)
1496 return;
1498 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1499 x = validize_mem (force_const_mem (mode, x));
1501 /* See if the machine can do this with a load multiple insn. */
1502 #ifdef HAVE_load_multiple
1503 if (HAVE_load_multiple)
1505 last = get_last_insn ();
1506 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1507 GEN_INT (nregs));
1508 if (pat)
1510 emit_insn (pat);
1511 return;
1513 else
1514 delete_insns_since (last);
1516 #endif
1518 for (i = 0; i < nregs; i++)
1519 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1520 operand_subword_force (x, i, mode));
1523 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1524 The number of registers to be filled is NREGS. */
1526 void
1527 move_block_from_reg (int regno, rtx x, int nregs)
1529 int i;
1531 if (nregs == 0)
1532 return;
1534 /* See if the machine can do this with a store multiple insn. */
1535 #ifdef HAVE_store_multiple
1536 if (HAVE_store_multiple)
1538 rtx last = get_last_insn ();
1539 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1540 GEN_INT (nregs));
1541 if (pat)
1543 emit_insn (pat);
1544 return;
1546 else
1547 delete_insns_since (last);
1549 #endif
1551 for (i = 0; i < nregs; i++)
1553 rtx tem = operand_subword (x, i, 1, BLKmode);
1555 gcc_assert (tem);
1557 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1561 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1562 ORIG, where ORIG is a non-consecutive group of registers represented by
1563 a PARALLEL. The clone is identical to the original except in that the
1564 original set of registers is replaced by a new set of pseudo registers.
1565 The new set has the same modes as the original set. */
1568 gen_group_rtx (rtx orig)
1570 int i, length;
1571 rtx *tmps;
1573 gcc_assert (GET_CODE (orig) == PARALLEL);
1575 length = XVECLEN (orig, 0);
1576 tmps = XALLOCAVEC (rtx, length);
1578 /* Skip a NULL entry in first slot. */
1579 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1581 if (i)
1582 tmps[0] = 0;
1584 for (; i < length; i++)
1586 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1587 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1589 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1592 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1595 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1596 except that values are placed in TMPS[i], and must later be moved
1597 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1599 static void
1600 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1602 rtx src;
1603 int start, i;
1604 enum machine_mode m = GET_MODE (orig_src);
1606 gcc_assert (GET_CODE (dst) == PARALLEL);
1608 if (m != VOIDmode
1609 && !SCALAR_INT_MODE_P (m)
1610 && !MEM_P (orig_src)
1611 && GET_CODE (orig_src) != CONCAT)
1613 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1614 if (imode == BLKmode)
1615 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1616 else
1617 src = gen_reg_rtx (imode);
1618 if (imode != BLKmode)
1619 src = gen_lowpart (GET_MODE (orig_src), src);
1620 emit_move_insn (src, orig_src);
1621 /* ...and back again. */
1622 if (imode != BLKmode)
1623 src = gen_lowpart (imode, src);
1624 emit_group_load_1 (tmps, dst, src, type, ssize);
1625 return;
1628 /* Check for a NULL entry, used to indicate that the parameter goes
1629 both on the stack and in registers. */
1630 if (XEXP (XVECEXP (dst, 0, 0), 0))
1631 start = 0;
1632 else
1633 start = 1;
1635 /* Process the pieces. */
1636 for (i = start; i < XVECLEN (dst, 0); i++)
1638 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1639 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1640 unsigned int bytelen = GET_MODE_SIZE (mode);
1641 int shift = 0;
1643 /* Handle trailing fragments that run over the size of the struct. */
1644 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1646 /* Arrange to shift the fragment to where it belongs.
1647 extract_bit_field loads to the lsb of the reg. */
1648 if (
1649 #ifdef BLOCK_REG_PADDING
1650 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1651 == (BYTES_BIG_ENDIAN ? upward : downward)
1652 #else
1653 BYTES_BIG_ENDIAN
1654 #endif
1656 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1657 bytelen = ssize - bytepos;
1658 gcc_assert (bytelen > 0);
1661 /* If we won't be loading directly from memory, protect the real source
1662 from strange tricks we might play; but make sure that the source can
1663 be loaded directly into the destination. */
1664 src = orig_src;
1665 if (!MEM_P (orig_src)
1666 && (!CONSTANT_P (orig_src)
1667 || (GET_MODE (orig_src) != mode
1668 && GET_MODE (orig_src) != VOIDmode)))
1670 if (GET_MODE (orig_src) == VOIDmode)
1671 src = gen_reg_rtx (mode);
1672 else
1673 src = gen_reg_rtx (GET_MODE (orig_src));
1675 emit_move_insn (src, orig_src);
1678 /* Optimize the access just a bit. */
1679 if (MEM_P (src)
1680 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1681 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1682 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1683 && bytelen == GET_MODE_SIZE (mode))
1685 tmps[i] = gen_reg_rtx (mode);
1686 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1688 else if (COMPLEX_MODE_P (mode)
1689 && GET_MODE (src) == mode
1690 && bytelen == GET_MODE_SIZE (mode))
1691 /* Let emit_move_complex do the bulk of the work. */
1692 tmps[i] = src;
1693 else if (GET_CODE (src) == CONCAT)
1695 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1696 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1698 if ((bytepos == 0 && bytelen == slen0)
1699 || (bytepos != 0 && bytepos + bytelen <= slen))
1701 /* The following assumes that the concatenated objects all
1702 have the same size. In this case, a simple calculation
1703 can be used to determine the object and the bit field
1704 to be extracted. */
1705 tmps[i] = XEXP (src, bytepos / slen0);
1706 if (! CONSTANT_P (tmps[i])
1707 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1708 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1709 (bytepos % slen0) * BITS_PER_UNIT,
1710 1, false, NULL_RTX, mode, mode);
1712 else
1714 rtx mem;
1716 gcc_assert (!bytepos);
1717 mem = assign_stack_temp (GET_MODE (src), slen);
1718 emit_move_insn (mem, src);
1719 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1720 0, 1, false, NULL_RTX, mode, mode);
1723 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1724 SIMD register, which is currently broken. While we get GCC
1725 to emit proper RTL for these cases, let's dump to memory. */
1726 else if (VECTOR_MODE_P (GET_MODE (dst))
1727 && REG_P (src))
1729 int slen = GET_MODE_SIZE (GET_MODE (src));
1730 rtx mem;
1732 mem = assign_stack_temp (GET_MODE (src), slen);
1733 emit_move_insn (mem, src);
1734 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1736 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1737 && XVECLEN (dst, 0) > 1)
1738 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1739 else if (CONSTANT_P (src))
1741 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1743 if (len == ssize)
1744 tmps[i] = src;
1745 else
1747 rtx first, second;
1749 gcc_assert (2 * len == ssize);
1750 split_double (src, &first, &second);
1751 if (i)
1752 tmps[i] = second;
1753 else
1754 tmps[i] = first;
1757 else if (REG_P (src) && GET_MODE (src) == mode)
1758 tmps[i] = src;
1759 else
1760 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1761 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1762 mode, mode);
1764 if (shift)
1765 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1766 shift, tmps[i], 0);
1770 /* Emit code to move a block SRC of type TYPE to a block DST,
1771 where DST is non-consecutive registers represented by a PARALLEL.
1772 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1773 if not known. */
1775 void
1776 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1778 rtx *tmps;
1779 int i;
1781 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1782 emit_group_load_1 (tmps, dst, src, type, ssize);
1784 /* Copy the extracted pieces into the proper (probable) hard regs. */
1785 for (i = 0; i < XVECLEN (dst, 0); i++)
1787 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1788 if (d == NULL)
1789 continue;
1790 emit_move_insn (d, tmps[i]);
1794 /* Similar, but load SRC into new pseudos in a format that looks like
1795 PARALLEL. This can later be fed to emit_group_move to get things
1796 in the right place. */
1799 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1801 rtvec vec;
1802 int i;
1804 vec = rtvec_alloc (XVECLEN (parallel, 0));
1805 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1807 /* Convert the vector to look just like the original PARALLEL, except
1808 with the computed values. */
1809 for (i = 0; i < XVECLEN (parallel, 0); i++)
1811 rtx e = XVECEXP (parallel, 0, i);
1812 rtx d = XEXP (e, 0);
1814 if (d)
1816 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1817 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1819 RTVEC_ELT (vec, i) = e;
1822 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1825 /* Emit code to move a block SRC to block DST, where SRC and DST are
1826 non-consecutive groups of registers, each represented by a PARALLEL. */
1828 void
1829 emit_group_move (rtx dst, rtx src)
1831 int i;
1833 gcc_assert (GET_CODE (src) == PARALLEL
1834 && GET_CODE (dst) == PARALLEL
1835 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1837 /* Skip first entry if NULL. */
1838 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1839 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1840 XEXP (XVECEXP (src, 0, i), 0));
1843 /* Move a group of registers represented by a PARALLEL into pseudos. */
1846 emit_group_move_into_temps (rtx src)
1848 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1849 int i;
1851 for (i = 0; i < XVECLEN (src, 0); i++)
1853 rtx e = XVECEXP (src, 0, i);
1854 rtx d = XEXP (e, 0);
1856 if (d)
1857 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1858 RTVEC_ELT (vec, i) = e;
1861 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1864 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1865 where SRC is non-consecutive registers represented by a PARALLEL.
1866 SSIZE represents the total size of block ORIG_DST, or -1 if not
1867 known. */
1869 void
1870 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1872 rtx *tmps, dst;
1873 int start, finish, i;
1874 enum machine_mode m = GET_MODE (orig_dst);
1876 gcc_assert (GET_CODE (src) == PARALLEL);
1878 if (!SCALAR_INT_MODE_P (m)
1879 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1881 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1882 if (imode == BLKmode)
1883 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1884 else
1885 dst = gen_reg_rtx (imode);
1886 emit_group_store (dst, src, type, ssize);
1887 if (imode != BLKmode)
1888 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1889 emit_move_insn (orig_dst, dst);
1890 return;
1893 /* Check for a NULL entry, used to indicate that the parameter goes
1894 both on the stack and in registers. */
1895 if (XEXP (XVECEXP (src, 0, 0), 0))
1896 start = 0;
1897 else
1898 start = 1;
1899 finish = XVECLEN (src, 0);
1901 tmps = XALLOCAVEC (rtx, finish);
1903 /* Copy the (probable) hard regs into pseudos. */
1904 for (i = start; i < finish; i++)
1906 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1907 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1909 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1910 emit_move_insn (tmps[i], reg);
1912 else
1913 tmps[i] = reg;
1916 /* If we won't be storing directly into memory, protect the real destination
1917 from strange tricks we might play. */
1918 dst = orig_dst;
1919 if (GET_CODE (dst) == PARALLEL)
1921 rtx temp;
1923 /* We can get a PARALLEL dst if there is a conditional expression in
1924 a return statement. In that case, the dst and src are the same,
1925 so no action is necessary. */
1926 if (rtx_equal_p (dst, src))
1927 return;
1929 /* It is unclear if we can ever reach here, but we may as well handle
1930 it. Allocate a temporary, and split this into a store/load to/from
1931 the temporary. */
1933 temp = assign_stack_temp (GET_MODE (dst), ssize);
1934 emit_group_store (temp, src, type, ssize);
1935 emit_group_load (dst, temp, type, ssize);
1936 return;
1938 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1940 enum machine_mode outer = GET_MODE (dst);
1941 enum machine_mode inner;
1942 HOST_WIDE_INT bytepos;
1943 bool done = false;
1944 rtx temp;
1946 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1947 dst = gen_reg_rtx (outer);
1949 /* Make life a bit easier for combine. */
1950 /* If the first element of the vector is the low part
1951 of the destination mode, use a paradoxical subreg to
1952 initialize the destination. */
1953 if (start < finish)
1955 inner = GET_MODE (tmps[start]);
1956 bytepos = subreg_lowpart_offset (inner, outer);
1957 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1959 temp = simplify_gen_subreg (outer, tmps[start],
1960 inner, 0);
1961 if (temp)
1963 emit_move_insn (dst, temp);
1964 done = true;
1965 start++;
1970 /* If the first element wasn't the low part, try the last. */
1971 if (!done
1972 && start < finish - 1)
1974 inner = GET_MODE (tmps[finish - 1]);
1975 bytepos = subreg_lowpart_offset (inner, outer);
1976 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1978 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1979 inner, 0);
1980 if (temp)
1982 emit_move_insn (dst, temp);
1983 done = true;
1984 finish--;
1989 /* Otherwise, simply initialize the result to zero. */
1990 if (!done)
1991 emit_move_insn (dst, CONST0_RTX (outer));
1994 /* Process the pieces. */
1995 for (i = start; i < finish; i++)
1997 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1998 enum machine_mode mode = GET_MODE (tmps[i]);
1999 unsigned int bytelen = GET_MODE_SIZE (mode);
2000 unsigned int adj_bytelen = bytelen;
2001 rtx dest = dst;
2003 /* Handle trailing fragments that run over the size of the struct. */
2004 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2005 adj_bytelen = ssize - bytepos;
2007 if (GET_CODE (dst) == CONCAT)
2009 if (bytepos + adj_bytelen
2010 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2011 dest = XEXP (dst, 0);
2012 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2015 dest = XEXP (dst, 1);
2017 else
2019 enum machine_mode dest_mode = GET_MODE (dest);
2020 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2022 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2024 if (GET_MODE_ALIGNMENT (dest_mode)
2025 >= GET_MODE_ALIGNMENT (tmp_mode))
2027 dest = assign_stack_temp (dest_mode,
2028 GET_MODE_SIZE (dest_mode));
2029 emit_move_insn (adjust_address (dest,
2030 tmp_mode,
2031 bytepos),
2032 tmps[i]);
2033 dst = dest;
2035 else
2037 dest = assign_stack_temp (tmp_mode,
2038 GET_MODE_SIZE (tmp_mode));
2039 emit_move_insn (dest, tmps[i]);
2040 dst = adjust_address (dest, dest_mode, bytepos);
2042 break;
2046 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2048 /* store_bit_field always takes its value from the lsb.
2049 Move the fragment to the lsb if it's not already there. */
2050 if (
2051 #ifdef BLOCK_REG_PADDING
2052 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2053 == (BYTES_BIG_ENDIAN ? upward : downward)
2054 #else
2055 BYTES_BIG_ENDIAN
2056 #endif
2059 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2060 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2061 shift, tmps[i], 0);
2063 bytelen = adj_bytelen;
2066 /* Optimize the access just a bit. */
2067 if (MEM_P (dest)
2068 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2069 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2070 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2071 && bytelen == GET_MODE_SIZE (mode))
2072 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2073 else
2074 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2075 0, 0, mode, tmps[i]);
2078 /* Copy from the pseudo into the (probable) hard reg. */
2079 if (orig_dst != dst)
2080 emit_move_insn (orig_dst, dst);
2083 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2085 This is used on targets that return BLKmode values in registers. */
2087 void
2088 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2090 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2091 rtx src = NULL, dst = NULL;
2092 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2093 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2094 enum machine_mode mode = GET_MODE (srcreg);
2095 enum machine_mode tmode = GET_MODE (target);
2096 enum machine_mode copy_mode;
2098 /* BLKmode registers created in the back-end shouldn't have survived. */
2099 gcc_assert (mode != BLKmode);
2101 /* If the structure doesn't take up a whole number of words, see whether
2102 SRCREG is padded on the left or on the right. If it's on the left,
2103 set PADDING_CORRECTION to the number of bits to skip.
2105 In most ABIs, the structure will be returned at the least end of
2106 the register, which translates to right padding on little-endian
2107 targets and left padding on big-endian targets. The opposite
2108 holds if the structure is returned at the most significant
2109 end of the register. */
2110 if (bytes % UNITS_PER_WORD != 0
2111 && (targetm.calls.return_in_msb (type)
2112 ? !BYTES_BIG_ENDIAN
2113 : BYTES_BIG_ENDIAN))
2114 padding_correction
2115 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2117 /* We can use a single move if we have an exact mode for the size. */
2118 else if (MEM_P (target)
2119 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2120 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2121 && bytes == GET_MODE_SIZE (mode))
2123 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2124 return;
2127 /* And if we additionally have the same mode for a register. */
2128 else if (REG_P (target)
2129 && GET_MODE (target) == mode
2130 && bytes == GET_MODE_SIZE (mode))
2132 emit_move_insn (target, srcreg);
2133 return;
2136 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2137 into a new pseudo which is a full word. */
2138 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2140 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2141 mode = word_mode;
2144 /* Copy the structure BITSIZE bits at a time. If the target lives in
2145 memory, take care of not reading/writing past its end by selecting
2146 a copy mode suited to BITSIZE. This should always be possible given
2147 how it is computed.
2149 If the target lives in register, make sure not to select a copy mode
2150 larger than the mode of the register.
2152 We could probably emit more efficient code for machines which do not use
2153 strict alignment, but it doesn't seem worth the effort at the current
2154 time. */
2156 copy_mode = word_mode;
2157 if (MEM_P (target))
2159 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2160 if (mem_mode != BLKmode)
2161 copy_mode = mem_mode;
2163 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2164 copy_mode = tmode;
2166 for (bitpos = 0, xbitpos = padding_correction;
2167 bitpos < bytes * BITS_PER_UNIT;
2168 bitpos += bitsize, xbitpos += bitsize)
2170 /* We need a new source operand each time xbitpos is on a
2171 word boundary and when xbitpos == padding_correction
2172 (the first time through). */
2173 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2174 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2176 /* We need a new destination operand each time bitpos is on
2177 a word boundary. */
2178 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2179 dst = target;
2180 else if (bitpos % BITS_PER_WORD == 0)
2181 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2183 /* Use xbitpos for the source extraction (right justified) and
2184 bitpos for the destination store (left justified). */
2185 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2186 extract_bit_field (src, bitsize,
2187 xbitpos % BITS_PER_WORD, 1, false,
2188 NULL_RTX, copy_mode, copy_mode));
2192 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2193 register if it contains any data, otherwise return null.
2195 This is used on targets that return BLKmode values in registers. */
2198 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2200 int i, n_regs;
2201 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2202 unsigned int bitsize;
2203 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2204 enum machine_mode dst_mode;
2206 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2208 x = expand_normal (src);
2210 bytes = int_size_in_bytes (TREE_TYPE (src));
2211 if (bytes == 0)
2212 return NULL_RTX;
2214 /* If the structure doesn't take up a whole number of words, see
2215 whether the register value should be padded on the left or on
2216 the right. Set PADDING_CORRECTION to the number of padding
2217 bits needed on the left side.
2219 In most ABIs, the structure will be returned at the least end of
2220 the register, which translates to right padding on little-endian
2221 targets and left padding on big-endian targets. The opposite
2222 holds if the structure is returned at the most significant
2223 end of the register. */
2224 if (bytes % UNITS_PER_WORD != 0
2225 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2226 ? !BYTES_BIG_ENDIAN
2227 : BYTES_BIG_ENDIAN))
2228 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2229 * BITS_PER_UNIT));
2231 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2232 dst_words = XALLOCAVEC (rtx, n_regs);
2233 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2235 /* Copy the structure BITSIZE bits at a time. */
2236 for (bitpos = 0, xbitpos = padding_correction;
2237 bitpos < bytes * BITS_PER_UNIT;
2238 bitpos += bitsize, xbitpos += bitsize)
2240 /* We need a new destination pseudo each time xbitpos is
2241 on a word boundary and when xbitpos == padding_correction
2242 (the first time through). */
2243 if (xbitpos % BITS_PER_WORD == 0
2244 || xbitpos == padding_correction)
2246 /* Generate an appropriate register. */
2247 dst_word = gen_reg_rtx (word_mode);
2248 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2250 /* Clear the destination before we move anything into it. */
2251 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2254 /* We need a new source operand each time bitpos is on a word
2255 boundary. */
2256 if (bitpos % BITS_PER_WORD == 0)
2257 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2259 /* Use bitpos for the source extraction (left justified) and
2260 xbitpos for the destination store (right justified). */
2261 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2262 0, 0, word_mode,
2263 extract_bit_field (src_word, bitsize,
2264 bitpos % BITS_PER_WORD, 1, false,
2265 NULL_RTX, word_mode, word_mode));
2268 if (mode == BLKmode)
2270 /* Find the smallest integer mode large enough to hold the
2271 entire structure. */
2272 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2273 mode != VOIDmode;
2274 mode = GET_MODE_WIDER_MODE (mode))
2275 /* Have we found a large enough mode? */
2276 if (GET_MODE_SIZE (mode) >= bytes)
2277 break;
2279 /* A suitable mode should have been found. */
2280 gcc_assert (mode != VOIDmode);
2283 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2284 dst_mode = word_mode;
2285 else
2286 dst_mode = mode;
2287 dst = gen_reg_rtx (dst_mode);
2289 for (i = 0; i < n_regs; i++)
2290 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2292 if (mode != dst_mode)
2293 dst = gen_lowpart (mode, dst);
2295 return dst;
2298 /* Add a USE expression for REG to the (possibly empty) list pointed
2299 to by CALL_FUSAGE. REG must denote a hard register. */
2301 void
2302 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2304 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2306 *call_fusage
2307 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2310 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2311 starting at REGNO. All of these registers must be hard registers. */
2313 void
2314 use_regs (rtx *call_fusage, int regno, int nregs)
2316 int i;
2318 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2320 for (i = 0; i < nregs; i++)
2321 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2324 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2325 PARALLEL REGS. This is for calls that pass values in multiple
2326 non-contiguous locations. The Irix 6 ABI has examples of this. */
2328 void
2329 use_group_regs (rtx *call_fusage, rtx regs)
2331 int i;
2333 for (i = 0; i < XVECLEN (regs, 0); i++)
2335 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2337 /* A NULL entry means the parameter goes both on the stack and in
2338 registers. This can also be a MEM for targets that pass values
2339 partially on the stack and partially in registers. */
2340 if (reg != 0 && REG_P (reg))
2341 use_reg (call_fusage, reg);
2345 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2346 assigment and the code of the expresion on the RHS is CODE. Return
2347 NULL otherwise. */
2349 static gimple
2350 get_def_for_expr (tree name, enum tree_code code)
2352 gimple def_stmt;
2354 if (TREE_CODE (name) != SSA_NAME)
2355 return NULL;
2357 def_stmt = get_gimple_for_ssa_name (name);
2358 if (!def_stmt
2359 || gimple_assign_rhs_code (def_stmt) != code)
2360 return NULL;
2362 return def_stmt;
2365 #ifdef HAVE_conditional_move
2366 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2367 assigment and the class of the expresion on the RHS is CLASS. Return
2368 NULL otherwise. */
2370 static gimple
2371 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2373 gimple def_stmt;
2375 if (TREE_CODE (name) != SSA_NAME)
2376 return NULL;
2378 def_stmt = get_gimple_for_ssa_name (name);
2379 if (!def_stmt
2380 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2381 return NULL;
2383 return def_stmt;
2385 #endif
2388 /* Determine whether the LEN bytes generated by CONSTFUN can be
2389 stored to memory using several move instructions. CONSTFUNDATA is
2390 a pointer which will be passed as argument in every CONSTFUN call.
2391 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2392 a memset operation and false if it's a copy of a constant string.
2393 Return nonzero if a call to store_by_pieces should succeed. */
2396 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2397 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2398 void *constfundata, unsigned int align, bool memsetp)
2400 unsigned HOST_WIDE_INT l;
2401 unsigned int max_size;
2402 HOST_WIDE_INT offset = 0;
2403 enum machine_mode mode;
2404 enum insn_code icode;
2405 int reverse;
2406 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2407 rtx cst ATTRIBUTE_UNUSED;
2409 if (len == 0)
2410 return 1;
2412 if (! (memsetp
2413 ? SET_BY_PIECES_P (len, align)
2414 : STORE_BY_PIECES_P (len, align)))
2415 return 0;
2417 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2419 /* We would first store what we can in the largest integer mode, then go to
2420 successively smaller modes. */
2422 for (reverse = 0;
2423 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2424 reverse++)
2426 l = len;
2427 max_size = STORE_MAX_PIECES + 1;
2428 while (max_size > 1 && l > 0)
2430 mode = widest_int_mode_for_size (max_size);
2432 if (mode == VOIDmode)
2433 break;
2435 icode = optab_handler (mov_optab, mode);
2436 if (icode != CODE_FOR_nothing
2437 && align >= GET_MODE_ALIGNMENT (mode))
2439 unsigned int size = GET_MODE_SIZE (mode);
2441 while (l >= size)
2443 if (reverse)
2444 offset -= size;
2446 cst = (*constfun) (constfundata, offset, mode);
2447 if (!targetm.legitimate_constant_p (mode, cst))
2448 return 0;
2450 if (!reverse)
2451 offset += size;
2453 l -= size;
2457 max_size = GET_MODE_SIZE (mode);
2460 /* The code above should have handled everything. */
2461 gcc_assert (!l);
2464 return 1;
2467 /* Generate several move instructions to store LEN bytes generated by
2468 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2469 pointer which will be passed as argument in every CONSTFUN call.
2470 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2471 a memset operation and false if it's a copy of a constant string.
2472 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2473 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2474 stpcpy. */
2477 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2478 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2479 void *constfundata, unsigned int align, bool memsetp, int endp)
2481 enum machine_mode to_addr_mode = get_address_mode (to);
2482 struct store_by_pieces_d data;
2484 if (len == 0)
2486 gcc_assert (endp != 2);
2487 return to;
2490 gcc_assert (memsetp
2491 ? SET_BY_PIECES_P (len, align)
2492 : STORE_BY_PIECES_P (len, align));
2493 data.constfun = constfun;
2494 data.constfundata = constfundata;
2495 data.len = len;
2496 data.to = to;
2497 store_by_pieces_1 (&data, align);
2498 if (endp)
2500 rtx to1;
2502 gcc_assert (!data.reverse);
2503 if (data.autinc_to)
2505 if (endp == 2)
2507 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2508 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2509 else
2510 data.to_addr = copy_to_mode_reg (to_addr_mode,
2511 plus_constant (to_addr_mode,
2512 data.to_addr,
2513 -1));
2515 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2516 data.offset);
2518 else
2520 if (endp == 2)
2521 --data.offset;
2522 to1 = adjust_address (data.to, QImode, data.offset);
2524 return to1;
2526 else
2527 return data.to;
2530 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2531 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2533 static void
2534 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2536 struct store_by_pieces_d data;
2538 if (len == 0)
2539 return;
2541 data.constfun = clear_by_pieces_1;
2542 data.constfundata = NULL;
2543 data.len = len;
2544 data.to = to;
2545 store_by_pieces_1 (&data, align);
2548 /* Callback routine for clear_by_pieces.
2549 Return const0_rtx unconditionally. */
2551 static rtx
2552 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2553 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2554 enum machine_mode mode ATTRIBUTE_UNUSED)
2556 return const0_rtx;
2559 /* Subroutine of clear_by_pieces and store_by_pieces.
2560 Generate several move instructions to store LEN bytes of block TO. (A MEM
2561 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2563 static void
2564 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2565 unsigned int align ATTRIBUTE_UNUSED)
2567 enum machine_mode to_addr_mode = get_address_mode (data->to);
2568 rtx to_addr = XEXP (data->to, 0);
2569 unsigned int max_size = STORE_MAX_PIECES + 1;
2570 enum insn_code icode;
2572 data->offset = 0;
2573 data->to_addr = to_addr;
2574 data->autinc_to
2575 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2576 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2578 data->explicit_inc_to = 0;
2579 data->reverse
2580 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2581 if (data->reverse)
2582 data->offset = data->len;
2584 /* If storing requires more than two move insns,
2585 copy addresses to registers (to make displacements shorter)
2586 and use post-increment if available. */
2587 if (!data->autinc_to
2588 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2590 /* Determine the main mode we'll be using.
2591 MODE might not be used depending on the definitions of the
2592 USE_* macros below. */
2593 enum machine_mode mode ATTRIBUTE_UNUSED
2594 = widest_int_mode_for_size (max_size);
2596 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2598 data->to_addr = copy_to_mode_reg (to_addr_mode,
2599 plus_constant (to_addr_mode,
2600 to_addr,
2601 data->len));
2602 data->autinc_to = 1;
2603 data->explicit_inc_to = -1;
2606 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2607 && ! data->autinc_to)
2609 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2610 data->autinc_to = 1;
2611 data->explicit_inc_to = 1;
2614 if ( !data->autinc_to && CONSTANT_P (to_addr))
2615 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2618 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2620 /* First store what we can in the largest integer mode, then go to
2621 successively smaller modes. */
2623 while (max_size > 1 && data->len > 0)
2625 enum machine_mode mode = widest_int_mode_for_size (max_size);
2627 if (mode == VOIDmode)
2628 break;
2630 icode = optab_handler (mov_optab, mode);
2631 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2632 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2634 max_size = GET_MODE_SIZE (mode);
2637 /* The code above should have handled everything. */
2638 gcc_assert (!data->len);
2641 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2642 with move instructions for mode MODE. GENFUN is the gen_... function
2643 to make a move insn for that mode. DATA has all the other info. */
2645 static void
2646 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2647 struct store_by_pieces_d *data)
2649 unsigned int size = GET_MODE_SIZE (mode);
2650 rtx to1, cst;
2652 while (data->len >= size)
2654 if (data->reverse)
2655 data->offset -= size;
2657 if (data->autinc_to)
2658 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2659 data->offset);
2660 else
2661 to1 = adjust_address (data->to, mode, data->offset);
2663 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2664 emit_insn (gen_add2_insn (data->to_addr,
2665 GEN_INT (-(HOST_WIDE_INT) size)));
2667 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2668 emit_insn ((*genfun) (to1, cst));
2670 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2671 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2673 if (! data->reverse)
2674 data->offset += size;
2676 data->len -= size;
2680 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2681 its length in bytes. */
2684 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2685 unsigned int expected_align, HOST_WIDE_INT expected_size)
2687 enum machine_mode mode = GET_MODE (object);
2688 unsigned int align;
2690 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2692 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2693 just move a zero. Otherwise, do this a piece at a time. */
2694 if (mode != BLKmode
2695 && CONST_INT_P (size)
2696 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2698 rtx zero = CONST0_RTX (mode);
2699 if (zero != NULL)
2701 emit_move_insn (object, zero);
2702 return NULL;
2705 if (COMPLEX_MODE_P (mode))
2707 zero = CONST0_RTX (GET_MODE_INNER (mode));
2708 if (zero != NULL)
2710 write_complex_part (object, zero, 0);
2711 write_complex_part (object, zero, 1);
2712 return NULL;
2717 if (size == const0_rtx)
2718 return NULL;
2720 align = MEM_ALIGN (object);
2722 if (CONST_INT_P (size)
2723 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2724 clear_by_pieces (object, INTVAL (size), align);
2725 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2726 expected_align, expected_size))
2728 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2729 return set_storage_via_libcall (object, size, const0_rtx,
2730 method == BLOCK_OP_TAILCALL);
2731 else
2732 gcc_unreachable ();
2734 return NULL;
2738 clear_storage (rtx object, rtx size, enum block_op_methods method)
2740 return clear_storage_hints (object, size, method, 0, -1);
2744 /* A subroutine of clear_storage. Expand a call to memset.
2745 Return the return value of memset, 0 otherwise. */
2748 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2750 tree call_expr, fn, object_tree, size_tree, val_tree;
2751 enum machine_mode size_mode;
2752 rtx retval;
2754 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2755 place those into new pseudos into a VAR_DECL and use them later. */
2757 object = copy_addr_to_reg (XEXP (object, 0));
2759 size_mode = TYPE_MODE (sizetype);
2760 size = convert_to_mode (size_mode, size, 1);
2761 size = copy_to_mode_reg (size_mode, size);
2763 /* It is incorrect to use the libcall calling conventions to call
2764 memset in this context. This could be a user call to memset and
2765 the user may wish to examine the return value from memset. For
2766 targets where libcalls and normal calls have different conventions
2767 for returning pointers, we could end up generating incorrect code. */
2769 object_tree = make_tree (ptr_type_node, object);
2770 if (!CONST_INT_P (val))
2771 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2772 size_tree = make_tree (sizetype, size);
2773 val_tree = make_tree (integer_type_node, val);
2775 fn = clear_storage_libcall_fn (true);
2776 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2777 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2779 retval = expand_normal (call_expr);
2781 return retval;
2784 /* A subroutine of set_storage_via_libcall. Create the tree node
2785 for the function we use for block clears. */
2787 tree block_clear_fn;
2789 void
2790 init_block_clear_fn (const char *asmspec)
2792 if (!block_clear_fn)
2794 tree fn, args;
2796 fn = get_identifier ("memset");
2797 args = build_function_type_list (ptr_type_node, ptr_type_node,
2798 integer_type_node, sizetype,
2799 NULL_TREE);
2801 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2802 DECL_EXTERNAL (fn) = 1;
2803 TREE_PUBLIC (fn) = 1;
2804 DECL_ARTIFICIAL (fn) = 1;
2805 TREE_NOTHROW (fn) = 1;
2806 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2807 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2809 block_clear_fn = fn;
2812 if (asmspec)
2813 set_user_assembler_name (block_clear_fn, asmspec);
2816 static tree
2817 clear_storage_libcall_fn (int for_call)
2819 static bool emitted_extern;
2821 if (!block_clear_fn)
2822 init_block_clear_fn (NULL);
2824 if (for_call && !emitted_extern)
2826 emitted_extern = true;
2827 make_decl_rtl (block_clear_fn);
2830 return block_clear_fn;
2833 /* Expand a setmem pattern; return true if successful. */
2835 bool
2836 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2837 unsigned int expected_align, HOST_WIDE_INT expected_size)
2839 /* Try the most limited insn first, because there's no point
2840 including more than one in the machine description unless
2841 the more limited one has some advantage. */
2843 enum machine_mode mode;
2845 if (expected_align < align)
2846 expected_align = align;
2848 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2849 mode = GET_MODE_WIDER_MODE (mode))
2851 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2853 if (code != CODE_FOR_nothing
2854 /* We don't need MODE to be narrower than
2855 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2856 the mode mask, as it is returned by the macro, it will
2857 definitely be less than the actual mode mask. */
2858 && ((CONST_INT_P (size)
2859 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2860 <= (GET_MODE_MASK (mode) >> 1)))
2861 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2863 struct expand_operand ops[6];
2864 unsigned int nops;
2866 nops = insn_data[(int) code].n_generator_args;
2867 gcc_assert (nops == 4 || nops == 6);
2869 create_fixed_operand (&ops[0], object);
2870 /* The check above guarantees that this size conversion is valid. */
2871 create_convert_operand_to (&ops[1], size, mode, true);
2872 create_convert_operand_from (&ops[2], val, byte_mode, true);
2873 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2874 if (nops == 6)
2876 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2877 create_integer_operand (&ops[5], expected_size);
2879 if (maybe_expand_insn (code, nops, ops))
2880 return true;
2884 return false;
2888 /* Write to one of the components of the complex value CPLX. Write VAL to
2889 the real part if IMAG_P is false, and the imaginary part if its true. */
2891 static void
2892 write_complex_part (rtx cplx, rtx val, bool imag_p)
2894 enum machine_mode cmode;
2895 enum machine_mode imode;
2896 unsigned ibitsize;
2898 if (GET_CODE (cplx) == CONCAT)
2900 emit_move_insn (XEXP (cplx, imag_p), val);
2901 return;
2904 cmode = GET_MODE (cplx);
2905 imode = GET_MODE_INNER (cmode);
2906 ibitsize = GET_MODE_BITSIZE (imode);
2908 /* For MEMs simplify_gen_subreg may generate an invalid new address
2909 because, e.g., the original address is considered mode-dependent
2910 by the target, which restricts simplify_subreg from invoking
2911 adjust_address_nv. Instead of preparing fallback support for an
2912 invalid address, we call adjust_address_nv directly. */
2913 if (MEM_P (cplx))
2915 emit_move_insn (adjust_address_nv (cplx, imode,
2916 imag_p ? GET_MODE_SIZE (imode) : 0),
2917 val);
2918 return;
2921 /* If the sub-object is at least word sized, then we know that subregging
2922 will work. This special case is important, since store_bit_field
2923 wants to operate on integer modes, and there's rarely an OImode to
2924 correspond to TCmode. */
2925 if (ibitsize >= BITS_PER_WORD
2926 /* For hard regs we have exact predicates. Assume we can split
2927 the original object if it spans an even number of hard regs.
2928 This special case is important for SCmode on 64-bit platforms
2929 where the natural size of floating-point regs is 32-bit. */
2930 || (REG_P (cplx)
2931 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2932 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2934 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2935 imag_p ? GET_MODE_SIZE (imode) : 0);
2936 if (part)
2938 emit_move_insn (part, val);
2939 return;
2941 else
2942 /* simplify_gen_subreg may fail for sub-word MEMs. */
2943 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2946 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2949 /* Extract one of the components of the complex value CPLX. Extract the
2950 real part if IMAG_P is false, and the imaginary part if it's true. */
2952 static rtx
2953 read_complex_part (rtx cplx, bool imag_p)
2955 enum machine_mode cmode, imode;
2956 unsigned ibitsize;
2958 if (GET_CODE (cplx) == CONCAT)
2959 return XEXP (cplx, imag_p);
2961 cmode = GET_MODE (cplx);
2962 imode = GET_MODE_INNER (cmode);
2963 ibitsize = GET_MODE_BITSIZE (imode);
2965 /* Special case reads from complex constants that got spilled to memory. */
2966 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2968 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2969 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2971 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2972 if (CONSTANT_CLASS_P (part))
2973 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2977 /* For MEMs simplify_gen_subreg may generate an invalid new address
2978 because, e.g., the original address is considered mode-dependent
2979 by the target, which restricts simplify_subreg from invoking
2980 adjust_address_nv. Instead of preparing fallback support for an
2981 invalid address, we call adjust_address_nv directly. */
2982 if (MEM_P (cplx))
2983 return adjust_address_nv (cplx, imode,
2984 imag_p ? GET_MODE_SIZE (imode) : 0);
2986 /* If the sub-object is at least word sized, then we know that subregging
2987 will work. This special case is important, since extract_bit_field
2988 wants to operate on integer modes, and there's rarely an OImode to
2989 correspond to TCmode. */
2990 if (ibitsize >= BITS_PER_WORD
2991 /* For hard regs we have exact predicates. Assume we can split
2992 the original object if it spans an even number of hard regs.
2993 This special case is important for SCmode on 64-bit platforms
2994 where the natural size of floating-point regs is 32-bit. */
2995 || (REG_P (cplx)
2996 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2997 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2999 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3000 imag_p ? GET_MODE_SIZE (imode) : 0);
3001 if (ret)
3002 return ret;
3003 else
3004 /* simplify_gen_subreg may fail for sub-word MEMs. */
3005 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3008 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3009 true, false, NULL_RTX, imode, imode);
3012 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3013 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3014 represented in NEW_MODE. If FORCE is true, this will never happen, as
3015 we'll force-create a SUBREG if needed. */
3017 static rtx
3018 emit_move_change_mode (enum machine_mode new_mode,
3019 enum machine_mode old_mode, rtx x, bool force)
3021 rtx ret;
3023 if (push_operand (x, GET_MODE (x)))
3025 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3026 MEM_COPY_ATTRIBUTES (ret, x);
3028 else if (MEM_P (x))
3030 /* We don't have to worry about changing the address since the
3031 size in bytes is supposed to be the same. */
3032 if (reload_in_progress)
3034 /* Copy the MEM to change the mode and move any
3035 substitutions from the old MEM to the new one. */
3036 ret = adjust_address_nv (x, new_mode, 0);
3037 copy_replacements (x, ret);
3039 else
3040 ret = adjust_address (x, new_mode, 0);
3042 else
3044 /* Note that we do want simplify_subreg's behavior of validating
3045 that the new mode is ok for a hard register. If we were to use
3046 simplify_gen_subreg, we would create the subreg, but would
3047 probably run into the target not being able to implement it. */
3048 /* Except, of course, when FORCE is true, when this is exactly what
3049 we want. Which is needed for CCmodes on some targets. */
3050 if (force)
3051 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3052 else
3053 ret = simplify_subreg (new_mode, x, old_mode, 0);
3056 return ret;
3059 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3060 an integer mode of the same size as MODE. Returns the instruction
3061 emitted, or NULL if such a move could not be generated. */
3063 static rtx
3064 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3066 enum machine_mode imode;
3067 enum insn_code code;
3069 /* There must exist a mode of the exact size we require. */
3070 imode = int_mode_for_mode (mode);
3071 if (imode == BLKmode)
3072 return NULL_RTX;
3074 /* The target must support moves in this mode. */
3075 code = optab_handler (mov_optab, imode);
3076 if (code == CODE_FOR_nothing)
3077 return NULL_RTX;
3079 x = emit_move_change_mode (imode, mode, x, force);
3080 if (x == NULL_RTX)
3081 return NULL_RTX;
3082 y = emit_move_change_mode (imode, mode, y, force);
3083 if (y == NULL_RTX)
3084 return NULL_RTX;
3085 return emit_insn (GEN_FCN (code) (x, y));
3088 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3089 Return an equivalent MEM that does not use an auto-increment. */
3091 static rtx
3092 emit_move_resolve_push (enum machine_mode mode, rtx x)
3094 enum rtx_code code = GET_CODE (XEXP (x, 0));
3095 HOST_WIDE_INT adjust;
3096 rtx temp;
3098 adjust = GET_MODE_SIZE (mode);
3099 #ifdef PUSH_ROUNDING
3100 adjust = PUSH_ROUNDING (adjust);
3101 #endif
3102 if (code == PRE_DEC || code == POST_DEC)
3103 adjust = -adjust;
3104 else if (code == PRE_MODIFY || code == POST_MODIFY)
3106 rtx expr = XEXP (XEXP (x, 0), 1);
3107 HOST_WIDE_INT val;
3109 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3110 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3111 val = INTVAL (XEXP (expr, 1));
3112 if (GET_CODE (expr) == MINUS)
3113 val = -val;
3114 gcc_assert (adjust == val || adjust == -val);
3115 adjust = val;
3118 /* Do not use anti_adjust_stack, since we don't want to update
3119 stack_pointer_delta. */
3120 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3121 GEN_INT (adjust), stack_pointer_rtx,
3122 0, OPTAB_LIB_WIDEN);
3123 if (temp != stack_pointer_rtx)
3124 emit_move_insn (stack_pointer_rtx, temp);
3126 switch (code)
3128 case PRE_INC:
3129 case PRE_DEC:
3130 case PRE_MODIFY:
3131 temp = stack_pointer_rtx;
3132 break;
3133 case POST_INC:
3134 case POST_DEC:
3135 case POST_MODIFY:
3136 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3137 break;
3138 default:
3139 gcc_unreachable ();
3142 return replace_equiv_address (x, temp);
3145 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3146 X is known to satisfy push_operand, and MODE is known to be complex.
3147 Returns the last instruction emitted. */
3150 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3152 enum machine_mode submode = GET_MODE_INNER (mode);
3153 bool imag_first;
3155 #ifdef PUSH_ROUNDING
3156 unsigned int submodesize = GET_MODE_SIZE (submode);
3158 /* In case we output to the stack, but the size is smaller than the
3159 machine can push exactly, we need to use move instructions. */
3160 if (PUSH_ROUNDING (submodesize) != submodesize)
3162 x = emit_move_resolve_push (mode, x);
3163 return emit_move_insn (x, y);
3165 #endif
3167 /* Note that the real part always precedes the imag part in memory
3168 regardless of machine's endianness. */
3169 switch (GET_CODE (XEXP (x, 0)))
3171 case PRE_DEC:
3172 case POST_DEC:
3173 imag_first = true;
3174 break;
3175 case PRE_INC:
3176 case POST_INC:
3177 imag_first = false;
3178 break;
3179 default:
3180 gcc_unreachable ();
3183 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3184 read_complex_part (y, imag_first));
3185 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3186 read_complex_part (y, !imag_first));
3189 /* A subroutine of emit_move_complex. Perform the move from Y to X
3190 via two moves of the parts. Returns the last instruction emitted. */
3193 emit_move_complex_parts (rtx x, rtx y)
3195 /* Show the output dies here. This is necessary for SUBREGs
3196 of pseudos since we cannot track their lifetimes correctly;
3197 hard regs shouldn't appear here except as return values. */
3198 if (!reload_completed && !reload_in_progress
3199 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3200 emit_clobber (x);
3202 write_complex_part (x, read_complex_part (y, false), false);
3203 write_complex_part (x, read_complex_part (y, true), true);
3205 return get_last_insn ();
3208 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3209 MODE is known to be complex. Returns the last instruction emitted. */
3211 static rtx
3212 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3214 bool try_int;
3216 /* Need to take special care for pushes, to maintain proper ordering
3217 of the data, and possibly extra padding. */
3218 if (push_operand (x, mode))
3219 return emit_move_complex_push (mode, x, y);
3221 /* See if we can coerce the target into moving both values at once. */
3223 /* Move floating point as parts. */
3224 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3225 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3226 try_int = false;
3227 /* Not possible if the values are inherently not adjacent. */
3228 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3229 try_int = false;
3230 /* Is possible if both are registers (or subregs of registers). */
3231 else if (register_operand (x, mode) && register_operand (y, mode))
3232 try_int = true;
3233 /* If one of the operands is a memory, and alignment constraints
3234 are friendly enough, we may be able to do combined memory operations.
3235 We do not attempt this if Y is a constant because that combination is
3236 usually better with the by-parts thing below. */
3237 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3238 && (!STRICT_ALIGNMENT
3239 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3240 try_int = true;
3241 else
3242 try_int = false;
3244 if (try_int)
3246 rtx ret;
3248 /* For memory to memory moves, optimal behavior can be had with the
3249 existing block move logic. */
3250 if (MEM_P (x) && MEM_P (y))
3252 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3253 BLOCK_OP_NO_LIBCALL);
3254 return get_last_insn ();
3257 ret = emit_move_via_integer (mode, x, y, true);
3258 if (ret)
3259 return ret;
3262 return emit_move_complex_parts (x, y);
3265 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3266 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3268 static rtx
3269 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3271 rtx ret;
3273 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3274 if (mode != CCmode)
3276 enum insn_code code = optab_handler (mov_optab, CCmode);
3277 if (code != CODE_FOR_nothing)
3279 x = emit_move_change_mode (CCmode, mode, x, true);
3280 y = emit_move_change_mode (CCmode, mode, y, true);
3281 return emit_insn (GEN_FCN (code) (x, y));
3285 /* Otherwise, find the MODE_INT mode of the same width. */
3286 ret = emit_move_via_integer (mode, x, y, false);
3287 gcc_assert (ret != NULL);
3288 return ret;
3291 /* Return true if word I of OP lies entirely in the
3292 undefined bits of a paradoxical subreg. */
3294 static bool
3295 undefined_operand_subword_p (const_rtx op, int i)
3297 enum machine_mode innermode, innermostmode;
3298 int offset;
3299 if (GET_CODE (op) != SUBREG)
3300 return false;
3301 innermode = GET_MODE (op);
3302 innermostmode = GET_MODE (SUBREG_REG (op));
3303 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3304 /* The SUBREG_BYTE represents offset, as if the value were stored in
3305 memory, except for a paradoxical subreg where we define
3306 SUBREG_BYTE to be 0; undo this exception as in
3307 simplify_subreg. */
3308 if (SUBREG_BYTE (op) == 0
3309 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3311 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3312 if (WORDS_BIG_ENDIAN)
3313 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3314 if (BYTES_BIG_ENDIAN)
3315 offset += difference % UNITS_PER_WORD;
3317 if (offset >= GET_MODE_SIZE (innermostmode)
3318 || offset <= -GET_MODE_SIZE (word_mode))
3319 return true;
3320 return false;
3323 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3324 MODE is any multi-word or full-word mode that lacks a move_insn
3325 pattern. Note that you will get better code if you define such
3326 patterns, even if they must turn into multiple assembler instructions. */
3328 static rtx
3329 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3331 rtx last_insn = 0;
3332 rtx seq, inner;
3333 bool need_clobber;
3334 int i;
3336 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3338 /* If X is a push on the stack, do the push now and replace
3339 X with a reference to the stack pointer. */
3340 if (push_operand (x, mode))
3341 x = emit_move_resolve_push (mode, x);
3343 /* If we are in reload, see if either operand is a MEM whose address
3344 is scheduled for replacement. */
3345 if (reload_in_progress && MEM_P (x)
3346 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3347 x = replace_equiv_address_nv (x, inner);
3348 if (reload_in_progress && MEM_P (y)
3349 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3350 y = replace_equiv_address_nv (y, inner);
3352 start_sequence ();
3354 need_clobber = false;
3355 for (i = 0;
3356 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3357 i++)
3359 rtx xpart = operand_subword (x, i, 1, mode);
3360 rtx ypart;
3362 /* Do not generate code for a move if it would come entirely
3363 from the undefined bits of a paradoxical subreg. */
3364 if (undefined_operand_subword_p (y, i))
3365 continue;
3367 ypart = operand_subword (y, i, 1, mode);
3369 /* If we can't get a part of Y, put Y into memory if it is a
3370 constant. Otherwise, force it into a register. Then we must
3371 be able to get a part of Y. */
3372 if (ypart == 0 && CONSTANT_P (y))
3374 y = use_anchored_address (force_const_mem (mode, y));
3375 ypart = operand_subword (y, i, 1, mode);
3377 else if (ypart == 0)
3378 ypart = operand_subword_force (y, i, mode);
3380 gcc_assert (xpart && ypart);
3382 need_clobber |= (GET_CODE (xpart) == SUBREG);
3384 last_insn = emit_move_insn (xpart, ypart);
3387 seq = get_insns ();
3388 end_sequence ();
3390 /* Show the output dies here. This is necessary for SUBREGs
3391 of pseudos since we cannot track their lifetimes correctly;
3392 hard regs shouldn't appear here except as return values.
3393 We never want to emit such a clobber after reload. */
3394 if (x != y
3395 && ! (reload_in_progress || reload_completed)
3396 && need_clobber != 0)
3397 emit_clobber (x);
3399 emit_insn (seq);
3401 return last_insn;
3404 /* Low level part of emit_move_insn.
3405 Called just like emit_move_insn, but assumes X and Y
3406 are basically valid. */
3409 emit_move_insn_1 (rtx x, rtx y)
3411 enum machine_mode mode = GET_MODE (x);
3412 enum insn_code code;
3414 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3416 code = optab_handler (mov_optab, mode);
3417 if (code != CODE_FOR_nothing)
3418 return emit_insn (GEN_FCN (code) (x, y));
3420 /* Expand complex moves by moving real part and imag part. */
3421 if (COMPLEX_MODE_P (mode))
3422 return emit_move_complex (mode, x, y);
3424 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3425 || ALL_FIXED_POINT_MODE_P (mode))
3427 rtx result = emit_move_via_integer (mode, x, y, true);
3429 /* If we can't find an integer mode, use multi words. */
3430 if (result)
3431 return result;
3432 else
3433 return emit_move_multi_word (mode, x, y);
3436 if (GET_MODE_CLASS (mode) == MODE_CC)
3437 return emit_move_ccmode (mode, x, y);
3439 /* Try using a move pattern for the corresponding integer mode. This is
3440 only safe when simplify_subreg can convert MODE constants into integer
3441 constants. At present, it can only do this reliably if the value
3442 fits within a HOST_WIDE_INT. */
3443 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3445 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3447 if (ret)
3449 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3450 return ret;
3454 return emit_move_multi_word (mode, x, y);
3457 /* Generate code to copy Y into X.
3458 Both Y and X must have the same mode, except that
3459 Y can be a constant with VOIDmode.
3460 This mode cannot be BLKmode; use emit_block_move for that.
3462 Return the last instruction emitted. */
3465 emit_move_insn (rtx x, rtx y)
3467 enum machine_mode mode = GET_MODE (x);
3468 rtx y_cst = NULL_RTX;
3469 rtx last_insn, set;
3471 gcc_assert (mode != BLKmode
3472 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3474 if (CONSTANT_P (y))
3476 if (optimize
3477 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3478 && (last_insn = compress_float_constant (x, y)))
3479 return last_insn;
3481 y_cst = y;
3483 if (!targetm.legitimate_constant_p (mode, y))
3485 y = force_const_mem (mode, y);
3487 /* If the target's cannot_force_const_mem prevented the spill,
3488 assume that the target's move expanders will also take care
3489 of the non-legitimate constant. */
3490 if (!y)
3491 y = y_cst;
3492 else
3493 y = use_anchored_address (y);
3497 /* If X or Y are memory references, verify that their addresses are valid
3498 for the machine. */
3499 if (MEM_P (x)
3500 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3501 MEM_ADDR_SPACE (x))
3502 && ! push_operand (x, GET_MODE (x))))
3503 x = validize_mem (x);
3505 if (MEM_P (y)
3506 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3507 MEM_ADDR_SPACE (y)))
3508 y = validize_mem (y);
3510 gcc_assert (mode != BLKmode);
3512 last_insn = emit_move_insn_1 (x, y);
3514 if (y_cst && REG_P (x)
3515 && (set = single_set (last_insn)) != NULL_RTX
3516 && SET_DEST (set) == x
3517 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3518 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3520 return last_insn;
3523 /* If Y is representable exactly in a narrower mode, and the target can
3524 perform the extension directly from constant or memory, then emit the
3525 move as an extension. */
3527 static rtx
3528 compress_float_constant (rtx x, rtx y)
3530 enum machine_mode dstmode = GET_MODE (x);
3531 enum machine_mode orig_srcmode = GET_MODE (y);
3532 enum machine_mode srcmode;
3533 REAL_VALUE_TYPE r;
3534 int oldcost, newcost;
3535 bool speed = optimize_insn_for_speed_p ();
3537 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3539 if (targetm.legitimate_constant_p (dstmode, y))
3540 oldcost = set_src_cost (y, speed);
3541 else
3542 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3544 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3545 srcmode != orig_srcmode;
3546 srcmode = GET_MODE_WIDER_MODE (srcmode))
3548 enum insn_code ic;
3549 rtx trunc_y, last_insn;
3551 /* Skip if the target can't extend this way. */
3552 ic = can_extend_p (dstmode, srcmode, 0);
3553 if (ic == CODE_FOR_nothing)
3554 continue;
3556 /* Skip if the narrowed value isn't exact. */
3557 if (! exact_real_truncate (srcmode, &r))
3558 continue;
3560 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3562 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3564 /* Skip if the target needs extra instructions to perform
3565 the extension. */
3566 if (!insn_operand_matches (ic, 1, trunc_y))
3567 continue;
3568 /* This is valid, but may not be cheaper than the original. */
3569 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3570 speed);
3571 if (oldcost < newcost)
3572 continue;
3574 else if (float_extend_from_mem[dstmode][srcmode])
3576 trunc_y = force_const_mem (srcmode, trunc_y);
3577 /* This is valid, but may not be cheaper than the original. */
3578 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3579 speed);
3580 if (oldcost < newcost)
3581 continue;
3582 trunc_y = validize_mem (trunc_y);
3584 else
3585 continue;
3587 /* For CSE's benefit, force the compressed constant pool entry
3588 into a new pseudo. This constant may be used in different modes,
3589 and if not, combine will put things back together for us. */
3590 trunc_y = force_reg (srcmode, trunc_y);
3591 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3592 last_insn = get_last_insn ();
3594 if (REG_P (x))
3595 set_unique_reg_note (last_insn, REG_EQUAL, y);
3597 return last_insn;
3600 return NULL_RTX;
3603 /* Pushing data onto the stack. */
3605 /* Push a block of length SIZE (perhaps variable)
3606 and return an rtx to address the beginning of the block.
3607 The value may be virtual_outgoing_args_rtx.
3609 EXTRA is the number of bytes of padding to push in addition to SIZE.
3610 BELOW nonzero means this padding comes at low addresses;
3611 otherwise, the padding comes at high addresses. */
3614 push_block (rtx size, int extra, int below)
3616 rtx temp;
3618 size = convert_modes (Pmode, ptr_mode, size, 1);
3619 if (CONSTANT_P (size))
3620 anti_adjust_stack (plus_constant (Pmode, size, extra));
3621 else if (REG_P (size) && extra == 0)
3622 anti_adjust_stack (size);
3623 else
3625 temp = copy_to_mode_reg (Pmode, size);
3626 if (extra != 0)
3627 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3628 temp, 0, OPTAB_LIB_WIDEN);
3629 anti_adjust_stack (temp);
3632 #ifndef STACK_GROWS_DOWNWARD
3633 if (0)
3634 #else
3635 if (1)
3636 #endif
3638 temp = virtual_outgoing_args_rtx;
3639 if (extra != 0 && below)
3640 temp = plus_constant (Pmode, temp, extra);
3642 else
3644 if (CONST_INT_P (size))
3645 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3646 -INTVAL (size) - (below ? 0 : extra));
3647 else if (extra != 0 && !below)
3648 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3649 negate_rtx (Pmode, plus_constant (Pmode, size,
3650 extra)));
3651 else
3652 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3653 negate_rtx (Pmode, size));
3656 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3659 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3661 static rtx
3662 mem_autoinc_base (rtx mem)
3664 if (MEM_P (mem))
3666 rtx addr = XEXP (mem, 0);
3667 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3668 return XEXP (addr, 0);
3670 return NULL;
3673 /* A utility routine used here, in reload, and in try_split. The insns
3674 after PREV up to and including LAST are known to adjust the stack,
3675 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3676 placing notes as appropriate. PREV may be NULL, indicating the
3677 entire insn sequence prior to LAST should be scanned.
3679 The set of allowed stack pointer modifications is small:
3680 (1) One or more auto-inc style memory references (aka pushes),
3681 (2) One or more addition/subtraction with the SP as destination,
3682 (3) A single move insn with the SP as destination,
3683 (4) A call_pop insn,
3684 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3686 Insns in the sequence that do not modify the SP are ignored,
3687 except for noreturn calls.
3689 The return value is the amount of adjustment that can be trivially
3690 verified, via immediate operand or auto-inc. If the adjustment
3691 cannot be trivially extracted, the return value is INT_MIN. */
3693 HOST_WIDE_INT
3694 find_args_size_adjust (rtx insn)
3696 rtx dest, set, pat;
3697 int i;
3699 pat = PATTERN (insn);
3700 set = NULL;
3702 /* Look for a call_pop pattern. */
3703 if (CALL_P (insn))
3705 /* We have to allow non-call_pop patterns for the case
3706 of emit_single_push_insn of a TLS address. */
3707 if (GET_CODE (pat) != PARALLEL)
3708 return 0;
3710 /* All call_pop have a stack pointer adjust in the parallel.
3711 The call itself is always first, and the stack adjust is
3712 usually last, so search from the end. */
3713 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3715 set = XVECEXP (pat, 0, i);
3716 if (GET_CODE (set) != SET)
3717 continue;
3718 dest = SET_DEST (set);
3719 if (dest == stack_pointer_rtx)
3720 break;
3722 /* We'd better have found the stack pointer adjust. */
3723 if (i == 0)
3724 return 0;
3725 /* Fall through to process the extracted SET and DEST
3726 as if it was a standalone insn. */
3728 else if (GET_CODE (pat) == SET)
3729 set = pat;
3730 else if ((set = single_set (insn)) != NULL)
3732 else if (GET_CODE (pat) == PARALLEL)
3734 /* ??? Some older ports use a parallel with a stack adjust
3735 and a store for a PUSH_ROUNDING pattern, rather than a
3736 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3737 /* ??? See h8300 and m68k, pushqi1. */
3738 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3740 set = XVECEXP (pat, 0, i);
3741 if (GET_CODE (set) != SET)
3742 continue;
3743 dest = SET_DEST (set);
3744 if (dest == stack_pointer_rtx)
3745 break;
3747 /* We do not expect an auto-inc of the sp in the parallel. */
3748 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3749 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3750 != stack_pointer_rtx);
3752 if (i < 0)
3753 return 0;
3755 else
3756 return 0;
3758 dest = SET_DEST (set);
3760 /* Look for direct modifications of the stack pointer. */
3761 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3763 /* Look for a trivial adjustment, otherwise assume nothing. */
3764 /* Note that the SPU restore_stack_block pattern refers to
3765 the stack pointer in V4SImode. Consider that non-trivial. */
3766 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3767 && GET_CODE (SET_SRC (set)) == PLUS
3768 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3769 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3770 return INTVAL (XEXP (SET_SRC (set), 1));
3771 /* ??? Reload can generate no-op moves, which will be cleaned
3772 up later. Recognize it and continue searching. */
3773 else if (rtx_equal_p (dest, SET_SRC (set)))
3774 return 0;
3775 else
3776 return HOST_WIDE_INT_MIN;
3778 else
3780 rtx mem, addr;
3782 /* Otherwise only think about autoinc patterns. */
3783 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3785 mem = dest;
3786 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3787 != stack_pointer_rtx);
3789 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3790 mem = SET_SRC (set);
3791 else
3792 return 0;
3794 addr = XEXP (mem, 0);
3795 switch (GET_CODE (addr))
3797 case PRE_INC:
3798 case POST_INC:
3799 return GET_MODE_SIZE (GET_MODE (mem));
3800 case PRE_DEC:
3801 case POST_DEC:
3802 return -GET_MODE_SIZE (GET_MODE (mem));
3803 case PRE_MODIFY:
3804 case POST_MODIFY:
3805 addr = XEXP (addr, 1);
3806 gcc_assert (GET_CODE (addr) == PLUS);
3807 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3808 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3809 return INTVAL (XEXP (addr, 1));
3810 default:
3811 gcc_unreachable ();
3817 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3819 int args_size = end_args_size;
3820 bool saw_unknown = false;
3821 rtx insn;
3823 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3825 HOST_WIDE_INT this_delta;
3827 if (!NONDEBUG_INSN_P (insn))
3828 continue;
3830 this_delta = find_args_size_adjust (insn);
3831 if (this_delta == 0)
3833 if (!CALL_P (insn)
3834 || ACCUMULATE_OUTGOING_ARGS
3835 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3836 continue;
3839 gcc_assert (!saw_unknown);
3840 if (this_delta == HOST_WIDE_INT_MIN)
3841 saw_unknown = true;
3843 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3844 #ifdef STACK_GROWS_DOWNWARD
3845 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3846 #endif
3847 args_size -= this_delta;
3850 return saw_unknown ? INT_MIN : args_size;
3853 #ifdef PUSH_ROUNDING
3854 /* Emit single push insn. */
3856 static void
3857 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3859 rtx dest_addr;
3860 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3861 rtx dest;
3862 enum insn_code icode;
3864 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3865 /* If there is push pattern, use it. Otherwise try old way of throwing
3866 MEM representing push operation to move expander. */
3867 icode = optab_handler (push_optab, mode);
3868 if (icode != CODE_FOR_nothing)
3870 struct expand_operand ops[1];
3872 create_input_operand (&ops[0], x, mode);
3873 if (maybe_expand_insn (icode, 1, ops))
3874 return;
3876 if (GET_MODE_SIZE (mode) == rounded_size)
3877 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3878 /* If we are to pad downward, adjust the stack pointer first and
3879 then store X into the stack location using an offset. This is
3880 because emit_move_insn does not know how to pad; it does not have
3881 access to type. */
3882 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3884 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3885 HOST_WIDE_INT offset;
3887 emit_move_insn (stack_pointer_rtx,
3888 expand_binop (Pmode,
3889 #ifdef STACK_GROWS_DOWNWARD
3890 sub_optab,
3891 #else
3892 add_optab,
3893 #endif
3894 stack_pointer_rtx,
3895 GEN_INT (rounded_size),
3896 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3898 offset = (HOST_WIDE_INT) padding_size;
3899 #ifdef STACK_GROWS_DOWNWARD
3900 if (STACK_PUSH_CODE == POST_DEC)
3901 /* We have already decremented the stack pointer, so get the
3902 previous value. */
3903 offset += (HOST_WIDE_INT) rounded_size;
3904 #else
3905 if (STACK_PUSH_CODE == POST_INC)
3906 /* We have already incremented the stack pointer, so get the
3907 previous value. */
3908 offset -= (HOST_WIDE_INT) rounded_size;
3909 #endif
3910 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3912 else
3914 #ifdef STACK_GROWS_DOWNWARD
3915 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3916 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3917 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3918 #else
3919 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3920 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3921 GEN_INT (rounded_size));
3922 #endif
3923 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3926 dest = gen_rtx_MEM (mode, dest_addr);
3928 if (type != 0)
3930 set_mem_attributes (dest, type, 1);
3932 if (flag_optimize_sibling_calls)
3933 /* Function incoming arguments may overlap with sibling call
3934 outgoing arguments and we cannot allow reordering of reads
3935 from function arguments with stores to outgoing arguments
3936 of sibling calls. */
3937 set_mem_alias_set (dest, 0);
3939 emit_move_insn (dest, x);
3942 /* Emit and annotate a single push insn. */
3944 static void
3945 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3947 int delta, old_delta = stack_pointer_delta;
3948 rtx prev = get_last_insn ();
3949 rtx last;
3951 emit_single_push_insn_1 (mode, x, type);
3953 last = get_last_insn ();
3955 /* Notice the common case where we emitted exactly one insn. */
3956 if (PREV_INSN (last) == prev)
3958 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3959 return;
3962 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
3963 gcc_assert (delta == INT_MIN || delta == old_delta);
3965 #endif
3967 /* Generate code to push X onto the stack, assuming it has mode MODE and
3968 type TYPE.
3969 MODE is redundant except when X is a CONST_INT (since they don't
3970 carry mode info).
3971 SIZE is an rtx for the size of data to be copied (in bytes),
3972 needed only if X is BLKmode.
3974 ALIGN (in bits) is maximum alignment we can assume.
3976 If PARTIAL and REG are both nonzero, then copy that many of the first
3977 bytes of X into registers starting with REG, and push the rest of X.
3978 The amount of space pushed is decreased by PARTIAL bytes.
3979 REG must be a hard register in this case.
3980 If REG is zero but PARTIAL is not, take any all others actions for an
3981 argument partially in registers, but do not actually load any
3982 registers.
3984 EXTRA is the amount in bytes of extra space to leave next to this arg.
3985 This is ignored if an argument block has already been allocated.
3987 On a machine that lacks real push insns, ARGS_ADDR is the address of
3988 the bottom of the argument block for this call. We use indexing off there
3989 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3990 argument block has not been preallocated.
3992 ARGS_SO_FAR is the size of args previously pushed for this call.
3994 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3995 for arguments passed in registers. If nonzero, it will be the number
3996 of bytes required. */
3998 void
3999 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4000 unsigned int align, int partial, rtx reg, int extra,
4001 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4002 rtx alignment_pad)
4004 rtx xinner;
4005 enum direction stack_direction
4006 #ifdef STACK_GROWS_DOWNWARD
4007 = downward;
4008 #else
4009 = upward;
4010 #endif
4012 /* Decide where to pad the argument: `downward' for below,
4013 `upward' for above, or `none' for don't pad it.
4014 Default is below for small data on big-endian machines; else above. */
4015 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4017 /* Invert direction if stack is post-decrement.
4018 FIXME: why? */
4019 if (STACK_PUSH_CODE == POST_DEC)
4020 if (where_pad != none)
4021 where_pad = (where_pad == downward ? upward : downward);
4023 xinner = x;
4025 if (mode == BLKmode
4026 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4028 /* Copy a block into the stack, entirely or partially. */
4030 rtx temp;
4031 int used;
4032 int offset;
4033 int skip;
4035 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4036 used = partial - offset;
4038 if (mode != BLKmode)
4040 /* A value is to be stored in an insufficiently aligned
4041 stack slot; copy via a suitably aligned slot if
4042 necessary. */
4043 size = GEN_INT (GET_MODE_SIZE (mode));
4044 if (!MEM_P (xinner))
4046 temp = assign_temp (type, 1, 1);
4047 emit_move_insn (temp, xinner);
4048 xinner = temp;
4052 gcc_assert (size);
4054 /* USED is now the # of bytes we need not copy to the stack
4055 because registers will take care of them. */
4057 if (partial != 0)
4058 xinner = adjust_address (xinner, BLKmode, used);
4060 /* If the partial register-part of the arg counts in its stack size,
4061 skip the part of stack space corresponding to the registers.
4062 Otherwise, start copying to the beginning of the stack space,
4063 by setting SKIP to 0. */
4064 skip = (reg_parm_stack_space == 0) ? 0 : used;
4066 #ifdef PUSH_ROUNDING
4067 /* Do it with several push insns if that doesn't take lots of insns
4068 and if there is no difficulty with push insns that skip bytes
4069 on the stack for alignment purposes. */
4070 if (args_addr == 0
4071 && PUSH_ARGS
4072 && CONST_INT_P (size)
4073 && skip == 0
4074 && MEM_ALIGN (xinner) >= align
4075 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4076 /* Here we avoid the case of a structure whose weak alignment
4077 forces many pushes of a small amount of data,
4078 and such small pushes do rounding that causes trouble. */
4079 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4080 || align >= BIGGEST_ALIGNMENT
4081 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4082 == (align / BITS_PER_UNIT)))
4083 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4085 /* Push padding now if padding above and stack grows down,
4086 or if padding below and stack grows up.
4087 But if space already allocated, this has already been done. */
4088 if (extra && args_addr == 0
4089 && where_pad != none && where_pad != stack_direction)
4090 anti_adjust_stack (GEN_INT (extra));
4092 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4094 else
4095 #endif /* PUSH_ROUNDING */
4097 rtx target;
4099 /* Otherwise make space on the stack and copy the data
4100 to the address of that space. */
4102 /* Deduct words put into registers from the size we must copy. */
4103 if (partial != 0)
4105 if (CONST_INT_P (size))
4106 size = GEN_INT (INTVAL (size) - used);
4107 else
4108 size = expand_binop (GET_MODE (size), sub_optab, size,
4109 GEN_INT (used), NULL_RTX, 0,
4110 OPTAB_LIB_WIDEN);
4113 /* Get the address of the stack space.
4114 In this case, we do not deal with EXTRA separately.
4115 A single stack adjust will do. */
4116 if (! args_addr)
4118 temp = push_block (size, extra, where_pad == downward);
4119 extra = 0;
4121 else if (CONST_INT_P (args_so_far))
4122 temp = memory_address (BLKmode,
4123 plus_constant (Pmode, args_addr,
4124 skip + INTVAL (args_so_far)));
4125 else
4126 temp = memory_address (BLKmode,
4127 plus_constant (Pmode,
4128 gen_rtx_PLUS (Pmode,
4129 args_addr,
4130 args_so_far),
4131 skip));
4133 if (!ACCUMULATE_OUTGOING_ARGS)
4135 /* If the source is referenced relative to the stack pointer,
4136 copy it to another register to stabilize it. We do not need
4137 to do this if we know that we won't be changing sp. */
4139 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4140 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4141 temp = copy_to_reg (temp);
4144 target = gen_rtx_MEM (BLKmode, temp);
4146 /* We do *not* set_mem_attributes here, because incoming arguments
4147 may overlap with sibling call outgoing arguments and we cannot
4148 allow reordering of reads from function arguments with stores
4149 to outgoing arguments of sibling calls. We do, however, want
4150 to record the alignment of the stack slot. */
4151 /* ALIGN may well be better aligned than TYPE, e.g. due to
4152 PARM_BOUNDARY. Assume the caller isn't lying. */
4153 set_mem_align (target, align);
4155 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4158 else if (partial > 0)
4160 /* Scalar partly in registers. */
4162 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4163 int i;
4164 int not_stack;
4165 /* # bytes of start of argument
4166 that we must make space for but need not store. */
4167 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4168 int args_offset = INTVAL (args_so_far);
4169 int skip;
4171 /* Push padding now if padding above and stack grows down,
4172 or if padding below and stack grows up.
4173 But if space already allocated, this has already been done. */
4174 if (extra && args_addr == 0
4175 && where_pad != none && where_pad != stack_direction)
4176 anti_adjust_stack (GEN_INT (extra));
4178 /* If we make space by pushing it, we might as well push
4179 the real data. Otherwise, we can leave OFFSET nonzero
4180 and leave the space uninitialized. */
4181 if (args_addr == 0)
4182 offset = 0;
4184 /* Now NOT_STACK gets the number of words that we don't need to
4185 allocate on the stack. Convert OFFSET to words too. */
4186 not_stack = (partial - offset) / UNITS_PER_WORD;
4187 offset /= UNITS_PER_WORD;
4189 /* If the partial register-part of the arg counts in its stack size,
4190 skip the part of stack space corresponding to the registers.
4191 Otherwise, start copying to the beginning of the stack space,
4192 by setting SKIP to 0. */
4193 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4195 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4196 x = validize_mem (force_const_mem (mode, x));
4198 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4199 SUBREGs of such registers are not allowed. */
4200 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4201 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4202 x = copy_to_reg (x);
4204 /* Loop over all the words allocated on the stack for this arg. */
4205 /* We can do it by words, because any scalar bigger than a word
4206 has a size a multiple of a word. */
4207 #ifndef PUSH_ARGS_REVERSED
4208 for (i = not_stack; i < size; i++)
4209 #else
4210 for (i = size - 1; i >= not_stack; i--)
4211 #endif
4212 if (i >= not_stack + offset)
4213 emit_push_insn (operand_subword_force (x, i, mode),
4214 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4215 0, args_addr,
4216 GEN_INT (args_offset + ((i - not_stack + skip)
4217 * UNITS_PER_WORD)),
4218 reg_parm_stack_space, alignment_pad);
4220 else
4222 rtx addr;
4223 rtx dest;
4225 /* Push padding now if padding above and stack grows down,
4226 or if padding below and stack grows up.
4227 But if space already allocated, this has already been done. */
4228 if (extra && args_addr == 0
4229 && where_pad != none && where_pad != stack_direction)
4230 anti_adjust_stack (GEN_INT (extra));
4232 #ifdef PUSH_ROUNDING
4233 if (args_addr == 0 && PUSH_ARGS)
4234 emit_single_push_insn (mode, x, type);
4235 else
4236 #endif
4238 if (CONST_INT_P (args_so_far))
4239 addr
4240 = memory_address (mode,
4241 plus_constant (Pmode, args_addr,
4242 INTVAL (args_so_far)));
4243 else
4244 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4245 args_so_far));
4246 dest = gen_rtx_MEM (mode, addr);
4248 /* We do *not* set_mem_attributes here, because incoming arguments
4249 may overlap with sibling call outgoing arguments and we cannot
4250 allow reordering of reads from function arguments with stores
4251 to outgoing arguments of sibling calls. We do, however, want
4252 to record the alignment of the stack slot. */
4253 /* ALIGN may well be better aligned than TYPE, e.g. due to
4254 PARM_BOUNDARY. Assume the caller isn't lying. */
4255 set_mem_align (dest, align);
4257 emit_move_insn (dest, x);
4261 /* If part should go in registers, copy that part
4262 into the appropriate registers. Do this now, at the end,
4263 since mem-to-mem copies above may do function calls. */
4264 if (partial > 0 && reg != 0)
4266 /* Handle calls that pass values in multiple non-contiguous locations.
4267 The Irix 6 ABI has examples of this. */
4268 if (GET_CODE (reg) == PARALLEL)
4269 emit_group_load (reg, x, type, -1);
4270 else
4272 gcc_assert (partial % UNITS_PER_WORD == 0);
4273 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4277 if (extra && args_addr == 0 && where_pad == stack_direction)
4278 anti_adjust_stack (GEN_INT (extra));
4280 if (alignment_pad && args_addr == 0)
4281 anti_adjust_stack (alignment_pad);
4284 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4285 operations. */
4287 static rtx
4288 get_subtarget (rtx x)
4290 return (optimize
4291 || x == 0
4292 /* Only registers can be subtargets. */
4293 || !REG_P (x)
4294 /* Don't use hard regs to avoid extending their life. */
4295 || REGNO (x) < FIRST_PSEUDO_REGISTER
4296 ? 0 : x);
4299 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4300 FIELD is a bitfield. Returns true if the optimization was successful,
4301 and there's nothing else to do. */
4303 static bool
4304 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4305 unsigned HOST_WIDE_INT bitpos,
4306 unsigned HOST_WIDE_INT bitregion_start,
4307 unsigned HOST_WIDE_INT bitregion_end,
4308 enum machine_mode mode1, rtx str_rtx,
4309 tree to, tree src)
4311 enum machine_mode str_mode = GET_MODE (str_rtx);
4312 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4313 tree op0, op1;
4314 rtx value, result;
4315 optab binop;
4316 gimple srcstmt;
4317 enum tree_code code;
4319 if (mode1 != VOIDmode
4320 || bitsize >= BITS_PER_WORD
4321 || str_bitsize > BITS_PER_WORD
4322 || TREE_SIDE_EFFECTS (to)
4323 || TREE_THIS_VOLATILE (to))
4324 return false;
4326 STRIP_NOPS (src);
4327 if (TREE_CODE (src) != SSA_NAME)
4328 return false;
4329 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4330 return false;
4332 srcstmt = get_gimple_for_ssa_name (src);
4333 if (!srcstmt
4334 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4335 return false;
4337 code = gimple_assign_rhs_code (srcstmt);
4339 op0 = gimple_assign_rhs1 (srcstmt);
4341 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4342 to find its initialization. Hopefully the initialization will
4343 be from a bitfield load. */
4344 if (TREE_CODE (op0) == SSA_NAME)
4346 gimple op0stmt = get_gimple_for_ssa_name (op0);
4348 /* We want to eventually have OP0 be the same as TO, which
4349 should be a bitfield. */
4350 if (!op0stmt
4351 || !is_gimple_assign (op0stmt)
4352 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4353 return false;
4354 op0 = gimple_assign_rhs1 (op0stmt);
4357 op1 = gimple_assign_rhs2 (srcstmt);
4359 if (!operand_equal_p (to, op0, 0))
4360 return false;
4362 if (MEM_P (str_rtx))
4364 unsigned HOST_WIDE_INT offset1;
4366 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4367 str_mode = word_mode;
4368 str_mode = get_best_mode (bitsize, bitpos,
4369 bitregion_start, bitregion_end,
4370 MEM_ALIGN (str_rtx), str_mode, 0);
4371 if (str_mode == VOIDmode)
4372 return false;
4373 str_bitsize = GET_MODE_BITSIZE (str_mode);
4375 offset1 = bitpos;
4376 bitpos %= str_bitsize;
4377 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4378 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4380 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4381 return false;
4383 /* If the bit field covers the whole REG/MEM, store_field
4384 will likely generate better code. */
4385 if (bitsize >= str_bitsize)
4386 return false;
4388 /* We can't handle fields split across multiple entities. */
4389 if (bitpos + bitsize > str_bitsize)
4390 return false;
4392 if (BYTES_BIG_ENDIAN)
4393 bitpos = str_bitsize - bitpos - bitsize;
4395 switch (code)
4397 case PLUS_EXPR:
4398 case MINUS_EXPR:
4399 /* For now, just optimize the case of the topmost bitfield
4400 where we don't need to do any masking and also
4401 1 bit bitfields where xor can be used.
4402 We might win by one instruction for the other bitfields
4403 too if insv/extv instructions aren't used, so that
4404 can be added later. */
4405 if (bitpos + bitsize != str_bitsize
4406 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4407 break;
4409 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4410 value = convert_modes (str_mode,
4411 TYPE_MODE (TREE_TYPE (op1)), value,
4412 TYPE_UNSIGNED (TREE_TYPE (op1)));
4414 /* We may be accessing data outside the field, which means
4415 we can alias adjacent data. */
4416 if (MEM_P (str_rtx))
4418 str_rtx = shallow_copy_rtx (str_rtx);
4419 set_mem_alias_set (str_rtx, 0);
4420 set_mem_expr (str_rtx, 0);
4423 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4424 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4426 value = expand_and (str_mode, value, const1_rtx, NULL);
4427 binop = xor_optab;
4429 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4430 result = expand_binop (str_mode, binop, str_rtx,
4431 value, str_rtx, 1, OPTAB_WIDEN);
4432 if (result != str_rtx)
4433 emit_move_insn (str_rtx, result);
4434 return true;
4436 case BIT_IOR_EXPR:
4437 case BIT_XOR_EXPR:
4438 if (TREE_CODE (op1) != INTEGER_CST)
4439 break;
4440 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4441 value = convert_modes (str_mode,
4442 TYPE_MODE (TREE_TYPE (op1)), value,
4443 TYPE_UNSIGNED (TREE_TYPE (op1)));
4445 /* We may be accessing data outside the field, which means
4446 we can alias adjacent data. */
4447 if (MEM_P (str_rtx))
4449 str_rtx = shallow_copy_rtx (str_rtx);
4450 set_mem_alias_set (str_rtx, 0);
4451 set_mem_expr (str_rtx, 0);
4454 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4455 if (bitpos + bitsize != str_bitsize)
4457 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1);
4458 value = expand_and (str_mode, value, mask, NULL_RTX);
4460 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4461 result = expand_binop (str_mode, binop, str_rtx,
4462 value, str_rtx, 1, OPTAB_WIDEN);
4463 if (result != str_rtx)
4464 emit_move_insn (str_rtx, result);
4465 return true;
4467 default:
4468 break;
4471 return false;
4474 /* In the C++ memory model, consecutive bit fields in a structure are
4475 considered one memory location.
4477 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4478 returns the bit range of consecutive bits in which this COMPONENT_REF
4479 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4480 and *OFFSET may be adjusted in the process.
4482 If the access does not need to be restricted, 0 is returned in both
4483 *BITSTART and *BITEND. */
4485 static void
4486 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4487 unsigned HOST_WIDE_INT *bitend,
4488 tree exp,
4489 HOST_WIDE_INT *bitpos,
4490 tree *offset)
4492 HOST_WIDE_INT bitoffset;
4493 tree field, repr;
4495 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4497 field = TREE_OPERAND (exp, 1);
4498 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4499 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4500 need to limit the range we can access. */
4501 if (!repr)
4503 *bitstart = *bitend = 0;
4504 return;
4507 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4508 part of a larger bit field, then the representative does not serve any
4509 useful purpose. This can occur in Ada. */
4510 if (handled_component_p (TREE_OPERAND (exp, 0)))
4512 enum machine_mode rmode;
4513 HOST_WIDE_INT rbitsize, rbitpos;
4514 tree roffset;
4515 int unsignedp;
4516 int volatilep = 0;
4517 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4518 &roffset, &rmode, &unsignedp, &volatilep, false);
4519 if ((rbitpos % BITS_PER_UNIT) != 0)
4521 *bitstart = *bitend = 0;
4522 return;
4526 /* Compute the adjustment to bitpos from the offset of the field
4527 relative to the representative. DECL_FIELD_OFFSET of field and
4528 repr are the same by construction if they are not constants,
4529 see finish_bitfield_layout. */
4530 if (host_integerp (DECL_FIELD_OFFSET (field), 1)
4531 && host_integerp (DECL_FIELD_OFFSET (repr), 1))
4532 bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
4533 - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
4534 else
4535 bitoffset = 0;
4536 bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
4537 - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
4539 /* If the adjustment is larger than bitpos, we would have a negative bit
4540 position for the lower bound and this may wreak havoc later. This can
4541 occur only if we have a non-null offset, so adjust offset and bitpos
4542 to make the lower bound non-negative. */
4543 if (bitoffset > *bitpos)
4545 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4547 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4548 gcc_assert (*offset != NULL_TREE);
4550 *bitpos += adjust;
4551 *offset
4552 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4553 *bitstart = 0;
4555 else
4556 *bitstart = *bitpos - bitoffset;
4558 *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
4561 /* Returns true if the MEM_REF REF refers to an object that does not
4562 reside in memory and has non-BLKmode. */
4564 static bool
4565 mem_ref_refers_to_non_mem_p (tree ref)
4567 tree base = TREE_OPERAND (ref, 0);
4568 if (TREE_CODE (base) != ADDR_EXPR)
4569 return false;
4570 base = TREE_OPERAND (base, 0);
4571 return (DECL_P (base)
4572 && !TREE_ADDRESSABLE (base)
4573 && DECL_MODE (base) != BLKmode
4574 && DECL_RTL_SET_P (base)
4575 && !MEM_P (DECL_RTL (base)));
4578 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4579 is true, try generating a nontemporal store. */
4581 void
4582 expand_assignment (tree to, tree from, bool nontemporal)
4584 rtx to_rtx = 0;
4585 rtx result;
4586 enum machine_mode mode;
4587 unsigned int align;
4588 enum insn_code icode;
4590 /* Don't crash if the lhs of the assignment was erroneous. */
4591 if (TREE_CODE (to) == ERROR_MARK)
4593 expand_normal (from);
4594 return;
4597 /* Optimize away no-op moves without side-effects. */
4598 if (operand_equal_p (to, from, 0))
4599 return;
4601 /* Handle misaligned stores. */
4602 mode = TYPE_MODE (TREE_TYPE (to));
4603 if ((TREE_CODE (to) == MEM_REF
4604 || TREE_CODE (to) == TARGET_MEM_REF)
4605 && mode != BLKmode
4606 && !mem_ref_refers_to_non_mem_p (to)
4607 && ((align = get_object_alignment (to))
4608 < GET_MODE_ALIGNMENT (mode))
4609 && (((icode = optab_handler (movmisalign_optab, mode))
4610 != CODE_FOR_nothing)
4611 || SLOW_UNALIGNED_ACCESS (mode, align)))
4613 rtx reg, mem;
4615 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4616 reg = force_not_mem (reg);
4617 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4619 if (icode != CODE_FOR_nothing)
4621 struct expand_operand ops[2];
4623 create_fixed_operand (&ops[0], mem);
4624 create_input_operand (&ops[1], reg, mode);
4625 /* The movmisalign<mode> pattern cannot fail, else the assignment
4626 would silently be omitted. */
4627 expand_insn (icode, 2, ops);
4629 else
4630 store_bit_field (mem, GET_MODE_BITSIZE (mode),
4631 0, 0, 0, mode, reg);
4632 return;
4635 /* Assignment of a structure component needs special treatment
4636 if the structure component's rtx is not simply a MEM.
4637 Assignment of an array element at a constant index, and assignment of
4638 an array element in an unaligned packed structure field, has the same
4639 problem. Same for (partially) storing into a non-memory object. */
4640 if (handled_component_p (to)
4641 || (TREE_CODE (to) == MEM_REF
4642 && mem_ref_refers_to_non_mem_p (to))
4643 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4645 enum machine_mode mode1;
4646 HOST_WIDE_INT bitsize, bitpos;
4647 unsigned HOST_WIDE_INT bitregion_start = 0;
4648 unsigned HOST_WIDE_INT bitregion_end = 0;
4649 tree offset;
4650 int unsignedp;
4651 int volatilep = 0;
4652 tree tem;
4653 bool misalignp;
4654 rtx mem = NULL_RTX;
4656 push_temp_slots ();
4657 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4658 &unsignedp, &volatilep, true);
4660 if (TREE_CODE (to) == COMPONENT_REF
4661 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4662 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4664 /* If we are going to use store_bit_field and extract_bit_field,
4665 make sure to_rtx will be safe for multiple use. */
4666 mode = TYPE_MODE (TREE_TYPE (tem));
4667 if (TREE_CODE (tem) == MEM_REF
4668 && mode != BLKmode
4669 && ((align = get_object_alignment (tem))
4670 < GET_MODE_ALIGNMENT (mode))
4671 && ((icode = optab_handler (movmisalign_optab, mode))
4672 != CODE_FOR_nothing))
4674 struct expand_operand ops[2];
4676 misalignp = true;
4677 to_rtx = gen_reg_rtx (mode);
4678 mem = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4680 /* If the misaligned store doesn't overwrite all bits, perform
4681 rmw cycle on MEM. */
4682 if (bitsize != GET_MODE_BITSIZE (mode))
4684 create_input_operand (&ops[0], to_rtx, mode);
4685 create_fixed_operand (&ops[1], mem);
4686 /* The movmisalign<mode> pattern cannot fail, else the assignment
4687 would silently be omitted. */
4688 expand_insn (icode, 2, ops);
4690 mem = copy_rtx (mem);
4693 else
4695 misalignp = false;
4696 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4699 /* If the bitfield is volatile, we want to access it in the
4700 field's mode, not the computed mode.
4701 If a MEM has VOIDmode (external with incomplete type),
4702 use BLKmode for it instead. */
4703 if (MEM_P (to_rtx))
4705 if (volatilep && flag_strict_volatile_bitfields > 0)
4706 to_rtx = adjust_address (to_rtx, mode1, 0);
4707 else if (GET_MODE (to_rtx) == VOIDmode)
4708 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4711 if (offset != 0)
4713 enum machine_mode address_mode;
4714 rtx offset_rtx;
4716 if (!MEM_P (to_rtx))
4718 /* We can get constant negative offsets into arrays with broken
4719 user code. Translate this to a trap instead of ICEing. */
4720 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4721 expand_builtin_trap ();
4722 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4725 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4726 address_mode = get_address_mode (to_rtx);
4727 if (GET_MODE (offset_rtx) != address_mode)
4728 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4730 /* A constant address in TO_RTX can have VOIDmode, we must not try
4731 to call force_reg for that case. Avoid that case. */
4732 if (MEM_P (to_rtx)
4733 && GET_MODE (to_rtx) == BLKmode
4734 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4735 && bitsize > 0
4736 && (bitpos % bitsize) == 0
4737 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4738 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4740 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4741 bitpos = 0;
4744 to_rtx = offset_address (to_rtx, offset_rtx,
4745 highest_pow2_factor_for_target (to,
4746 offset));
4749 /* No action is needed if the target is not a memory and the field
4750 lies completely outside that target. This can occur if the source
4751 code contains an out-of-bounds access to a small array. */
4752 if (!MEM_P (to_rtx)
4753 && GET_MODE (to_rtx) != BLKmode
4754 && (unsigned HOST_WIDE_INT) bitpos
4755 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4757 expand_normal (from);
4758 result = NULL;
4760 /* Handle expand_expr of a complex value returning a CONCAT. */
4761 else if (GET_CODE (to_rtx) == CONCAT)
4763 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4764 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4765 && bitpos == 0
4766 && bitsize == mode_bitsize)
4767 result = store_expr (from, to_rtx, false, nontemporal);
4768 else if (bitsize == mode_bitsize / 2
4769 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4770 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4771 nontemporal);
4772 else if (bitpos + bitsize <= mode_bitsize / 2)
4773 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4774 bitregion_start, bitregion_end,
4775 mode1, from, TREE_TYPE (tem),
4776 get_alias_set (to), nontemporal);
4777 else if (bitpos >= mode_bitsize / 2)
4778 result = store_field (XEXP (to_rtx, 1), bitsize,
4779 bitpos - mode_bitsize / 2,
4780 bitregion_start, bitregion_end,
4781 mode1, from,
4782 TREE_TYPE (tem), get_alias_set (to),
4783 nontemporal);
4784 else if (bitpos == 0 && bitsize == mode_bitsize)
4786 rtx from_rtx;
4787 result = expand_normal (from);
4788 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4789 TYPE_MODE (TREE_TYPE (from)), 0);
4790 emit_move_insn (XEXP (to_rtx, 0),
4791 read_complex_part (from_rtx, false));
4792 emit_move_insn (XEXP (to_rtx, 1),
4793 read_complex_part (from_rtx, true));
4795 else
4797 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4798 GET_MODE_SIZE (GET_MODE (to_rtx)));
4799 write_complex_part (temp, XEXP (to_rtx, 0), false);
4800 write_complex_part (temp, XEXP (to_rtx, 1), true);
4801 result = store_field (temp, bitsize, bitpos,
4802 bitregion_start, bitregion_end,
4803 mode1, from,
4804 TREE_TYPE (tem), get_alias_set (to),
4805 nontemporal);
4806 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4807 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4810 else
4812 if (MEM_P (to_rtx))
4814 /* If the field is at offset zero, we could have been given the
4815 DECL_RTX of the parent struct. Don't munge it. */
4816 to_rtx = shallow_copy_rtx (to_rtx);
4818 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4820 /* Deal with volatile and readonly fields. The former is only
4821 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4822 if (volatilep)
4823 MEM_VOLATILE_P (to_rtx) = 1;
4824 if (component_uses_parent_alias_set (to))
4825 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4828 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4829 bitregion_start, bitregion_end,
4830 mode1,
4831 to_rtx, to, from))
4832 result = NULL;
4833 else
4834 result = store_field (to_rtx, bitsize, bitpos,
4835 bitregion_start, bitregion_end,
4836 mode1, from,
4837 TREE_TYPE (tem), get_alias_set (to),
4838 nontemporal);
4841 if (misalignp)
4843 struct expand_operand ops[2];
4845 create_fixed_operand (&ops[0], mem);
4846 create_input_operand (&ops[1], to_rtx, mode);
4847 /* The movmisalign<mode> pattern cannot fail, else the assignment
4848 would silently be omitted. */
4849 expand_insn (icode, 2, ops);
4852 if (result)
4853 preserve_temp_slots (result);
4854 pop_temp_slots ();
4855 return;
4858 /* If the rhs is a function call and its value is not an aggregate,
4859 call the function before we start to compute the lhs.
4860 This is needed for correct code for cases such as
4861 val = setjmp (buf) on machines where reference to val
4862 requires loading up part of an address in a separate insn.
4864 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4865 since it might be a promoted variable where the zero- or sign- extension
4866 needs to be done. Handling this in the normal way is safe because no
4867 computation is done before the call. The same is true for SSA names. */
4868 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4869 && COMPLETE_TYPE_P (TREE_TYPE (from))
4870 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4871 && ! (((TREE_CODE (to) == VAR_DECL
4872 || TREE_CODE (to) == PARM_DECL
4873 || TREE_CODE (to) == RESULT_DECL)
4874 && REG_P (DECL_RTL (to)))
4875 || TREE_CODE (to) == SSA_NAME))
4877 rtx value;
4879 push_temp_slots ();
4880 value = expand_normal (from);
4881 if (to_rtx == 0)
4882 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4884 /* Handle calls that return values in multiple non-contiguous locations.
4885 The Irix 6 ABI has examples of this. */
4886 if (GET_CODE (to_rtx) == PARALLEL)
4888 if (GET_CODE (value) == PARALLEL)
4889 emit_group_move (to_rtx, value);
4890 else
4891 emit_group_load (to_rtx, value, TREE_TYPE (from),
4892 int_size_in_bytes (TREE_TYPE (from)));
4894 else if (GET_CODE (value) == PARALLEL)
4895 emit_group_store (to_rtx, value, TREE_TYPE (from),
4896 int_size_in_bytes (TREE_TYPE (from)));
4897 else if (GET_MODE (to_rtx) == BLKmode)
4899 if (REG_P (value))
4900 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
4901 else
4902 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4904 else
4906 if (POINTER_TYPE_P (TREE_TYPE (to)))
4907 value = convert_memory_address_addr_space
4908 (GET_MODE (to_rtx), value,
4909 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4911 emit_move_insn (to_rtx, value);
4913 preserve_temp_slots (to_rtx);
4914 pop_temp_slots ();
4915 return;
4918 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
4919 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4921 /* Don't move directly into a return register. */
4922 if (TREE_CODE (to) == RESULT_DECL
4923 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4925 rtx temp;
4927 push_temp_slots ();
4928 if (REG_P (to_rtx) && TYPE_MODE (TREE_TYPE (from)) == BLKmode)
4929 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4930 else
4931 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4933 /* Handle calls that return values in multiple non-contiguous locations.
4934 The Irix 6 ABI has examples of this. */
4935 if (GET_CODE (to_rtx) == PARALLEL)
4937 if (GET_CODE (temp) == PARALLEL)
4938 emit_group_move (to_rtx, temp);
4939 else
4940 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4941 int_size_in_bytes (TREE_TYPE (from)));
4943 else if (temp)
4944 emit_move_insn (to_rtx, temp);
4946 preserve_temp_slots (to_rtx);
4947 pop_temp_slots ();
4948 return;
4951 /* In case we are returning the contents of an object which overlaps
4952 the place the value is being stored, use a safe function when copying
4953 a value through a pointer into a structure value return block. */
4954 if (TREE_CODE (to) == RESULT_DECL
4955 && TREE_CODE (from) == INDIRECT_REF
4956 && ADDR_SPACE_GENERIC_P
4957 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4958 && refs_may_alias_p (to, from)
4959 && cfun->returns_struct
4960 && !cfun->returns_pcc_struct)
4962 rtx from_rtx, size;
4964 push_temp_slots ();
4965 size = expr_size (from);
4966 from_rtx = expand_normal (from);
4968 emit_library_call (memmove_libfunc, LCT_NORMAL,
4969 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4970 XEXP (from_rtx, 0), Pmode,
4971 convert_to_mode (TYPE_MODE (sizetype),
4972 size, TYPE_UNSIGNED (sizetype)),
4973 TYPE_MODE (sizetype));
4975 preserve_temp_slots (to_rtx);
4976 pop_temp_slots ();
4977 return;
4980 /* Compute FROM and store the value in the rtx we got. */
4982 push_temp_slots ();
4983 result = store_expr (from, to_rtx, 0, nontemporal);
4984 preserve_temp_slots (result);
4985 pop_temp_slots ();
4986 return;
4989 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4990 succeeded, false otherwise. */
4992 bool
4993 emit_storent_insn (rtx to, rtx from)
4995 struct expand_operand ops[2];
4996 enum machine_mode mode = GET_MODE (to);
4997 enum insn_code code = optab_handler (storent_optab, mode);
4999 if (code == CODE_FOR_nothing)
5000 return false;
5002 create_fixed_operand (&ops[0], to);
5003 create_input_operand (&ops[1], from, mode);
5004 return maybe_expand_insn (code, 2, ops);
5007 /* Generate code for computing expression EXP,
5008 and storing the value into TARGET.
5010 If the mode is BLKmode then we may return TARGET itself.
5011 It turns out that in BLKmode it doesn't cause a problem.
5012 because C has no operators that could combine two different
5013 assignments into the same BLKmode object with different values
5014 with no sequence point. Will other languages need this to
5015 be more thorough?
5017 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5018 stack, and block moves may need to be treated specially.
5020 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5023 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5025 rtx temp;
5026 rtx alt_rtl = NULL_RTX;
5027 location_t loc = curr_insn_location ();
5029 if (VOID_TYPE_P (TREE_TYPE (exp)))
5031 /* C++ can generate ?: expressions with a throw expression in one
5032 branch and an rvalue in the other. Here, we resolve attempts to
5033 store the throw expression's nonexistent result. */
5034 gcc_assert (!call_param_p);
5035 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5036 return NULL_RTX;
5038 if (TREE_CODE (exp) == COMPOUND_EXPR)
5040 /* Perform first part of compound expression, then assign from second
5041 part. */
5042 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5043 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5044 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5045 nontemporal);
5047 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5049 /* For conditional expression, get safe form of the target. Then
5050 test the condition, doing the appropriate assignment on either
5051 side. This avoids the creation of unnecessary temporaries.
5052 For non-BLKmode, it is more efficient not to do this. */
5054 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5056 do_pending_stack_adjust ();
5057 NO_DEFER_POP;
5058 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5059 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5060 nontemporal);
5061 emit_jump_insn (gen_jump (lab2));
5062 emit_barrier ();
5063 emit_label (lab1);
5064 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5065 nontemporal);
5066 emit_label (lab2);
5067 OK_DEFER_POP;
5069 return NULL_RTX;
5071 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5072 /* If this is a scalar in a register that is stored in a wider mode
5073 than the declared mode, compute the result into its declared mode
5074 and then convert to the wider mode. Our value is the computed
5075 expression. */
5077 rtx inner_target = 0;
5079 /* We can do the conversion inside EXP, which will often result
5080 in some optimizations. Do the conversion in two steps: first
5081 change the signedness, if needed, then the extend. But don't
5082 do this if the type of EXP is a subtype of something else
5083 since then the conversion might involve more than just
5084 converting modes. */
5085 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5086 && TREE_TYPE (TREE_TYPE (exp)) == 0
5087 && GET_MODE_PRECISION (GET_MODE (target))
5088 == TYPE_PRECISION (TREE_TYPE (exp)))
5090 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5091 != SUBREG_PROMOTED_UNSIGNED_P (target))
5093 /* Some types, e.g. Fortran's logical*4, won't have a signed
5094 version, so use the mode instead. */
5095 tree ntype
5096 = (signed_or_unsigned_type_for
5097 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5098 if (ntype == NULL)
5099 ntype = lang_hooks.types.type_for_mode
5100 (TYPE_MODE (TREE_TYPE (exp)),
5101 SUBREG_PROMOTED_UNSIGNED_P (target));
5103 exp = fold_convert_loc (loc, ntype, exp);
5106 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5107 (GET_MODE (SUBREG_REG (target)),
5108 SUBREG_PROMOTED_UNSIGNED_P (target)),
5109 exp);
5111 inner_target = SUBREG_REG (target);
5114 temp = expand_expr (exp, inner_target, VOIDmode,
5115 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5117 /* If TEMP is a VOIDmode constant, use convert_modes to make
5118 sure that we properly convert it. */
5119 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5121 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5122 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5123 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5124 GET_MODE (target), temp,
5125 SUBREG_PROMOTED_UNSIGNED_P (target));
5128 convert_move (SUBREG_REG (target), temp,
5129 SUBREG_PROMOTED_UNSIGNED_P (target));
5131 return NULL_RTX;
5133 else if ((TREE_CODE (exp) == STRING_CST
5134 || (TREE_CODE (exp) == MEM_REF
5135 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5136 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5137 == STRING_CST
5138 && integer_zerop (TREE_OPERAND (exp, 1))))
5139 && !nontemporal && !call_param_p
5140 && MEM_P (target))
5142 /* Optimize initialization of an array with a STRING_CST. */
5143 HOST_WIDE_INT exp_len, str_copy_len;
5144 rtx dest_mem;
5145 tree str = TREE_CODE (exp) == STRING_CST
5146 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5148 exp_len = int_expr_size (exp);
5149 if (exp_len <= 0)
5150 goto normal_expr;
5152 if (TREE_STRING_LENGTH (str) <= 0)
5153 goto normal_expr;
5155 str_copy_len = strlen (TREE_STRING_POINTER (str));
5156 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5157 goto normal_expr;
5159 str_copy_len = TREE_STRING_LENGTH (str);
5160 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5161 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5163 str_copy_len += STORE_MAX_PIECES - 1;
5164 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5166 str_copy_len = MIN (str_copy_len, exp_len);
5167 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5168 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5169 MEM_ALIGN (target), false))
5170 goto normal_expr;
5172 dest_mem = target;
5174 dest_mem = store_by_pieces (dest_mem,
5175 str_copy_len, builtin_strncpy_read_str,
5176 CONST_CAST (char *,
5177 TREE_STRING_POINTER (str)),
5178 MEM_ALIGN (target), false,
5179 exp_len > str_copy_len ? 1 : 0);
5180 if (exp_len > str_copy_len)
5181 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5182 GEN_INT (exp_len - str_copy_len),
5183 BLOCK_OP_NORMAL);
5184 return NULL_RTX;
5186 else
5188 rtx tmp_target;
5190 normal_expr:
5191 /* If we want to use a nontemporal store, force the value to
5192 register first. */
5193 tmp_target = nontemporal ? NULL_RTX : target;
5194 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5195 (call_param_p
5196 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5197 &alt_rtl);
5200 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5201 the same as that of TARGET, adjust the constant. This is needed, for
5202 example, in case it is a CONST_DOUBLE and we want only a word-sized
5203 value. */
5204 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5205 && TREE_CODE (exp) != ERROR_MARK
5206 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5207 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5208 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5210 /* If value was not generated in the target, store it there.
5211 Convert the value to TARGET's type first if necessary and emit the
5212 pending incrementations that have been queued when expanding EXP.
5213 Note that we cannot emit the whole queue blindly because this will
5214 effectively disable the POST_INC optimization later.
5216 If TEMP and TARGET compare equal according to rtx_equal_p, but
5217 one or both of them are volatile memory refs, we have to distinguish
5218 two cases:
5219 - expand_expr has used TARGET. In this case, we must not generate
5220 another copy. This can be detected by TARGET being equal according
5221 to == .
5222 - expand_expr has not used TARGET - that means that the source just
5223 happens to have the same RTX form. Since temp will have been created
5224 by expand_expr, it will compare unequal according to == .
5225 We must generate a copy in this case, to reach the correct number
5226 of volatile memory references. */
5228 if ((! rtx_equal_p (temp, target)
5229 || (temp != target && (side_effects_p (temp)
5230 || side_effects_p (target))))
5231 && TREE_CODE (exp) != ERROR_MARK
5232 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5233 but TARGET is not valid memory reference, TEMP will differ
5234 from TARGET although it is really the same location. */
5235 && !(alt_rtl
5236 && rtx_equal_p (alt_rtl, target)
5237 && !side_effects_p (alt_rtl)
5238 && !side_effects_p (target))
5239 /* If there's nothing to copy, don't bother. Don't call
5240 expr_size unless necessary, because some front-ends (C++)
5241 expr_size-hook must not be given objects that are not
5242 supposed to be bit-copied or bit-initialized. */
5243 && expr_size (exp) != const0_rtx)
5245 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5247 if (GET_MODE (target) == BLKmode)
5249 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5250 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5251 else
5252 store_bit_field (target,
5253 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5254 0, 0, 0, GET_MODE (temp), temp);
5256 else
5257 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5260 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5262 /* Handle copying a string constant into an array. The string
5263 constant may be shorter than the array. So copy just the string's
5264 actual length, and clear the rest. First get the size of the data
5265 type of the string, which is actually the size of the target. */
5266 rtx size = expr_size (exp);
5268 if (CONST_INT_P (size)
5269 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5270 emit_block_move (target, temp, size,
5271 (call_param_p
5272 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5273 else
5275 enum machine_mode pointer_mode
5276 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5277 enum machine_mode address_mode = get_address_mode (target);
5279 /* Compute the size of the data to copy from the string. */
5280 tree copy_size
5281 = size_binop_loc (loc, MIN_EXPR,
5282 make_tree (sizetype, size),
5283 size_int (TREE_STRING_LENGTH (exp)));
5284 rtx copy_size_rtx
5285 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5286 (call_param_p
5287 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5288 rtx label = 0;
5290 /* Copy that much. */
5291 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5292 TYPE_UNSIGNED (sizetype));
5293 emit_block_move (target, temp, copy_size_rtx,
5294 (call_param_p
5295 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5297 /* Figure out how much is left in TARGET that we have to clear.
5298 Do all calculations in pointer_mode. */
5299 if (CONST_INT_P (copy_size_rtx))
5301 size = plus_constant (address_mode, size,
5302 -INTVAL (copy_size_rtx));
5303 target = adjust_address (target, BLKmode,
5304 INTVAL (copy_size_rtx));
5306 else
5308 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5309 copy_size_rtx, NULL_RTX, 0,
5310 OPTAB_LIB_WIDEN);
5312 if (GET_MODE (copy_size_rtx) != address_mode)
5313 copy_size_rtx = convert_to_mode (address_mode,
5314 copy_size_rtx,
5315 TYPE_UNSIGNED (sizetype));
5317 target = offset_address (target, copy_size_rtx,
5318 highest_pow2_factor (copy_size));
5319 label = gen_label_rtx ();
5320 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5321 GET_MODE (size), 0, label);
5324 if (size != const0_rtx)
5325 clear_storage (target, size, BLOCK_OP_NORMAL);
5327 if (label)
5328 emit_label (label);
5331 /* Handle calls that return values in multiple non-contiguous locations.
5332 The Irix 6 ABI has examples of this. */
5333 else if (GET_CODE (target) == PARALLEL)
5335 if (GET_CODE (temp) == PARALLEL)
5336 emit_group_move (target, temp);
5337 else
5338 emit_group_load (target, temp, TREE_TYPE (exp),
5339 int_size_in_bytes (TREE_TYPE (exp)));
5341 else if (GET_CODE (temp) == PARALLEL)
5342 emit_group_store (target, temp, TREE_TYPE (exp),
5343 int_size_in_bytes (TREE_TYPE (exp)));
5344 else if (GET_MODE (temp) == BLKmode)
5345 emit_block_move (target, temp, expr_size (exp),
5346 (call_param_p
5347 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5348 /* If we emit a nontemporal store, there is nothing else to do. */
5349 else if (nontemporal && emit_storent_insn (target, temp))
5351 else
5353 temp = force_operand (temp, target);
5354 if (temp != target)
5355 emit_move_insn (target, temp);
5359 return NULL_RTX;
5362 /* Return true if field F of structure TYPE is a flexible array. */
5364 static bool
5365 flexible_array_member_p (const_tree f, const_tree type)
5367 const_tree tf;
5369 tf = TREE_TYPE (f);
5370 return (DECL_CHAIN (f) == NULL
5371 && TREE_CODE (tf) == ARRAY_TYPE
5372 && TYPE_DOMAIN (tf)
5373 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5374 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5375 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5376 && int_size_in_bytes (type) >= 0);
5379 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5380 must have in order for it to completely initialize a value of type TYPE.
5381 Return -1 if the number isn't known.
5383 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5385 static HOST_WIDE_INT
5386 count_type_elements (const_tree type, bool for_ctor_p)
5388 switch (TREE_CODE (type))
5390 case ARRAY_TYPE:
5392 tree nelts;
5394 nelts = array_type_nelts (type);
5395 if (nelts && host_integerp (nelts, 1))
5397 unsigned HOST_WIDE_INT n;
5399 n = tree_low_cst (nelts, 1) + 1;
5400 if (n == 0 || for_ctor_p)
5401 return n;
5402 else
5403 return n * count_type_elements (TREE_TYPE (type), false);
5405 return for_ctor_p ? -1 : 1;
5408 case RECORD_TYPE:
5410 unsigned HOST_WIDE_INT n;
5411 tree f;
5413 n = 0;
5414 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5415 if (TREE_CODE (f) == FIELD_DECL)
5417 if (!for_ctor_p)
5418 n += count_type_elements (TREE_TYPE (f), false);
5419 else if (!flexible_array_member_p (f, type))
5420 /* Don't count flexible arrays, which are not supposed
5421 to be initialized. */
5422 n += 1;
5425 return n;
5428 case UNION_TYPE:
5429 case QUAL_UNION_TYPE:
5431 tree f;
5432 HOST_WIDE_INT n, m;
5434 gcc_assert (!for_ctor_p);
5435 /* Estimate the number of scalars in each field and pick the
5436 maximum. Other estimates would do instead; the idea is simply
5437 to make sure that the estimate is not sensitive to the ordering
5438 of the fields. */
5439 n = 1;
5440 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5441 if (TREE_CODE (f) == FIELD_DECL)
5443 m = count_type_elements (TREE_TYPE (f), false);
5444 /* If the field doesn't span the whole union, add an extra
5445 scalar for the rest. */
5446 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5447 TYPE_SIZE (type)) != 1)
5448 m++;
5449 if (n < m)
5450 n = m;
5452 return n;
5455 case COMPLEX_TYPE:
5456 return 2;
5458 case VECTOR_TYPE:
5459 return TYPE_VECTOR_SUBPARTS (type);
5461 case INTEGER_TYPE:
5462 case REAL_TYPE:
5463 case FIXED_POINT_TYPE:
5464 case ENUMERAL_TYPE:
5465 case BOOLEAN_TYPE:
5466 case POINTER_TYPE:
5467 case OFFSET_TYPE:
5468 case REFERENCE_TYPE:
5469 case NULLPTR_TYPE:
5470 return 1;
5472 case ERROR_MARK:
5473 return 0;
5475 case VOID_TYPE:
5476 case METHOD_TYPE:
5477 case FUNCTION_TYPE:
5478 case LANG_TYPE:
5479 default:
5480 gcc_unreachable ();
5484 /* Helper for categorize_ctor_elements. Identical interface. */
5486 static bool
5487 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5488 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5490 unsigned HOST_WIDE_INT idx;
5491 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5492 tree value, purpose, elt_type;
5494 /* Whether CTOR is a valid constant initializer, in accordance with what
5495 initializer_constant_valid_p does. If inferred from the constructor
5496 elements, true until proven otherwise. */
5497 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5498 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5500 nz_elts = 0;
5501 init_elts = 0;
5502 num_fields = 0;
5503 elt_type = NULL_TREE;
5505 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5507 HOST_WIDE_INT mult = 1;
5509 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5511 tree lo_index = TREE_OPERAND (purpose, 0);
5512 tree hi_index = TREE_OPERAND (purpose, 1);
5514 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5515 mult = (tree_low_cst (hi_index, 1)
5516 - tree_low_cst (lo_index, 1) + 1);
5518 num_fields += mult;
5519 elt_type = TREE_TYPE (value);
5521 switch (TREE_CODE (value))
5523 case CONSTRUCTOR:
5525 HOST_WIDE_INT nz = 0, ic = 0;
5527 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5528 p_complete);
5530 nz_elts += mult * nz;
5531 init_elts += mult * ic;
5533 if (const_from_elts_p && const_p)
5534 const_p = const_elt_p;
5536 break;
5538 case INTEGER_CST:
5539 case REAL_CST:
5540 case FIXED_CST:
5541 if (!initializer_zerop (value))
5542 nz_elts += mult;
5543 init_elts += mult;
5544 break;
5546 case STRING_CST:
5547 nz_elts += mult * TREE_STRING_LENGTH (value);
5548 init_elts += mult * TREE_STRING_LENGTH (value);
5549 break;
5551 case COMPLEX_CST:
5552 if (!initializer_zerop (TREE_REALPART (value)))
5553 nz_elts += mult;
5554 if (!initializer_zerop (TREE_IMAGPART (value)))
5555 nz_elts += mult;
5556 init_elts += mult;
5557 break;
5559 case VECTOR_CST:
5561 unsigned i;
5562 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5564 tree v = VECTOR_CST_ELT (value, i);
5565 if (!initializer_zerop (v))
5566 nz_elts += mult;
5567 init_elts += mult;
5570 break;
5572 default:
5574 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5575 nz_elts += mult * tc;
5576 init_elts += mult * tc;
5578 if (const_from_elts_p && const_p)
5579 const_p = initializer_constant_valid_p (value, elt_type)
5580 != NULL_TREE;
5582 break;
5586 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5587 num_fields, elt_type))
5588 *p_complete = false;
5590 *p_nz_elts += nz_elts;
5591 *p_init_elts += init_elts;
5593 return const_p;
5596 /* Examine CTOR to discover:
5597 * how many scalar fields are set to nonzero values,
5598 and place it in *P_NZ_ELTS;
5599 * how many scalar fields in total are in CTOR,
5600 and place it in *P_ELT_COUNT.
5601 * whether the constructor is complete -- in the sense that every
5602 meaningful byte is explicitly given a value --
5603 and place it in *P_COMPLETE.
5605 Return whether or not CTOR is a valid static constant initializer, the same
5606 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5608 bool
5609 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5610 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5612 *p_nz_elts = 0;
5613 *p_init_elts = 0;
5614 *p_complete = true;
5616 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5619 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5620 of which had type LAST_TYPE. Each element was itself a complete
5621 initializer, in the sense that every meaningful byte was explicitly
5622 given a value. Return true if the same is true for the constructor
5623 as a whole. */
5625 bool
5626 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5627 const_tree last_type)
5629 if (TREE_CODE (type) == UNION_TYPE
5630 || TREE_CODE (type) == QUAL_UNION_TYPE)
5632 if (num_elts == 0)
5633 return false;
5635 gcc_assert (num_elts == 1 && last_type);
5637 /* ??? We could look at each element of the union, and find the
5638 largest element. Which would avoid comparing the size of the
5639 initialized element against any tail padding in the union.
5640 Doesn't seem worth the effort... */
5641 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5644 return count_type_elements (type, true) == num_elts;
5647 /* Return 1 if EXP contains mostly (3/4) zeros. */
5649 static int
5650 mostly_zeros_p (const_tree exp)
5652 if (TREE_CODE (exp) == CONSTRUCTOR)
5654 HOST_WIDE_INT nz_elts, init_elts;
5655 bool complete_p;
5657 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5658 return !complete_p || nz_elts < init_elts / 4;
5661 return initializer_zerop (exp);
5664 /* Return 1 if EXP contains all zeros. */
5666 static int
5667 all_zeros_p (const_tree exp)
5669 if (TREE_CODE (exp) == CONSTRUCTOR)
5671 HOST_WIDE_INT nz_elts, init_elts;
5672 bool complete_p;
5674 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5675 return nz_elts == 0;
5678 return initializer_zerop (exp);
5681 /* Helper function for store_constructor.
5682 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5683 TYPE is the type of the CONSTRUCTOR, not the element type.
5684 CLEARED is as for store_constructor.
5685 ALIAS_SET is the alias set to use for any stores.
5687 This provides a recursive shortcut back to store_constructor when it isn't
5688 necessary to go through store_field. This is so that we can pass through
5689 the cleared field to let store_constructor know that we may not have to
5690 clear a substructure if the outer structure has already been cleared. */
5692 static void
5693 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5694 HOST_WIDE_INT bitpos, enum machine_mode mode,
5695 tree exp, tree type, int cleared,
5696 alias_set_type alias_set)
5698 if (TREE_CODE (exp) == CONSTRUCTOR
5699 /* We can only call store_constructor recursively if the size and
5700 bit position are on a byte boundary. */
5701 && bitpos % BITS_PER_UNIT == 0
5702 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5703 /* If we have a nonzero bitpos for a register target, then we just
5704 let store_field do the bitfield handling. This is unlikely to
5705 generate unnecessary clear instructions anyways. */
5706 && (bitpos == 0 || MEM_P (target)))
5708 if (MEM_P (target))
5709 target
5710 = adjust_address (target,
5711 GET_MODE (target) == BLKmode
5712 || 0 != (bitpos
5713 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5714 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5717 /* Update the alias set, if required. */
5718 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5719 && MEM_ALIAS_SET (target) != 0)
5721 target = copy_rtx (target);
5722 set_mem_alias_set (target, alias_set);
5725 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5727 else
5728 store_field (target, bitsize, bitpos, 0, 0, mode, exp, type, alias_set,
5729 false);
5732 /* Store the value of constructor EXP into the rtx TARGET.
5733 TARGET is either a REG or a MEM; we know it cannot conflict, since
5734 safe_from_p has been called.
5735 CLEARED is true if TARGET is known to have been zero'd.
5736 SIZE is the number of bytes of TARGET we are allowed to modify: this
5737 may not be the same as the size of EXP if we are assigning to a field
5738 which has been packed to exclude padding bits. */
5740 static void
5741 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5743 tree type = TREE_TYPE (exp);
5744 #ifdef WORD_REGISTER_OPERATIONS
5745 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5746 #endif
5748 switch (TREE_CODE (type))
5750 case RECORD_TYPE:
5751 case UNION_TYPE:
5752 case QUAL_UNION_TYPE:
5754 unsigned HOST_WIDE_INT idx;
5755 tree field, value;
5757 /* If size is zero or the target is already cleared, do nothing. */
5758 if (size == 0 || cleared)
5759 cleared = 1;
5760 /* We either clear the aggregate or indicate the value is dead. */
5761 else if ((TREE_CODE (type) == UNION_TYPE
5762 || TREE_CODE (type) == QUAL_UNION_TYPE)
5763 && ! CONSTRUCTOR_ELTS (exp))
5764 /* If the constructor is empty, clear the union. */
5766 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5767 cleared = 1;
5770 /* If we are building a static constructor into a register,
5771 set the initial value as zero so we can fold the value into
5772 a constant. But if more than one register is involved,
5773 this probably loses. */
5774 else if (REG_P (target) && TREE_STATIC (exp)
5775 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5777 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5778 cleared = 1;
5781 /* If the constructor has fewer fields than the structure or
5782 if we are initializing the structure to mostly zeros, clear
5783 the whole structure first. Don't do this if TARGET is a
5784 register whose mode size isn't equal to SIZE since
5785 clear_storage can't handle this case. */
5786 else if (size > 0
5787 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5788 != fields_length (type))
5789 || mostly_zeros_p (exp))
5790 && (!REG_P (target)
5791 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5792 == size)))
5794 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5795 cleared = 1;
5798 if (REG_P (target) && !cleared)
5799 emit_clobber (target);
5801 /* Store each element of the constructor into the
5802 corresponding field of TARGET. */
5803 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5805 enum machine_mode mode;
5806 HOST_WIDE_INT bitsize;
5807 HOST_WIDE_INT bitpos = 0;
5808 tree offset;
5809 rtx to_rtx = target;
5811 /* Just ignore missing fields. We cleared the whole
5812 structure, above, if any fields are missing. */
5813 if (field == 0)
5814 continue;
5816 if (cleared && initializer_zerop (value))
5817 continue;
5819 if (host_integerp (DECL_SIZE (field), 1))
5820 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5821 else
5822 bitsize = -1;
5824 mode = DECL_MODE (field);
5825 if (DECL_BIT_FIELD (field))
5826 mode = VOIDmode;
5828 offset = DECL_FIELD_OFFSET (field);
5829 if (host_integerp (offset, 0)
5830 && host_integerp (bit_position (field), 0))
5832 bitpos = int_bit_position (field);
5833 offset = 0;
5835 else
5836 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5838 if (offset)
5840 enum machine_mode address_mode;
5841 rtx offset_rtx;
5843 offset
5844 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5845 make_tree (TREE_TYPE (exp),
5846 target));
5848 offset_rtx = expand_normal (offset);
5849 gcc_assert (MEM_P (to_rtx));
5851 address_mode = get_address_mode (to_rtx);
5852 if (GET_MODE (offset_rtx) != address_mode)
5853 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5855 to_rtx = offset_address (to_rtx, offset_rtx,
5856 highest_pow2_factor (offset));
5859 #ifdef WORD_REGISTER_OPERATIONS
5860 /* If this initializes a field that is smaller than a
5861 word, at the start of a word, try to widen it to a full
5862 word. This special case allows us to output C++ member
5863 function initializations in a form that the optimizers
5864 can understand. */
5865 if (REG_P (target)
5866 && bitsize < BITS_PER_WORD
5867 && bitpos % BITS_PER_WORD == 0
5868 && GET_MODE_CLASS (mode) == MODE_INT
5869 && TREE_CODE (value) == INTEGER_CST
5870 && exp_size >= 0
5871 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5873 tree type = TREE_TYPE (value);
5875 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5877 type = lang_hooks.types.type_for_mode
5878 (word_mode, TYPE_UNSIGNED (type));
5879 value = fold_convert (type, value);
5882 if (BYTES_BIG_ENDIAN)
5883 value
5884 = fold_build2 (LSHIFT_EXPR, type, value,
5885 build_int_cst (type,
5886 BITS_PER_WORD - bitsize));
5887 bitsize = BITS_PER_WORD;
5888 mode = word_mode;
5890 #endif
5892 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5893 && DECL_NONADDRESSABLE_P (field))
5895 to_rtx = copy_rtx (to_rtx);
5896 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5899 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5900 value, type, cleared,
5901 get_alias_set (TREE_TYPE (field)));
5903 break;
5905 case ARRAY_TYPE:
5907 tree value, index;
5908 unsigned HOST_WIDE_INT i;
5909 int need_to_clear;
5910 tree domain;
5911 tree elttype = TREE_TYPE (type);
5912 int const_bounds_p;
5913 HOST_WIDE_INT minelt = 0;
5914 HOST_WIDE_INT maxelt = 0;
5916 domain = TYPE_DOMAIN (type);
5917 const_bounds_p = (TYPE_MIN_VALUE (domain)
5918 && TYPE_MAX_VALUE (domain)
5919 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5920 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5922 /* If we have constant bounds for the range of the type, get them. */
5923 if (const_bounds_p)
5925 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5926 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5929 /* If the constructor has fewer elements than the array, clear
5930 the whole array first. Similarly if this is static
5931 constructor of a non-BLKmode object. */
5932 if (cleared)
5933 need_to_clear = 0;
5934 else if (REG_P (target) && TREE_STATIC (exp))
5935 need_to_clear = 1;
5936 else
5938 unsigned HOST_WIDE_INT idx;
5939 tree index, value;
5940 HOST_WIDE_INT count = 0, zero_count = 0;
5941 need_to_clear = ! const_bounds_p;
5943 /* This loop is a more accurate version of the loop in
5944 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5945 is also needed to check for missing elements. */
5946 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5948 HOST_WIDE_INT this_node_count;
5950 if (need_to_clear)
5951 break;
5953 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5955 tree lo_index = TREE_OPERAND (index, 0);
5956 tree hi_index = TREE_OPERAND (index, 1);
5958 if (! host_integerp (lo_index, 1)
5959 || ! host_integerp (hi_index, 1))
5961 need_to_clear = 1;
5962 break;
5965 this_node_count = (tree_low_cst (hi_index, 1)
5966 - tree_low_cst (lo_index, 1) + 1);
5968 else
5969 this_node_count = 1;
5971 count += this_node_count;
5972 if (mostly_zeros_p (value))
5973 zero_count += this_node_count;
5976 /* Clear the entire array first if there are any missing
5977 elements, or if the incidence of zero elements is >=
5978 75%. */
5979 if (! need_to_clear
5980 && (count < maxelt - minelt + 1
5981 || 4 * zero_count >= 3 * count))
5982 need_to_clear = 1;
5985 if (need_to_clear && size > 0)
5987 if (REG_P (target))
5988 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5989 else
5990 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5991 cleared = 1;
5994 if (!cleared && REG_P (target))
5995 /* Inform later passes that the old value is dead. */
5996 emit_clobber (target);
5998 /* Store each element of the constructor into the
5999 corresponding element of TARGET, determined by counting the
6000 elements. */
6001 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6003 enum machine_mode mode;
6004 HOST_WIDE_INT bitsize;
6005 HOST_WIDE_INT bitpos;
6006 rtx xtarget = target;
6008 if (cleared && initializer_zerop (value))
6009 continue;
6011 mode = TYPE_MODE (elttype);
6012 if (mode == BLKmode)
6013 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
6014 ? tree_low_cst (TYPE_SIZE (elttype), 1)
6015 : -1);
6016 else
6017 bitsize = GET_MODE_BITSIZE (mode);
6019 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6021 tree lo_index = TREE_OPERAND (index, 0);
6022 tree hi_index = TREE_OPERAND (index, 1);
6023 rtx index_r, pos_rtx;
6024 HOST_WIDE_INT lo, hi, count;
6025 tree position;
6027 /* If the range is constant and "small", unroll the loop. */
6028 if (const_bounds_p
6029 && host_integerp (lo_index, 0)
6030 && host_integerp (hi_index, 0)
6031 && (lo = tree_low_cst (lo_index, 0),
6032 hi = tree_low_cst (hi_index, 0),
6033 count = hi - lo + 1,
6034 (!MEM_P (target)
6035 || count <= 2
6036 || (host_integerp (TYPE_SIZE (elttype), 1)
6037 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
6038 <= 40 * 8)))))
6040 lo -= minelt; hi -= minelt;
6041 for (; lo <= hi; lo++)
6043 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6045 if (MEM_P (target)
6046 && !MEM_KEEP_ALIAS_SET_P (target)
6047 && TREE_CODE (type) == ARRAY_TYPE
6048 && TYPE_NONALIASED_COMPONENT (type))
6050 target = copy_rtx (target);
6051 MEM_KEEP_ALIAS_SET_P (target) = 1;
6054 store_constructor_field
6055 (target, bitsize, bitpos, mode, value, type, cleared,
6056 get_alias_set (elttype));
6059 else
6061 rtx loop_start = gen_label_rtx ();
6062 rtx loop_end = gen_label_rtx ();
6063 tree exit_cond;
6065 expand_normal (hi_index);
6067 index = build_decl (EXPR_LOCATION (exp),
6068 VAR_DECL, NULL_TREE, domain);
6069 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6070 SET_DECL_RTL (index, index_r);
6071 store_expr (lo_index, index_r, 0, false);
6073 /* Build the head of the loop. */
6074 do_pending_stack_adjust ();
6075 emit_label (loop_start);
6077 /* Assign value to element index. */
6078 position =
6079 fold_convert (ssizetype,
6080 fold_build2 (MINUS_EXPR,
6081 TREE_TYPE (index),
6082 index,
6083 TYPE_MIN_VALUE (domain)));
6085 position =
6086 size_binop (MULT_EXPR, position,
6087 fold_convert (ssizetype,
6088 TYPE_SIZE_UNIT (elttype)));
6090 pos_rtx = expand_normal (position);
6091 xtarget = offset_address (target, pos_rtx,
6092 highest_pow2_factor (position));
6093 xtarget = adjust_address (xtarget, mode, 0);
6094 if (TREE_CODE (value) == CONSTRUCTOR)
6095 store_constructor (value, xtarget, cleared,
6096 bitsize / BITS_PER_UNIT);
6097 else
6098 store_expr (value, xtarget, 0, false);
6100 /* Generate a conditional jump to exit the loop. */
6101 exit_cond = build2 (LT_EXPR, integer_type_node,
6102 index, hi_index);
6103 jumpif (exit_cond, loop_end, -1);
6105 /* Update the loop counter, and jump to the head of
6106 the loop. */
6107 expand_assignment (index,
6108 build2 (PLUS_EXPR, TREE_TYPE (index),
6109 index, integer_one_node),
6110 false);
6112 emit_jump (loop_start);
6114 /* Build the end of the loop. */
6115 emit_label (loop_end);
6118 else if ((index != 0 && ! host_integerp (index, 0))
6119 || ! host_integerp (TYPE_SIZE (elttype), 1))
6121 tree position;
6123 if (index == 0)
6124 index = ssize_int (1);
6126 if (minelt)
6127 index = fold_convert (ssizetype,
6128 fold_build2 (MINUS_EXPR,
6129 TREE_TYPE (index),
6130 index,
6131 TYPE_MIN_VALUE (domain)));
6133 position =
6134 size_binop (MULT_EXPR, index,
6135 fold_convert (ssizetype,
6136 TYPE_SIZE_UNIT (elttype)));
6137 xtarget = offset_address (target,
6138 expand_normal (position),
6139 highest_pow2_factor (position));
6140 xtarget = adjust_address (xtarget, mode, 0);
6141 store_expr (value, xtarget, 0, false);
6143 else
6145 if (index != 0)
6146 bitpos = ((tree_low_cst (index, 0) - minelt)
6147 * tree_low_cst (TYPE_SIZE (elttype), 1));
6148 else
6149 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6151 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6152 && TREE_CODE (type) == ARRAY_TYPE
6153 && TYPE_NONALIASED_COMPONENT (type))
6155 target = copy_rtx (target);
6156 MEM_KEEP_ALIAS_SET_P (target) = 1;
6158 store_constructor_field (target, bitsize, bitpos, mode, value,
6159 type, cleared, get_alias_set (elttype));
6162 break;
6165 case VECTOR_TYPE:
6167 unsigned HOST_WIDE_INT idx;
6168 constructor_elt *ce;
6169 int i;
6170 int need_to_clear;
6171 int icode = CODE_FOR_nothing;
6172 tree elttype = TREE_TYPE (type);
6173 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6174 enum machine_mode eltmode = TYPE_MODE (elttype);
6175 HOST_WIDE_INT bitsize;
6176 HOST_WIDE_INT bitpos;
6177 rtvec vector = NULL;
6178 unsigned n_elts;
6179 alias_set_type alias;
6181 gcc_assert (eltmode != BLKmode);
6183 n_elts = TYPE_VECTOR_SUBPARTS (type);
6184 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6186 enum machine_mode mode = GET_MODE (target);
6188 icode = (int) optab_handler (vec_init_optab, mode);
6189 if (icode != CODE_FOR_nothing)
6191 unsigned int i;
6193 vector = rtvec_alloc (n_elts);
6194 for (i = 0; i < n_elts; i++)
6195 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6199 /* If the constructor has fewer elements than the vector,
6200 clear the whole array first. Similarly if this is static
6201 constructor of a non-BLKmode object. */
6202 if (cleared)
6203 need_to_clear = 0;
6204 else if (REG_P (target) && TREE_STATIC (exp))
6205 need_to_clear = 1;
6206 else
6208 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6209 tree value;
6211 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6213 int n_elts_here = tree_low_cst
6214 (int_const_binop (TRUNC_DIV_EXPR,
6215 TYPE_SIZE (TREE_TYPE (value)),
6216 TYPE_SIZE (elttype)), 1);
6218 count += n_elts_here;
6219 if (mostly_zeros_p (value))
6220 zero_count += n_elts_here;
6223 /* Clear the entire vector first if there are any missing elements,
6224 or if the incidence of zero elements is >= 75%. */
6225 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6228 if (need_to_clear && size > 0 && !vector)
6230 if (REG_P (target))
6231 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6232 else
6233 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6234 cleared = 1;
6237 /* Inform later passes that the old value is dead. */
6238 if (!cleared && !vector && REG_P (target))
6239 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6241 if (MEM_P (target))
6242 alias = MEM_ALIAS_SET (target);
6243 else
6244 alias = get_alias_set (elttype);
6246 /* Store each element of the constructor into the corresponding
6247 element of TARGET, determined by counting the elements. */
6248 for (idx = 0, i = 0;
6249 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6250 idx++, i += bitsize / elt_size)
6252 HOST_WIDE_INT eltpos;
6253 tree value = ce->value;
6255 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6256 if (cleared && initializer_zerop (value))
6257 continue;
6259 if (ce->index)
6260 eltpos = tree_low_cst (ce->index, 1);
6261 else
6262 eltpos = i;
6264 if (vector)
6266 /* Vector CONSTRUCTORs should only be built from smaller
6267 vectors in the case of BLKmode vectors. */
6268 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6269 RTVEC_ELT (vector, eltpos)
6270 = expand_normal (value);
6272 else
6274 enum machine_mode value_mode =
6275 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6276 ? TYPE_MODE (TREE_TYPE (value))
6277 : eltmode;
6278 bitpos = eltpos * elt_size;
6279 store_constructor_field (target, bitsize, bitpos,
6280 value_mode, value, type,
6281 cleared, alias);
6285 if (vector)
6286 emit_insn (GEN_FCN (icode)
6287 (target,
6288 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6289 break;
6292 default:
6293 gcc_unreachable ();
6297 /* Store the value of EXP (an expression tree)
6298 into a subfield of TARGET which has mode MODE and occupies
6299 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6300 If MODE is VOIDmode, it means that we are storing into a bit-field.
6302 BITREGION_START is bitpos of the first bitfield in this region.
6303 BITREGION_END is the bitpos of the ending bitfield in this region.
6304 These two fields are 0, if the C++ memory model does not apply,
6305 or we are not interested in keeping track of bitfield regions.
6307 Always return const0_rtx unless we have something particular to
6308 return.
6310 TYPE is the type of the underlying object,
6312 ALIAS_SET is the alias set for the destination. This value will
6313 (in general) be different from that for TARGET, since TARGET is a
6314 reference to the containing structure.
6316 If NONTEMPORAL is true, try generating a nontemporal store. */
6318 static rtx
6319 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6320 unsigned HOST_WIDE_INT bitregion_start,
6321 unsigned HOST_WIDE_INT bitregion_end,
6322 enum machine_mode mode, tree exp, tree type,
6323 alias_set_type alias_set, bool nontemporal)
6325 if (TREE_CODE (exp) == ERROR_MARK)
6326 return const0_rtx;
6328 /* If we have nothing to store, do nothing unless the expression has
6329 side-effects. */
6330 if (bitsize == 0)
6331 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6333 /* If we are storing into an unaligned field of an aligned union that is
6334 in a register, we may have the mode of TARGET being an integer mode but
6335 MODE == BLKmode. In that case, get an aligned object whose size and
6336 alignment are the same as TARGET and store TARGET into it (we can avoid
6337 the store if the field being stored is the entire width of TARGET). Then
6338 call ourselves recursively to store the field into a BLKmode version of
6339 that object. Finally, load from the object into TARGET. This is not
6340 very efficient in general, but should only be slightly more expensive
6341 than the otherwise-required unaligned accesses. Perhaps this can be
6342 cleaned up later. It's tempting to make OBJECT readonly, but it's set
6343 twice, once with emit_move_insn and once via store_field. */
6345 if (mode == BLKmode
6346 && (REG_P (target) || GET_CODE (target) == SUBREG)
6347 && TREE_CODE (exp) != CALL_EXPR)
6349 rtx object = assign_temp (type, 1, 1);
6350 rtx blk_object = adjust_address (object, BLKmode, 0);
6352 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
6353 emit_move_insn (object, target);
6355 store_field (blk_object, bitsize, bitpos,
6356 bitregion_start, bitregion_end,
6357 mode, exp, type, MEM_ALIAS_SET (blk_object), nontemporal);
6359 emit_move_insn (target, object);
6361 /* We want to return the BLKmode version of the data. */
6362 return blk_object;
6365 if (GET_CODE (target) == CONCAT)
6367 /* We're storing into a struct containing a single __complex. */
6369 gcc_assert (!bitpos);
6370 return store_expr (exp, target, 0, nontemporal);
6373 /* If the structure is in a register or if the component
6374 is a bit field, we cannot use addressing to access it.
6375 Use bit-field techniques or SUBREG to store in it. */
6377 if (mode == VOIDmode
6378 || (mode != BLKmode && ! direct_store[(int) mode]
6379 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6380 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6381 || REG_P (target)
6382 || GET_CODE (target) == SUBREG
6383 /* If the field isn't aligned enough to store as an ordinary memref,
6384 store it as a bit field. */
6385 || (mode != BLKmode
6386 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6387 || bitpos % GET_MODE_ALIGNMENT (mode))
6388 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6389 || (bitpos % BITS_PER_UNIT != 0)))
6390 || (bitsize >= 0 && mode != BLKmode
6391 && GET_MODE_BITSIZE (mode) > bitsize)
6392 /* If the RHS and field are a constant size and the size of the
6393 RHS isn't the same size as the bitfield, we must use bitfield
6394 operations. */
6395 || (bitsize >= 0
6396 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6397 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6398 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6399 decl we must use bitfield operations. */
6400 || (bitsize >= 0
6401 && TREE_CODE (exp) == MEM_REF
6402 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6403 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6404 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6405 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6407 rtx temp;
6408 gimple nop_def;
6410 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6411 implies a mask operation. If the precision is the same size as
6412 the field we're storing into, that mask is redundant. This is
6413 particularly common with bit field assignments generated by the
6414 C front end. */
6415 nop_def = get_def_for_expr (exp, NOP_EXPR);
6416 if (nop_def)
6418 tree type = TREE_TYPE (exp);
6419 if (INTEGRAL_TYPE_P (type)
6420 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6421 && bitsize == TYPE_PRECISION (type))
6423 tree op = gimple_assign_rhs1 (nop_def);
6424 type = TREE_TYPE (op);
6425 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6426 exp = op;
6430 temp = expand_normal (exp);
6432 /* If BITSIZE is narrower than the size of the type of EXP
6433 we will be narrowing TEMP. Normally, what's wanted are the
6434 low-order bits. However, if EXP's type is a record and this is
6435 big-endian machine, we want the upper BITSIZE bits. */
6436 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6437 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6438 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6439 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6440 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6441 NULL_RTX, 1);
6443 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6444 if (mode != VOIDmode && mode != BLKmode
6445 && mode != TYPE_MODE (TREE_TYPE (exp)))
6446 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6448 /* If the modes of TEMP and TARGET are both BLKmode, both
6449 must be in memory and BITPOS must be aligned on a byte
6450 boundary. If so, we simply do a block copy. Likewise
6451 for a BLKmode-like TARGET. */
6452 if (GET_MODE (temp) == BLKmode
6453 && (GET_MODE (target) == BLKmode
6454 || (MEM_P (target)
6455 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6456 && (bitpos % BITS_PER_UNIT) == 0
6457 && (bitsize % BITS_PER_UNIT) == 0)))
6459 gcc_assert (MEM_P (target) && MEM_P (temp)
6460 && (bitpos % BITS_PER_UNIT) == 0);
6462 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6463 emit_block_move (target, temp,
6464 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6465 / BITS_PER_UNIT),
6466 BLOCK_OP_NORMAL);
6468 return const0_rtx;
6471 /* Handle calls that return values in multiple non-contiguous locations.
6472 The Irix 6 ABI has examples of this. */
6473 if (GET_CODE (temp) == PARALLEL)
6475 rtx temp_target;
6477 /* We are not supposed to have a true bitfield in this case. */
6478 gcc_assert (bitsize == GET_MODE_BITSIZE (mode));
6480 /* If we don't store at bit 0, we need an intermediate pseudo
6481 since emit_group_store only stores at bit 0. */
6482 if (bitpos != 0)
6483 temp_target = gen_reg_rtx (mode);
6484 else
6485 temp_target = target;
6487 emit_group_store (temp_target, temp, TREE_TYPE (exp),
6488 int_size_in_bytes (TREE_TYPE (exp)));
6490 if (temp_target == target)
6491 return const0_rtx;
6493 temp = temp_target;
6496 /* Handle calls that return BLKmode values in registers. */
6497 else if (mode == BLKmode
6498 && REG_P (temp)
6499 && TREE_CODE (exp) == CALL_EXPR)
6501 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6502 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6503 temp = temp_target;
6506 /* Store the value in the bitfield. */
6507 store_bit_field (target, bitsize, bitpos,
6508 bitregion_start, bitregion_end,
6509 mode, temp);
6511 return const0_rtx;
6513 else
6515 /* Now build a reference to just the desired component. */
6516 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6518 if (to_rtx == target)
6519 to_rtx = copy_rtx (to_rtx);
6521 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6522 set_mem_alias_set (to_rtx, alias_set);
6524 return store_expr (exp, to_rtx, 0, nontemporal);
6528 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6529 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6530 codes and find the ultimate containing object, which we return.
6532 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6533 bit position, and *PUNSIGNEDP to the signedness of the field.
6534 If the position of the field is variable, we store a tree
6535 giving the variable offset (in units) in *POFFSET.
6536 This offset is in addition to the bit position.
6537 If the position is not variable, we store 0 in *POFFSET.
6539 If any of the extraction expressions is volatile,
6540 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6542 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6543 Otherwise, it is a mode that can be used to access the field.
6545 If the field describes a variable-sized object, *PMODE is set to
6546 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6547 this case, but the address of the object can be found.
6549 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6550 look through nodes that serve as markers of a greater alignment than
6551 the one that can be deduced from the expression. These nodes make it
6552 possible for front-ends to prevent temporaries from being created by
6553 the middle-end on alignment considerations. For that purpose, the
6554 normal operating mode at high-level is to always pass FALSE so that
6555 the ultimate containing object is really returned; moreover, the
6556 associated predicate handled_component_p will always return TRUE
6557 on these nodes, thus indicating that they are essentially handled
6558 by get_inner_reference. TRUE should only be passed when the caller
6559 is scanning the expression in order to build another representation
6560 and specifically knows how to handle these nodes; as such, this is
6561 the normal operating mode in the RTL expanders. */
6563 tree
6564 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6565 HOST_WIDE_INT *pbitpos, tree *poffset,
6566 enum machine_mode *pmode, int *punsignedp,
6567 int *pvolatilep, bool keep_aligning)
6569 tree size_tree = 0;
6570 enum machine_mode mode = VOIDmode;
6571 bool blkmode_bitfield = false;
6572 tree offset = size_zero_node;
6573 double_int bit_offset = double_int_zero;
6575 /* First get the mode, signedness, and size. We do this from just the
6576 outermost expression. */
6577 *pbitsize = -1;
6578 if (TREE_CODE (exp) == COMPONENT_REF)
6580 tree field = TREE_OPERAND (exp, 1);
6581 size_tree = DECL_SIZE (field);
6582 if (!DECL_BIT_FIELD (field))
6583 mode = DECL_MODE (field);
6584 else if (DECL_MODE (field) == BLKmode)
6585 blkmode_bitfield = true;
6586 else if (TREE_THIS_VOLATILE (exp)
6587 && flag_strict_volatile_bitfields > 0)
6588 /* Volatile bitfields should be accessed in the mode of the
6589 field's type, not the mode computed based on the bit
6590 size. */
6591 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6593 *punsignedp = DECL_UNSIGNED (field);
6595 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6597 size_tree = TREE_OPERAND (exp, 1);
6598 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6599 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6601 /* For vector types, with the correct size of access, use the mode of
6602 inner type. */
6603 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6604 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6605 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6606 mode = TYPE_MODE (TREE_TYPE (exp));
6608 else
6610 mode = TYPE_MODE (TREE_TYPE (exp));
6611 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6613 if (mode == BLKmode)
6614 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6615 else
6616 *pbitsize = GET_MODE_BITSIZE (mode);
6619 if (size_tree != 0)
6621 if (! host_integerp (size_tree, 1))
6622 mode = BLKmode, *pbitsize = -1;
6623 else
6624 *pbitsize = tree_low_cst (size_tree, 1);
6627 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6628 and find the ultimate containing object. */
6629 while (1)
6631 switch (TREE_CODE (exp))
6633 case BIT_FIELD_REF:
6634 bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
6635 break;
6637 case COMPONENT_REF:
6639 tree field = TREE_OPERAND (exp, 1);
6640 tree this_offset = component_ref_field_offset (exp);
6642 /* If this field hasn't been filled in yet, don't go past it.
6643 This should only happen when folding expressions made during
6644 type construction. */
6645 if (this_offset == 0)
6646 break;
6648 offset = size_binop (PLUS_EXPR, offset, this_offset);
6649 bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
6651 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6653 break;
6655 case ARRAY_REF:
6656 case ARRAY_RANGE_REF:
6658 tree index = TREE_OPERAND (exp, 1);
6659 tree low_bound = array_ref_low_bound (exp);
6660 tree unit_size = array_ref_element_size (exp);
6662 /* We assume all arrays have sizes that are a multiple of a byte.
6663 First subtract the lower bound, if any, in the type of the
6664 index, then convert to sizetype and multiply by the size of
6665 the array element. */
6666 if (! integer_zerop (low_bound))
6667 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6668 index, low_bound);
6670 offset = size_binop (PLUS_EXPR, offset,
6671 size_binop (MULT_EXPR,
6672 fold_convert (sizetype, index),
6673 unit_size));
6675 break;
6677 case REALPART_EXPR:
6678 break;
6680 case IMAGPART_EXPR:
6681 bit_offset += double_int::from_uhwi (*pbitsize);
6682 break;
6684 case VIEW_CONVERT_EXPR:
6685 if (keep_aligning && STRICT_ALIGNMENT
6686 && (TYPE_ALIGN (TREE_TYPE (exp))
6687 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6688 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6689 < BIGGEST_ALIGNMENT)
6690 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6691 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6692 goto done;
6693 break;
6695 case MEM_REF:
6696 /* Hand back the decl for MEM[&decl, off]. */
6697 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6699 tree off = TREE_OPERAND (exp, 1);
6700 if (!integer_zerop (off))
6702 double_int boff, coff = mem_ref_offset (exp);
6703 boff = coff.alshift (BITS_PER_UNIT == 8
6704 ? 3 : exact_log2 (BITS_PER_UNIT),
6705 HOST_BITS_PER_DOUBLE_INT);
6706 bit_offset += boff;
6708 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6710 goto done;
6712 default:
6713 goto done;
6716 /* If any reference in the chain is volatile, the effect is volatile. */
6717 if (TREE_THIS_VOLATILE (exp))
6718 *pvolatilep = 1;
6720 exp = TREE_OPERAND (exp, 0);
6722 done:
6724 /* If OFFSET is constant, see if we can return the whole thing as a
6725 constant bit position. Make sure to handle overflow during
6726 this conversion. */
6727 if (TREE_CODE (offset) == INTEGER_CST)
6729 double_int tem = tree_to_double_int (offset);
6730 tem = tem.sext (TYPE_PRECISION (sizetype));
6731 tem = tem.alshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT),
6732 HOST_BITS_PER_DOUBLE_INT);
6733 tem += bit_offset;
6734 if (tem.fits_shwi ())
6736 *pbitpos = tem.to_shwi ();
6737 *poffset = offset = NULL_TREE;
6741 /* Otherwise, split it up. */
6742 if (offset)
6744 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6745 if (bit_offset.is_negative ())
6747 double_int mask
6748 = double_int::mask (BITS_PER_UNIT == 8
6749 ? 3 : exact_log2 (BITS_PER_UNIT));
6750 double_int tem = bit_offset.and_not (mask);
6751 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6752 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6753 bit_offset -= tem;
6754 tem = tem.arshift (BITS_PER_UNIT == 8
6755 ? 3 : exact_log2 (BITS_PER_UNIT),
6756 HOST_BITS_PER_DOUBLE_INT);
6757 offset = size_binop (PLUS_EXPR, offset,
6758 double_int_to_tree (sizetype, tem));
6761 *pbitpos = bit_offset.to_shwi ();
6762 *poffset = offset;
6765 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6766 if (mode == VOIDmode
6767 && blkmode_bitfield
6768 && (*pbitpos % BITS_PER_UNIT) == 0
6769 && (*pbitsize % BITS_PER_UNIT) == 0)
6770 *pmode = BLKmode;
6771 else
6772 *pmode = mode;
6774 return exp;
6777 /* Return a tree of sizetype representing the size, in bytes, of the element
6778 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6780 tree
6781 array_ref_element_size (tree exp)
6783 tree aligned_size = TREE_OPERAND (exp, 3);
6784 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6785 location_t loc = EXPR_LOCATION (exp);
6787 /* If a size was specified in the ARRAY_REF, it's the size measured
6788 in alignment units of the element type. So multiply by that value. */
6789 if (aligned_size)
6791 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6792 sizetype from another type of the same width and signedness. */
6793 if (TREE_TYPE (aligned_size) != sizetype)
6794 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6795 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6796 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6799 /* Otherwise, take the size from that of the element type. Substitute
6800 any PLACEHOLDER_EXPR that we have. */
6801 else
6802 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6805 /* Return a tree representing the lower bound of the array mentioned in
6806 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6808 tree
6809 array_ref_low_bound (tree exp)
6811 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6813 /* If a lower bound is specified in EXP, use it. */
6814 if (TREE_OPERAND (exp, 2))
6815 return TREE_OPERAND (exp, 2);
6817 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6818 substituting for a PLACEHOLDER_EXPR as needed. */
6819 if (domain_type && TYPE_MIN_VALUE (domain_type))
6820 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6822 /* Otherwise, return a zero of the appropriate type. */
6823 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6826 /* Returns true if REF is an array reference to an array at the end of
6827 a structure. If this is the case, the array may be allocated larger
6828 than its upper bound implies. */
6830 bool
6831 array_at_struct_end_p (tree ref)
6833 if (TREE_CODE (ref) != ARRAY_REF
6834 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6835 return false;
6837 while (handled_component_p (ref))
6839 /* If the reference chain contains a component reference to a
6840 non-union type and there follows another field the reference
6841 is not at the end of a structure. */
6842 if (TREE_CODE (ref) == COMPONENT_REF
6843 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6845 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6846 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6847 nextf = DECL_CHAIN (nextf);
6848 if (nextf)
6849 return false;
6852 ref = TREE_OPERAND (ref, 0);
6855 /* If the reference is based on a declared entity, the size of the array
6856 is constrained by its given domain. */
6857 if (DECL_P (ref))
6858 return false;
6860 return true;
6863 /* Return a tree representing the upper bound of the array mentioned in
6864 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6866 tree
6867 array_ref_up_bound (tree exp)
6869 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6871 /* If there is a domain type and it has an upper bound, use it, substituting
6872 for a PLACEHOLDER_EXPR as needed. */
6873 if (domain_type && TYPE_MAX_VALUE (domain_type))
6874 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6876 /* Otherwise fail. */
6877 return NULL_TREE;
6880 /* Return a tree representing the offset, in bytes, of the field referenced
6881 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6883 tree
6884 component_ref_field_offset (tree exp)
6886 tree aligned_offset = TREE_OPERAND (exp, 2);
6887 tree field = TREE_OPERAND (exp, 1);
6888 location_t loc = EXPR_LOCATION (exp);
6890 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6891 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6892 value. */
6893 if (aligned_offset)
6895 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6896 sizetype from another type of the same width and signedness. */
6897 if (TREE_TYPE (aligned_offset) != sizetype)
6898 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6899 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6900 size_int (DECL_OFFSET_ALIGN (field)
6901 / BITS_PER_UNIT));
6904 /* Otherwise, take the offset from that of the field. Substitute
6905 any PLACEHOLDER_EXPR that we have. */
6906 else
6907 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6910 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6912 static unsigned HOST_WIDE_INT
6913 target_align (const_tree target)
6915 /* We might have a chain of nested references with intermediate misaligning
6916 bitfields components, so need to recurse to find out. */
6918 unsigned HOST_WIDE_INT this_align, outer_align;
6920 switch (TREE_CODE (target))
6922 case BIT_FIELD_REF:
6923 return 1;
6925 case COMPONENT_REF:
6926 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6927 outer_align = target_align (TREE_OPERAND (target, 0));
6928 return MIN (this_align, outer_align);
6930 case ARRAY_REF:
6931 case ARRAY_RANGE_REF:
6932 this_align = TYPE_ALIGN (TREE_TYPE (target));
6933 outer_align = target_align (TREE_OPERAND (target, 0));
6934 return MIN (this_align, outer_align);
6936 CASE_CONVERT:
6937 case NON_LVALUE_EXPR:
6938 case VIEW_CONVERT_EXPR:
6939 this_align = TYPE_ALIGN (TREE_TYPE (target));
6940 outer_align = target_align (TREE_OPERAND (target, 0));
6941 return MAX (this_align, outer_align);
6943 default:
6944 return TYPE_ALIGN (TREE_TYPE (target));
6949 /* Given an rtx VALUE that may contain additions and multiplications, return
6950 an equivalent value that just refers to a register, memory, or constant.
6951 This is done by generating instructions to perform the arithmetic and
6952 returning a pseudo-register containing the value.
6954 The returned value may be a REG, SUBREG, MEM or constant. */
6957 force_operand (rtx value, rtx target)
6959 rtx op1, op2;
6960 /* Use subtarget as the target for operand 0 of a binary operation. */
6961 rtx subtarget = get_subtarget (target);
6962 enum rtx_code code = GET_CODE (value);
6964 /* Check for subreg applied to an expression produced by loop optimizer. */
6965 if (code == SUBREG
6966 && !REG_P (SUBREG_REG (value))
6967 && !MEM_P (SUBREG_REG (value)))
6969 value
6970 = simplify_gen_subreg (GET_MODE (value),
6971 force_reg (GET_MODE (SUBREG_REG (value)),
6972 force_operand (SUBREG_REG (value),
6973 NULL_RTX)),
6974 GET_MODE (SUBREG_REG (value)),
6975 SUBREG_BYTE (value));
6976 code = GET_CODE (value);
6979 /* Check for a PIC address load. */
6980 if ((code == PLUS || code == MINUS)
6981 && XEXP (value, 0) == pic_offset_table_rtx
6982 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6983 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6984 || GET_CODE (XEXP (value, 1)) == CONST))
6986 if (!subtarget)
6987 subtarget = gen_reg_rtx (GET_MODE (value));
6988 emit_move_insn (subtarget, value);
6989 return subtarget;
6992 if (ARITHMETIC_P (value))
6994 op2 = XEXP (value, 1);
6995 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6996 subtarget = 0;
6997 if (code == MINUS && CONST_INT_P (op2))
6999 code = PLUS;
7000 op2 = negate_rtx (GET_MODE (value), op2);
7003 /* Check for an addition with OP2 a constant integer and our first
7004 operand a PLUS of a virtual register and something else. In that
7005 case, we want to emit the sum of the virtual register and the
7006 constant first and then add the other value. This allows virtual
7007 register instantiation to simply modify the constant rather than
7008 creating another one around this addition. */
7009 if (code == PLUS && CONST_INT_P (op2)
7010 && GET_CODE (XEXP (value, 0)) == PLUS
7011 && REG_P (XEXP (XEXP (value, 0), 0))
7012 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7013 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7015 rtx temp = expand_simple_binop (GET_MODE (value), code,
7016 XEXP (XEXP (value, 0), 0), op2,
7017 subtarget, 0, OPTAB_LIB_WIDEN);
7018 return expand_simple_binop (GET_MODE (value), code, temp,
7019 force_operand (XEXP (XEXP (value,
7020 0), 1), 0),
7021 target, 0, OPTAB_LIB_WIDEN);
7024 op1 = force_operand (XEXP (value, 0), subtarget);
7025 op2 = force_operand (op2, NULL_RTX);
7026 switch (code)
7028 case MULT:
7029 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7030 case DIV:
7031 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7032 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7033 target, 1, OPTAB_LIB_WIDEN);
7034 else
7035 return expand_divmod (0,
7036 FLOAT_MODE_P (GET_MODE (value))
7037 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7038 GET_MODE (value), op1, op2, target, 0);
7039 case MOD:
7040 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7041 target, 0);
7042 case UDIV:
7043 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7044 target, 1);
7045 case UMOD:
7046 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7047 target, 1);
7048 case ASHIFTRT:
7049 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7050 target, 0, OPTAB_LIB_WIDEN);
7051 default:
7052 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7053 target, 1, OPTAB_LIB_WIDEN);
7056 if (UNARY_P (value))
7058 if (!target)
7059 target = gen_reg_rtx (GET_MODE (value));
7060 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7061 switch (code)
7063 case ZERO_EXTEND:
7064 case SIGN_EXTEND:
7065 case TRUNCATE:
7066 case FLOAT_EXTEND:
7067 case FLOAT_TRUNCATE:
7068 convert_move (target, op1, code == ZERO_EXTEND);
7069 return target;
7071 case FIX:
7072 case UNSIGNED_FIX:
7073 expand_fix (target, op1, code == UNSIGNED_FIX);
7074 return target;
7076 case FLOAT:
7077 case UNSIGNED_FLOAT:
7078 expand_float (target, op1, code == UNSIGNED_FLOAT);
7079 return target;
7081 default:
7082 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7086 #ifdef INSN_SCHEDULING
7087 /* On machines that have insn scheduling, we want all memory reference to be
7088 explicit, so we need to deal with such paradoxical SUBREGs. */
7089 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7090 value
7091 = simplify_gen_subreg (GET_MODE (value),
7092 force_reg (GET_MODE (SUBREG_REG (value)),
7093 force_operand (SUBREG_REG (value),
7094 NULL_RTX)),
7095 GET_MODE (SUBREG_REG (value)),
7096 SUBREG_BYTE (value));
7097 #endif
7099 return value;
7102 /* Subroutine of expand_expr: return nonzero iff there is no way that
7103 EXP can reference X, which is being modified. TOP_P is nonzero if this
7104 call is going to be used to determine whether we need a temporary
7105 for EXP, as opposed to a recursive call to this function.
7107 It is always safe for this routine to return zero since it merely
7108 searches for optimization opportunities. */
7111 safe_from_p (const_rtx x, tree exp, int top_p)
7113 rtx exp_rtl = 0;
7114 int i, nops;
7116 if (x == 0
7117 /* If EXP has varying size, we MUST use a target since we currently
7118 have no way of allocating temporaries of variable size
7119 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7120 So we assume here that something at a higher level has prevented a
7121 clash. This is somewhat bogus, but the best we can do. Only
7122 do this when X is BLKmode and when we are at the top level. */
7123 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7124 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7125 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7126 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7127 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7128 != INTEGER_CST)
7129 && GET_MODE (x) == BLKmode)
7130 /* If X is in the outgoing argument area, it is always safe. */
7131 || (MEM_P (x)
7132 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7133 || (GET_CODE (XEXP (x, 0)) == PLUS
7134 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7135 return 1;
7137 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7138 find the underlying pseudo. */
7139 if (GET_CODE (x) == SUBREG)
7141 x = SUBREG_REG (x);
7142 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7143 return 0;
7146 /* Now look at our tree code and possibly recurse. */
7147 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7149 case tcc_declaration:
7150 exp_rtl = DECL_RTL_IF_SET (exp);
7151 break;
7153 case tcc_constant:
7154 return 1;
7156 case tcc_exceptional:
7157 if (TREE_CODE (exp) == TREE_LIST)
7159 while (1)
7161 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7162 return 0;
7163 exp = TREE_CHAIN (exp);
7164 if (!exp)
7165 return 1;
7166 if (TREE_CODE (exp) != TREE_LIST)
7167 return safe_from_p (x, exp, 0);
7170 else if (TREE_CODE (exp) == CONSTRUCTOR)
7172 constructor_elt *ce;
7173 unsigned HOST_WIDE_INT idx;
7175 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
7176 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7177 || !safe_from_p (x, ce->value, 0))
7178 return 0;
7179 return 1;
7181 else if (TREE_CODE (exp) == ERROR_MARK)
7182 return 1; /* An already-visited SAVE_EXPR? */
7183 else
7184 return 0;
7186 case tcc_statement:
7187 /* The only case we look at here is the DECL_INITIAL inside a
7188 DECL_EXPR. */
7189 return (TREE_CODE (exp) != DECL_EXPR
7190 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7191 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7192 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7194 case tcc_binary:
7195 case tcc_comparison:
7196 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7197 return 0;
7198 /* Fall through. */
7200 case tcc_unary:
7201 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7203 case tcc_expression:
7204 case tcc_reference:
7205 case tcc_vl_exp:
7206 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7207 the expression. If it is set, we conflict iff we are that rtx or
7208 both are in memory. Otherwise, we check all operands of the
7209 expression recursively. */
7211 switch (TREE_CODE (exp))
7213 case ADDR_EXPR:
7214 /* If the operand is static or we are static, we can't conflict.
7215 Likewise if we don't conflict with the operand at all. */
7216 if (staticp (TREE_OPERAND (exp, 0))
7217 || TREE_STATIC (exp)
7218 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7219 return 1;
7221 /* Otherwise, the only way this can conflict is if we are taking
7222 the address of a DECL a that address if part of X, which is
7223 very rare. */
7224 exp = TREE_OPERAND (exp, 0);
7225 if (DECL_P (exp))
7227 if (!DECL_RTL_SET_P (exp)
7228 || !MEM_P (DECL_RTL (exp)))
7229 return 0;
7230 else
7231 exp_rtl = XEXP (DECL_RTL (exp), 0);
7233 break;
7235 case MEM_REF:
7236 if (MEM_P (x)
7237 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7238 get_alias_set (exp)))
7239 return 0;
7240 break;
7242 case CALL_EXPR:
7243 /* Assume that the call will clobber all hard registers and
7244 all of memory. */
7245 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7246 || MEM_P (x))
7247 return 0;
7248 break;
7250 case WITH_CLEANUP_EXPR:
7251 case CLEANUP_POINT_EXPR:
7252 /* Lowered by gimplify.c. */
7253 gcc_unreachable ();
7255 case SAVE_EXPR:
7256 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7258 default:
7259 break;
7262 /* If we have an rtx, we do not need to scan our operands. */
7263 if (exp_rtl)
7264 break;
7266 nops = TREE_OPERAND_LENGTH (exp);
7267 for (i = 0; i < nops; i++)
7268 if (TREE_OPERAND (exp, i) != 0
7269 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7270 return 0;
7272 break;
7274 case tcc_type:
7275 /* Should never get a type here. */
7276 gcc_unreachable ();
7279 /* If we have an rtl, find any enclosed object. Then see if we conflict
7280 with it. */
7281 if (exp_rtl)
7283 if (GET_CODE (exp_rtl) == SUBREG)
7285 exp_rtl = SUBREG_REG (exp_rtl);
7286 if (REG_P (exp_rtl)
7287 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7288 return 0;
7291 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7292 are memory and they conflict. */
7293 return ! (rtx_equal_p (x, exp_rtl)
7294 || (MEM_P (x) && MEM_P (exp_rtl)
7295 && true_dependence (exp_rtl, VOIDmode, x)));
7298 /* If we reach here, it is safe. */
7299 return 1;
7303 /* Return the highest power of two that EXP is known to be a multiple of.
7304 This is used in updating alignment of MEMs in array references. */
7306 unsigned HOST_WIDE_INT
7307 highest_pow2_factor (const_tree exp)
7309 unsigned HOST_WIDE_INT c0, c1;
7311 switch (TREE_CODE (exp))
7313 case INTEGER_CST:
7314 /* We can find the lowest bit that's a one. If the low
7315 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
7316 We need to handle this case since we can find it in a COND_EXPR,
7317 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
7318 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
7319 later ICE. */
7320 if (TREE_OVERFLOW (exp))
7321 return BIGGEST_ALIGNMENT;
7322 else
7324 /* Note: tree_low_cst is intentionally not used here,
7325 we don't care about the upper bits. */
7326 c0 = TREE_INT_CST_LOW (exp);
7327 c0 &= -c0;
7328 return c0 ? c0 : BIGGEST_ALIGNMENT;
7330 break;
7332 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
7333 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7334 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7335 return MIN (c0, c1);
7337 case MULT_EXPR:
7338 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7339 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7340 return c0 * c1;
7342 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
7343 case CEIL_DIV_EXPR:
7344 if (integer_pow2p (TREE_OPERAND (exp, 1))
7345 && host_integerp (TREE_OPERAND (exp, 1), 1))
7347 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7348 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
7349 return MAX (1, c0 / c1);
7351 break;
7353 case BIT_AND_EXPR:
7354 /* The highest power of two of a bit-and expression is the maximum of
7355 that of its operands. We typically get here for a complex LHS and
7356 a constant negative power of two on the RHS to force an explicit
7357 alignment, so don't bother looking at the LHS. */
7358 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7360 CASE_CONVERT:
7361 case SAVE_EXPR:
7362 return highest_pow2_factor (TREE_OPERAND (exp, 0));
7364 case COMPOUND_EXPR:
7365 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7367 case COND_EXPR:
7368 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7369 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
7370 return MIN (c0, c1);
7372 default:
7373 break;
7376 return 1;
7379 /* Similar, except that the alignment requirements of TARGET are
7380 taken into account. Assume it is at least as aligned as its
7381 type, unless it is a COMPONENT_REF in which case the layout of
7382 the structure gives the alignment. */
7384 static unsigned HOST_WIDE_INT
7385 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7387 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7388 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7390 return MAX (factor, talign);
7393 #ifdef HAVE_conditional_move
7394 /* Convert the tree comparison code TCODE to the rtl one where the
7395 signedness is UNSIGNEDP. */
7397 static enum rtx_code
7398 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7400 enum rtx_code code;
7401 switch (tcode)
7403 case EQ_EXPR:
7404 code = EQ;
7405 break;
7406 case NE_EXPR:
7407 code = NE;
7408 break;
7409 case LT_EXPR:
7410 code = unsignedp ? LTU : LT;
7411 break;
7412 case LE_EXPR:
7413 code = unsignedp ? LEU : LE;
7414 break;
7415 case GT_EXPR:
7416 code = unsignedp ? GTU : GT;
7417 break;
7418 case GE_EXPR:
7419 code = unsignedp ? GEU : GE;
7420 break;
7421 case UNORDERED_EXPR:
7422 code = UNORDERED;
7423 break;
7424 case ORDERED_EXPR:
7425 code = ORDERED;
7426 break;
7427 case UNLT_EXPR:
7428 code = UNLT;
7429 break;
7430 case UNLE_EXPR:
7431 code = UNLE;
7432 break;
7433 case UNGT_EXPR:
7434 code = UNGT;
7435 break;
7436 case UNGE_EXPR:
7437 code = UNGE;
7438 break;
7439 case UNEQ_EXPR:
7440 code = UNEQ;
7441 break;
7442 case LTGT_EXPR:
7443 code = LTGT;
7444 break;
7446 default:
7447 gcc_unreachable ();
7449 return code;
7451 #endif
7453 /* Subroutine of expand_expr. Expand the two operands of a binary
7454 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7455 The value may be stored in TARGET if TARGET is nonzero. The
7456 MODIFIER argument is as documented by expand_expr. */
7458 static void
7459 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7460 enum expand_modifier modifier)
7462 if (! safe_from_p (target, exp1, 1))
7463 target = 0;
7464 if (operand_equal_p (exp0, exp1, 0))
7466 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7467 *op1 = copy_rtx (*op0);
7469 else
7471 /* If we need to preserve evaluation order, copy exp0 into its own
7472 temporary variable so that it can't be clobbered by exp1. */
7473 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7474 exp0 = save_expr (exp0);
7475 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7476 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7481 /* Return a MEM that contains constant EXP. DEFER is as for
7482 output_constant_def and MODIFIER is as for expand_expr. */
7484 static rtx
7485 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7487 rtx mem;
7489 mem = output_constant_def (exp, defer);
7490 if (modifier != EXPAND_INITIALIZER)
7491 mem = use_anchored_address (mem);
7492 return mem;
7495 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7496 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7498 static rtx
7499 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7500 enum expand_modifier modifier, addr_space_t as)
7502 rtx result, subtarget;
7503 tree inner, offset;
7504 HOST_WIDE_INT bitsize, bitpos;
7505 int volatilep, unsignedp;
7506 enum machine_mode mode1;
7508 /* If we are taking the address of a constant and are at the top level,
7509 we have to use output_constant_def since we can't call force_const_mem
7510 at top level. */
7511 /* ??? This should be considered a front-end bug. We should not be
7512 generating ADDR_EXPR of something that isn't an LVALUE. The only
7513 exception here is STRING_CST. */
7514 if (CONSTANT_CLASS_P (exp))
7516 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7517 if (modifier < EXPAND_SUM)
7518 result = force_operand (result, target);
7519 return result;
7522 /* Everything must be something allowed by is_gimple_addressable. */
7523 switch (TREE_CODE (exp))
7525 case INDIRECT_REF:
7526 /* This case will happen via recursion for &a->b. */
7527 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7529 case MEM_REF:
7531 tree tem = TREE_OPERAND (exp, 0);
7532 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7533 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7534 return expand_expr (tem, target, tmode, modifier);
7537 case CONST_DECL:
7538 /* Expand the initializer like constants above. */
7539 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7540 0, modifier), 0);
7541 if (modifier < EXPAND_SUM)
7542 result = force_operand (result, target);
7543 return result;
7545 case REALPART_EXPR:
7546 /* The real part of the complex number is always first, therefore
7547 the address is the same as the address of the parent object. */
7548 offset = 0;
7549 bitpos = 0;
7550 inner = TREE_OPERAND (exp, 0);
7551 break;
7553 case IMAGPART_EXPR:
7554 /* The imaginary part of the complex number is always second.
7555 The expression is therefore always offset by the size of the
7556 scalar type. */
7557 offset = 0;
7558 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7559 inner = TREE_OPERAND (exp, 0);
7560 break;
7562 default:
7563 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7564 expand_expr, as that can have various side effects; LABEL_DECLs for
7565 example, may not have their DECL_RTL set yet. Expand the rtl of
7566 CONSTRUCTORs too, which should yield a memory reference for the
7567 constructor's contents. Assume language specific tree nodes can
7568 be expanded in some interesting way. */
7569 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7570 if (DECL_P (exp)
7571 || TREE_CODE (exp) == CONSTRUCTOR
7572 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7574 result = expand_expr (exp, target, tmode,
7575 modifier == EXPAND_INITIALIZER
7576 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7578 /* If the DECL isn't in memory, then the DECL wasn't properly
7579 marked TREE_ADDRESSABLE, which will be either a front-end
7580 or a tree optimizer bug. */
7582 if (TREE_ADDRESSABLE (exp)
7583 && ! MEM_P (result)
7584 && ! targetm.calls.allocate_stack_slots_for_args())
7586 error ("local frame unavailable (naked function?)");
7587 return result;
7589 else
7590 gcc_assert (MEM_P (result));
7591 result = XEXP (result, 0);
7593 /* ??? Is this needed anymore? */
7594 if (DECL_P (exp))
7595 TREE_USED (exp) = 1;
7597 if (modifier != EXPAND_INITIALIZER
7598 && modifier != EXPAND_CONST_ADDRESS
7599 && modifier != EXPAND_SUM)
7600 result = force_operand (result, target);
7601 return result;
7604 /* Pass FALSE as the last argument to get_inner_reference although
7605 we are expanding to RTL. The rationale is that we know how to
7606 handle "aligning nodes" here: we can just bypass them because
7607 they won't change the final object whose address will be returned
7608 (they actually exist only for that purpose). */
7609 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7610 &mode1, &unsignedp, &volatilep, false);
7611 break;
7614 /* We must have made progress. */
7615 gcc_assert (inner != exp);
7617 subtarget = offset || bitpos ? NULL_RTX : target;
7618 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7619 inner alignment, force the inner to be sufficiently aligned. */
7620 if (CONSTANT_CLASS_P (inner)
7621 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7623 inner = copy_node (inner);
7624 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7625 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7626 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7628 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7630 if (offset)
7632 rtx tmp;
7634 if (modifier != EXPAND_NORMAL)
7635 result = force_operand (result, NULL);
7636 tmp = expand_expr (offset, NULL_RTX, tmode,
7637 modifier == EXPAND_INITIALIZER
7638 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7640 result = convert_memory_address_addr_space (tmode, result, as);
7641 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7643 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7644 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7645 else
7647 subtarget = bitpos ? NULL_RTX : target;
7648 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7649 1, OPTAB_LIB_WIDEN);
7653 if (bitpos)
7655 /* Someone beforehand should have rejected taking the address
7656 of such an object. */
7657 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7659 result = convert_memory_address_addr_space (tmode, result, as);
7660 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7661 if (modifier < EXPAND_SUM)
7662 result = force_operand (result, target);
7665 return result;
7668 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7669 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7671 static rtx
7672 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7673 enum expand_modifier modifier)
7675 addr_space_t as = ADDR_SPACE_GENERIC;
7676 enum machine_mode address_mode = Pmode;
7677 enum machine_mode pointer_mode = ptr_mode;
7678 enum machine_mode rmode;
7679 rtx result;
7681 /* Target mode of VOIDmode says "whatever's natural". */
7682 if (tmode == VOIDmode)
7683 tmode = TYPE_MODE (TREE_TYPE (exp));
7685 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7687 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7688 address_mode = targetm.addr_space.address_mode (as);
7689 pointer_mode = targetm.addr_space.pointer_mode (as);
7692 /* We can get called with some Weird Things if the user does silliness
7693 like "(short) &a". In that case, convert_memory_address won't do
7694 the right thing, so ignore the given target mode. */
7695 if (tmode != address_mode && tmode != pointer_mode)
7696 tmode = address_mode;
7698 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7699 tmode, modifier, as);
7701 /* Despite expand_expr claims concerning ignoring TMODE when not
7702 strictly convenient, stuff breaks if we don't honor it. Note
7703 that combined with the above, we only do this for pointer modes. */
7704 rmode = GET_MODE (result);
7705 if (rmode == VOIDmode)
7706 rmode = tmode;
7707 if (rmode != tmode)
7708 result = convert_memory_address_addr_space (tmode, result, as);
7710 return result;
7713 /* Generate code for computing CONSTRUCTOR EXP.
7714 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7715 is TRUE, instead of creating a temporary variable in memory
7716 NULL is returned and the caller needs to handle it differently. */
7718 static rtx
7719 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7720 bool avoid_temp_mem)
7722 tree type = TREE_TYPE (exp);
7723 enum machine_mode mode = TYPE_MODE (type);
7725 /* Try to avoid creating a temporary at all. This is possible
7726 if all of the initializer is zero.
7727 FIXME: try to handle all [0..255] initializers we can handle
7728 with memset. */
7729 if (TREE_STATIC (exp)
7730 && !TREE_ADDRESSABLE (exp)
7731 && target != 0 && mode == BLKmode
7732 && all_zeros_p (exp))
7734 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7735 return target;
7738 /* All elts simple constants => refer to a constant in memory. But
7739 if this is a non-BLKmode mode, let it store a field at a time
7740 since that should make a CONST_INT or CONST_DOUBLE when we
7741 fold. Likewise, if we have a target we can use, it is best to
7742 store directly into the target unless the type is large enough
7743 that memcpy will be used. If we are making an initializer and
7744 all operands are constant, put it in memory as well.
7746 FIXME: Avoid trying to fill vector constructors piece-meal.
7747 Output them with output_constant_def below unless we're sure
7748 they're zeros. This should go away when vector initializers
7749 are treated like VECTOR_CST instead of arrays. */
7750 if ((TREE_STATIC (exp)
7751 && ((mode == BLKmode
7752 && ! (target != 0 && safe_from_p (target, exp, 1)))
7753 || TREE_ADDRESSABLE (exp)
7754 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7755 && (! MOVE_BY_PIECES_P
7756 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7757 TYPE_ALIGN (type)))
7758 && ! mostly_zeros_p (exp))))
7759 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7760 && TREE_CONSTANT (exp)))
7762 rtx constructor;
7764 if (avoid_temp_mem)
7765 return NULL_RTX;
7767 constructor = expand_expr_constant (exp, 1, modifier);
7769 if (modifier != EXPAND_CONST_ADDRESS
7770 && modifier != EXPAND_INITIALIZER
7771 && modifier != EXPAND_SUM)
7772 constructor = validize_mem (constructor);
7774 return constructor;
7777 /* Handle calls that pass values in multiple non-contiguous
7778 locations. The Irix 6 ABI has examples of this. */
7779 if (target == 0 || ! safe_from_p (target, exp, 1)
7780 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7782 if (avoid_temp_mem)
7783 return NULL_RTX;
7785 target
7786 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7787 | (TREE_READONLY (exp)
7788 * TYPE_QUAL_CONST))),
7789 TREE_ADDRESSABLE (exp), 1);
7792 store_constructor (exp, target, 0, int_expr_size (exp));
7793 return target;
7797 /* expand_expr: generate code for computing expression EXP.
7798 An rtx for the computed value is returned. The value is never null.
7799 In the case of a void EXP, const0_rtx is returned.
7801 The value may be stored in TARGET if TARGET is nonzero.
7802 TARGET is just a suggestion; callers must assume that
7803 the rtx returned may not be the same as TARGET.
7805 If TARGET is CONST0_RTX, it means that the value will be ignored.
7807 If TMODE is not VOIDmode, it suggests generating the
7808 result in mode TMODE. But this is done only when convenient.
7809 Otherwise, TMODE is ignored and the value generated in its natural mode.
7810 TMODE is just a suggestion; callers must assume that
7811 the rtx returned may not have mode TMODE.
7813 Note that TARGET may have neither TMODE nor MODE. In that case, it
7814 probably will not be used.
7816 If MODIFIER is EXPAND_SUM then when EXP is an addition
7817 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7818 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7819 products as above, or REG or MEM, or constant.
7820 Ordinarily in such cases we would output mul or add instructions
7821 and then return a pseudo reg containing the sum.
7823 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7824 it also marks a label as absolutely required (it can't be dead).
7825 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7826 This is used for outputting expressions used in initializers.
7828 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7829 with a constant address even if that address is not normally legitimate.
7830 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7832 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7833 a call parameter. Such targets require special care as we haven't yet
7834 marked TARGET so that it's safe from being trashed by libcalls. We
7835 don't want to use TARGET for anything but the final result;
7836 Intermediate values must go elsewhere. Additionally, calls to
7837 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7839 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7840 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7841 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7842 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7843 recursively. */
7846 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7847 enum expand_modifier modifier, rtx *alt_rtl)
7849 rtx ret;
7851 /* Handle ERROR_MARK before anybody tries to access its type. */
7852 if (TREE_CODE (exp) == ERROR_MARK
7853 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7855 ret = CONST0_RTX (tmode);
7856 return ret ? ret : const0_rtx;
7859 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7860 return ret;
7863 /* Try to expand the conditional expression which is represented by
7864 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7865 return the rtl reg which repsents the result. Otherwise return
7866 NULL_RTL. */
7868 static rtx
7869 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7870 tree treeop1 ATTRIBUTE_UNUSED,
7871 tree treeop2 ATTRIBUTE_UNUSED)
7873 #ifdef HAVE_conditional_move
7874 rtx insn;
7875 rtx op00, op01, op1, op2;
7876 enum rtx_code comparison_code;
7877 enum machine_mode comparison_mode;
7878 gimple srcstmt;
7879 rtx temp;
7880 tree type = TREE_TYPE (treeop1);
7881 int unsignedp = TYPE_UNSIGNED (type);
7882 enum machine_mode mode = TYPE_MODE (type);
7884 temp = assign_temp (type, 0, 1);
7886 /* If we cannot do a conditional move on the mode, try doing it
7887 with the promoted mode. */
7888 if (!can_conditionally_move_p (mode))
7889 mode = promote_mode (type, mode, &unsignedp);
7891 if (!can_conditionally_move_p (mode))
7892 return NULL_RTX;
7894 start_sequence ();
7895 expand_operands (treeop1, treeop2,
7896 temp, &op1, &op2, EXPAND_NORMAL);
7898 if (TREE_CODE (treeop0) == SSA_NAME
7899 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7901 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7902 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7903 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7904 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7905 comparison_mode = TYPE_MODE (type);
7906 unsignedp = TYPE_UNSIGNED (type);
7907 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7909 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7911 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7912 enum tree_code cmpcode = TREE_CODE (treeop0);
7913 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7914 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7915 unsignedp = TYPE_UNSIGNED (type);
7916 comparison_mode = TYPE_MODE (type);
7917 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7919 else
7921 op00 = expand_normal (treeop0);
7922 op01 = const0_rtx;
7923 comparison_code = NE;
7924 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7927 if (GET_MODE (op1) != mode)
7928 op1 = gen_lowpart (mode, op1);
7930 if (GET_MODE (op2) != mode)
7931 op2 = gen_lowpart (mode, op2);
7933 /* Try to emit the conditional move. */
7934 insn = emit_conditional_move (temp, comparison_code,
7935 op00, op01, comparison_mode,
7936 op1, op2, mode,
7937 unsignedp);
7939 /* If we could do the conditional move, emit the sequence,
7940 and return. */
7941 if (insn)
7943 rtx seq = get_insns ();
7944 end_sequence ();
7945 emit_insn (seq);
7946 return temp;
7949 /* Otherwise discard the sequence and fall back to code with
7950 branches. */
7951 end_sequence ();
7952 #endif
7953 return NULL_RTX;
7957 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7958 enum expand_modifier modifier)
7960 rtx op0, op1, op2, temp;
7961 tree type;
7962 int unsignedp;
7963 enum machine_mode mode;
7964 enum tree_code code = ops->code;
7965 optab this_optab;
7966 rtx subtarget, original_target;
7967 int ignore;
7968 bool reduce_bit_field;
7969 location_t loc = ops->location;
7970 tree treeop0, treeop1, treeop2;
7971 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7972 ? reduce_to_bit_field_precision ((expr), \
7973 target, \
7974 type) \
7975 : (expr))
7977 type = ops->type;
7978 mode = TYPE_MODE (type);
7979 unsignedp = TYPE_UNSIGNED (type);
7981 treeop0 = ops->op0;
7982 treeop1 = ops->op1;
7983 treeop2 = ops->op2;
7985 /* We should be called only on simple (binary or unary) expressions,
7986 exactly those that are valid in gimple expressions that aren't
7987 GIMPLE_SINGLE_RHS (or invalid). */
7988 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7989 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7990 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7992 ignore = (target == const0_rtx
7993 || ((CONVERT_EXPR_CODE_P (code)
7994 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7995 && TREE_CODE (type) == VOID_TYPE));
7997 /* We should be called only if we need the result. */
7998 gcc_assert (!ignore);
8000 /* An operation in what may be a bit-field type needs the
8001 result to be reduced to the precision of the bit-field type,
8002 which is narrower than that of the type's mode. */
8003 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8004 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8006 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8007 target = 0;
8009 /* Use subtarget as the target for operand 0 of a binary operation. */
8010 subtarget = get_subtarget (target);
8011 original_target = target;
8013 switch (code)
8015 case NON_LVALUE_EXPR:
8016 case PAREN_EXPR:
8017 CASE_CONVERT:
8018 if (treeop0 == error_mark_node)
8019 return const0_rtx;
8021 if (TREE_CODE (type) == UNION_TYPE)
8023 tree valtype = TREE_TYPE (treeop0);
8025 /* If both input and output are BLKmode, this conversion isn't doing
8026 anything except possibly changing memory attribute. */
8027 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8029 rtx result = expand_expr (treeop0, target, tmode,
8030 modifier);
8032 result = copy_rtx (result);
8033 set_mem_attributes (result, type, 0);
8034 return result;
8037 if (target == 0)
8039 if (TYPE_MODE (type) != BLKmode)
8040 target = gen_reg_rtx (TYPE_MODE (type));
8041 else
8042 target = assign_temp (type, 1, 1);
8045 if (MEM_P (target))
8046 /* Store data into beginning of memory target. */
8047 store_expr (treeop0,
8048 adjust_address (target, TYPE_MODE (valtype), 0),
8049 modifier == EXPAND_STACK_PARM,
8050 false);
8052 else
8054 gcc_assert (REG_P (target));
8056 /* Store this field into a union of the proper type. */
8057 store_field (target,
8058 MIN ((int_size_in_bytes (TREE_TYPE
8059 (treeop0))
8060 * BITS_PER_UNIT),
8061 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8062 0, 0, 0, TYPE_MODE (valtype), treeop0,
8063 type, 0, false);
8066 /* Return the entire union. */
8067 return target;
8070 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8072 op0 = expand_expr (treeop0, target, VOIDmode,
8073 modifier);
8075 /* If the signedness of the conversion differs and OP0 is
8076 a promoted SUBREG, clear that indication since we now
8077 have to do the proper extension. */
8078 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8079 && GET_CODE (op0) == SUBREG)
8080 SUBREG_PROMOTED_VAR_P (op0) = 0;
8082 return REDUCE_BIT_FIELD (op0);
8085 op0 = expand_expr (treeop0, NULL_RTX, mode,
8086 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8087 if (GET_MODE (op0) == mode)
8090 /* If OP0 is a constant, just convert it into the proper mode. */
8091 else if (CONSTANT_P (op0))
8093 tree inner_type = TREE_TYPE (treeop0);
8094 enum machine_mode inner_mode = GET_MODE (op0);
8096 if (inner_mode == VOIDmode)
8097 inner_mode = TYPE_MODE (inner_type);
8099 if (modifier == EXPAND_INITIALIZER)
8100 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8101 subreg_lowpart_offset (mode,
8102 inner_mode));
8103 else
8104 op0= convert_modes (mode, inner_mode, op0,
8105 TYPE_UNSIGNED (inner_type));
8108 else if (modifier == EXPAND_INITIALIZER)
8109 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8111 else if (target == 0)
8112 op0 = convert_to_mode (mode, op0,
8113 TYPE_UNSIGNED (TREE_TYPE
8114 (treeop0)));
8115 else
8117 convert_move (target, op0,
8118 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8119 op0 = target;
8122 return REDUCE_BIT_FIELD (op0);
8124 case ADDR_SPACE_CONVERT_EXPR:
8126 tree treeop0_type = TREE_TYPE (treeop0);
8127 addr_space_t as_to;
8128 addr_space_t as_from;
8130 gcc_assert (POINTER_TYPE_P (type));
8131 gcc_assert (POINTER_TYPE_P (treeop0_type));
8133 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8134 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8136 /* Conversions between pointers to the same address space should
8137 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8138 gcc_assert (as_to != as_from);
8140 /* Ask target code to handle conversion between pointers
8141 to overlapping address spaces. */
8142 if (targetm.addr_space.subset_p (as_to, as_from)
8143 || targetm.addr_space.subset_p (as_from, as_to))
8145 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8146 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8147 gcc_assert (op0);
8148 return op0;
8151 /* For disjoint address spaces, converting anything but
8152 a null pointer invokes undefined behaviour. We simply
8153 always return a null pointer here. */
8154 return CONST0_RTX (mode);
8157 case POINTER_PLUS_EXPR:
8158 /* Even though the sizetype mode and the pointer's mode can be different
8159 expand is able to handle this correctly and get the correct result out
8160 of the PLUS_EXPR code. */
8161 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8162 if sizetype precision is smaller than pointer precision. */
8163 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8164 treeop1 = fold_convert_loc (loc, type,
8165 fold_convert_loc (loc, ssizetype,
8166 treeop1));
8167 /* If sizetype precision is larger than pointer precision, truncate the
8168 offset to have matching modes. */
8169 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8170 treeop1 = fold_convert_loc (loc, type, treeop1);
8172 case PLUS_EXPR:
8173 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8174 something else, make sure we add the register to the constant and
8175 then to the other thing. This case can occur during strength
8176 reduction and doing it this way will produce better code if the
8177 frame pointer or argument pointer is eliminated.
8179 fold-const.c will ensure that the constant is always in the inner
8180 PLUS_EXPR, so the only case we need to do anything about is if
8181 sp, ap, or fp is our second argument, in which case we must swap
8182 the innermost first argument and our second argument. */
8184 if (TREE_CODE (treeop0) == PLUS_EXPR
8185 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8186 && TREE_CODE (treeop1) == VAR_DECL
8187 && (DECL_RTL (treeop1) == frame_pointer_rtx
8188 || DECL_RTL (treeop1) == stack_pointer_rtx
8189 || DECL_RTL (treeop1) == arg_pointer_rtx))
8191 gcc_unreachable ();
8194 /* If the result is to be ptr_mode and we are adding an integer to
8195 something, we might be forming a constant. So try to use
8196 plus_constant. If it produces a sum and we can't accept it,
8197 use force_operand. This allows P = &ARR[const] to generate
8198 efficient code on machines where a SYMBOL_REF is not a valid
8199 address.
8201 If this is an EXPAND_SUM call, always return the sum. */
8202 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8203 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8205 if (modifier == EXPAND_STACK_PARM)
8206 target = 0;
8207 if (TREE_CODE (treeop0) == INTEGER_CST
8208 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8209 && TREE_CONSTANT (treeop1))
8211 rtx constant_part;
8213 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8214 EXPAND_SUM);
8215 /* Use immed_double_const to ensure that the constant is
8216 truncated according to the mode of OP1, then sign extended
8217 to a HOST_WIDE_INT. Using the constant directly can result
8218 in non-canonical RTL in a 64x32 cross compile. */
8219 constant_part
8220 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8221 (HOST_WIDE_INT) 0,
8222 TYPE_MODE (TREE_TYPE (treeop1)));
8223 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8224 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8225 op1 = force_operand (op1, target);
8226 return REDUCE_BIT_FIELD (op1);
8229 else if (TREE_CODE (treeop1) == INTEGER_CST
8230 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8231 && TREE_CONSTANT (treeop0))
8233 rtx constant_part;
8235 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8236 (modifier == EXPAND_INITIALIZER
8237 ? EXPAND_INITIALIZER : EXPAND_SUM));
8238 if (! CONSTANT_P (op0))
8240 op1 = expand_expr (treeop1, NULL_RTX,
8241 VOIDmode, modifier);
8242 /* Return a PLUS if modifier says it's OK. */
8243 if (modifier == EXPAND_SUM
8244 || modifier == EXPAND_INITIALIZER)
8245 return simplify_gen_binary (PLUS, mode, op0, op1);
8246 goto binop2;
8248 /* Use immed_double_const to ensure that the constant is
8249 truncated according to the mode of OP1, then sign extended
8250 to a HOST_WIDE_INT. Using the constant directly can result
8251 in non-canonical RTL in a 64x32 cross compile. */
8252 constant_part
8253 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8254 (HOST_WIDE_INT) 0,
8255 TYPE_MODE (TREE_TYPE (treeop0)));
8256 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8257 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8258 op0 = force_operand (op0, target);
8259 return REDUCE_BIT_FIELD (op0);
8263 /* Use TER to expand pointer addition of a negated value
8264 as pointer subtraction. */
8265 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8266 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8267 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8268 && TREE_CODE (treeop1) == SSA_NAME
8269 && TYPE_MODE (TREE_TYPE (treeop0))
8270 == TYPE_MODE (TREE_TYPE (treeop1)))
8272 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8273 if (def)
8275 treeop1 = gimple_assign_rhs1 (def);
8276 code = MINUS_EXPR;
8277 goto do_minus;
8281 /* No sense saving up arithmetic to be done
8282 if it's all in the wrong mode to form part of an address.
8283 And force_operand won't know whether to sign-extend or
8284 zero-extend. */
8285 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8286 || mode != ptr_mode)
8288 expand_operands (treeop0, treeop1,
8289 subtarget, &op0, &op1, EXPAND_NORMAL);
8290 if (op0 == const0_rtx)
8291 return op1;
8292 if (op1 == const0_rtx)
8293 return op0;
8294 goto binop2;
8297 expand_operands (treeop0, treeop1,
8298 subtarget, &op0, &op1, modifier);
8299 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8301 case MINUS_EXPR:
8302 do_minus:
8303 /* For initializers, we are allowed to return a MINUS of two
8304 symbolic constants. Here we handle all cases when both operands
8305 are constant. */
8306 /* Handle difference of two symbolic constants,
8307 for the sake of an initializer. */
8308 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8309 && really_constant_p (treeop0)
8310 && really_constant_p (treeop1))
8312 expand_operands (treeop0, treeop1,
8313 NULL_RTX, &op0, &op1, modifier);
8315 /* If the last operand is a CONST_INT, use plus_constant of
8316 the negated constant. Else make the MINUS. */
8317 if (CONST_INT_P (op1))
8318 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8319 -INTVAL (op1)));
8320 else
8321 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8324 /* No sense saving up arithmetic to be done
8325 if it's all in the wrong mode to form part of an address.
8326 And force_operand won't know whether to sign-extend or
8327 zero-extend. */
8328 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8329 || mode != ptr_mode)
8330 goto binop;
8332 expand_operands (treeop0, treeop1,
8333 subtarget, &op0, &op1, modifier);
8335 /* Convert A - const to A + (-const). */
8336 if (CONST_INT_P (op1))
8338 op1 = negate_rtx (mode, op1);
8339 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8342 goto binop2;
8344 case WIDEN_MULT_PLUS_EXPR:
8345 case WIDEN_MULT_MINUS_EXPR:
8346 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8347 op2 = expand_normal (treeop2);
8348 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8349 target, unsignedp);
8350 return target;
8352 case WIDEN_MULT_EXPR:
8353 /* If first operand is constant, swap them.
8354 Thus the following special case checks need only
8355 check the second operand. */
8356 if (TREE_CODE (treeop0) == INTEGER_CST)
8358 tree t1 = treeop0;
8359 treeop0 = treeop1;
8360 treeop1 = t1;
8363 /* First, check if we have a multiplication of one signed and one
8364 unsigned operand. */
8365 if (TREE_CODE (treeop1) != INTEGER_CST
8366 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8367 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8369 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8370 this_optab = usmul_widen_optab;
8371 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8372 != CODE_FOR_nothing)
8374 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8375 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8376 EXPAND_NORMAL);
8377 else
8378 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8379 EXPAND_NORMAL);
8380 goto binop3;
8383 /* Check for a multiplication with matching signedness. */
8384 else if ((TREE_CODE (treeop1) == INTEGER_CST
8385 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8386 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8387 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8389 tree op0type = TREE_TYPE (treeop0);
8390 enum machine_mode innermode = TYPE_MODE (op0type);
8391 bool zextend_p = TYPE_UNSIGNED (op0type);
8392 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8393 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8395 if (TREE_CODE (treeop0) != INTEGER_CST)
8397 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8398 != CODE_FOR_nothing)
8400 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8401 EXPAND_NORMAL);
8402 temp = expand_widening_mult (mode, op0, op1, target,
8403 unsignedp, this_optab);
8404 return REDUCE_BIT_FIELD (temp);
8406 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8407 != CODE_FOR_nothing
8408 && innermode == word_mode)
8410 rtx htem, hipart;
8411 op0 = expand_normal (treeop0);
8412 if (TREE_CODE (treeop1) == INTEGER_CST)
8413 op1 = convert_modes (innermode, mode,
8414 expand_normal (treeop1), unsignedp);
8415 else
8416 op1 = expand_normal (treeop1);
8417 temp = expand_binop (mode, other_optab, op0, op1, target,
8418 unsignedp, OPTAB_LIB_WIDEN);
8419 hipart = gen_highpart (innermode, temp);
8420 htem = expand_mult_highpart_adjust (innermode, hipart,
8421 op0, op1, hipart,
8422 zextend_p);
8423 if (htem != hipart)
8424 emit_move_insn (hipart, htem);
8425 return REDUCE_BIT_FIELD (temp);
8429 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8430 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8431 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8432 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8434 case FMA_EXPR:
8436 optab opt = fma_optab;
8437 gimple def0, def2;
8439 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8440 call. */
8441 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8443 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8444 tree call_expr;
8446 gcc_assert (fn != NULL_TREE);
8447 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8448 return expand_builtin (call_expr, target, subtarget, mode, false);
8451 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8452 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8454 op0 = op2 = NULL;
8456 if (def0 && def2
8457 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8459 opt = fnms_optab;
8460 op0 = expand_normal (gimple_assign_rhs1 (def0));
8461 op2 = expand_normal (gimple_assign_rhs1 (def2));
8463 else if (def0
8464 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8466 opt = fnma_optab;
8467 op0 = expand_normal (gimple_assign_rhs1 (def0));
8469 else if (def2
8470 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8472 opt = fms_optab;
8473 op2 = expand_normal (gimple_assign_rhs1 (def2));
8476 if (op0 == NULL)
8477 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8478 if (op2 == NULL)
8479 op2 = expand_normal (treeop2);
8480 op1 = expand_normal (treeop1);
8482 return expand_ternary_op (TYPE_MODE (type), opt,
8483 op0, op1, op2, target, 0);
8486 case MULT_EXPR:
8487 /* If this is a fixed-point operation, then we cannot use the code
8488 below because "expand_mult" doesn't support sat/no-sat fixed-point
8489 multiplications. */
8490 if (ALL_FIXED_POINT_MODE_P (mode))
8491 goto binop;
8493 /* If first operand is constant, swap them.
8494 Thus the following special case checks need only
8495 check the second operand. */
8496 if (TREE_CODE (treeop0) == INTEGER_CST)
8498 tree t1 = treeop0;
8499 treeop0 = treeop1;
8500 treeop1 = t1;
8503 /* Attempt to return something suitable for generating an
8504 indexed address, for machines that support that. */
8506 if (modifier == EXPAND_SUM && mode == ptr_mode
8507 && host_integerp (treeop1, 0))
8509 tree exp1 = treeop1;
8511 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8512 EXPAND_SUM);
8514 if (!REG_P (op0))
8515 op0 = force_operand (op0, NULL_RTX);
8516 if (!REG_P (op0))
8517 op0 = copy_to_mode_reg (mode, op0);
8519 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8520 gen_int_mode (tree_low_cst (exp1, 0),
8521 TYPE_MODE (TREE_TYPE (exp1)))));
8524 if (modifier == EXPAND_STACK_PARM)
8525 target = 0;
8527 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8528 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8530 case TRUNC_DIV_EXPR:
8531 case FLOOR_DIV_EXPR:
8532 case CEIL_DIV_EXPR:
8533 case ROUND_DIV_EXPR:
8534 case EXACT_DIV_EXPR:
8535 /* If this is a fixed-point operation, then we cannot use the code
8536 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8537 divisions. */
8538 if (ALL_FIXED_POINT_MODE_P (mode))
8539 goto binop;
8541 if (modifier == EXPAND_STACK_PARM)
8542 target = 0;
8543 /* Possible optimization: compute the dividend with EXPAND_SUM
8544 then if the divisor is constant can optimize the case
8545 where some terms of the dividend have coeffs divisible by it. */
8546 expand_operands (treeop0, treeop1,
8547 subtarget, &op0, &op1, EXPAND_NORMAL);
8548 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8550 case RDIV_EXPR:
8551 goto binop;
8553 case MULT_HIGHPART_EXPR:
8554 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8555 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8556 gcc_assert (temp);
8557 return temp;
8559 case TRUNC_MOD_EXPR:
8560 case FLOOR_MOD_EXPR:
8561 case CEIL_MOD_EXPR:
8562 case ROUND_MOD_EXPR:
8563 if (modifier == EXPAND_STACK_PARM)
8564 target = 0;
8565 expand_operands (treeop0, treeop1,
8566 subtarget, &op0, &op1, EXPAND_NORMAL);
8567 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8569 case FIXED_CONVERT_EXPR:
8570 op0 = expand_normal (treeop0);
8571 if (target == 0 || modifier == EXPAND_STACK_PARM)
8572 target = gen_reg_rtx (mode);
8574 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8575 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8576 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8577 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8578 else
8579 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8580 return target;
8582 case FIX_TRUNC_EXPR:
8583 op0 = expand_normal (treeop0);
8584 if (target == 0 || modifier == EXPAND_STACK_PARM)
8585 target = gen_reg_rtx (mode);
8586 expand_fix (target, op0, unsignedp);
8587 return target;
8589 case FLOAT_EXPR:
8590 op0 = expand_normal (treeop0);
8591 if (target == 0 || modifier == EXPAND_STACK_PARM)
8592 target = gen_reg_rtx (mode);
8593 /* expand_float can't figure out what to do if FROM has VOIDmode.
8594 So give it the correct mode. With -O, cse will optimize this. */
8595 if (GET_MODE (op0) == VOIDmode)
8596 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8597 op0);
8598 expand_float (target, op0,
8599 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8600 return target;
8602 case NEGATE_EXPR:
8603 op0 = expand_expr (treeop0, subtarget,
8604 VOIDmode, EXPAND_NORMAL);
8605 if (modifier == EXPAND_STACK_PARM)
8606 target = 0;
8607 temp = expand_unop (mode,
8608 optab_for_tree_code (NEGATE_EXPR, type,
8609 optab_default),
8610 op0, target, 0);
8611 gcc_assert (temp);
8612 return REDUCE_BIT_FIELD (temp);
8614 case ABS_EXPR:
8615 op0 = expand_expr (treeop0, subtarget,
8616 VOIDmode, EXPAND_NORMAL);
8617 if (modifier == EXPAND_STACK_PARM)
8618 target = 0;
8620 /* ABS_EXPR is not valid for complex arguments. */
8621 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8622 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8624 /* Unsigned abs is simply the operand. Testing here means we don't
8625 risk generating incorrect code below. */
8626 if (TYPE_UNSIGNED (type))
8627 return op0;
8629 return expand_abs (mode, op0, target, unsignedp,
8630 safe_from_p (target, treeop0, 1));
8632 case MAX_EXPR:
8633 case MIN_EXPR:
8634 target = original_target;
8635 if (target == 0
8636 || modifier == EXPAND_STACK_PARM
8637 || (MEM_P (target) && MEM_VOLATILE_P (target))
8638 || GET_MODE (target) != mode
8639 || (REG_P (target)
8640 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8641 target = gen_reg_rtx (mode);
8642 expand_operands (treeop0, treeop1,
8643 target, &op0, &op1, EXPAND_NORMAL);
8645 /* First try to do it with a special MIN or MAX instruction.
8646 If that does not win, use a conditional jump to select the proper
8647 value. */
8648 this_optab = optab_for_tree_code (code, type, optab_default);
8649 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8650 OPTAB_WIDEN);
8651 if (temp != 0)
8652 return temp;
8654 /* At this point, a MEM target is no longer useful; we will get better
8655 code without it. */
8657 if (! REG_P (target))
8658 target = gen_reg_rtx (mode);
8660 /* If op1 was placed in target, swap op0 and op1. */
8661 if (target != op0 && target == op1)
8663 temp = op0;
8664 op0 = op1;
8665 op1 = temp;
8668 /* We generate better code and avoid problems with op1 mentioning
8669 target by forcing op1 into a pseudo if it isn't a constant. */
8670 if (! CONSTANT_P (op1))
8671 op1 = force_reg (mode, op1);
8674 enum rtx_code comparison_code;
8675 rtx cmpop1 = op1;
8677 if (code == MAX_EXPR)
8678 comparison_code = unsignedp ? GEU : GE;
8679 else
8680 comparison_code = unsignedp ? LEU : LE;
8682 /* Canonicalize to comparisons against 0. */
8683 if (op1 == const1_rtx)
8685 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8686 or (a != 0 ? a : 1) for unsigned.
8687 For MIN we are safe converting (a <= 1 ? a : 1)
8688 into (a <= 0 ? a : 1) */
8689 cmpop1 = const0_rtx;
8690 if (code == MAX_EXPR)
8691 comparison_code = unsignedp ? NE : GT;
8693 if (op1 == constm1_rtx && !unsignedp)
8695 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8696 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8697 cmpop1 = const0_rtx;
8698 if (code == MIN_EXPR)
8699 comparison_code = LT;
8701 #ifdef HAVE_conditional_move
8702 /* Use a conditional move if possible. */
8703 if (can_conditionally_move_p (mode))
8705 rtx insn;
8707 /* ??? Same problem as in expmed.c: emit_conditional_move
8708 forces a stack adjustment via compare_from_rtx, and we
8709 lose the stack adjustment if the sequence we are about
8710 to create is discarded. */
8711 do_pending_stack_adjust ();
8713 start_sequence ();
8715 /* Try to emit the conditional move. */
8716 insn = emit_conditional_move (target, comparison_code,
8717 op0, cmpop1, mode,
8718 op0, op1, mode,
8719 unsignedp);
8721 /* If we could do the conditional move, emit the sequence,
8722 and return. */
8723 if (insn)
8725 rtx seq = get_insns ();
8726 end_sequence ();
8727 emit_insn (seq);
8728 return target;
8731 /* Otherwise discard the sequence and fall back to code with
8732 branches. */
8733 end_sequence ();
8735 #endif
8736 if (target != op0)
8737 emit_move_insn (target, op0);
8739 temp = gen_label_rtx ();
8740 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8741 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8742 -1);
8744 emit_move_insn (target, op1);
8745 emit_label (temp);
8746 return target;
8748 case BIT_NOT_EXPR:
8749 op0 = expand_expr (treeop0, subtarget,
8750 VOIDmode, EXPAND_NORMAL);
8751 if (modifier == EXPAND_STACK_PARM)
8752 target = 0;
8753 /* In case we have to reduce the result to bitfield precision
8754 for unsigned bitfield expand this as XOR with a proper constant
8755 instead. */
8756 if (reduce_bit_field && TYPE_UNSIGNED (type))
8757 temp = expand_binop (mode, xor_optab, op0,
8758 immed_double_int_const
8759 (double_int::mask (TYPE_PRECISION (type)), mode),
8760 target, 1, OPTAB_LIB_WIDEN);
8761 else
8762 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8763 gcc_assert (temp);
8764 return temp;
8766 /* ??? Can optimize bitwise operations with one arg constant.
8767 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8768 and (a bitwise1 b) bitwise2 b (etc)
8769 but that is probably not worth while. */
8771 case BIT_AND_EXPR:
8772 case BIT_IOR_EXPR:
8773 case BIT_XOR_EXPR:
8774 goto binop;
8776 case LROTATE_EXPR:
8777 case RROTATE_EXPR:
8778 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8779 || (GET_MODE_PRECISION (TYPE_MODE (type))
8780 == TYPE_PRECISION (type)));
8781 /* fall through */
8783 case LSHIFT_EXPR:
8784 case RSHIFT_EXPR:
8785 /* If this is a fixed-point operation, then we cannot use the code
8786 below because "expand_shift" doesn't support sat/no-sat fixed-point
8787 shifts. */
8788 if (ALL_FIXED_POINT_MODE_P (mode))
8789 goto binop;
8791 if (! safe_from_p (subtarget, treeop1, 1))
8792 subtarget = 0;
8793 if (modifier == EXPAND_STACK_PARM)
8794 target = 0;
8795 op0 = expand_expr (treeop0, subtarget,
8796 VOIDmode, EXPAND_NORMAL);
8797 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8798 unsignedp);
8799 if (code == LSHIFT_EXPR)
8800 temp = REDUCE_BIT_FIELD (temp);
8801 return temp;
8803 /* Could determine the answer when only additive constants differ. Also,
8804 the addition of one can be handled by changing the condition. */
8805 case LT_EXPR:
8806 case LE_EXPR:
8807 case GT_EXPR:
8808 case GE_EXPR:
8809 case EQ_EXPR:
8810 case NE_EXPR:
8811 case UNORDERED_EXPR:
8812 case ORDERED_EXPR:
8813 case UNLT_EXPR:
8814 case UNLE_EXPR:
8815 case UNGT_EXPR:
8816 case UNGE_EXPR:
8817 case UNEQ_EXPR:
8818 case LTGT_EXPR:
8819 temp = do_store_flag (ops,
8820 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8821 tmode != VOIDmode ? tmode : mode);
8822 if (temp)
8823 return temp;
8825 /* Use a compare and a jump for BLKmode comparisons, or for function
8826 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8828 if ((target == 0
8829 || modifier == EXPAND_STACK_PARM
8830 || ! safe_from_p (target, treeop0, 1)
8831 || ! safe_from_p (target, treeop1, 1)
8832 /* Make sure we don't have a hard reg (such as function's return
8833 value) live across basic blocks, if not optimizing. */
8834 || (!optimize && REG_P (target)
8835 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8836 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8838 emit_move_insn (target, const0_rtx);
8840 op1 = gen_label_rtx ();
8841 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8843 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8844 emit_move_insn (target, constm1_rtx);
8845 else
8846 emit_move_insn (target, const1_rtx);
8848 emit_label (op1);
8849 return target;
8851 case COMPLEX_EXPR:
8852 /* Get the rtx code of the operands. */
8853 op0 = expand_normal (treeop0);
8854 op1 = expand_normal (treeop1);
8856 if (!target)
8857 target = gen_reg_rtx (TYPE_MODE (type));
8859 /* Move the real (op0) and imaginary (op1) parts to their location. */
8860 write_complex_part (target, op0, false);
8861 write_complex_part (target, op1, true);
8863 return target;
8865 case WIDEN_SUM_EXPR:
8867 tree oprnd0 = treeop0;
8868 tree oprnd1 = treeop1;
8870 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8871 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8872 target, unsignedp);
8873 return target;
8876 case REDUC_MAX_EXPR:
8877 case REDUC_MIN_EXPR:
8878 case REDUC_PLUS_EXPR:
8880 op0 = expand_normal (treeop0);
8881 this_optab = optab_for_tree_code (code, type, optab_default);
8882 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8883 gcc_assert (temp);
8884 return temp;
8887 case VEC_LSHIFT_EXPR:
8888 case VEC_RSHIFT_EXPR:
8890 target = expand_vec_shift_expr (ops, target);
8891 return target;
8894 case VEC_UNPACK_HI_EXPR:
8895 case VEC_UNPACK_LO_EXPR:
8897 op0 = expand_normal (treeop0);
8898 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8899 target, unsignedp);
8900 gcc_assert (temp);
8901 return temp;
8904 case VEC_UNPACK_FLOAT_HI_EXPR:
8905 case VEC_UNPACK_FLOAT_LO_EXPR:
8907 op0 = expand_normal (treeop0);
8908 /* The signedness is determined from input operand. */
8909 temp = expand_widen_pattern_expr
8910 (ops, op0, NULL_RTX, NULL_RTX,
8911 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8913 gcc_assert (temp);
8914 return temp;
8917 case VEC_WIDEN_MULT_HI_EXPR:
8918 case VEC_WIDEN_MULT_LO_EXPR:
8919 case VEC_WIDEN_MULT_EVEN_EXPR:
8920 case VEC_WIDEN_MULT_ODD_EXPR:
8921 case VEC_WIDEN_LSHIFT_HI_EXPR:
8922 case VEC_WIDEN_LSHIFT_LO_EXPR:
8923 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8924 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8925 target, unsignedp);
8926 gcc_assert (target);
8927 return target;
8929 case VEC_PACK_TRUNC_EXPR:
8930 case VEC_PACK_SAT_EXPR:
8931 case VEC_PACK_FIX_TRUNC_EXPR:
8932 mode = TYPE_MODE (TREE_TYPE (treeop0));
8933 goto binop;
8935 case VEC_PERM_EXPR:
8936 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
8937 op2 = expand_normal (treeop2);
8939 /* Careful here: if the target doesn't support integral vector modes,
8940 a constant selection vector could wind up smooshed into a normal
8941 integral constant. */
8942 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
8944 tree sel_type = TREE_TYPE (treeop2);
8945 enum machine_mode vmode
8946 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
8947 TYPE_VECTOR_SUBPARTS (sel_type));
8948 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
8949 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
8950 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
8952 else
8953 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
8955 temp = expand_vec_perm (mode, op0, op1, op2, target);
8956 gcc_assert (temp);
8957 return temp;
8959 case DOT_PROD_EXPR:
8961 tree oprnd0 = treeop0;
8962 tree oprnd1 = treeop1;
8963 tree oprnd2 = treeop2;
8964 rtx op2;
8966 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8967 op2 = expand_normal (oprnd2);
8968 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8969 target, unsignedp);
8970 return target;
8973 case REALIGN_LOAD_EXPR:
8975 tree oprnd0 = treeop0;
8976 tree oprnd1 = treeop1;
8977 tree oprnd2 = treeop2;
8978 rtx op2;
8980 this_optab = optab_for_tree_code (code, type, optab_default);
8981 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8982 op2 = expand_normal (oprnd2);
8983 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8984 target, unsignedp);
8985 gcc_assert (temp);
8986 return temp;
8989 case COND_EXPR:
8990 /* A COND_EXPR with its type being VOID_TYPE represents a
8991 conditional jump and is handled in
8992 expand_gimple_cond_expr. */
8993 gcc_assert (!VOID_TYPE_P (type));
8995 /* Note that COND_EXPRs whose type is a structure or union
8996 are required to be constructed to contain assignments of
8997 a temporary variable, so that we can evaluate them here
8998 for side effect only. If type is void, we must do likewise. */
9000 gcc_assert (!TREE_ADDRESSABLE (type)
9001 && !ignore
9002 && TREE_TYPE (treeop1) != void_type_node
9003 && TREE_TYPE (treeop2) != void_type_node);
9005 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9006 if (temp)
9007 return temp;
9009 /* If we are not to produce a result, we have no target. Otherwise,
9010 if a target was specified use it; it will not be used as an
9011 intermediate target unless it is safe. If no target, use a
9012 temporary. */
9014 if (modifier != EXPAND_STACK_PARM
9015 && original_target
9016 && safe_from_p (original_target, treeop0, 1)
9017 && GET_MODE (original_target) == mode
9018 && !MEM_P (original_target))
9019 temp = original_target;
9020 else
9021 temp = assign_temp (type, 0, 1);
9023 do_pending_stack_adjust ();
9024 NO_DEFER_POP;
9025 op0 = gen_label_rtx ();
9026 op1 = gen_label_rtx ();
9027 jumpifnot (treeop0, op0, -1);
9028 store_expr (treeop1, temp,
9029 modifier == EXPAND_STACK_PARM,
9030 false);
9032 emit_jump_insn (gen_jump (op1));
9033 emit_barrier ();
9034 emit_label (op0);
9035 store_expr (treeop2, temp,
9036 modifier == EXPAND_STACK_PARM,
9037 false);
9039 emit_label (op1);
9040 OK_DEFER_POP;
9041 return temp;
9043 case VEC_COND_EXPR:
9044 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9045 return target;
9047 default:
9048 gcc_unreachable ();
9051 /* Here to do an ordinary binary operator. */
9052 binop:
9053 expand_operands (treeop0, treeop1,
9054 subtarget, &op0, &op1, EXPAND_NORMAL);
9055 binop2:
9056 this_optab = optab_for_tree_code (code, type, optab_default);
9057 binop3:
9058 if (modifier == EXPAND_STACK_PARM)
9059 target = 0;
9060 temp = expand_binop (mode, this_optab, op0, op1, target,
9061 unsignedp, OPTAB_LIB_WIDEN);
9062 gcc_assert (temp);
9063 /* Bitwise operations do not need bitfield reduction as we expect their
9064 operands being properly truncated. */
9065 if (code == BIT_XOR_EXPR
9066 || code == BIT_AND_EXPR
9067 || code == BIT_IOR_EXPR)
9068 return temp;
9069 return REDUCE_BIT_FIELD (temp);
9071 #undef REDUCE_BIT_FIELD
9074 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9075 enum expand_modifier modifier, rtx *alt_rtl)
9077 rtx op0, op1, temp, decl_rtl;
9078 tree type;
9079 int unsignedp;
9080 enum machine_mode mode;
9081 enum tree_code code = TREE_CODE (exp);
9082 rtx subtarget, original_target;
9083 int ignore;
9084 tree context;
9085 bool reduce_bit_field;
9086 location_t loc = EXPR_LOCATION (exp);
9087 struct separate_ops ops;
9088 tree treeop0, treeop1, treeop2;
9089 tree ssa_name = NULL_TREE;
9090 gimple g;
9092 type = TREE_TYPE (exp);
9093 mode = TYPE_MODE (type);
9094 unsignedp = TYPE_UNSIGNED (type);
9096 treeop0 = treeop1 = treeop2 = NULL_TREE;
9097 if (!VL_EXP_CLASS_P (exp))
9098 switch (TREE_CODE_LENGTH (code))
9100 default:
9101 case 3: treeop2 = TREE_OPERAND (exp, 2);
9102 case 2: treeop1 = TREE_OPERAND (exp, 1);
9103 case 1: treeop0 = TREE_OPERAND (exp, 0);
9104 case 0: break;
9106 ops.code = code;
9107 ops.type = type;
9108 ops.op0 = treeop0;
9109 ops.op1 = treeop1;
9110 ops.op2 = treeop2;
9111 ops.location = loc;
9113 ignore = (target == const0_rtx
9114 || ((CONVERT_EXPR_CODE_P (code)
9115 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9116 && TREE_CODE (type) == VOID_TYPE));
9118 /* An operation in what may be a bit-field type needs the
9119 result to be reduced to the precision of the bit-field type,
9120 which is narrower than that of the type's mode. */
9121 reduce_bit_field = (!ignore
9122 && INTEGRAL_TYPE_P (type)
9123 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9125 /* If we are going to ignore this result, we need only do something
9126 if there is a side-effect somewhere in the expression. If there
9127 is, short-circuit the most common cases here. Note that we must
9128 not call expand_expr with anything but const0_rtx in case this
9129 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9131 if (ignore)
9133 if (! TREE_SIDE_EFFECTS (exp))
9134 return const0_rtx;
9136 /* Ensure we reference a volatile object even if value is ignored, but
9137 don't do this if all we are doing is taking its address. */
9138 if (TREE_THIS_VOLATILE (exp)
9139 && TREE_CODE (exp) != FUNCTION_DECL
9140 && mode != VOIDmode && mode != BLKmode
9141 && modifier != EXPAND_CONST_ADDRESS)
9143 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9144 if (MEM_P (temp))
9145 copy_to_reg (temp);
9146 return const0_rtx;
9149 if (TREE_CODE_CLASS (code) == tcc_unary
9150 || code == BIT_FIELD_REF
9151 || code == COMPONENT_REF
9152 || code == INDIRECT_REF)
9153 return expand_expr (treeop0, const0_rtx, VOIDmode,
9154 modifier);
9156 else if (TREE_CODE_CLASS (code) == tcc_binary
9157 || TREE_CODE_CLASS (code) == tcc_comparison
9158 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9160 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9161 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9162 return const0_rtx;
9165 target = 0;
9168 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9169 target = 0;
9171 /* Use subtarget as the target for operand 0 of a binary operation. */
9172 subtarget = get_subtarget (target);
9173 original_target = target;
9175 switch (code)
9177 case LABEL_DECL:
9179 tree function = decl_function_context (exp);
9181 temp = label_rtx (exp);
9182 temp = gen_rtx_LABEL_REF (Pmode, temp);
9184 if (function != current_function_decl
9185 && function != 0)
9186 LABEL_REF_NONLOCAL_P (temp) = 1;
9188 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9189 return temp;
9192 case SSA_NAME:
9193 /* ??? ivopts calls expander, without any preparation from
9194 out-of-ssa. So fake instructions as if this was an access to the
9195 base variable. This unnecessarily allocates a pseudo, see how we can
9196 reuse it, if partition base vars have it set already. */
9197 if (!currently_expanding_to_rtl)
9199 tree var = SSA_NAME_VAR (exp);
9200 if (var && DECL_RTL_SET_P (var))
9201 return DECL_RTL (var);
9202 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9203 LAST_VIRTUAL_REGISTER + 1);
9206 g = get_gimple_for_ssa_name (exp);
9207 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9208 if (g == NULL
9209 && modifier == EXPAND_INITIALIZER
9210 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9211 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9212 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9213 g = SSA_NAME_DEF_STMT (exp);
9214 if (g)
9216 rtx r;
9217 location_t saved_loc = curr_insn_location ();
9219 set_curr_insn_location (gimple_location (g));
9220 r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9221 tmode, modifier, NULL);
9222 set_curr_insn_location (saved_loc);
9223 if (REG_P (r) && !REG_EXPR (r))
9224 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9225 return r;
9228 ssa_name = exp;
9229 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9230 exp = SSA_NAME_VAR (ssa_name);
9231 goto expand_decl_rtl;
9233 case PARM_DECL:
9234 case VAR_DECL:
9235 /* If a static var's type was incomplete when the decl was written,
9236 but the type is complete now, lay out the decl now. */
9237 if (DECL_SIZE (exp) == 0
9238 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9239 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9240 layout_decl (exp, 0);
9242 /* ... fall through ... */
9244 case FUNCTION_DECL:
9245 case RESULT_DECL:
9246 decl_rtl = DECL_RTL (exp);
9247 expand_decl_rtl:
9248 gcc_assert (decl_rtl);
9249 decl_rtl = copy_rtx (decl_rtl);
9250 /* Record writes to register variables. */
9251 if (modifier == EXPAND_WRITE
9252 && REG_P (decl_rtl)
9253 && HARD_REGISTER_P (decl_rtl))
9254 add_to_hard_reg_set (&crtl->asm_clobbers,
9255 GET_MODE (decl_rtl), REGNO (decl_rtl));
9257 /* Ensure variable marked as used even if it doesn't go through
9258 a parser. If it hasn't be used yet, write out an external
9259 definition. */
9260 TREE_USED (exp) = 1;
9262 /* Show we haven't gotten RTL for this yet. */
9263 temp = 0;
9265 /* Variables inherited from containing functions should have
9266 been lowered by this point. */
9267 context = decl_function_context (exp);
9268 gcc_assert (!context
9269 || context == current_function_decl
9270 || TREE_STATIC (exp)
9271 || DECL_EXTERNAL (exp)
9272 /* ??? C++ creates functions that are not TREE_STATIC. */
9273 || TREE_CODE (exp) == FUNCTION_DECL);
9275 /* This is the case of an array whose size is to be determined
9276 from its initializer, while the initializer is still being parsed.
9277 ??? We aren't parsing while expanding anymore. */
9279 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9280 temp = validize_mem (decl_rtl);
9282 /* If DECL_RTL is memory, we are in the normal case and the
9283 address is not valid, get the address into a register. */
9285 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9287 if (alt_rtl)
9288 *alt_rtl = decl_rtl;
9289 decl_rtl = use_anchored_address (decl_rtl);
9290 if (modifier != EXPAND_CONST_ADDRESS
9291 && modifier != EXPAND_SUM
9292 && !memory_address_addr_space_p (DECL_MODE (exp),
9293 XEXP (decl_rtl, 0),
9294 MEM_ADDR_SPACE (decl_rtl)))
9295 temp = replace_equiv_address (decl_rtl,
9296 copy_rtx (XEXP (decl_rtl, 0)));
9299 /* If we got something, return it. But first, set the alignment
9300 if the address is a register. */
9301 if (temp != 0)
9303 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9304 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9306 return temp;
9309 /* If the mode of DECL_RTL does not match that of the decl,
9310 there are two cases: we are dealing with a BLKmode value
9311 that is returned in a register, or we are dealing with
9312 a promoted value. In the latter case, return a SUBREG
9313 of the wanted mode, but mark it so that we know that it
9314 was already extended. */
9315 if (REG_P (decl_rtl)
9316 && DECL_MODE (exp) != BLKmode
9317 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9319 enum machine_mode pmode;
9321 /* Get the signedness to be used for this variable. Ensure we get
9322 the same mode we got when the variable was declared. */
9323 if (code == SSA_NAME
9324 && (g = SSA_NAME_DEF_STMT (ssa_name))
9325 && gimple_code (g) == GIMPLE_CALL)
9327 gcc_assert (!gimple_call_internal_p (g));
9328 pmode = promote_function_mode (type, mode, &unsignedp,
9329 gimple_call_fntype (g),
9332 else
9333 pmode = promote_decl_mode (exp, &unsignedp);
9334 gcc_assert (GET_MODE (decl_rtl) == pmode);
9336 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9337 SUBREG_PROMOTED_VAR_P (temp) = 1;
9338 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9339 return temp;
9342 return decl_rtl;
9344 case INTEGER_CST:
9345 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9346 TREE_INT_CST_HIGH (exp), mode);
9348 return temp;
9350 case VECTOR_CST:
9352 tree tmp = NULL_TREE;
9353 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9354 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9355 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9356 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9357 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9358 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9359 return const_vector_from_tree (exp);
9360 if (GET_MODE_CLASS (mode) == MODE_INT)
9362 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9363 if (type_for_mode)
9364 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9366 if (!tmp)
9368 VEC(constructor_elt,gc) *v;
9369 unsigned i;
9370 v = VEC_alloc (constructor_elt, gc, VECTOR_CST_NELTS (exp));
9371 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9372 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9373 tmp = build_constructor (type, v);
9375 return expand_expr (tmp, ignore ? const0_rtx : target,
9376 tmode, modifier);
9379 case CONST_DECL:
9380 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9382 case REAL_CST:
9383 /* If optimized, generate immediate CONST_DOUBLE
9384 which will be turned into memory by reload if necessary.
9386 We used to force a register so that loop.c could see it. But
9387 this does not allow gen_* patterns to perform optimizations with
9388 the constants. It also produces two insns in cases like "x = 1.0;".
9389 On most machines, floating-point constants are not permitted in
9390 many insns, so we'd end up copying it to a register in any case.
9392 Now, we do the copying in expand_binop, if appropriate. */
9393 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9394 TYPE_MODE (TREE_TYPE (exp)));
9396 case FIXED_CST:
9397 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9398 TYPE_MODE (TREE_TYPE (exp)));
9400 case COMPLEX_CST:
9401 /* Handle evaluating a complex constant in a CONCAT target. */
9402 if (original_target && GET_CODE (original_target) == CONCAT)
9404 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9405 rtx rtarg, itarg;
9407 rtarg = XEXP (original_target, 0);
9408 itarg = XEXP (original_target, 1);
9410 /* Move the real and imaginary parts separately. */
9411 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9412 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9414 if (op0 != rtarg)
9415 emit_move_insn (rtarg, op0);
9416 if (op1 != itarg)
9417 emit_move_insn (itarg, op1);
9419 return original_target;
9422 /* ... fall through ... */
9424 case STRING_CST:
9425 temp = expand_expr_constant (exp, 1, modifier);
9427 /* temp contains a constant address.
9428 On RISC machines where a constant address isn't valid,
9429 make some insns to get that address into a register. */
9430 if (modifier != EXPAND_CONST_ADDRESS
9431 && modifier != EXPAND_INITIALIZER
9432 && modifier != EXPAND_SUM
9433 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9434 MEM_ADDR_SPACE (temp)))
9435 return replace_equiv_address (temp,
9436 copy_rtx (XEXP (temp, 0)));
9437 return temp;
9439 case SAVE_EXPR:
9441 tree val = treeop0;
9442 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9444 if (!SAVE_EXPR_RESOLVED_P (exp))
9446 /* We can indeed still hit this case, typically via builtin
9447 expanders calling save_expr immediately before expanding
9448 something. Assume this means that we only have to deal
9449 with non-BLKmode values. */
9450 gcc_assert (GET_MODE (ret) != BLKmode);
9452 val = build_decl (curr_insn_location (),
9453 VAR_DECL, NULL, TREE_TYPE (exp));
9454 DECL_ARTIFICIAL (val) = 1;
9455 DECL_IGNORED_P (val) = 1;
9456 treeop0 = val;
9457 TREE_OPERAND (exp, 0) = treeop0;
9458 SAVE_EXPR_RESOLVED_P (exp) = 1;
9460 if (!CONSTANT_P (ret))
9461 ret = copy_to_reg (ret);
9462 SET_DECL_RTL (val, ret);
9465 return ret;
9469 case CONSTRUCTOR:
9470 /* If we don't need the result, just ensure we evaluate any
9471 subexpressions. */
9472 if (ignore)
9474 unsigned HOST_WIDE_INT idx;
9475 tree value;
9477 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9478 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9480 return const0_rtx;
9483 return expand_constructor (exp, target, modifier, false);
9485 case TARGET_MEM_REF:
9487 addr_space_t as
9488 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9489 struct mem_address addr;
9490 enum insn_code icode;
9491 unsigned int align;
9493 get_address_description (exp, &addr);
9494 op0 = addr_for_mem_ref (&addr, as, true);
9495 op0 = memory_address_addr_space (mode, op0, as);
9496 temp = gen_rtx_MEM (mode, op0);
9497 set_mem_attributes (temp, exp, 0);
9498 set_mem_addr_space (temp, as);
9499 align = get_object_alignment (exp);
9500 if (modifier != EXPAND_WRITE
9501 && mode != BLKmode
9502 && align < GET_MODE_ALIGNMENT (mode)
9503 /* If the target does not have special handling for unaligned
9504 loads of mode then it can use regular moves for them. */
9505 && ((icode = optab_handler (movmisalign_optab, mode))
9506 != CODE_FOR_nothing))
9508 struct expand_operand ops[2];
9510 /* We've already validated the memory, and we're creating a
9511 new pseudo destination. The predicates really can't fail,
9512 nor can the generator. */
9513 create_output_operand (&ops[0], NULL_RTX, mode);
9514 create_fixed_operand (&ops[1], temp);
9515 expand_insn (icode, 2, ops);
9516 return ops[0].value;
9518 return temp;
9521 case MEM_REF:
9523 addr_space_t as
9524 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9525 enum machine_mode address_mode;
9526 tree base = TREE_OPERAND (exp, 0);
9527 gimple def_stmt;
9528 enum insn_code icode;
9529 unsigned align;
9530 /* Handle expansion of non-aliased memory with non-BLKmode. That
9531 might end up in a register. */
9532 if (mem_ref_refers_to_non_mem_p (exp))
9534 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9535 tree bit_offset;
9536 tree bftype;
9537 base = TREE_OPERAND (base, 0);
9538 if (offset == 0
9539 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
9540 && (GET_MODE_BITSIZE (DECL_MODE (base))
9541 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
9542 return expand_expr (build1 (VIEW_CONVERT_EXPR,
9543 TREE_TYPE (exp), base),
9544 target, tmode, modifier);
9545 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
9546 bftype = TREE_TYPE (base);
9547 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
9548 bftype = TREE_TYPE (exp);
9549 else
9551 temp = assign_stack_temp (DECL_MODE (base),
9552 GET_MODE_SIZE (DECL_MODE (base)));
9553 store_expr (base, temp, 0, false);
9554 temp = adjust_address (temp, BLKmode, offset);
9555 set_mem_size (temp, int_size_in_bytes (TREE_TYPE (exp)));
9556 return temp;
9558 return expand_expr (build3 (BIT_FIELD_REF, bftype,
9559 base,
9560 TYPE_SIZE (TREE_TYPE (exp)),
9561 bit_offset),
9562 target, tmode, modifier);
9564 address_mode = targetm.addr_space.address_mode (as);
9565 base = TREE_OPERAND (exp, 0);
9566 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9568 tree mask = gimple_assign_rhs2 (def_stmt);
9569 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9570 gimple_assign_rhs1 (def_stmt), mask);
9571 TREE_OPERAND (exp, 0) = base;
9573 align = get_object_alignment (exp);
9574 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9575 op0 = memory_address_addr_space (address_mode, op0, as);
9576 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9578 rtx off
9579 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9580 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9582 op0 = memory_address_addr_space (mode, op0, as);
9583 temp = gen_rtx_MEM (mode, op0);
9584 set_mem_attributes (temp, exp, 0);
9585 set_mem_addr_space (temp, as);
9586 if (TREE_THIS_VOLATILE (exp))
9587 MEM_VOLATILE_P (temp) = 1;
9588 if (modifier != EXPAND_WRITE
9589 && mode != BLKmode
9590 && align < GET_MODE_ALIGNMENT (mode))
9592 if ((icode = optab_handler (movmisalign_optab, mode))
9593 != CODE_FOR_nothing)
9595 struct expand_operand ops[2];
9597 /* We've already validated the memory, and we're creating a
9598 new pseudo destination. The predicates really can't fail,
9599 nor can the generator. */
9600 create_output_operand (&ops[0], NULL_RTX, mode);
9601 create_fixed_operand (&ops[1], temp);
9602 expand_insn (icode, 2, ops);
9603 return ops[0].value;
9605 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9606 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9607 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9608 true, (modifier == EXPAND_STACK_PARM
9609 ? NULL_RTX : target),
9610 mode, mode);
9612 return temp;
9615 case ARRAY_REF:
9618 tree array = treeop0;
9619 tree index = treeop1;
9621 /* Fold an expression like: "foo"[2].
9622 This is not done in fold so it won't happen inside &.
9623 Don't fold if this is for wide characters since it's too
9624 difficult to do correctly and this is a very rare case. */
9626 if (modifier != EXPAND_CONST_ADDRESS
9627 && modifier != EXPAND_INITIALIZER
9628 && modifier != EXPAND_MEMORY)
9630 tree t = fold_read_from_constant_string (exp);
9632 if (t)
9633 return expand_expr (t, target, tmode, modifier);
9636 /* If this is a constant index into a constant array,
9637 just get the value from the array. Handle both the cases when
9638 we have an explicit constructor and when our operand is a variable
9639 that was declared const. */
9641 if (modifier != EXPAND_CONST_ADDRESS
9642 && modifier != EXPAND_INITIALIZER
9643 && modifier != EXPAND_MEMORY
9644 && TREE_CODE (array) == CONSTRUCTOR
9645 && ! TREE_SIDE_EFFECTS (array)
9646 && TREE_CODE (index) == INTEGER_CST)
9648 unsigned HOST_WIDE_INT ix;
9649 tree field, value;
9651 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9652 field, value)
9653 if (tree_int_cst_equal (field, index))
9655 if (!TREE_SIDE_EFFECTS (value))
9656 return expand_expr (fold (value), target, tmode, modifier);
9657 break;
9661 else if (optimize >= 1
9662 && modifier != EXPAND_CONST_ADDRESS
9663 && modifier != EXPAND_INITIALIZER
9664 && modifier != EXPAND_MEMORY
9665 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9666 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9667 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
9668 && const_value_known_p (array))
9670 if (TREE_CODE (index) == INTEGER_CST)
9672 tree init = DECL_INITIAL (array);
9674 if (TREE_CODE (init) == CONSTRUCTOR)
9676 unsigned HOST_WIDE_INT ix;
9677 tree field, value;
9679 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9680 field, value)
9681 if (tree_int_cst_equal (field, index))
9683 if (TREE_SIDE_EFFECTS (value))
9684 break;
9686 if (TREE_CODE (value) == CONSTRUCTOR)
9688 /* If VALUE is a CONSTRUCTOR, this
9689 optimization is only useful if
9690 this doesn't store the CONSTRUCTOR
9691 into memory. If it does, it is more
9692 efficient to just load the data from
9693 the array directly. */
9694 rtx ret = expand_constructor (value, target,
9695 modifier, true);
9696 if (ret == NULL_RTX)
9697 break;
9700 return expand_expr (fold (value), target, tmode,
9701 modifier);
9704 else if(TREE_CODE (init) == STRING_CST)
9706 tree index1 = index;
9707 tree low_bound = array_ref_low_bound (exp);
9708 index1 = fold_convert_loc (loc, sizetype,
9709 treeop1);
9711 /* Optimize the special-case of a zero lower bound.
9713 We convert the low_bound to sizetype to avoid some problems
9714 with constant folding. (E.g. suppose the lower bound is 1,
9715 and its mode is QI. Without the conversion,l (ARRAY
9716 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9717 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9719 if (! integer_zerop (low_bound))
9720 index1 = size_diffop_loc (loc, index1,
9721 fold_convert_loc (loc, sizetype,
9722 low_bound));
9724 if (0 > compare_tree_int (index1,
9725 TREE_STRING_LENGTH (init)))
9727 tree type = TREE_TYPE (TREE_TYPE (init));
9728 enum machine_mode mode = TYPE_MODE (type);
9730 if (GET_MODE_CLASS (mode) == MODE_INT
9731 && GET_MODE_SIZE (mode) == 1)
9732 return gen_int_mode (TREE_STRING_POINTER (init)
9733 [TREE_INT_CST_LOW (index1)],
9734 mode);
9740 goto normal_inner_ref;
9742 case COMPONENT_REF:
9743 /* If the operand is a CONSTRUCTOR, we can just extract the
9744 appropriate field if it is present. */
9745 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9747 unsigned HOST_WIDE_INT idx;
9748 tree field, value;
9750 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9751 idx, field, value)
9752 if (field == treeop1
9753 /* We can normally use the value of the field in the
9754 CONSTRUCTOR. However, if this is a bitfield in
9755 an integral mode that we can fit in a HOST_WIDE_INT,
9756 we must mask only the number of bits in the bitfield,
9757 since this is done implicitly by the constructor. If
9758 the bitfield does not meet either of those conditions,
9759 we can't do this optimization. */
9760 && (! DECL_BIT_FIELD (field)
9761 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9762 && (GET_MODE_PRECISION (DECL_MODE (field))
9763 <= HOST_BITS_PER_WIDE_INT))))
9765 if (DECL_BIT_FIELD (field)
9766 && modifier == EXPAND_STACK_PARM)
9767 target = 0;
9768 op0 = expand_expr (value, target, tmode, modifier);
9769 if (DECL_BIT_FIELD (field))
9771 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9772 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9774 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9776 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
9777 op0 = expand_and (imode, op0, op1, target);
9779 else
9781 int count = GET_MODE_PRECISION (imode) - bitsize;
9783 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9784 target, 0);
9785 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9786 target, 0);
9790 return op0;
9793 goto normal_inner_ref;
9795 case BIT_FIELD_REF:
9796 case ARRAY_RANGE_REF:
9797 normal_inner_ref:
9799 enum machine_mode mode1, mode2;
9800 HOST_WIDE_INT bitsize, bitpos;
9801 tree offset;
9802 int volatilep = 0, must_force_mem;
9803 bool packedp = false;
9804 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9805 &mode1, &unsignedp, &volatilep, true);
9806 rtx orig_op0, memloc;
9807 bool mem_attrs_from_type = false;
9809 /* If we got back the original object, something is wrong. Perhaps
9810 we are evaluating an expression too early. In any event, don't
9811 infinitely recurse. */
9812 gcc_assert (tem != exp);
9814 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9815 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9816 && DECL_PACKED (TREE_OPERAND (exp, 1))))
9817 packedp = true;
9819 /* If TEM's type is a union of variable size, pass TARGET to the inner
9820 computation, since it will need a temporary and TARGET is known
9821 to have to do. This occurs in unchecked conversion in Ada. */
9822 orig_op0 = op0
9823 = expand_expr (tem,
9824 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9825 && COMPLETE_TYPE_P (TREE_TYPE (tem))
9826 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9827 != INTEGER_CST)
9828 && modifier != EXPAND_STACK_PARM
9829 ? target : NULL_RTX),
9830 VOIDmode,
9831 (modifier == EXPAND_INITIALIZER
9832 || modifier == EXPAND_CONST_ADDRESS
9833 || modifier == EXPAND_STACK_PARM)
9834 ? modifier : EXPAND_NORMAL);
9837 /* If the bitfield is volatile, we want to access it in the
9838 field's mode, not the computed mode.
9839 If a MEM has VOIDmode (external with incomplete type),
9840 use BLKmode for it instead. */
9841 if (MEM_P (op0))
9843 if (volatilep && flag_strict_volatile_bitfields > 0)
9844 op0 = adjust_address (op0, mode1, 0);
9845 else if (GET_MODE (op0) == VOIDmode)
9846 op0 = adjust_address (op0, BLKmode, 0);
9849 mode2
9850 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9852 /* If we have either an offset, a BLKmode result, or a reference
9853 outside the underlying object, we must force it to memory.
9854 Such a case can occur in Ada if we have unchecked conversion
9855 of an expression from a scalar type to an aggregate type or
9856 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9857 passed a partially uninitialized object or a view-conversion
9858 to a larger size. */
9859 must_force_mem = (offset
9860 || mode1 == BLKmode
9861 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9863 /* Handle CONCAT first. */
9864 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9866 if (bitpos == 0
9867 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9868 return op0;
9869 if (bitpos == 0
9870 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9871 && bitsize)
9873 op0 = XEXP (op0, 0);
9874 mode2 = GET_MODE (op0);
9876 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9877 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9878 && bitpos
9879 && bitsize)
9881 op0 = XEXP (op0, 1);
9882 bitpos = 0;
9883 mode2 = GET_MODE (op0);
9885 else
9886 /* Otherwise force into memory. */
9887 must_force_mem = 1;
9890 /* If this is a constant, put it in a register if it is a legitimate
9891 constant and we don't need a memory reference. */
9892 if (CONSTANT_P (op0)
9893 && mode2 != BLKmode
9894 && targetm.legitimate_constant_p (mode2, op0)
9895 && !must_force_mem)
9896 op0 = force_reg (mode2, op0);
9898 /* Otherwise, if this is a constant, try to force it to the constant
9899 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9900 is a legitimate constant. */
9901 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9902 op0 = validize_mem (memloc);
9904 /* Otherwise, if this is a constant or the object is not in memory
9905 and need be, put it there. */
9906 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9908 tree nt = build_qualified_type (TREE_TYPE (tem),
9909 (TYPE_QUALS (TREE_TYPE (tem))
9910 | TYPE_QUAL_CONST));
9911 memloc = assign_temp (nt, 1, 1);
9912 emit_move_insn (memloc, op0);
9913 op0 = memloc;
9914 mem_attrs_from_type = true;
9917 if (offset)
9919 enum machine_mode address_mode;
9920 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9921 EXPAND_SUM);
9923 gcc_assert (MEM_P (op0));
9925 address_mode = get_address_mode (op0);
9926 if (GET_MODE (offset_rtx) != address_mode)
9927 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9929 if (GET_MODE (op0) == BLKmode
9930 /* A constant address in OP0 can have VOIDmode, we must
9931 not try to call force_reg in that case. */
9932 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9933 && bitsize != 0
9934 && (bitpos % bitsize) == 0
9935 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9936 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9938 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9939 bitpos = 0;
9942 op0 = offset_address (op0, offset_rtx,
9943 highest_pow2_factor (offset));
9946 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9947 record its alignment as BIGGEST_ALIGNMENT. */
9948 if (MEM_P (op0) && bitpos == 0 && offset != 0
9949 && is_aligning_offset (offset, tem))
9950 set_mem_align (op0, BIGGEST_ALIGNMENT);
9952 /* Don't forget about volatility even if this is a bitfield. */
9953 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9955 if (op0 == orig_op0)
9956 op0 = copy_rtx (op0);
9958 MEM_VOLATILE_P (op0) = 1;
9961 /* In cases where an aligned union has an unaligned object
9962 as a field, we might be extracting a BLKmode value from
9963 an integer-mode (e.g., SImode) object. Handle this case
9964 by doing the extract into an object as wide as the field
9965 (which we know to be the width of a basic mode), then
9966 storing into memory, and changing the mode to BLKmode. */
9967 if (mode1 == VOIDmode
9968 || REG_P (op0) || GET_CODE (op0) == SUBREG
9969 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9970 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9971 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9972 && modifier != EXPAND_CONST_ADDRESS
9973 && modifier != EXPAND_INITIALIZER)
9974 /* If the field is volatile, we always want an aligned
9975 access. Do this in following two situations:
9976 1. the access is not already naturally
9977 aligned, otherwise "normal" (non-bitfield) volatile fields
9978 become non-addressable.
9979 2. the bitsize is narrower than the access size. Need
9980 to extract bitfields from the access. */
9981 || (volatilep && flag_strict_volatile_bitfields > 0
9982 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
9983 || (mode1 != BLKmode
9984 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
9985 /* If the field isn't aligned enough to fetch as a memref,
9986 fetch it as a bit field. */
9987 || (mode1 != BLKmode
9988 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9989 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9990 || (MEM_P (op0)
9991 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9992 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9993 && ((modifier == EXPAND_CONST_ADDRESS
9994 || modifier == EXPAND_INITIALIZER)
9995 ? STRICT_ALIGNMENT
9996 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9997 || (bitpos % BITS_PER_UNIT != 0)))
9998 /* If the type and the field are a constant size and the
9999 size of the type isn't the same size as the bitfield,
10000 we must use bitfield operations. */
10001 || (bitsize >= 0
10002 && TYPE_SIZE (TREE_TYPE (exp))
10003 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10004 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10005 bitsize)))
10007 enum machine_mode ext_mode = mode;
10009 if (ext_mode == BLKmode
10010 && ! (target != 0 && MEM_P (op0)
10011 && MEM_P (target)
10012 && bitpos % BITS_PER_UNIT == 0))
10013 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10015 if (ext_mode == BLKmode)
10017 if (target == 0)
10018 target = assign_temp (type, 1, 1);
10020 if (bitsize == 0)
10021 return target;
10023 /* In this case, BITPOS must start at a byte boundary and
10024 TARGET, if specified, must be a MEM. */
10025 gcc_assert (MEM_P (op0)
10026 && (!target || MEM_P (target))
10027 && !(bitpos % BITS_PER_UNIT));
10029 emit_block_move (target,
10030 adjust_address (op0, VOIDmode,
10031 bitpos / BITS_PER_UNIT),
10032 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10033 / BITS_PER_UNIT),
10034 (modifier == EXPAND_STACK_PARM
10035 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10037 return target;
10040 op0 = validize_mem (op0);
10042 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10043 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10045 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
10046 (modifier == EXPAND_STACK_PARM
10047 ? NULL_RTX : target),
10048 ext_mode, ext_mode);
10050 /* If the result is a record type and BITSIZE is narrower than
10051 the mode of OP0, an integral mode, and this is a big endian
10052 machine, we must put the field into the high-order bits. */
10053 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10054 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10055 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10056 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10057 GET_MODE_BITSIZE (GET_MODE (op0))
10058 - bitsize, op0, 1);
10060 /* If the result type is BLKmode, store the data into a temporary
10061 of the appropriate type, but with the mode corresponding to the
10062 mode for the data we have (op0's mode). It's tempting to make
10063 this a constant type, since we know it's only being stored once,
10064 but that can cause problems if we are taking the address of this
10065 COMPONENT_REF because the MEM of any reference via that address
10066 will have flags corresponding to the type, which will not
10067 necessarily be constant. */
10068 if (mode == BLKmode)
10070 rtx new_rtx;
10072 new_rtx = assign_stack_temp_for_type (ext_mode,
10073 GET_MODE_BITSIZE (ext_mode),
10074 type);
10075 emit_move_insn (new_rtx, op0);
10076 op0 = copy_rtx (new_rtx);
10077 PUT_MODE (op0, BLKmode);
10080 return op0;
10083 /* If the result is BLKmode, use that to access the object
10084 now as well. */
10085 if (mode == BLKmode)
10086 mode1 = BLKmode;
10088 /* Get a reference to just this component. */
10089 if (modifier == EXPAND_CONST_ADDRESS
10090 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10091 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10092 else
10093 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10095 if (op0 == orig_op0)
10096 op0 = copy_rtx (op0);
10098 /* If op0 is a temporary because of forcing to memory, pass only the
10099 type to set_mem_attributes so that the original expression is never
10100 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10101 if (mem_attrs_from_type)
10102 set_mem_attributes (op0, type, 0);
10103 else
10104 set_mem_attributes (op0, exp, 0);
10106 if (REG_P (XEXP (op0, 0)))
10107 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10109 MEM_VOLATILE_P (op0) |= volatilep;
10110 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10111 || modifier == EXPAND_CONST_ADDRESS
10112 || modifier == EXPAND_INITIALIZER)
10113 return op0;
10114 else if (target == 0)
10115 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10117 convert_move (target, op0, unsignedp);
10118 return target;
10121 case OBJ_TYPE_REF:
10122 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10124 case CALL_EXPR:
10125 /* All valid uses of __builtin_va_arg_pack () are removed during
10126 inlining. */
10127 if (CALL_EXPR_VA_ARG_PACK (exp))
10128 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10130 tree fndecl = get_callee_fndecl (exp), attr;
10132 if (fndecl
10133 && (attr = lookup_attribute ("error",
10134 DECL_ATTRIBUTES (fndecl))) != NULL)
10135 error ("%Kcall to %qs declared with attribute error: %s",
10136 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10137 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10138 if (fndecl
10139 && (attr = lookup_attribute ("warning",
10140 DECL_ATTRIBUTES (fndecl))) != NULL)
10141 warning_at (tree_nonartificial_location (exp),
10142 0, "%Kcall to %qs declared with attribute warning: %s",
10143 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10144 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10146 /* Check for a built-in function. */
10147 if (fndecl && DECL_BUILT_IN (fndecl))
10149 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10150 return expand_builtin (exp, target, subtarget, tmode, ignore);
10153 return expand_call (exp, target, ignore);
10155 case VIEW_CONVERT_EXPR:
10156 op0 = NULL_RTX;
10158 /* If we are converting to BLKmode, try to avoid an intermediate
10159 temporary by fetching an inner memory reference. */
10160 if (mode == BLKmode
10161 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10162 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10163 && handled_component_p (treeop0))
10165 enum machine_mode mode1;
10166 HOST_WIDE_INT bitsize, bitpos;
10167 tree offset;
10168 int unsignedp;
10169 int volatilep = 0;
10170 tree tem
10171 = get_inner_reference (treeop0, &bitsize, &bitpos,
10172 &offset, &mode1, &unsignedp, &volatilep,
10173 true);
10174 rtx orig_op0;
10176 /* ??? We should work harder and deal with non-zero offsets. */
10177 if (!offset
10178 && (bitpos % BITS_PER_UNIT) == 0
10179 && bitsize >= 0
10180 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
10182 /* See the normal_inner_ref case for the rationale. */
10183 orig_op0
10184 = expand_expr (tem,
10185 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10186 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10187 != INTEGER_CST)
10188 && modifier != EXPAND_STACK_PARM
10189 ? target : NULL_RTX),
10190 VOIDmode,
10191 (modifier == EXPAND_INITIALIZER
10192 || modifier == EXPAND_CONST_ADDRESS
10193 || modifier == EXPAND_STACK_PARM)
10194 ? modifier : EXPAND_NORMAL);
10196 if (MEM_P (orig_op0))
10198 op0 = orig_op0;
10200 /* Get a reference to just this component. */
10201 if (modifier == EXPAND_CONST_ADDRESS
10202 || modifier == EXPAND_SUM
10203 || modifier == EXPAND_INITIALIZER)
10204 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10205 else
10206 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10208 if (op0 == orig_op0)
10209 op0 = copy_rtx (op0);
10211 set_mem_attributes (op0, treeop0, 0);
10212 if (REG_P (XEXP (op0, 0)))
10213 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10215 MEM_VOLATILE_P (op0) |= volatilep;
10220 if (!op0)
10221 op0 = expand_expr (treeop0,
10222 NULL_RTX, VOIDmode, modifier);
10224 /* If the input and output modes are both the same, we are done. */
10225 if (mode == GET_MODE (op0))
10227 /* If neither mode is BLKmode, and both modes are the same size
10228 then we can use gen_lowpart. */
10229 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10230 && (GET_MODE_PRECISION (mode)
10231 == GET_MODE_PRECISION (GET_MODE (op0)))
10232 && !COMPLEX_MODE_P (GET_MODE (op0)))
10234 if (GET_CODE (op0) == SUBREG)
10235 op0 = force_reg (GET_MODE (op0), op0);
10236 temp = gen_lowpart_common (mode, op0);
10237 if (temp)
10238 op0 = temp;
10239 else
10241 if (!REG_P (op0) && !MEM_P (op0))
10242 op0 = force_reg (GET_MODE (op0), op0);
10243 op0 = gen_lowpart (mode, op0);
10246 /* If both types are integral, convert from one mode to the other. */
10247 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10248 op0 = convert_modes (mode, GET_MODE (op0), op0,
10249 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10250 /* As a last resort, spill op0 to memory, and reload it in a
10251 different mode. */
10252 else if (!MEM_P (op0))
10254 /* If the operand is not a MEM, force it into memory. Since we
10255 are going to be changing the mode of the MEM, don't call
10256 force_const_mem for constants because we don't allow pool
10257 constants to change mode. */
10258 tree inner_type = TREE_TYPE (treeop0);
10260 gcc_assert (!TREE_ADDRESSABLE (exp));
10262 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10263 target
10264 = assign_stack_temp_for_type
10265 (TYPE_MODE (inner_type),
10266 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10268 emit_move_insn (target, op0);
10269 op0 = target;
10272 /* At this point, OP0 is in the correct mode. If the output type is
10273 such that the operand is known to be aligned, indicate that it is.
10274 Otherwise, we need only be concerned about alignment for non-BLKmode
10275 results. */
10276 if (MEM_P (op0))
10278 enum insn_code icode;
10280 if (TYPE_ALIGN_OK (type))
10282 /* ??? Copying the MEM without substantially changing it might
10283 run afoul of the code handling volatile memory references in
10284 store_expr, which assumes that TARGET is returned unmodified
10285 if it has been used. */
10286 op0 = copy_rtx (op0);
10287 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10289 else if (mode != BLKmode
10290 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10291 /* If the target does have special handling for unaligned
10292 loads of mode then use them. */
10293 && ((icode = optab_handler (movmisalign_optab, mode))
10294 != CODE_FOR_nothing))
10296 rtx reg, insn;
10298 op0 = adjust_address (op0, mode, 0);
10299 /* We've already validated the memory, and we're creating a
10300 new pseudo destination. The predicates really can't
10301 fail. */
10302 reg = gen_reg_rtx (mode);
10304 /* Nor can the insn generator. */
10305 insn = GEN_FCN (icode) (reg, op0);
10306 emit_insn (insn);
10307 return reg;
10309 else if (STRICT_ALIGNMENT
10310 && mode != BLKmode
10311 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10313 tree inner_type = TREE_TYPE (treeop0);
10314 HOST_WIDE_INT temp_size
10315 = MAX (int_size_in_bytes (inner_type),
10316 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10317 rtx new_rtx
10318 = assign_stack_temp_for_type (mode, temp_size, type);
10319 rtx new_with_op0_mode
10320 = adjust_address (new_rtx, GET_MODE (op0), 0);
10322 gcc_assert (!TREE_ADDRESSABLE (exp));
10324 if (GET_MODE (op0) == BLKmode)
10325 emit_block_move (new_with_op0_mode, op0,
10326 GEN_INT (GET_MODE_SIZE (mode)),
10327 (modifier == EXPAND_STACK_PARM
10328 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10329 else
10330 emit_move_insn (new_with_op0_mode, op0);
10332 op0 = new_rtx;
10335 op0 = adjust_address (op0, mode, 0);
10338 return op0;
10340 case MODIFY_EXPR:
10342 tree lhs = treeop0;
10343 tree rhs = treeop1;
10344 gcc_assert (ignore);
10346 /* Check for |= or &= of a bitfield of size one into another bitfield
10347 of size 1. In this case, (unless we need the result of the
10348 assignment) we can do this more efficiently with a
10349 test followed by an assignment, if necessary.
10351 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10352 things change so we do, this code should be enhanced to
10353 support it. */
10354 if (TREE_CODE (lhs) == COMPONENT_REF
10355 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10356 || TREE_CODE (rhs) == BIT_AND_EXPR)
10357 && TREE_OPERAND (rhs, 0) == lhs
10358 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10359 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10360 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10362 rtx label = gen_label_rtx ();
10363 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10364 do_jump (TREE_OPERAND (rhs, 1),
10365 value ? label : 0,
10366 value ? 0 : label, -1);
10367 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10368 false);
10369 do_pending_stack_adjust ();
10370 emit_label (label);
10371 return const0_rtx;
10374 expand_assignment (lhs, rhs, false);
10375 return const0_rtx;
10378 case ADDR_EXPR:
10379 return expand_expr_addr_expr (exp, target, tmode, modifier);
10381 case REALPART_EXPR:
10382 op0 = expand_normal (treeop0);
10383 return read_complex_part (op0, false);
10385 case IMAGPART_EXPR:
10386 op0 = expand_normal (treeop0);
10387 return read_complex_part (op0, true);
10389 case RETURN_EXPR:
10390 case LABEL_EXPR:
10391 case GOTO_EXPR:
10392 case SWITCH_EXPR:
10393 case ASM_EXPR:
10394 /* Expanded in cfgexpand.c. */
10395 gcc_unreachable ();
10397 case TRY_CATCH_EXPR:
10398 case CATCH_EXPR:
10399 case EH_FILTER_EXPR:
10400 case TRY_FINALLY_EXPR:
10401 /* Lowered by tree-eh.c. */
10402 gcc_unreachable ();
10404 case WITH_CLEANUP_EXPR:
10405 case CLEANUP_POINT_EXPR:
10406 case TARGET_EXPR:
10407 case CASE_LABEL_EXPR:
10408 case VA_ARG_EXPR:
10409 case BIND_EXPR:
10410 case INIT_EXPR:
10411 case CONJ_EXPR:
10412 case COMPOUND_EXPR:
10413 case PREINCREMENT_EXPR:
10414 case PREDECREMENT_EXPR:
10415 case POSTINCREMENT_EXPR:
10416 case POSTDECREMENT_EXPR:
10417 case LOOP_EXPR:
10418 case EXIT_EXPR:
10419 case COMPOUND_LITERAL_EXPR:
10420 /* Lowered by gimplify.c. */
10421 gcc_unreachable ();
10423 case FDESC_EXPR:
10424 /* Function descriptors are not valid except for as
10425 initialization constants, and should not be expanded. */
10426 gcc_unreachable ();
10428 case WITH_SIZE_EXPR:
10429 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10430 have pulled out the size to use in whatever context it needed. */
10431 return expand_expr_real (treeop0, original_target, tmode,
10432 modifier, alt_rtl);
10434 default:
10435 return expand_expr_real_2 (&ops, target, tmode, modifier);
10439 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10440 signedness of TYPE), possibly returning the result in TARGET. */
10441 static rtx
10442 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10444 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10445 if (target && GET_MODE (target) != GET_MODE (exp))
10446 target = 0;
10447 /* For constant values, reduce using build_int_cst_type. */
10448 if (CONST_INT_P (exp))
10450 HOST_WIDE_INT value = INTVAL (exp);
10451 tree t = build_int_cst_type (type, value);
10452 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10454 else if (TYPE_UNSIGNED (type))
10456 rtx mask = immed_double_int_const (double_int::mask (prec),
10457 GET_MODE (exp));
10458 return expand_and (GET_MODE (exp), exp, mask, target);
10460 else
10462 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10463 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10464 exp, count, target, 0);
10465 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10466 exp, count, target, 0);
10470 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10471 when applied to the address of EXP produces an address known to be
10472 aligned more than BIGGEST_ALIGNMENT. */
10474 static int
10475 is_aligning_offset (const_tree offset, const_tree exp)
10477 /* Strip off any conversions. */
10478 while (CONVERT_EXPR_P (offset))
10479 offset = TREE_OPERAND (offset, 0);
10481 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10482 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10483 if (TREE_CODE (offset) != BIT_AND_EXPR
10484 || !host_integerp (TREE_OPERAND (offset, 1), 1)
10485 || compare_tree_int (TREE_OPERAND (offset, 1),
10486 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10487 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10488 return 0;
10490 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10491 It must be NEGATE_EXPR. Then strip any more conversions. */
10492 offset = TREE_OPERAND (offset, 0);
10493 while (CONVERT_EXPR_P (offset))
10494 offset = TREE_OPERAND (offset, 0);
10496 if (TREE_CODE (offset) != NEGATE_EXPR)
10497 return 0;
10499 offset = TREE_OPERAND (offset, 0);
10500 while (CONVERT_EXPR_P (offset))
10501 offset = TREE_OPERAND (offset, 0);
10503 /* This must now be the address of EXP. */
10504 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10507 /* Return the tree node if an ARG corresponds to a string constant or zero
10508 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10509 in bytes within the string that ARG is accessing. The type of the
10510 offset will be `sizetype'. */
10512 tree
10513 string_constant (tree arg, tree *ptr_offset)
10515 tree array, offset, lower_bound;
10516 STRIP_NOPS (arg);
10518 if (TREE_CODE (arg) == ADDR_EXPR)
10520 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10522 *ptr_offset = size_zero_node;
10523 return TREE_OPERAND (arg, 0);
10525 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10527 array = TREE_OPERAND (arg, 0);
10528 offset = size_zero_node;
10530 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10532 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10533 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10534 if (TREE_CODE (array) != STRING_CST
10535 && TREE_CODE (array) != VAR_DECL)
10536 return 0;
10538 /* Check if the array has a nonzero lower bound. */
10539 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10540 if (!integer_zerop (lower_bound))
10542 /* If the offset and base aren't both constants, return 0. */
10543 if (TREE_CODE (lower_bound) != INTEGER_CST)
10544 return 0;
10545 if (TREE_CODE (offset) != INTEGER_CST)
10546 return 0;
10547 /* Adjust offset by the lower bound. */
10548 offset = size_diffop (fold_convert (sizetype, offset),
10549 fold_convert (sizetype, lower_bound));
10552 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10554 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10555 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10556 if (TREE_CODE (array) != ADDR_EXPR)
10557 return 0;
10558 array = TREE_OPERAND (array, 0);
10559 if (TREE_CODE (array) != STRING_CST
10560 && TREE_CODE (array) != VAR_DECL)
10561 return 0;
10563 else
10564 return 0;
10566 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10568 tree arg0 = TREE_OPERAND (arg, 0);
10569 tree arg1 = TREE_OPERAND (arg, 1);
10571 STRIP_NOPS (arg0);
10572 STRIP_NOPS (arg1);
10574 if (TREE_CODE (arg0) == ADDR_EXPR
10575 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10576 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10578 array = TREE_OPERAND (arg0, 0);
10579 offset = arg1;
10581 else if (TREE_CODE (arg1) == ADDR_EXPR
10582 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10583 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10585 array = TREE_OPERAND (arg1, 0);
10586 offset = arg0;
10588 else
10589 return 0;
10591 else
10592 return 0;
10594 if (TREE_CODE (array) == STRING_CST)
10596 *ptr_offset = fold_convert (sizetype, offset);
10597 return array;
10599 else if (TREE_CODE (array) == VAR_DECL
10600 || TREE_CODE (array) == CONST_DECL)
10602 int length;
10604 /* Variables initialized to string literals can be handled too. */
10605 if (!const_value_known_p (array)
10606 || !DECL_INITIAL (array)
10607 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
10608 return 0;
10610 /* Avoid const char foo[4] = "abcde"; */
10611 if (DECL_SIZE_UNIT (array) == NULL_TREE
10612 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10613 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
10614 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10615 return 0;
10617 /* If variable is bigger than the string literal, OFFSET must be constant
10618 and inside of the bounds of the string literal. */
10619 offset = fold_convert (sizetype, offset);
10620 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10621 && (! host_integerp (offset, 1)
10622 || compare_tree_int (offset, length) >= 0))
10623 return 0;
10625 *ptr_offset = offset;
10626 return DECL_INITIAL (array);
10629 return 0;
10632 /* Generate code to calculate OPS, and exploded expression
10633 using a store-flag instruction and return an rtx for the result.
10634 OPS reflects a comparison.
10636 If TARGET is nonzero, store the result there if convenient.
10638 Return zero if there is no suitable set-flag instruction
10639 available on this machine.
10641 Once expand_expr has been called on the arguments of the comparison,
10642 we are committed to doing the store flag, since it is not safe to
10643 re-evaluate the expression. We emit the store-flag insn by calling
10644 emit_store_flag, but only expand the arguments if we have a reason
10645 to believe that emit_store_flag will be successful. If we think that
10646 it will, but it isn't, we have to simulate the store-flag with a
10647 set/jump/set sequence. */
10649 static rtx
10650 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10652 enum rtx_code code;
10653 tree arg0, arg1, type;
10654 tree tem;
10655 enum machine_mode operand_mode;
10656 int unsignedp;
10657 rtx op0, op1;
10658 rtx subtarget = target;
10659 location_t loc = ops->location;
10661 arg0 = ops->op0;
10662 arg1 = ops->op1;
10664 /* Don't crash if the comparison was erroneous. */
10665 if (arg0 == error_mark_node || arg1 == error_mark_node)
10666 return const0_rtx;
10668 type = TREE_TYPE (arg0);
10669 operand_mode = TYPE_MODE (type);
10670 unsignedp = TYPE_UNSIGNED (type);
10672 /* We won't bother with BLKmode store-flag operations because it would mean
10673 passing a lot of information to emit_store_flag. */
10674 if (operand_mode == BLKmode)
10675 return 0;
10677 /* We won't bother with store-flag operations involving function pointers
10678 when function pointers must be canonicalized before comparisons. */
10679 #ifdef HAVE_canonicalize_funcptr_for_compare
10680 if (HAVE_canonicalize_funcptr_for_compare
10681 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10682 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10683 == FUNCTION_TYPE))
10684 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10685 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10686 == FUNCTION_TYPE))))
10687 return 0;
10688 #endif
10690 STRIP_NOPS (arg0);
10691 STRIP_NOPS (arg1);
10693 /* For vector typed comparisons emit code to generate the desired
10694 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10695 expander for this. */
10696 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10698 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10699 tree if_true = constant_boolean_node (true, ops->type);
10700 tree if_false = constant_boolean_node (false, ops->type);
10701 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10704 /* Get the rtx comparison code to use. We know that EXP is a comparison
10705 operation of some type. Some comparisons against 1 and -1 can be
10706 converted to comparisons with zero. Do so here so that the tests
10707 below will be aware that we have a comparison with zero. These
10708 tests will not catch constants in the first operand, but constants
10709 are rarely passed as the first operand. */
10711 switch (ops->code)
10713 case EQ_EXPR:
10714 code = EQ;
10715 break;
10716 case NE_EXPR:
10717 code = NE;
10718 break;
10719 case LT_EXPR:
10720 if (integer_onep (arg1))
10721 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10722 else
10723 code = unsignedp ? LTU : LT;
10724 break;
10725 case LE_EXPR:
10726 if (! unsignedp && integer_all_onesp (arg1))
10727 arg1 = integer_zero_node, code = LT;
10728 else
10729 code = unsignedp ? LEU : LE;
10730 break;
10731 case GT_EXPR:
10732 if (! unsignedp && integer_all_onesp (arg1))
10733 arg1 = integer_zero_node, code = GE;
10734 else
10735 code = unsignedp ? GTU : GT;
10736 break;
10737 case GE_EXPR:
10738 if (integer_onep (arg1))
10739 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10740 else
10741 code = unsignedp ? GEU : GE;
10742 break;
10744 case UNORDERED_EXPR:
10745 code = UNORDERED;
10746 break;
10747 case ORDERED_EXPR:
10748 code = ORDERED;
10749 break;
10750 case UNLT_EXPR:
10751 code = UNLT;
10752 break;
10753 case UNLE_EXPR:
10754 code = UNLE;
10755 break;
10756 case UNGT_EXPR:
10757 code = UNGT;
10758 break;
10759 case UNGE_EXPR:
10760 code = UNGE;
10761 break;
10762 case UNEQ_EXPR:
10763 code = UNEQ;
10764 break;
10765 case LTGT_EXPR:
10766 code = LTGT;
10767 break;
10769 default:
10770 gcc_unreachable ();
10773 /* Put a constant second. */
10774 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10775 || TREE_CODE (arg0) == FIXED_CST)
10777 tem = arg0; arg0 = arg1; arg1 = tem;
10778 code = swap_condition (code);
10781 /* If this is an equality or inequality test of a single bit, we can
10782 do this by shifting the bit being tested to the low-order bit and
10783 masking the result with the constant 1. If the condition was EQ,
10784 we xor it with 1. This does not require an scc insn and is faster
10785 than an scc insn even if we have it.
10787 The code to make this transformation was moved into fold_single_bit_test,
10788 so we just call into the folder and expand its result. */
10790 if ((code == NE || code == EQ)
10791 && integer_zerop (arg1)
10792 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10794 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10795 if (srcstmt
10796 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10798 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10799 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10800 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10801 gimple_assign_rhs1 (srcstmt),
10802 gimple_assign_rhs2 (srcstmt));
10803 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10804 if (temp)
10805 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10809 if (! get_subtarget (target)
10810 || GET_MODE (subtarget) != operand_mode)
10811 subtarget = 0;
10813 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10815 if (target == 0)
10816 target = gen_reg_rtx (mode);
10818 /* Try a cstore if possible. */
10819 return emit_store_flag_force (target, code, op0, op1,
10820 operand_mode, unsignedp,
10821 (TYPE_PRECISION (ops->type) == 1
10822 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10826 /* Stubs in case we haven't got a casesi insn. */
10827 #ifndef HAVE_casesi
10828 # define HAVE_casesi 0
10829 # define gen_casesi(a, b, c, d, e) (0)
10830 # define CODE_FOR_casesi CODE_FOR_nothing
10831 #endif
10833 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10834 0 otherwise (i.e. if there is no casesi instruction).
10836 DEFAULT_PROBABILITY is the probability of jumping to the default
10837 label. */
10839 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10840 rtx table_label, rtx default_label, rtx fallback_label,
10841 int default_probability)
10843 struct expand_operand ops[5];
10844 enum machine_mode index_mode = SImode;
10845 rtx op1, op2, index;
10847 if (! HAVE_casesi)
10848 return 0;
10850 /* Convert the index to SImode. */
10851 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10853 enum machine_mode omode = TYPE_MODE (index_type);
10854 rtx rangertx = expand_normal (range);
10856 /* We must handle the endpoints in the original mode. */
10857 index_expr = build2 (MINUS_EXPR, index_type,
10858 index_expr, minval);
10859 minval = integer_zero_node;
10860 index = expand_normal (index_expr);
10861 if (default_label)
10862 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10863 omode, 1, default_label,
10864 default_probability);
10865 /* Now we can safely truncate. */
10866 index = convert_to_mode (index_mode, index, 0);
10868 else
10870 if (TYPE_MODE (index_type) != index_mode)
10872 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10873 index_expr = fold_convert (index_type, index_expr);
10876 index = expand_normal (index_expr);
10879 do_pending_stack_adjust ();
10881 op1 = expand_normal (minval);
10882 op2 = expand_normal (range);
10884 create_input_operand (&ops[0], index, index_mode);
10885 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10886 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10887 create_fixed_operand (&ops[3], table_label);
10888 create_fixed_operand (&ops[4], (default_label
10889 ? default_label
10890 : fallback_label));
10891 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10892 return 1;
10895 /* Attempt to generate a tablejump instruction; same concept. */
10896 #ifndef HAVE_tablejump
10897 #define HAVE_tablejump 0
10898 #define gen_tablejump(x, y) (0)
10899 #endif
10901 /* Subroutine of the next function.
10903 INDEX is the value being switched on, with the lowest value
10904 in the table already subtracted.
10905 MODE is its expected mode (needed if INDEX is constant).
10906 RANGE is the length of the jump table.
10907 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10909 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10910 index value is out of range.
10911 DEFAULT_PROBABILITY is the probability of jumping to
10912 the default label. */
10914 static void
10915 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10916 rtx default_label, int default_probability)
10918 rtx temp, vector;
10920 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10921 cfun->cfg->max_jumptable_ents = INTVAL (range);
10923 /* Do an unsigned comparison (in the proper mode) between the index
10924 expression and the value which represents the length of the range.
10925 Since we just finished subtracting the lower bound of the range
10926 from the index expression, this comparison allows us to simultaneously
10927 check that the original index expression value is both greater than
10928 or equal to the minimum value of the range and less than or equal to
10929 the maximum value of the range. */
10931 if (default_label)
10932 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10933 default_label, default_probability);
10936 /* If index is in range, it must fit in Pmode.
10937 Convert to Pmode so we can index with it. */
10938 if (mode != Pmode)
10939 index = convert_to_mode (Pmode, index, 1);
10941 /* Don't let a MEM slip through, because then INDEX that comes
10942 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10943 and break_out_memory_refs will go to work on it and mess it up. */
10944 #ifdef PIC_CASE_VECTOR_ADDRESS
10945 if (flag_pic && !REG_P (index))
10946 index = copy_to_mode_reg (Pmode, index);
10947 #endif
10949 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10950 GET_MODE_SIZE, because this indicates how large insns are. The other
10951 uses should all be Pmode, because they are addresses. This code
10952 could fail if addresses and insns are not the same size. */
10953 index = gen_rtx_PLUS (Pmode,
10954 gen_rtx_MULT (Pmode, index,
10955 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10956 gen_rtx_LABEL_REF (Pmode, table_label));
10957 #ifdef PIC_CASE_VECTOR_ADDRESS
10958 if (flag_pic)
10959 index = PIC_CASE_VECTOR_ADDRESS (index);
10960 else
10961 #endif
10962 index = memory_address (CASE_VECTOR_MODE, index);
10963 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10964 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10965 convert_move (temp, vector, 0);
10967 emit_jump_insn (gen_tablejump (temp, table_label));
10969 /* If we are generating PIC code or if the table is PC-relative, the
10970 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10971 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10972 emit_barrier ();
10976 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10977 rtx table_label, rtx default_label, int default_probability)
10979 rtx index;
10981 if (! HAVE_tablejump)
10982 return 0;
10984 index_expr = fold_build2 (MINUS_EXPR, index_type,
10985 fold_convert (index_type, index_expr),
10986 fold_convert (index_type, minval));
10987 index = expand_normal (index_expr);
10988 do_pending_stack_adjust ();
10990 do_tablejump (index, TYPE_MODE (index_type),
10991 convert_modes (TYPE_MODE (index_type),
10992 TYPE_MODE (TREE_TYPE (range)),
10993 expand_normal (range),
10994 TYPE_UNSIGNED (TREE_TYPE (range))),
10995 table_label, default_label, default_probability);
10996 return 1;
10999 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11000 static rtx
11001 const_vector_from_tree (tree exp)
11003 rtvec v;
11004 unsigned i;
11005 int units;
11006 tree elt;
11007 enum machine_mode inner, mode;
11009 mode = TYPE_MODE (TREE_TYPE (exp));
11011 if (initializer_zerop (exp))
11012 return CONST0_RTX (mode);
11014 units = GET_MODE_NUNITS (mode);
11015 inner = GET_MODE_INNER (mode);
11017 v = rtvec_alloc (units);
11019 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11021 elt = VECTOR_CST_ELT (exp, i);
11023 if (TREE_CODE (elt) == REAL_CST)
11024 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11025 inner);
11026 else if (TREE_CODE (elt) == FIXED_CST)
11027 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11028 inner);
11029 else
11030 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11031 inner);
11034 return gen_rtx_CONST_VECTOR (mode, v);
11037 /* Build a decl for a personality function given a language prefix. */
11039 tree
11040 build_personality_function (const char *lang)
11042 const char *unwind_and_version;
11043 tree decl, type;
11044 char *name;
11046 switch (targetm_common.except_unwind_info (&global_options))
11048 case UI_NONE:
11049 return NULL;
11050 case UI_SJLJ:
11051 unwind_and_version = "_sj0";
11052 break;
11053 case UI_DWARF2:
11054 case UI_TARGET:
11055 unwind_and_version = "_v0";
11056 break;
11057 case UI_SEH:
11058 unwind_and_version = "_seh0";
11059 break;
11060 default:
11061 gcc_unreachable ();
11064 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11066 type = build_function_type_list (integer_type_node, integer_type_node,
11067 long_long_unsigned_type_node,
11068 ptr_type_node, ptr_type_node, NULL_TREE);
11069 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11070 get_identifier (name), type);
11071 DECL_ARTIFICIAL (decl) = 1;
11072 DECL_EXTERNAL (decl) = 1;
11073 TREE_PUBLIC (decl) = 1;
11075 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11076 are the flags assigned by targetm.encode_section_info. */
11077 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11079 return decl;
11082 /* Extracts the personality function of DECL and returns the corresponding
11083 libfunc. */
11086 get_personality_function (tree decl)
11088 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11089 enum eh_personality_kind pk;
11091 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11092 if (pk == eh_personality_none)
11093 return NULL;
11095 if (!personality
11096 && pk == eh_personality_any)
11097 personality = lang_hooks.eh_personality ();
11099 if (pk == eh_personality_lang)
11100 gcc_assert (personality != NULL_TREE);
11102 return XEXP (DECL_RTL (personality), 0);
11105 #include "gt-expr.h"