ira.c (find_moveable_pseudos): Skip registers whose DF_REG_EQ_USE_COUNT is nonzero.
[official-gcc.git] / gcc / expr.c
blob0d52725ae070c0f1a4c25aa2fcf4d8b8022499c8
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
51 #include "target.h"
52 #include "common/common-target.h"
53 #include "timevar.h"
54 #include "df.h"
55 #include "diagnostic.h"
56 #include "ssaexpand.h"
57 #include "target-globals.h"
58 #include "params.h"
60 /* Decide whether a function's arguments should be processed
61 from first to last or from last to first.
63 They should if the stack and args grow in opposite directions, but
64 only if we have push insns. */
66 #ifdef PUSH_ROUNDING
68 #ifndef PUSH_ARGS_REVERSED
69 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
70 #define PUSH_ARGS_REVERSED /* If it's last to first. */
71 #endif
72 #endif
74 #endif
76 #ifndef STACK_PUSH_CODE
77 #ifdef STACK_GROWS_DOWNWARD
78 #define STACK_PUSH_CODE PRE_DEC
79 #else
80 #define STACK_PUSH_CODE PRE_INC
81 #endif
82 #endif
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
91 int cse_not_expected;
93 /* This structure is used by move_by_pieces to describe the move to
94 be performed. */
95 struct move_by_pieces_d
97 rtx to;
98 rtx to_addr;
99 int autinc_to;
100 int explicit_inc_to;
101 rtx from;
102 rtx from_addr;
103 int autinc_from;
104 int explicit_inc_from;
105 unsigned HOST_WIDE_INT len;
106 HOST_WIDE_INT offset;
107 int reverse;
110 /* This structure is used by store_by_pieces to describe the clear to
111 be performed. */
113 struct store_by_pieces_d
115 rtx to;
116 rtx to_addr;
117 int autinc_to;
118 int explicit_inc_to;
119 unsigned HOST_WIDE_INT len;
120 HOST_WIDE_INT offset;
121 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 void *constfundata;
123 int reverse;
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces_d *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces_d *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, alias_set_type);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
145 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
146 enum machine_mode,
147 tree, tree, alias_set_type, bool);
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
163 /* This macro is used to determine whether move_by_pieces should be called
164 to perform a structure copy. */
165 #ifndef MOVE_BY_PIECES_P
166 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
167 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
168 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
169 #endif
171 /* This macro is used to determine whether clear_by_pieces should be
172 called to clear storage. */
173 #ifndef CLEAR_BY_PIECES_P
174 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
175 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
176 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
177 #endif
179 /* This macro is used to determine whether store_by_pieces should be
180 called to "memset" storage with byte values other than zero. */
181 #ifndef SET_BY_PIECES_P
182 #define SET_BY_PIECES_P(SIZE, ALIGN) \
183 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
184 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
185 #endif
187 /* This macro is used to determine whether store_by_pieces should be
188 called to "memcpy" storage when the source is a constant string. */
189 #ifndef STORE_BY_PIECES_P
190 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
192 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
193 #endif
195 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
199 #endif
201 /* This is run to set up which modes can be used
202 directly in memory and to initialize the block move optab. It is run
203 at the beginning of compilation and when the target is reinitialized. */
205 void
206 init_expr_target (void)
208 rtx insn, pat;
209 enum machine_mode mode;
210 int num_clobbers;
211 rtx mem, mem1;
212 rtx reg;
214 /* Try indexing by frame ptr and try by stack ptr.
215 It is known that on the Convex the stack ptr isn't a valid index.
216 With luck, one or the other is valid on any machine. */
217 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
218 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
220 /* A scratch register we can modify in-place below to avoid
221 useless RTL allocations. */
222 reg = gen_rtx_REG (VOIDmode, -1);
224 insn = rtx_alloc (INSN);
225 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
226 PATTERN (insn) = pat;
228 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229 mode = (enum machine_mode) ((int) mode + 1))
231 int regno;
233 direct_load[(int) mode] = direct_store[(int) mode] = 0;
234 PUT_MODE (mem, mode);
235 PUT_MODE (mem1, mode);
236 PUT_MODE (reg, mode);
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
241 if (mode != VOIDmode && mode != BLKmode)
242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
244 regno++)
246 if (! HARD_REGNO_MODE_OK (regno, mode))
247 continue;
249 SET_REGNO (reg, regno);
251 SET_SRC (pat) = mem;
252 SET_DEST (pat) = reg;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_load[(int) mode] = 1;
256 SET_SRC (pat) = mem1;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
261 SET_SRC (pat) = reg;
262 SET_DEST (pat) = mem;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_store[(int) mode] = 1;
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem1;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
273 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
276 mode = GET_MODE_WIDER_MODE (mode))
278 enum machine_mode srcmode;
279 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
280 srcmode = GET_MODE_WIDER_MODE (srcmode))
282 enum insn_code ic;
284 ic = can_extend_p (mode, srcmode, 0);
285 if (ic == CODE_FOR_nothing)
286 continue;
288 PUT_MODE (mem, srcmode);
290 if (insn_operand_matches (ic, 1, mem))
291 float_extend_from_mem[mode][srcmode] = true;
296 /* This is run at the start of compiling a function. */
298 void
299 init_expr (void)
301 memset (&crtl->expr, 0, sizeof (crtl->expr));
304 /* Copy data from FROM to TO, where the machine modes are not the same.
305 Both modes may be integer, or both may be floating, or both may be
306 fixed-point.
307 UNSIGNEDP should be nonzero if FROM is an unsigned type.
308 This causes zero-extension instead of sign-extension. */
310 void
311 convert_move (rtx to, rtx from, int unsignedp)
313 enum machine_mode to_mode = GET_MODE (to);
314 enum machine_mode from_mode = GET_MODE (from);
315 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
316 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
317 enum insn_code code;
318 rtx libcall;
320 /* rtx code for making an equivalent value. */
321 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
322 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
325 gcc_assert (to_real == from_real);
326 gcc_assert (to_mode != BLKmode);
327 gcc_assert (from_mode != BLKmode);
329 /* If the source and destination are already the same, then there's
330 nothing to do. */
331 if (to == from)
332 return;
334 /* If FROM is a SUBREG that indicates that we have already done at least
335 the required extension, strip it. We don't handle such SUBREGs as
336 TO here. */
338 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
339 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
340 >= GET_MODE_PRECISION (to_mode))
341 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
342 from = gen_lowpart (to_mode, from), from_mode = to_mode;
344 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
346 if (to_mode == from_mode
347 || (from_mode == VOIDmode && CONSTANT_P (from)))
349 emit_move_insn (to, from);
350 return;
353 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
355 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
357 if (VECTOR_MODE_P (to_mode))
358 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
359 else
360 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
362 emit_move_insn (to, from);
363 return;
366 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
368 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
369 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
370 return;
373 if (to_real)
375 rtx value, insns;
376 convert_optab tab;
378 gcc_assert ((GET_MODE_PRECISION (from_mode)
379 != GET_MODE_PRECISION (to_mode))
380 || (DECIMAL_FLOAT_MODE_P (from_mode)
381 != DECIMAL_FLOAT_MODE_P (to_mode)));
383 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
384 /* Conversion between decimal float and binary float, same size. */
385 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
386 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
387 tab = sext_optab;
388 else
389 tab = trunc_optab;
391 /* Try converting directly if the insn is supported. */
393 code = convert_optab_handler (tab, to_mode, from_mode);
394 if (code != CODE_FOR_nothing)
396 emit_unop_insn (code, to, from,
397 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
398 return;
401 /* Otherwise use a libcall. */
402 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
404 /* Is this conversion implemented yet? */
405 gcc_assert (libcall);
407 start_sequence ();
408 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
409 1, from, from_mode);
410 insns = get_insns ();
411 end_sequence ();
412 emit_libcall_block (insns, to, value,
413 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
414 from)
415 : gen_rtx_FLOAT_EXTEND (to_mode, from));
416 return;
419 /* Handle pointer conversion. */ /* SPEE 900220. */
420 /* Targets are expected to provide conversion insns between PxImode and
421 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
422 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
424 enum machine_mode full_mode
425 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
427 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
428 != CODE_FOR_nothing);
430 if (full_mode != from_mode)
431 from = convert_to_mode (full_mode, from, unsignedp);
432 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
433 to, from, UNKNOWN);
434 return;
436 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
438 rtx new_from;
439 enum machine_mode full_mode
440 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
441 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
442 enum insn_code icode;
444 icode = convert_optab_handler (ctab, full_mode, from_mode);
445 gcc_assert (icode != CODE_FOR_nothing);
447 if (to_mode == full_mode)
449 emit_unop_insn (icode, to, from, UNKNOWN);
450 return;
453 new_from = gen_reg_rtx (full_mode);
454 emit_unop_insn (icode, new_from, from, UNKNOWN);
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
458 from = new_from;
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
470 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
471 expand_fixed_convert (to, from, 0, 0);
472 else
473 expand_fixed_convert (to, from, 0, 1);
474 return;
477 /* Now both modes are integers. */
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
481 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
483 rtx insns;
484 rtx lowpart;
485 rtx fill_value;
486 rtx lowfrom;
487 int i;
488 enum machine_mode lowpart_mode;
489 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
491 /* Try converting directly if the insn is supported. */
492 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
493 != CODE_FOR_nothing)
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize > 0 && GET_CODE (from) == SUBREG)
500 from = force_reg (from_mode, from);
501 emit_unop_insn (code, to, from, equiv_code);
502 return;
504 /* Next, try converting via full word. */
505 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
506 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
507 != CODE_FOR_nothing))
509 rtx word_to = gen_reg_rtx (word_mode);
510 if (REG_P (to))
512 if (reg_overlap_mentioned_p (to, from))
513 from = force_reg (from_mode, from);
514 emit_clobber (to);
516 convert_move (word_to, from, unsignedp);
517 emit_unop_insn (code, to, word_to, equiv_code);
518 return;
521 /* No special multiword conversion insn; do it by hand. */
522 start_sequence ();
524 /* Since we will turn this into a no conflict block, we must ensure
525 that the source does not overlap the target. */
527 if (reg_overlap_mentioned_p (to, from))
528 from = force_reg (from_mode, from);
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
532 lowpart_mode = word_mode;
533 else
534 lowpart_mode = from_mode;
536 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
538 lowpart = gen_lowpart (lowpart_mode, to);
539 emit_move_insn (lowpart, lowfrom);
541 /* Compute the value to put in each remaining word. */
542 if (unsignedp)
543 fill_value = const0_rtx;
544 else
545 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
546 LT, lowfrom, const0_rtx,
547 VOIDmode, 0, -1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
562 end_sequence ();
564 emit_insn (insns);
565 return;
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
570 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
572 if (!((MEM_P (from)
573 && ! MEM_VOLATILE_P (from)
574 && direct_load[(int) to_mode]
575 && ! mode_dependent_address_p (XEXP (from, 0)))
576 || REG_P (from)
577 || GET_CODE (from) == SUBREG))
578 from = force_reg (from_mode, from);
579 convert_move (to, gen_lowpart (word_mode, from), 0);
580 return;
583 /* Now follow all the conversions between integers
584 no more than a word long. */
586 /* For truncation, usually we can just refer to FROM in a narrower mode. */
587 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
588 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
590 if (!((MEM_P (from)
591 && ! MEM_VOLATILE_P (from)
592 && direct_load[(int) to_mode]
593 && ! mode_dependent_address_p (XEXP (from, 0)))
594 || REG_P (from)
595 || GET_CODE (from) == SUBREG))
596 from = force_reg (from_mode, from);
597 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
598 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
599 from = copy_to_reg (from);
600 emit_move_insn (to, gen_lowpart (to_mode, from));
601 return;
604 /* Handle extension. */
605 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
607 /* Convert directly if that works. */
608 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
609 != CODE_FOR_nothing)
611 emit_unop_insn (code, to, from, equiv_code);
612 return;
614 else
616 enum machine_mode intermediate;
617 rtx tmp;
618 int shift_amount;
620 /* Search for a mode to convert via. */
621 for (intermediate = from_mode; intermediate != VOIDmode;
622 intermediate = GET_MODE_WIDER_MODE (intermediate))
623 if (((can_extend_p (to_mode, intermediate, unsignedp)
624 != CODE_FOR_nothing)
625 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
626 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
627 && (can_extend_p (intermediate, from_mode, unsignedp)
628 != CODE_FOR_nothing))
630 convert_move (to, convert_to_mode (intermediate, from,
631 unsignedp), unsignedp);
632 return;
635 /* No suitable intermediate mode.
636 Generate what we need with shifts. */
637 shift_amount = (GET_MODE_PRECISION (to_mode)
638 - GET_MODE_PRECISION (from_mode));
639 from = gen_lowpart (to_mode, force_reg (from_mode, from));
640 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
641 to, unsignedp);
642 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
643 to, unsignedp);
644 if (tmp != to)
645 emit_move_insn (to, tmp);
646 return;
650 /* Support special truncate insns for certain modes. */
651 if (convert_optab_handler (trunc_optab, to_mode,
652 from_mode) != CODE_FOR_nothing)
654 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
655 to, from, UNKNOWN);
656 return;
659 /* Handle truncation of volatile memrefs, and so on;
660 the things that couldn't be truncated directly,
661 and for which there was no special instruction.
663 ??? Code above formerly short-circuited this, for most integer
664 mode pairs, with a force_reg in from_mode followed by a recursive
665 call to this routine. Appears always to have been wrong. */
666 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
668 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
669 emit_move_insn (to, temp);
670 return;
673 /* Mode combination is not recognized. */
674 gcc_unreachable ();
677 /* Return an rtx for a value that would result
678 from converting X to mode MODE.
679 Both X and MODE may be floating, or both integer.
680 UNSIGNEDP is nonzero if X is an unsigned value.
681 This can be done by referring to a part of X in place
682 or by copying to a new temporary with conversion. */
685 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
687 return convert_modes (mode, VOIDmode, x, unsignedp);
690 /* Return an rtx for a value that would result
691 from converting X from mode OLDMODE to mode MODE.
692 Both modes may be floating, or both integer.
693 UNSIGNEDP is nonzero if X is an unsigned value.
695 This can be done by referring to a part of X in place
696 or by copying to a new temporary with conversion.
698 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
701 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
703 rtx temp;
705 /* If FROM is a SUBREG that indicates that we have already done at least
706 the required extension, strip it. */
708 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
709 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
710 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
711 x = gen_lowpart (mode, x);
713 if (GET_MODE (x) != VOIDmode)
714 oldmode = GET_MODE (x);
716 if (mode == oldmode)
717 return x;
719 /* There is one case that we must handle specially: If we are converting
720 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
721 we are to interpret the constant as unsigned, gen_lowpart will do
722 the wrong if the constant appears negative. What we want to do is
723 make the high-order word of the constant zero, not all ones. */
725 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
726 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
727 && CONST_INT_P (x) && INTVAL (x) < 0)
729 double_int val = uhwi_to_double_int (INTVAL (x));
731 /* We need to zero extend VAL. */
732 if (oldmode != VOIDmode)
733 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
735 return immed_double_int_const (val, mode);
738 /* We can do this with a gen_lowpart if both desired and current modes
739 are integer, and this is either a constant integer, a register, or a
740 non-volatile MEM. Except for the constant case where MODE is no
741 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
743 if ((CONST_INT_P (x)
744 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
745 || (GET_MODE_CLASS (mode) == MODE_INT
746 && GET_MODE_CLASS (oldmode) == MODE_INT
747 && (GET_CODE (x) == CONST_DOUBLE
748 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
749 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
750 && direct_load[(int) mode])
751 || (REG_P (x)
752 && (! HARD_REGISTER_P (x)
753 || HARD_REGNO_MODE_OK (REGNO (x), mode))
754 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
755 GET_MODE (x))))))))
757 /* ?? If we don't know OLDMODE, we have to assume here that
758 X does not need sign- or zero-extension. This may not be
759 the case, but it's the best we can do. */
760 if (CONST_INT_P (x) && oldmode != VOIDmode
761 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
763 HOST_WIDE_INT val = INTVAL (x);
765 /* We must sign or zero-extend in this case. Start by
766 zero-extending, then sign extend if we need to. */
767 val &= GET_MODE_MASK (oldmode);
768 if (! unsignedp
769 && val_signbit_known_set_p (oldmode, val))
770 val |= ~GET_MODE_MASK (oldmode);
772 return gen_int_mode (val, mode);
775 return gen_lowpart (mode, x);
778 /* Converting from integer constant into mode is always equivalent to an
779 subreg operation. */
780 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
782 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
783 return simplify_gen_subreg (mode, x, oldmode, 0);
786 temp = gen_reg_rtx (mode);
787 convert_move (temp, x, unsignedp);
788 return temp;
791 /* Return the largest alignment we can use for doing a move (or store)
792 of MAX_PIECES. ALIGN is the largest alignment we could use. */
794 static unsigned int
795 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
797 enum machine_mode tmode;
799 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
800 if (align >= GET_MODE_ALIGNMENT (tmode))
801 align = GET_MODE_ALIGNMENT (tmode);
802 else
804 enum machine_mode tmode, xmode;
806 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
807 tmode != VOIDmode;
808 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
809 if (GET_MODE_SIZE (tmode) > max_pieces
810 || SLOW_UNALIGNED_ACCESS (tmode, align))
811 break;
813 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
816 return align;
819 /* Return the widest integer mode no wider than SIZE. If no such mode
820 can be found, return VOIDmode. */
822 static enum machine_mode
823 widest_int_mode_for_size (unsigned int size)
825 enum machine_mode tmode, mode = VOIDmode;
827 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
828 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
829 if (GET_MODE_SIZE (tmode) < size)
830 mode = tmode;
832 return mode;
835 /* STORE_MAX_PIECES is the number of bytes at a time that we can
836 store efficiently. Due to internal GCC limitations, this is
837 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
838 for an immediate constant. */
840 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
842 /* Determine whether the LEN bytes can be moved by using several move
843 instructions. Return nonzero if a call to move_by_pieces should
844 succeed. */
847 can_move_by_pieces (unsigned HOST_WIDE_INT len,
848 unsigned int align ATTRIBUTE_UNUSED)
850 return MOVE_BY_PIECES_P (len, align);
853 /* Generate several move instructions to copy LEN bytes from block FROM to
854 block TO. (These are MEM rtx's with BLKmode).
856 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
857 used to push FROM to the stack.
859 ALIGN is maximum stack alignment we can assume.
861 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
862 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
863 stpcpy. */
866 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
867 unsigned int align, int endp)
869 struct move_by_pieces_d data;
870 enum machine_mode to_addr_mode;
871 enum machine_mode from_addr_mode = get_address_mode (from);
872 rtx to_addr, from_addr = XEXP (from, 0);
873 unsigned int max_size = MOVE_MAX_PIECES + 1;
874 enum insn_code icode;
876 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
878 data.offset = 0;
879 data.from_addr = from_addr;
880 if (to)
882 to_addr_mode = get_address_mode (to);
883 to_addr = XEXP (to, 0);
884 data.to = to;
885 data.autinc_to
886 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
887 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
888 data.reverse
889 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891 else
893 to_addr_mode = VOIDmode;
894 to_addr = NULL_RTX;
895 data.to = NULL_RTX;
896 data.autinc_to = 1;
897 #ifdef STACK_GROWS_DOWNWARD
898 data.reverse = 1;
899 #else
900 data.reverse = 0;
901 #endif
903 data.to_addr = to_addr;
904 data.from = from;
905 data.autinc_from
906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907 || GET_CODE (from_addr) == POST_INC
908 || GET_CODE (from_addr) == POST_DEC);
910 data.explicit_inc_from = 0;
911 data.explicit_inc_to = 0;
912 if (data.reverse) data.offset = len;
913 data.len = len;
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data.autinc_from && data.autinc_to)
919 && move_by_pieces_ninsns (len, align, max_size) > 2)
921 /* Find the mode of the largest move...
922 MODE might not be used depending on the definitions of the
923 USE_* macros below. */
924 enum machine_mode mode ATTRIBUTE_UNUSED
925 = widest_int_mode_for_size (max_size);
927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
929 data.from_addr = copy_to_mode_reg (from_addr_mode,
930 plus_constant (from_addr_mode,
931 from_addr, len));
932 data.autinc_from = 1;
933 data.explicit_inc_from = -1;
935 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
937 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
938 data.autinc_from = 1;
939 data.explicit_inc_from = 1;
941 if (!data.autinc_from && CONSTANT_P (from_addr))
942 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
943 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
945 data.to_addr = copy_to_mode_reg (to_addr_mode,
946 plus_constant (to_addr_mode,
947 to_addr, len));
948 data.autinc_to = 1;
949 data.explicit_inc_to = -1;
951 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
953 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
954 data.autinc_to = 1;
955 data.explicit_inc_to = 1;
957 if (!data.autinc_to && CONSTANT_P (to_addr))
958 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
961 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
963 /* First move what we can in the largest integer mode, then go to
964 successively smaller modes. */
966 while (max_size > 1)
968 enum machine_mode mode = widest_int_mode_for_size (max_size);
970 if (mode == VOIDmode)
971 break;
973 icode = optab_handler (mov_optab, mode);
974 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
975 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
977 max_size = GET_MODE_SIZE (mode);
980 /* The code above should have handled everything. */
981 gcc_assert (!data.len);
983 if (endp)
985 rtx to1;
987 gcc_assert (!data.reverse);
988 if (data.autinc_to)
990 if (endp == 2)
992 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
993 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
994 else
995 data.to_addr = copy_to_mode_reg (to_addr_mode,
996 plus_constant (to_addr_mode,
997 data.to_addr,
998 -1));
1000 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1001 data.offset);
1003 else
1005 if (endp == 2)
1006 --data.offset;
1007 to1 = adjust_address (data.to, QImode, data.offset);
1009 return to1;
1011 else
1012 return data.to;
1015 /* Return number of insns required to move L bytes by pieces.
1016 ALIGN (in bits) is maximum alignment we can assume. */
1018 unsigned HOST_WIDE_INT
1019 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1020 unsigned int max_size)
1022 unsigned HOST_WIDE_INT n_insns = 0;
1024 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1026 while (max_size > 1)
1028 enum machine_mode mode;
1029 enum insn_code icode;
1031 mode = widest_int_mode_for_size (max_size);
1033 if (mode == VOIDmode)
1034 break;
1036 icode = optab_handler (mov_optab, mode);
1037 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1038 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1040 max_size = GET_MODE_SIZE (mode);
1043 gcc_assert (!l);
1044 return n_insns;
1047 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1048 with move instructions for mode MODE. GENFUN is the gen_... function
1049 to make a move insn for that mode. DATA has all the other info. */
1051 static void
1052 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1053 struct move_by_pieces_d *data)
1055 unsigned int size = GET_MODE_SIZE (mode);
1056 rtx to1 = NULL_RTX, from1;
1058 while (data->len >= size)
1060 if (data->reverse)
1061 data->offset -= size;
1063 if (data->to)
1065 if (data->autinc_to)
1066 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1067 data->offset);
1068 else
1069 to1 = adjust_address (data->to, mode, data->offset);
1072 if (data->autinc_from)
1073 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1074 data->offset);
1075 else
1076 from1 = adjust_address (data->from, mode, data->offset);
1078 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1079 emit_insn (gen_add2_insn (data->to_addr,
1080 GEN_INT (-(HOST_WIDE_INT)size)));
1081 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1082 emit_insn (gen_add2_insn (data->from_addr,
1083 GEN_INT (-(HOST_WIDE_INT)size)));
1085 if (data->to)
1086 emit_insn ((*genfun) (to1, from1));
1087 else
1089 #ifdef PUSH_ROUNDING
1090 emit_single_push_insn (mode, from1, NULL);
1091 #else
1092 gcc_unreachable ();
1093 #endif
1096 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1097 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1098 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1099 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1101 if (! data->reverse)
1102 data->offset += size;
1104 data->len -= size;
1108 /* Emit code to move a block Y to a block X. This may be done with
1109 string-move instructions, with multiple scalar move instructions,
1110 or with a library call.
1112 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1113 SIZE is an rtx that says how long they are.
1114 ALIGN is the maximum alignment we can assume they have.
1115 METHOD describes what kind of copy this is, and what mechanisms may be used.
1117 Return the address of the new block, if memcpy is called and returns it,
1118 0 otherwise. */
1121 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1122 unsigned int expected_align, HOST_WIDE_INT expected_size)
1124 bool may_use_call;
1125 rtx retval = 0;
1126 unsigned int align;
1128 gcc_assert (size);
1129 if (CONST_INT_P (size)
1130 && INTVAL (size) == 0)
1131 return 0;
1133 switch (method)
1135 case BLOCK_OP_NORMAL:
1136 case BLOCK_OP_TAILCALL:
1137 may_use_call = true;
1138 break;
1140 case BLOCK_OP_CALL_PARM:
1141 may_use_call = block_move_libcall_safe_for_call_parm ();
1143 /* Make inhibit_defer_pop nonzero around the library call
1144 to force it to pop the arguments right away. */
1145 NO_DEFER_POP;
1146 break;
1148 case BLOCK_OP_NO_LIBCALL:
1149 may_use_call = false;
1150 break;
1152 default:
1153 gcc_unreachable ();
1156 gcc_assert (MEM_P (x) && MEM_P (y));
1157 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1158 gcc_assert (align >= BITS_PER_UNIT);
1160 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1161 block copy is more efficient for other large modes, e.g. DCmode. */
1162 x = adjust_address (x, BLKmode, 0);
1163 y = adjust_address (y, BLKmode, 0);
1165 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1166 can be incorrect is coming from __builtin_memcpy. */
1167 if (CONST_INT_P (size))
1169 x = shallow_copy_rtx (x);
1170 y = shallow_copy_rtx (y);
1171 set_mem_size (x, INTVAL (size));
1172 set_mem_size (y, INTVAL (size));
1175 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1176 move_by_pieces (x, y, INTVAL (size), align, 0);
1177 else if (emit_block_move_via_movmem (x, y, size, align,
1178 expected_align, expected_size))
1180 else if (may_use_call
1181 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1184 /* Since x and y are passed to a libcall, mark the corresponding
1185 tree EXPR as addressable. */
1186 tree y_expr = MEM_EXPR (y);
1187 tree x_expr = MEM_EXPR (x);
1188 if (y_expr)
1189 mark_addressable (y_expr);
1190 if (x_expr)
1191 mark_addressable (x_expr);
1192 retval = emit_block_move_via_libcall (x, y, size,
1193 method == BLOCK_OP_TAILCALL);
1196 else
1197 emit_block_move_via_loop (x, y, size, align);
1199 if (method == BLOCK_OP_CALL_PARM)
1200 OK_DEFER_POP;
1202 return retval;
1206 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1208 return emit_block_move_hints (x, y, size, method, 0, -1);
1211 /* A subroutine of emit_block_move. Returns true if calling the
1212 block move libcall will not clobber any parameters which may have
1213 already been placed on the stack. */
1215 static bool
1216 block_move_libcall_safe_for_call_parm (void)
1218 #if defined (REG_PARM_STACK_SPACE)
1219 tree fn;
1220 #endif
1222 /* If arguments are pushed on the stack, then they're safe. */
1223 if (PUSH_ARGS)
1224 return true;
1226 /* If registers go on the stack anyway, any argument is sure to clobber
1227 an outgoing argument. */
1228 #if defined (REG_PARM_STACK_SPACE)
1229 fn = emit_block_move_libcall_fn (false);
1230 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1231 depend on its argument. */
1232 (void) fn;
1233 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1234 && REG_PARM_STACK_SPACE (fn) != 0)
1235 return false;
1236 #endif
1238 /* If any argument goes in memory, then it might clobber an outgoing
1239 argument. */
1241 CUMULATIVE_ARGS args_so_far_v;
1242 cumulative_args_t args_so_far;
1243 tree fn, arg;
1245 fn = emit_block_move_libcall_fn (false);
1246 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1247 args_so_far = pack_cumulative_args (&args_so_far_v);
1249 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1250 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1252 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1253 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1254 NULL_TREE, true);
1255 if (!tmp || !REG_P (tmp))
1256 return false;
1257 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1258 return false;
1259 targetm.calls.function_arg_advance (args_so_far, mode,
1260 NULL_TREE, true);
1263 return true;
1266 /* A subroutine of emit_block_move. Expand a movmem pattern;
1267 return true if successful. */
1269 static bool
1270 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1271 unsigned int expected_align, HOST_WIDE_INT expected_size)
1273 int save_volatile_ok = volatile_ok;
1274 enum machine_mode mode;
1276 if (expected_align < align)
1277 expected_align = align;
1279 /* Since this is a move insn, we don't care about volatility. */
1280 volatile_ok = 1;
1282 /* Try the most limited insn first, because there's no point
1283 including more than one in the machine description unless
1284 the more limited one has some advantage. */
1286 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1287 mode = GET_MODE_WIDER_MODE (mode))
1289 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1291 if (code != CODE_FOR_nothing
1292 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1293 here because if SIZE is less than the mode mask, as it is
1294 returned by the macro, it will definitely be less than the
1295 actual mode mask. */
1296 && ((CONST_INT_P (size)
1297 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1298 <= (GET_MODE_MASK (mode) >> 1)))
1299 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1301 struct expand_operand ops[6];
1302 unsigned int nops;
1304 /* ??? When called via emit_block_move_for_call, it'd be
1305 nice if there were some way to inform the backend, so
1306 that it doesn't fail the expansion because it thinks
1307 emitting the libcall would be more efficient. */
1308 nops = insn_data[(int) code].n_generator_args;
1309 gcc_assert (nops == 4 || nops == 6);
1311 create_fixed_operand (&ops[0], x);
1312 create_fixed_operand (&ops[1], y);
1313 /* The check above guarantees that this size conversion is valid. */
1314 create_convert_operand_to (&ops[2], size, mode, true);
1315 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1316 if (nops == 6)
1318 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1319 create_integer_operand (&ops[5], expected_size);
1321 if (maybe_expand_insn (code, nops, ops))
1323 volatile_ok = save_volatile_ok;
1324 return true;
1329 volatile_ok = save_volatile_ok;
1330 return false;
1333 /* A subroutine of emit_block_move. Expand a call to memcpy.
1334 Return the return value from memcpy, 0 otherwise. */
1337 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1339 rtx dst_addr, src_addr;
1340 tree call_expr, fn, src_tree, dst_tree, size_tree;
1341 enum machine_mode size_mode;
1342 rtx retval;
1344 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1345 pseudos. We can then place those new pseudos into a VAR_DECL and
1346 use them later. */
1348 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1349 src_addr = copy_addr_to_reg (XEXP (src, 0));
1351 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1352 src_addr = convert_memory_address (ptr_mode, src_addr);
1354 dst_tree = make_tree (ptr_type_node, dst_addr);
1355 src_tree = make_tree (ptr_type_node, src_addr);
1357 size_mode = TYPE_MODE (sizetype);
1359 size = convert_to_mode (size_mode, size, 1);
1360 size = copy_to_mode_reg (size_mode, size);
1362 /* It is incorrect to use the libcall calling conventions to call
1363 memcpy in this context. This could be a user call to memcpy and
1364 the user may wish to examine the return value from memcpy. For
1365 targets where libcalls and normal calls have different conventions
1366 for returning pointers, we could end up generating incorrect code. */
1368 size_tree = make_tree (sizetype, size);
1370 fn = emit_block_move_libcall_fn (true);
1371 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1372 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1374 retval = expand_normal (call_expr);
1376 return retval;
1379 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1380 for the function we use for block copies. */
1382 static GTY(()) tree block_move_fn;
1384 void
1385 init_block_move_fn (const char *asmspec)
1387 if (!block_move_fn)
1389 tree args, fn, attrs, attr_args;
1391 fn = get_identifier ("memcpy");
1392 args = build_function_type_list (ptr_type_node, ptr_type_node,
1393 const_ptr_type_node, sizetype,
1394 NULL_TREE);
1396 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1397 DECL_EXTERNAL (fn) = 1;
1398 TREE_PUBLIC (fn) = 1;
1399 DECL_ARTIFICIAL (fn) = 1;
1400 TREE_NOTHROW (fn) = 1;
1401 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1402 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1404 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1405 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1407 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1409 block_move_fn = fn;
1412 if (asmspec)
1413 set_user_assembler_name (block_move_fn, asmspec);
1416 static tree
1417 emit_block_move_libcall_fn (int for_call)
1419 static bool emitted_extern;
1421 if (!block_move_fn)
1422 init_block_move_fn (NULL);
1424 if (for_call && !emitted_extern)
1426 emitted_extern = true;
1427 make_decl_rtl (block_move_fn);
1430 return block_move_fn;
1433 /* A subroutine of emit_block_move. Copy the data via an explicit
1434 loop. This is used only when libcalls are forbidden. */
1435 /* ??? It'd be nice to copy in hunks larger than QImode. */
1437 static void
1438 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1439 unsigned int align ATTRIBUTE_UNUSED)
1441 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1442 enum machine_mode x_addr_mode = get_address_mode (x);
1443 enum machine_mode y_addr_mode = get_address_mode (y);
1444 enum machine_mode iter_mode;
1446 iter_mode = GET_MODE (size);
1447 if (iter_mode == VOIDmode)
1448 iter_mode = word_mode;
1450 top_label = gen_label_rtx ();
1451 cmp_label = gen_label_rtx ();
1452 iter = gen_reg_rtx (iter_mode);
1454 emit_move_insn (iter, const0_rtx);
1456 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1457 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1458 do_pending_stack_adjust ();
1460 emit_jump (cmp_label);
1461 emit_label (top_label);
1463 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1464 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1466 if (x_addr_mode != y_addr_mode)
1467 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1468 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1470 x = change_address (x, QImode, x_addr);
1471 y = change_address (y, QImode, y_addr);
1473 emit_move_insn (x, y);
1475 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1476 true, OPTAB_LIB_WIDEN);
1477 if (tmp != iter)
1478 emit_move_insn (iter, tmp);
1480 emit_label (cmp_label);
1482 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1483 true, top_label);
1486 /* Copy all or part of a value X into registers starting at REGNO.
1487 The number of registers to be filled is NREGS. */
1489 void
1490 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1492 int i;
1493 #ifdef HAVE_load_multiple
1494 rtx pat;
1495 rtx last;
1496 #endif
1498 if (nregs == 0)
1499 return;
1501 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1502 x = validize_mem (force_const_mem (mode, x));
1504 /* See if the machine can do this with a load multiple insn. */
1505 #ifdef HAVE_load_multiple
1506 if (HAVE_load_multiple)
1508 last = get_last_insn ();
1509 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1510 GEN_INT (nregs));
1511 if (pat)
1513 emit_insn (pat);
1514 return;
1516 else
1517 delete_insns_since (last);
1519 #endif
1521 for (i = 0; i < nregs; i++)
1522 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1523 operand_subword_force (x, i, mode));
1526 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1527 The number of registers to be filled is NREGS. */
1529 void
1530 move_block_from_reg (int regno, rtx x, int nregs)
1532 int i;
1534 if (nregs == 0)
1535 return;
1537 /* See if the machine can do this with a store multiple insn. */
1538 #ifdef HAVE_store_multiple
1539 if (HAVE_store_multiple)
1541 rtx last = get_last_insn ();
1542 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1543 GEN_INT (nregs));
1544 if (pat)
1546 emit_insn (pat);
1547 return;
1549 else
1550 delete_insns_since (last);
1552 #endif
1554 for (i = 0; i < nregs; i++)
1556 rtx tem = operand_subword (x, i, 1, BLKmode);
1558 gcc_assert (tem);
1560 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1564 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1565 ORIG, where ORIG is a non-consecutive group of registers represented by
1566 a PARALLEL. The clone is identical to the original except in that the
1567 original set of registers is replaced by a new set of pseudo registers.
1568 The new set has the same modes as the original set. */
1571 gen_group_rtx (rtx orig)
1573 int i, length;
1574 rtx *tmps;
1576 gcc_assert (GET_CODE (orig) == PARALLEL);
1578 length = XVECLEN (orig, 0);
1579 tmps = XALLOCAVEC (rtx, length);
1581 /* Skip a NULL entry in first slot. */
1582 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1584 if (i)
1585 tmps[0] = 0;
1587 for (; i < length; i++)
1589 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1590 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1592 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1595 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1598 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1599 except that values are placed in TMPS[i], and must later be moved
1600 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1602 static void
1603 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1605 rtx src;
1606 int start, i;
1607 enum machine_mode m = GET_MODE (orig_src);
1609 gcc_assert (GET_CODE (dst) == PARALLEL);
1611 if (m != VOIDmode
1612 && !SCALAR_INT_MODE_P (m)
1613 && !MEM_P (orig_src)
1614 && GET_CODE (orig_src) != CONCAT)
1616 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1617 if (imode == BLKmode)
1618 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1619 else
1620 src = gen_reg_rtx (imode);
1621 if (imode != BLKmode)
1622 src = gen_lowpart (GET_MODE (orig_src), src);
1623 emit_move_insn (src, orig_src);
1624 /* ...and back again. */
1625 if (imode != BLKmode)
1626 src = gen_lowpart (imode, src);
1627 emit_group_load_1 (tmps, dst, src, type, ssize);
1628 return;
1631 /* Check for a NULL entry, used to indicate that the parameter goes
1632 both on the stack and in registers. */
1633 if (XEXP (XVECEXP (dst, 0, 0), 0))
1634 start = 0;
1635 else
1636 start = 1;
1638 /* Process the pieces. */
1639 for (i = start; i < XVECLEN (dst, 0); i++)
1641 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1642 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1643 unsigned int bytelen = GET_MODE_SIZE (mode);
1644 int shift = 0;
1646 /* Handle trailing fragments that run over the size of the struct. */
1647 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1649 /* Arrange to shift the fragment to where it belongs.
1650 extract_bit_field loads to the lsb of the reg. */
1651 if (
1652 #ifdef BLOCK_REG_PADDING
1653 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1654 == (BYTES_BIG_ENDIAN ? upward : downward)
1655 #else
1656 BYTES_BIG_ENDIAN
1657 #endif
1659 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1660 bytelen = ssize - bytepos;
1661 gcc_assert (bytelen > 0);
1664 /* If we won't be loading directly from memory, protect the real source
1665 from strange tricks we might play; but make sure that the source can
1666 be loaded directly into the destination. */
1667 src = orig_src;
1668 if (!MEM_P (orig_src)
1669 && (!CONSTANT_P (orig_src)
1670 || (GET_MODE (orig_src) != mode
1671 && GET_MODE (orig_src) != VOIDmode)))
1673 if (GET_MODE (orig_src) == VOIDmode)
1674 src = gen_reg_rtx (mode);
1675 else
1676 src = gen_reg_rtx (GET_MODE (orig_src));
1678 emit_move_insn (src, orig_src);
1681 /* Optimize the access just a bit. */
1682 if (MEM_P (src)
1683 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1684 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1685 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1686 && bytelen == GET_MODE_SIZE (mode))
1688 tmps[i] = gen_reg_rtx (mode);
1689 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1691 else if (COMPLEX_MODE_P (mode)
1692 && GET_MODE (src) == mode
1693 && bytelen == GET_MODE_SIZE (mode))
1694 /* Let emit_move_complex do the bulk of the work. */
1695 tmps[i] = src;
1696 else if (GET_CODE (src) == CONCAT)
1698 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1699 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1701 if ((bytepos == 0 && bytelen == slen0)
1702 || (bytepos != 0 && bytepos + bytelen <= slen))
1704 /* The following assumes that the concatenated objects all
1705 have the same size. In this case, a simple calculation
1706 can be used to determine the object and the bit field
1707 to be extracted. */
1708 tmps[i] = XEXP (src, bytepos / slen0);
1709 if (! CONSTANT_P (tmps[i])
1710 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1711 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1712 (bytepos % slen0) * BITS_PER_UNIT,
1713 1, false, NULL_RTX, mode, mode);
1715 else
1717 rtx mem;
1719 gcc_assert (!bytepos);
1720 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1721 emit_move_insn (mem, src);
1722 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1723 0, 1, false, NULL_RTX, mode, mode);
1726 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1727 SIMD register, which is currently broken. While we get GCC
1728 to emit proper RTL for these cases, let's dump to memory. */
1729 else if (VECTOR_MODE_P (GET_MODE (dst))
1730 && REG_P (src))
1732 int slen = GET_MODE_SIZE (GET_MODE (src));
1733 rtx mem;
1735 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1736 emit_move_insn (mem, src);
1737 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1739 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1740 && XVECLEN (dst, 0) > 1)
1741 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1742 else if (CONSTANT_P (src))
1744 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1746 if (len == ssize)
1747 tmps[i] = src;
1748 else
1750 rtx first, second;
1752 gcc_assert (2 * len == ssize);
1753 split_double (src, &first, &second);
1754 if (i)
1755 tmps[i] = second;
1756 else
1757 tmps[i] = first;
1760 else if (REG_P (src) && GET_MODE (src) == mode)
1761 tmps[i] = src;
1762 else
1763 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1764 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1765 mode, mode);
1767 if (shift)
1768 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1769 shift, tmps[i], 0);
1773 /* Emit code to move a block SRC of type TYPE to a block DST,
1774 where DST is non-consecutive registers represented by a PARALLEL.
1775 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1776 if not known. */
1778 void
1779 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1781 rtx *tmps;
1782 int i;
1784 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1785 emit_group_load_1 (tmps, dst, src, type, ssize);
1787 /* Copy the extracted pieces into the proper (probable) hard regs. */
1788 for (i = 0; i < XVECLEN (dst, 0); i++)
1790 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1791 if (d == NULL)
1792 continue;
1793 emit_move_insn (d, tmps[i]);
1797 /* Similar, but load SRC into new pseudos in a format that looks like
1798 PARALLEL. This can later be fed to emit_group_move to get things
1799 in the right place. */
1802 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1804 rtvec vec;
1805 int i;
1807 vec = rtvec_alloc (XVECLEN (parallel, 0));
1808 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1810 /* Convert the vector to look just like the original PARALLEL, except
1811 with the computed values. */
1812 for (i = 0; i < XVECLEN (parallel, 0); i++)
1814 rtx e = XVECEXP (parallel, 0, i);
1815 rtx d = XEXP (e, 0);
1817 if (d)
1819 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1820 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1822 RTVEC_ELT (vec, i) = e;
1825 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1828 /* Emit code to move a block SRC to block DST, where SRC and DST are
1829 non-consecutive groups of registers, each represented by a PARALLEL. */
1831 void
1832 emit_group_move (rtx dst, rtx src)
1834 int i;
1836 gcc_assert (GET_CODE (src) == PARALLEL
1837 && GET_CODE (dst) == PARALLEL
1838 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1840 /* Skip first entry if NULL. */
1841 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1842 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1843 XEXP (XVECEXP (src, 0, i), 0));
1846 /* Move a group of registers represented by a PARALLEL into pseudos. */
1849 emit_group_move_into_temps (rtx src)
1851 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1852 int i;
1854 for (i = 0; i < XVECLEN (src, 0); i++)
1856 rtx e = XVECEXP (src, 0, i);
1857 rtx d = XEXP (e, 0);
1859 if (d)
1860 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1861 RTVEC_ELT (vec, i) = e;
1864 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1867 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1868 where SRC is non-consecutive registers represented by a PARALLEL.
1869 SSIZE represents the total size of block ORIG_DST, or -1 if not
1870 known. */
1872 void
1873 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1875 rtx *tmps, dst;
1876 int start, finish, i;
1877 enum machine_mode m = GET_MODE (orig_dst);
1879 gcc_assert (GET_CODE (src) == PARALLEL);
1881 if (!SCALAR_INT_MODE_P (m)
1882 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1884 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1885 if (imode == BLKmode)
1886 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1887 else
1888 dst = gen_reg_rtx (imode);
1889 emit_group_store (dst, src, type, ssize);
1890 if (imode != BLKmode)
1891 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1892 emit_move_insn (orig_dst, dst);
1893 return;
1896 /* Check for a NULL entry, used to indicate that the parameter goes
1897 both on the stack and in registers. */
1898 if (XEXP (XVECEXP (src, 0, 0), 0))
1899 start = 0;
1900 else
1901 start = 1;
1902 finish = XVECLEN (src, 0);
1904 tmps = XALLOCAVEC (rtx, finish);
1906 /* Copy the (probable) hard regs into pseudos. */
1907 for (i = start; i < finish; i++)
1909 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1910 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1912 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1913 emit_move_insn (tmps[i], reg);
1915 else
1916 tmps[i] = reg;
1919 /* If we won't be storing directly into memory, protect the real destination
1920 from strange tricks we might play. */
1921 dst = orig_dst;
1922 if (GET_CODE (dst) == PARALLEL)
1924 rtx temp;
1926 /* We can get a PARALLEL dst if there is a conditional expression in
1927 a return statement. In that case, the dst and src are the same,
1928 so no action is necessary. */
1929 if (rtx_equal_p (dst, src))
1930 return;
1932 /* It is unclear if we can ever reach here, but we may as well handle
1933 it. Allocate a temporary, and split this into a store/load to/from
1934 the temporary. */
1936 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1937 emit_group_store (temp, src, type, ssize);
1938 emit_group_load (dst, temp, type, ssize);
1939 return;
1941 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1943 enum machine_mode outer = GET_MODE (dst);
1944 enum machine_mode inner;
1945 HOST_WIDE_INT bytepos;
1946 bool done = false;
1947 rtx temp;
1949 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1950 dst = gen_reg_rtx (outer);
1952 /* Make life a bit easier for combine. */
1953 /* If the first element of the vector is the low part
1954 of the destination mode, use a paradoxical subreg to
1955 initialize the destination. */
1956 if (start < finish)
1958 inner = GET_MODE (tmps[start]);
1959 bytepos = subreg_lowpart_offset (inner, outer);
1960 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1962 temp = simplify_gen_subreg (outer, tmps[start],
1963 inner, 0);
1964 if (temp)
1966 emit_move_insn (dst, temp);
1967 done = true;
1968 start++;
1973 /* If the first element wasn't the low part, try the last. */
1974 if (!done
1975 && start < finish - 1)
1977 inner = GET_MODE (tmps[finish - 1]);
1978 bytepos = subreg_lowpart_offset (inner, outer);
1979 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1981 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1982 inner, 0);
1983 if (temp)
1985 emit_move_insn (dst, temp);
1986 done = true;
1987 finish--;
1992 /* Otherwise, simply initialize the result to zero. */
1993 if (!done)
1994 emit_move_insn (dst, CONST0_RTX (outer));
1997 /* Process the pieces. */
1998 for (i = start; i < finish; i++)
2000 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2001 enum machine_mode mode = GET_MODE (tmps[i]);
2002 unsigned int bytelen = GET_MODE_SIZE (mode);
2003 unsigned int adj_bytelen = bytelen;
2004 rtx dest = dst;
2006 /* Handle trailing fragments that run over the size of the struct. */
2007 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2008 adj_bytelen = ssize - bytepos;
2010 if (GET_CODE (dst) == CONCAT)
2012 if (bytepos + adj_bytelen
2013 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014 dest = XEXP (dst, 0);
2015 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2017 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2018 dest = XEXP (dst, 1);
2020 else
2022 enum machine_mode dest_mode = GET_MODE (dest);
2023 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2025 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2027 if (GET_MODE_ALIGNMENT (dest_mode)
2028 >= GET_MODE_ALIGNMENT (tmp_mode))
2030 dest = assign_stack_temp (dest_mode,
2031 GET_MODE_SIZE (dest_mode),
2033 emit_move_insn (adjust_address (dest,
2034 tmp_mode,
2035 bytepos),
2036 tmps[i]);
2037 dst = dest;
2039 else
2041 dest = assign_stack_temp (tmp_mode,
2042 GET_MODE_SIZE (tmp_mode),
2044 emit_move_insn (dest, tmps[i]);
2045 dst = adjust_address (dest, dest_mode, bytepos);
2047 break;
2051 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2053 /* store_bit_field always takes its value from the lsb.
2054 Move the fragment to the lsb if it's not already there. */
2055 if (
2056 #ifdef BLOCK_REG_PADDING
2057 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2058 == (BYTES_BIG_ENDIAN ? upward : downward)
2059 #else
2060 BYTES_BIG_ENDIAN
2061 #endif
2064 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2065 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2066 shift, tmps[i], 0);
2068 bytelen = adj_bytelen;
2071 /* Optimize the access just a bit. */
2072 if (MEM_P (dest)
2073 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2074 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2075 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2076 && bytelen == GET_MODE_SIZE (mode))
2077 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2078 else
2079 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2080 0, 0, mode, tmps[i]);
2083 /* Copy from the pseudo into the (probable) hard reg. */
2084 if (orig_dst != dst)
2085 emit_move_insn (orig_dst, dst);
2088 /* Generate code to copy a BLKmode object of TYPE out of a
2089 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2090 is null, a stack temporary is created. TGTBLK is returned.
2092 The purpose of this routine is to handle functions that return
2093 BLKmode structures in registers. Some machines (the PA for example)
2094 want to return all small structures in registers regardless of the
2095 structure's alignment. */
2098 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2100 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2101 rtx src = NULL, dst = NULL;
2102 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2103 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2104 enum machine_mode copy_mode;
2106 if (tgtblk == 0)
2108 tgtblk = assign_temp (build_qualified_type (type,
2109 (TYPE_QUALS (type)
2110 | TYPE_QUAL_CONST)),
2111 0, 1, 1);
2112 preserve_temp_slots (tgtblk);
2115 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2116 into a new pseudo which is a full word. */
2118 if (GET_MODE (srcreg) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2120 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2122 /* If the structure doesn't take up a whole number of words, see whether
2123 SRCREG is padded on the left or on the right. If it's on the left,
2124 set PADDING_CORRECTION to the number of bits to skip.
2126 In most ABIs, the structure will be returned at the least end of
2127 the register, which translates to right padding on little-endian
2128 targets and left padding on big-endian targets. The opposite
2129 holds if the structure is returned at the most significant
2130 end of the register. */
2131 if (bytes % UNITS_PER_WORD != 0
2132 && (targetm.calls.return_in_msb (type)
2133 ? !BYTES_BIG_ENDIAN
2134 : BYTES_BIG_ENDIAN))
2135 padding_correction
2136 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2138 /* Copy the structure BITSIZE bits at a time. If the target lives in
2139 memory, take care of not reading/writing past its end by selecting
2140 a copy mode suited to BITSIZE. This should always be possible given
2141 how it is computed.
2143 We could probably emit more efficient code for machines which do not use
2144 strict alignment, but it doesn't seem worth the effort at the current
2145 time. */
2147 copy_mode = word_mode;
2148 if (MEM_P (tgtblk))
2150 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2151 if (mem_mode != BLKmode)
2152 copy_mode = mem_mode;
2155 for (bitpos = 0, xbitpos = padding_correction;
2156 bitpos < bytes * BITS_PER_UNIT;
2157 bitpos += bitsize, xbitpos += bitsize)
2159 /* We need a new source operand each time xbitpos is on a
2160 word boundary and when xbitpos == padding_correction
2161 (the first time through). */
2162 if (xbitpos % BITS_PER_WORD == 0
2163 || xbitpos == padding_correction)
2164 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2165 GET_MODE (srcreg));
2167 /* We need a new destination operand each time bitpos is on
2168 a word boundary. */
2169 if (bitpos % BITS_PER_WORD == 0)
2170 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2172 /* Use xbitpos for the source extraction (right justified) and
2173 bitpos for the destination store (left justified). */
2174 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2175 extract_bit_field (src, bitsize,
2176 xbitpos % BITS_PER_WORD, 1, false,
2177 NULL_RTX, copy_mode, copy_mode));
2180 return tgtblk;
2183 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2184 register if it contains any data, otherwise return null.
2186 This is used on targets that return BLKmode values in registers. */
2189 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2191 int i, n_regs;
2192 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2193 unsigned int bitsize;
2194 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2195 enum machine_mode dst_mode;
2197 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2199 x = expand_normal (src);
2201 bytes = int_size_in_bytes (TREE_TYPE (src));
2202 if (bytes == 0)
2203 return NULL_RTX;
2205 /* If the structure doesn't take up a whole number of words, see
2206 whether the register value should be padded on the left or on
2207 the right. Set PADDING_CORRECTION to the number of padding
2208 bits needed on the left side.
2210 In most ABIs, the structure will be returned at the least end of
2211 the register, which translates to right padding on little-endian
2212 targets and left padding on big-endian targets. The opposite
2213 holds if the structure is returned at the most significant
2214 end of the register. */
2215 if (bytes % UNITS_PER_WORD != 0
2216 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2217 ? !BYTES_BIG_ENDIAN
2218 : BYTES_BIG_ENDIAN))
2219 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2220 * BITS_PER_UNIT));
2222 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2223 dst_words = XALLOCAVEC (rtx, n_regs);
2224 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2226 /* Copy the structure BITSIZE bits at a time. */
2227 for (bitpos = 0, xbitpos = padding_correction;
2228 bitpos < bytes * BITS_PER_UNIT;
2229 bitpos += bitsize, xbitpos += bitsize)
2231 /* We need a new destination pseudo each time xbitpos is
2232 on a word boundary and when xbitpos == padding_correction
2233 (the first time through). */
2234 if (xbitpos % BITS_PER_WORD == 0
2235 || xbitpos == padding_correction)
2237 /* Generate an appropriate register. */
2238 dst_word = gen_reg_rtx (word_mode);
2239 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2241 /* Clear the destination before we move anything into it. */
2242 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2245 /* We need a new source operand each time bitpos is on a word
2246 boundary. */
2247 if (bitpos % BITS_PER_WORD == 0)
2248 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2250 /* Use bitpos for the source extraction (left justified) and
2251 xbitpos for the destination store (right justified). */
2252 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2253 0, 0, word_mode,
2254 extract_bit_field (src_word, bitsize,
2255 bitpos % BITS_PER_WORD, 1, false,
2256 NULL_RTX, word_mode, word_mode));
2259 if (mode == BLKmode)
2261 /* Find the smallest integer mode large enough to hold the
2262 entire structure. */
2263 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2264 mode != VOIDmode;
2265 mode = GET_MODE_WIDER_MODE (mode))
2266 /* Have we found a large enough mode? */
2267 if (GET_MODE_SIZE (mode) >= bytes)
2268 break;
2270 /* A suitable mode should have been found. */
2271 gcc_assert (mode != VOIDmode);
2274 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2275 dst_mode = word_mode;
2276 else
2277 dst_mode = mode;
2278 dst = gen_reg_rtx (dst_mode);
2280 for (i = 0; i < n_regs; i++)
2281 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2283 if (mode != dst_mode)
2284 dst = gen_lowpart (mode, dst);
2286 return dst;
2289 /* Add a USE expression for REG to the (possibly empty) list pointed
2290 to by CALL_FUSAGE. REG must denote a hard register. */
2292 void
2293 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2295 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2297 *call_fusage
2298 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2301 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2302 starting at REGNO. All of these registers must be hard registers. */
2304 void
2305 use_regs (rtx *call_fusage, int regno, int nregs)
2307 int i;
2309 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2311 for (i = 0; i < nregs; i++)
2312 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2315 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2316 PARALLEL REGS. This is for calls that pass values in multiple
2317 non-contiguous locations. The Irix 6 ABI has examples of this. */
2319 void
2320 use_group_regs (rtx *call_fusage, rtx regs)
2322 int i;
2324 for (i = 0; i < XVECLEN (regs, 0); i++)
2326 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2328 /* A NULL entry means the parameter goes both on the stack and in
2329 registers. This can also be a MEM for targets that pass values
2330 partially on the stack and partially in registers. */
2331 if (reg != 0 && REG_P (reg))
2332 use_reg (call_fusage, reg);
2336 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2337 assigment and the code of the expresion on the RHS is CODE. Return
2338 NULL otherwise. */
2340 static gimple
2341 get_def_for_expr (tree name, enum tree_code code)
2343 gimple def_stmt;
2345 if (TREE_CODE (name) != SSA_NAME)
2346 return NULL;
2348 def_stmt = get_gimple_for_ssa_name (name);
2349 if (!def_stmt
2350 || gimple_assign_rhs_code (def_stmt) != code)
2351 return NULL;
2353 return def_stmt;
2356 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2357 assigment and the class of the expresion on the RHS is CLASS. Return
2358 NULL otherwise. */
2360 static gimple
2361 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2363 gimple def_stmt;
2365 if (TREE_CODE (name) != SSA_NAME)
2366 return NULL;
2368 def_stmt = get_gimple_for_ssa_name (name);
2369 if (!def_stmt
2370 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2371 return NULL;
2373 return def_stmt;
2377 /* Determine whether the LEN bytes generated by CONSTFUN can be
2378 stored to memory using several move instructions. CONSTFUNDATA is
2379 a pointer which will be passed as argument in every CONSTFUN call.
2380 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2381 a memset operation and false if it's a copy of a constant string.
2382 Return nonzero if a call to store_by_pieces should succeed. */
2385 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2386 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2387 void *constfundata, unsigned int align, bool memsetp)
2389 unsigned HOST_WIDE_INT l;
2390 unsigned int max_size;
2391 HOST_WIDE_INT offset = 0;
2392 enum machine_mode mode;
2393 enum insn_code icode;
2394 int reverse;
2395 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2396 rtx cst ATTRIBUTE_UNUSED;
2398 if (len == 0)
2399 return 1;
2401 if (! (memsetp
2402 ? SET_BY_PIECES_P (len, align)
2403 : STORE_BY_PIECES_P (len, align)))
2404 return 0;
2406 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2408 /* We would first store what we can in the largest integer mode, then go to
2409 successively smaller modes. */
2411 for (reverse = 0;
2412 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2413 reverse++)
2415 l = len;
2416 max_size = STORE_MAX_PIECES + 1;
2417 while (max_size > 1)
2419 mode = widest_int_mode_for_size (max_size);
2421 if (mode == VOIDmode)
2422 break;
2424 icode = optab_handler (mov_optab, mode);
2425 if (icode != CODE_FOR_nothing
2426 && align >= GET_MODE_ALIGNMENT (mode))
2428 unsigned int size = GET_MODE_SIZE (mode);
2430 while (l >= size)
2432 if (reverse)
2433 offset -= size;
2435 cst = (*constfun) (constfundata, offset, mode);
2436 if (!targetm.legitimate_constant_p (mode, cst))
2437 return 0;
2439 if (!reverse)
2440 offset += size;
2442 l -= size;
2446 max_size = GET_MODE_SIZE (mode);
2449 /* The code above should have handled everything. */
2450 gcc_assert (!l);
2453 return 1;
2456 /* Generate several move instructions to store LEN bytes generated by
2457 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2458 pointer which will be passed as argument in every CONSTFUN call.
2459 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2460 a memset operation and false if it's a copy of a constant string.
2461 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2462 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2463 stpcpy. */
2466 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2467 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2468 void *constfundata, unsigned int align, bool memsetp, int endp)
2470 enum machine_mode to_addr_mode = get_address_mode (to);
2471 struct store_by_pieces_d data;
2473 if (len == 0)
2475 gcc_assert (endp != 2);
2476 return to;
2479 gcc_assert (memsetp
2480 ? SET_BY_PIECES_P (len, align)
2481 : STORE_BY_PIECES_P (len, align));
2482 data.constfun = constfun;
2483 data.constfundata = constfundata;
2484 data.len = len;
2485 data.to = to;
2486 store_by_pieces_1 (&data, align);
2487 if (endp)
2489 rtx to1;
2491 gcc_assert (!data.reverse);
2492 if (data.autinc_to)
2494 if (endp == 2)
2496 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2497 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2498 else
2499 data.to_addr = copy_to_mode_reg (to_addr_mode,
2500 plus_constant (to_addr_mode,
2501 data.to_addr,
2502 -1));
2504 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2505 data.offset);
2507 else
2509 if (endp == 2)
2510 --data.offset;
2511 to1 = adjust_address (data.to, QImode, data.offset);
2513 return to1;
2515 else
2516 return data.to;
2519 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2520 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2522 static void
2523 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2525 struct store_by_pieces_d data;
2527 if (len == 0)
2528 return;
2530 data.constfun = clear_by_pieces_1;
2531 data.constfundata = NULL;
2532 data.len = len;
2533 data.to = to;
2534 store_by_pieces_1 (&data, align);
2537 /* Callback routine for clear_by_pieces.
2538 Return const0_rtx unconditionally. */
2540 static rtx
2541 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2542 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2543 enum machine_mode mode ATTRIBUTE_UNUSED)
2545 return const0_rtx;
2548 /* Subroutine of clear_by_pieces and store_by_pieces.
2549 Generate several move instructions to store LEN bytes of block TO. (A MEM
2550 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2552 static void
2553 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2554 unsigned int align ATTRIBUTE_UNUSED)
2556 enum machine_mode to_addr_mode = get_address_mode (data->to);
2557 rtx to_addr = XEXP (data->to, 0);
2558 unsigned int max_size = STORE_MAX_PIECES + 1;
2559 enum insn_code icode;
2561 data->offset = 0;
2562 data->to_addr = to_addr;
2563 data->autinc_to
2564 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2565 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2567 data->explicit_inc_to = 0;
2568 data->reverse
2569 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2570 if (data->reverse)
2571 data->offset = data->len;
2573 /* If storing requires more than two move insns,
2574 copy addresses to registers (to make displacements shorter)
2575 and use post-increment if available. */
2576 if (!data->autinc_to
2577 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2579 /* Determine the main mode we'll be using.
2580 MODE might not be used depending on the definitions of the
2581 USE_* macros below. */
2582 enum machine_mode mode ATTRIBUTE_UNUSED
2583 = widest_int_mode_for_size (max_size);
2585 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2587 data->to_addr = copy_to_mode_reg (to_addr_mode,
2588 plus_constant (to_addr_mode,
2589 to_addr,
2590 data->len));
2591 data->autinc_to = 1;
2592 data->explicit_inc_to = -1;
2595 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2596 && ! data->autinc_to)
2598 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2599 data->autinc_to = 1;
2600 data->explicit_inc_to = 1;
2603 if ( !data->autinc_to && CONSTANT_P (to_addr))
2604 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2607 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2609 /* First store what we can in the largest integer mode, then go to
2610 successively smaller modes. */
2612 while (max_size > 1)
2614 enum machine_mode mode = widest_int_mode_for_size (max_size);
2616 if (mode == VOIDmode)
2617 break;
2619 icode = optab_handler (mov_optab, mode);
2620 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2621 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2623 max_size = GET_MODE_SIZE (mode);
2626 /* The code above should have handled everything. */
2627 gcc_assert (!data->len);
2630 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2631 with move instructions for mode MODE. GENFUN is the gen_... function
2632 to make a move insn for that mode. DATA has all the other info. */
2634 static void
2635 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2636 struct store_by_pieces_d *data)
2638 unsigned int size = GET_MODE_SIZE (mode);
2639 rtx to1, cst;
2641 while (data->len >= size)
2643 if (data->reverse)
2644 data->offset -= size;
2646 if (data->autinc_to)
2647 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2648 data->offset);
2649 else
2650 to1 = adjust_address (data->to, mode, data->offset);
2652 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2653 emit_insn (gen_add2_insn (data->to_addr,
2654 GEN_INT (-(HOST_WIDE_INT) size)));
2656 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2657 emit_insn ((*genfun) (to1, cst));
2659 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2660 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2662 if (! data->reverse)
2663 data->offset += size;
2665 data->len -= size;
2669 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2670 its length in bytes. */
2673 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2674 unsigned int expected_align, HOST_WIDE_INT expected_size)
2676 enum machine_mode mode = GET_MODE (object);
2677 unsigned int align;
2679 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2681 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2682 just move a zero. Otherwise, do this a piece at a time. */
2683 if (mode != BLKmode
2684 && CONST_INT_P (size)
2685 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2687 rtx zero = CONST0_RTX (mode);
2688 if (zero != NULL)
2690 emit_move_insn (object, zero);
2691 return NULL;
2694 if (COMPLEX_MODE_P (mode))
2696 zero = CONST0_RTX (GET_MODE_INNER (mode));
2697 if (zero != NULL)
2699 write_complex_part (object, zero, 0);
2700 write_complex_part (object, zero, 1);
2701 return NULL;
2706 if (size == const0_rtx)
2707 return NULL;
2709 align = MEM_ALIGN (object);
2711 if (CONST_INT_P (size)
2712 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2713 clear_by_pieces (object, INTVAL (size), align);
2714 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2715 expected_align, expected_size))
2717 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2718 return set_storage_via_libcall (object, size, const0_rtx,
2719 method == BLOCK_OP_TAILCALL);
2720 else
2721 gcc_unreachable ();
2723 return NULL;
2727 clear_storage (rtx object, rtx size, enum block_op_methods method)
2729 return clear_storage_hints (object, size, method, 0, -1);
2733 /* A subroutine of clear_storage. Expand a call to memset.
2734 Return the return value of memset, 0 otherwise. */
2737 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2739 tree call_expr, fn, object_tree, size_tree, val_tree;
2740 enum machine_mode size_mode;
2741 rtx retval;
2743 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2744 place those into new pseudos into a VAR_DECL and use them later. */
2746 object = copy_addr_to_reg (XEXP (object, 0));
2748 size_mode = TYPE_MODE (sizetype);
2749 size = convert_to_mode (size_mode, size, 1);
2750 size = copy_to_mode_reg (size_mode, size);
2752 /* It is incorrect to use the libcall calling conventions to call
2753 memset in this context. This could be a user call to memset and
2754 the user may wish to examine the return value from memset. For
2755 targets where libcalls and normal calls have different conventions
2756 for returning pointers, we could end up generating incorrect code. */
2758 object_tree = make_tree (ptr_type_node, object);
2759 if (!CONST_INT_P (val))
2760 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2761 size_tree = make_tree (sizetype, size);
2762 val_tree = make_tree (integer_type_node, val);
2764 fn = clear_storage_libcall_fn (true);
2765 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2766 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2768 retval = expand_normal (call_expr);
2770 return retval;
2773 /* A subroutine of set_storage_via_libcall. Create the tree node
2774 for the function we use for block clears. */
2776 tree block_clear_fn;
2778 void
2779 init_block_clear_fn (const char *asmspec)
2781 if (!block_clear_fn)
2783 tree fn, args;
2785 fn = get_identifier ("memset");
2786 args = build_function_type_list (ptr_type_node, ptr_type_node,
2787 integer_type_node, sizetype,
2788 NULL_TREE);
2790 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2791 DECL_EXTERNAL (fn) = 1;
2792 TREE_PUBLIC (fn) = 1;
2793 DECL_ARTIFICIAL (fn) = 1;
2794 TREE_NOTHROW (fn) = 1;
2795 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2796 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2798 block_clear_fn = fn;
2801 if (asmspec)
2802 set_user_assembler_name (block_clear_fn, asmspec);
2805 static tree
2806 clear_storage_libcall_fn (int for_call)
2808 static bool emitted_extern;
2810 if (!block_clear_fn)
2811 init_block_clear_fn (NULL);
2813 if (for_call && !emitted_extern)
2815 emitted_extern = true;
2816 make_decl_rtl (block_clear_fn);
2819 return block_clear_fn;
2822 /* Expand a setmem pattern; return true if successful. */
2824 bool
2825 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2826 unsigned int expected_align, HOST_WIDE_INT expected_size)
2828 /* Try the most limited insn first, because there's no point
2829 including more than one in the machine description unless
2830 the more limited one has some advantage. */
2832 enum machine_mode mode;
2834 if (expected_align < align)
2835 expected_align = align;
2837 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2838 mode = GET_MODE_WIDER_MODE (mode))
2840 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2842 if (code != CODE_FOR_nothing
2843 /* We don't need MODE to be narrower than
2844 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2845 the mode mask, as it is returned by the macro, it will
2846 definitely be less than the actual mode mask. */
2847 && ((CONST_INT_P (size)
2848 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2849 <= (GET_MODE_MASK (mode) >> 1)))
2850 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2852 struct expand_operand ops[6];
2853 unsigned int nops;
2855 nops = insn_data[(int) code].n_generator_args;
2856 gcc_assert (nops == 4 || nops == 6);
2858 create_fixed_operand (&ops[0], object);
2859 /* The check above guarantees that this size conversion is valid. */
2860 create_convert_operand_to (&ops[1], size, mode, true);
2861 create_convert_operand_from (&ops[2], val, byte_mode, true);
2862 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2863 if (nops == 6)
2865 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2866 create_integer_operand (&ops[5], expected_size);
2868 if (maybe_expand_insn (code, nops, ops))
2869 return true;
2873 return false;
2877 /* Write to one of the components of the complex value CPLX. Write VAL to
2878 the real part if IMAG_P is false, and the imaginary part if its true. */
2880 static void
2881 write_complex_part (rtx cplx, rtx val, bool imag_p)
2883 enum machine_mode cmode;
2884 enum machine_mode imode;
2885 unsigned ibitsize;
2887 if (GET_CODE (cplx) == CONCAT)
2889 emit_move_insn (XEXP (cplx, imag_p), val);
2890 return;
2893 cmode = GET_MODE (cplx);
2894 imode = GET_MODE_INNER (cmode);
2895 ibitsize = GET_MODE_BITSIZE (imode);
2897 /* For MEMs simplify_gen_subreg may generate an invalid new address
2898 because, e.g., the original address is considered mode-dependent
2899 by the target, which restricts simplify_subreg from invoking
2900 adjust_address_nv. Instead of preparing fallback support for an
2901 invalid address, we call adjust_address_nv directly. */
2902 if (MEM_P (cplx))
2904 emit_move_insn (adjust_address_nv (cplx, imode,
2905 imag_p ? GET_MODE_SIZE (imode) : 0),
2906 val);
2907 return;
2910 /* If the sub-object is at least word sized, then we know that subregging
2911 will work. This special case is important, since store_bit_field
2912 wants to operate on integer modes, and there's rarely an OImode to
2913 correspond to TCmode. */
2914 if (ibitsize >= BITS_PER_WORD
2915 /* For hard regs we have exact predicates. Assume we can split
2916 the original object if it spans an even number of hard regs.
2917 This special case is important for SCmode on 64-bit platforms
2918 where the natural size of floating-point regs is 32-bit. */
2919 || (REG_P (cplx)
2920 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2921 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2923 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2924 imag_p ? GET_MODE_SIZE (imode) : 0);
2925 if (part)
2927 emit_move_insn (part, val);
2928 return;
2930 else
2931 /* simplify_gen_subreg may fail for sub-word MEMs. */
2932 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2935 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2938 /* Extract one of the components of the complex value CPLX. Extract the
2939 real part if IMAG_P is false, and the imaginary part if it's true. */
2941 static rtx
2942 read_complex_part (rtx cplx, bool imag_p)
2944 enum machine_mode cmode, imode;
2945 unsigned ibitsize;
2947 if (GET_CODE (cplx) == CONCAT)
2948 return XEXP (cplx, imag_p);
2950 cmode = GET_MODE (cplx);
2951 imode = GET_MODE_INNER (cmode);
2952 ibitsize = GET_MODE_BITSIZE (imode);
2954 /* Special case reads from complex constants that got spilled to memory. */
2955 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2957 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2958 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2960 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2961 if (CONSTANT_CLASS_P (part))
2962 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2966 /* For MEMs simplify_gen_subreg may generate an invalid new address
2967 because, e.g., the original address is considered mode-dependent
2968 by the target, which restricts simplify_subreg from invoking
2969 adjust_address_nv. Instead of preparing fallback support for an
2970 invalid address, we call adjust_address_nv directly. */
2971 if (MEM_P (cplx))
2972 return adjust_address_nv (cplx, imode,
2973 imag_p ? GET_MODE_SIZE (imode) : 0);
2975 /* If the sub-object is at least word sized, then we know that subregging
2976 will work. This special case is important, since extract_bit_field
2977 wants to operate on integer modes, and there's rarely an OImode to
2978 correspond to TCmode. */
2979 if (ibitsize >= BITS_PER_WORD
2980 /* For hard regs we have exact predicates. Assume we can split
2981 the original object if it spans an even number of hard regs.
2982 This special case is important for SCmode on 64-bit platforms
2983 where the natural size of floating-point regs is 32-bit. */
2984 || (REG_P (cplx)
2985 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2986 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2988 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2989 imag_p ? GET_MODE_SIZE (imode) : 0);
2990 if (ret)
2991 return ret;
2992 else
2993 /* simplify_gen_subreg may fail for sub-word MEMs. */
2994 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2997 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2998 true, false, NULL_RTX, imode, imode);
3001 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3002 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3003 represented in NEW_MODE. If FORCE is true, this will never happen, as
3004 we'll force-create a SUBREG if needed. */
3006 static rtx
3007 emit_move_change_mode (enum machine_mode new_mode,
3008 enum machine_mode old_mode, rtx x, bool force)
3010 rtx ret;
3012 if (push_operand (x, GET_MODE (x)))
3014 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3015 MEM_COPY_ATTRIBUTES (ret, x);
3017 else if (MEM_P (x))
3019 /* We don't have to worry about changing the address since the
3020 size in bytes is supposed to be the same. */
3021 if (reload_in_progress)
3023 /* Copy the MEM to change the mode and move any
3024 substitutions from the old MEM to the new one. */
3025 ret = adjust_address_nv (x, new_mode, 0);
3026 copy_replacements (x, ret);
3028 else
3029 ret = adjust_address (x, new_mode, 0);
3031 else
3033 /* Note that we do want simplify_subreg's behavior of validating
3034 that the new mode is ok for a hard register. If we were to use
3035 simplify_gen_subreg, we would create the subreg, but would
3036 probably run into the target not being able to implement it. */
3037 /* Except, of course, when FORCE is true, when this is exactly what
3038 we want. Which is needed for CCmodes on some targets. */
3039 if (force)
3040 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3041 else
3042 ret = simplify_subreg (new_mode, x, old_mode, 0);
3045 return ret;
3048 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3049 an integer mode of the same size as MODE. Returns the instruction
3050 emitted, or NULL if such a move could not be generated. */
3052 static rtx
3053 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3055 enum machine_mode imode;
3056 enum insn_code code;
3058 /* There must exist a mode of the exact size we require. */
3059 imode = int_mode_for_mode (mode);
3060 if (imode == BLKmode)
3061 return NULL_RTX;
3063 /* The target must support moves in this mode. */
3064 code = optab_handler (mov_optab, imode);
3065 if (code == CODE_FOR_nothing)
3066 return NULL_RTX;
3068 x = emit_move_change_mode (imode, mode, x, force);
3069 if (x == NULL_RTX)
3070 return NULL_RTX;
3071 y = emit_move_change_mode (imode, mode, y, force);
3072 if (y == NULL_RTX)
3073 return NULL_RTX;
3074 return emit_insn (GEN_FCN (code) (x, y));
3077 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3078 Return an equivalent MEM that does not use an auto-increment. */
3080 static rtx
3081 emit_move_resolve_push (enum machine_mode mode, rtx x)
3083 enum rtx_code code = GET_CODE (XEXP (x, 0));
3084 HOST_WIDE_INT adjust;
3085 rtx temp;
3087 adjust = GET_MODE_SIZE (mode);
3088 #ifdef PUSH_ROUNDING
3089 adjust = PUSH_ROUNDING (adjust);
3090 #endif
3091 if (code == PRE_DEC || code == POST_DEC)
3092 adjust = -adjust;
3093 else if (code == PRE_MODIFY || code == POST_MODIFY)
3095 rtx expr = XEXP (XEXP (x, 0), 1);
3096 HOST_WIDE_INT val;
3098 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3099 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3100 val = INTVAL (XEXP (expr, 1));
3101 if (GET_CODE (expr) == MINUS)
3102 val = -val;
3103 gcc_assert (adjust == val || adjust == -val);
3104 adjust = val;
3107 /* Do not use anti_adjust_stack, since we don't want to update
3108 stack_pointer_delta. */
3109 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3110 GEN_INT (adjust), stack_pointer_rtx,
3111 0, OPTAB_LIB_WIDEN);
3112 if (temp != stack_pointer_rtx)
3113 emit_move_insn (stack_pointer_rtx, temp);
3115 switch (code)
3117 case PRE_INC:
3118 case PRE_DEC:
3119 case PRE_MODIFY:
3120 temp = stack_pointer_rtx;
3121 break;
3122 case POST_INC:
3123 case POST_DEC:
3124 case POST_MODIFY:
3125 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3126 break;
3127 default:
3128 gcc_unreachable ();
3131 return replace_equiv_address (x, temp);
3134 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3135 X is known to satisfy push_operand, and MODE is known to be complex.
3136 Returns the last instruction emitted. */
3139 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3141 enum machine_mode submode = GET_MODE_INNER (mode);
3142 bool imag_first;
3144 #ifdef PUSH_ROUNDING
3145 unsigned int submodesize = GET_MODE_SIZE (submode);
3147 /* In case we output to the stack, but the size is smaller than the
3148 machine can push exactly, we need to use move instructions. */
3149 if (PUSH_ROUNDING (submodesize) != submodesize)
3151 x = emit_move_resolve_push (mode, x);
3152 return emit_move_insn (x, y);
3154 #endif
3156 /* Note that the real part always precedes the imag part in memory
3157 regardless of machine's endianness. */
3158 switch (GET_CODE (XEXP (x, 0)))
3160 case PRE_DEC:
3161 case POST_DEC:
3162 imag_first = true;
3163 break;
3164 case PRE_INC:
3165 case POST_INC:
3166 imag_first = false;
3167 break;
3168 default:
3169 gcc_unreachable ();
3172 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3173 read_complex_part (y, imag_first));
3174 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3175 read_complex_part (y, !imag_first));
3178 /* A subroutine of emit_move_complex. Perform the move from Y to X
3179 via two moves of the parts. Returns the last instruction emitted. */
3182 emit_move_complex_parts (rtx x, rtx y)
3184 /* Show the output dies here. This is necessary for SUBREGs
3185 of pseudos since we cannot track their lifetimes correctly;
3186 hard regs shouldn't appear here except as return values. */
3187 if (!reload_completed && !reload_in_progress
3188 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3189 emit_clobber (x);
3191 write_complex_part (x, read_complex_part (y, false), false);
3192 write_complex_part (x, read_complex_part (y, true), true);
3194 return get_last_insn ();
3197 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3198 MODE is known to be complex. Returns the last instruction emitted. */
3200 static rtx
3201 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3203 bool try_int;
3205 /* Need to take special care for pushes, to maintain proper ordering
3206 of the data, and possibly extra padding. */
3207 if (push_operand (x, mode))
3208 return emit_move_complex_push (mode, x, y);
3210 /* See if we can coerce the target into moving both values at once. */
3212 /* Move floating point as parts. */
3213 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3214 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3215 try_int = false;
3216 /* Not possible if the values are inherently not adjacent. */
3217 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3218 try_int = false;
3219 /* Is possible if both are registers (or subregs of registers). */
3220 else if (register_operand (x, mode) && register_operand (y, mode))
3221 try_int = true;
3222 /* If one of the operands is a memory, and alignment constraints
3223 are friendly enough, we may be able to do combined memory operations.
3224 We do not attempt this if Y is a constant because that combination is
3225 usually better with the by-parts thing below. */
3226 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3227 && (!STRICT_ALIGNMENT
3228 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3229 try_int = true;
3230 else
3231 try_int = false;
3233 if (try_int)
3235 rtx ret;
3237 /* For memory to memory moves, optimal behavior can be had with the
3238 existing block move logic. */
3239 if (MEM_P (x) && MEM_P (y))
3241 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3242 BLOCK_OP_NO_LIBCALL);
3243 return get_last_insn ();
3246 ret = emit_move_via_integer (mode, x, y, true);
3247 if (ret)
3248 return ret;
3251 return emit_move_complex_parts (x, y);
3254 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3255 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3257 static rtx
3258 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3260 rtx ret;
3262 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3263 if (mode != CCmode)
3265 enum insn_code code = optab_handler (mov_optab, CCmode);
3266 if (code != CODE_FOR_nothing)
3268 x = emit_move_change_mode (CCmode, mode, x, true);
3269 y = emit_move_change_mode (CCmode, mode, y, true);
3270 return emit_insn (GEN_FCN (code) (x, y));
3274 /* Otherwise, find the MODE_INT mode of the same width. */
3275 ret = emit_move_via_integer (mode, x, y, false);
3276 gcc_assert (ret != NULL);
3277 return ret;
3280 /* Return true if word I of OP lies entirely in the
3281 undefined bits of a paradoxical subreg. */
3283 static bool
3284 undefined_operand_subword_p (const_rtx op, int i)
3286 enum machine_mode innermode, innermostmode;
3287 int offset;
3288 if (GET_CODE (op) != SUBREG)
3289 return false;
3290 innermode = GET_MODE (op);
3291 innermostmode = GET_MODE (SUBREG_REG (op));
3292 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3293 /* The SUBREG_BYTE represents offset, as if the value were stored in
3294 memory, except for a paradoxical subreg where we define
3295 SUBREG_BYTE to be 0; undo this exception as in
3296 simplify_subreg. */
3297 if (SUBREG_BYTE (op) == 0
3298 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3300 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3301 if (WORDS_BIG_ENDIAN)
3302 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3303 if (BYTES_BIG_ENDIAN)
3304 offset += difference % UNITS_PER_WORD;
3306 if (offset >= GET_MODE_SIZE (innermostmode)
3307 || offset <= -GET_MODE_SIZE (word_mode))
3308 return true;
3309 return false;
3312 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3313 MODE is any multi-word or full-word mode that lacks a move_insn
3314 pattern. Note that you will get better code if you define such
3315 patterns, even if they must turn into multiple assembler instructions. */
3317 static rtx
3318 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3320 rtx last_insn = 0;
3321 rtx seq, inner;
3322 bool need_clobber;
3323 int i;
3325 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3327 /* If X is a push on the stack, do the push now and replace
3328 X with a reference to the stack pointer. */
3329 if (push_operand (x, mode))
3330 x = emit_move_resolve_push (mode, x);
3332 /* If we are in reload, see if either operand is a MEM whose address
3333 is scheduled for replacement. */
3334 if (reload_in_progress && MEM_P (x)
3335 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3336 x = replace_equiv_address_nv (x, inner);
3337 if (reload_in_progress && MEM_P (y)
3338 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3339 y = replace_equiv_address_nv (y, inner);
3341 start_sequence ();
3343 need_clobber = false;
3344 for (i = 0;
3345 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3346 i++)
3348 rtx xpart = operand_subword (x, i, 1, mode);
3349 rtx ypart;
3351 /* Do not generate code for a move if it would come entirely
3352 from the undefined bits of a paradoxical subreg. */
3353 if (undefined_operand_subword_p (y, i))
3354 continue;
3356 ypart = operand_subword (y, i, 1, mode);
3358 /* If we can't get a part of Y, put Y into memory if it is a
3359 constant. Otherwise, force it into a register. Then we must
3360 be able to get a part of Y. */
3361 if (ypart == 0 && CONSTANT_P (y))
3363 y = use_anchored_address (force_const_mem (mode, y));
3364 ypart = operand_subword (y, i, 1, mode);
3366 else if (ypart == 0)
3367 ypart = operand_subword_force (y, i, mode);
3369 gcc_assert (xpart && ypart);
3371 need_clobber |= (GET_CODE (xpart) == SUBREG);
3373 last_insn = emit_move_insn (xpart, ypart);
3376 seq = get_insns ();
3377 end_sequence ();
3379 /* Show the output dies here. This is necessary for SUBREGs
3380 of pseudos since we cannot track their lifetimes correctly;
3381 hard regs shouldn't appear here except as return values.
3382 We never want to emit such a clobber after reload. */
3383 if (x != y
3384 && ! (reload_in_progress || reload_completed)
3385 && need_clobber != 0)
3386 emit_clobber (x);
3388 emit_insn (seq);
3390 return last_insn;
3393 /* Low level part of emit_move_insn.
3394 Called just like emit_move_insn, but assumes X and Y
3395 are basically valid. */
3398 emit_move_insn_1 (rtx x, rtx y)
3400 enum machine_mode mode = GET_MODE (x);
3401 enum insn_code code;
3403 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3405 code = optab_handler (mov_optab, mode);
3406 if (code != CODE_FOR_nothing)
3407 return emit_insn (GEN_FCN (code) (x, y));
3409 /* Expand complex moves by moving real part and imag part. */
3410 if (COMPLEX_MODE_P (mode))
3411 return emit_move_complex (mode, x, y);
3413 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3414 || ALL_FIXED_POINT_MODE_P (mode))
3416 rtx result = emit_move_via_integer (mode, x, y, true);
3418 /* If we can't find an integer mode, use multi words. */
3419 if (result)
3420 return result;
3421 else
3422 return emit_move_multi_word (mode, x, y);
3425 if (GET_MODE_CLASS (mode) == MODE_CC)
3426 return emit_move_ccmode (mode, x, y);
3428 /* Try using a move pattern for the corresponding integer mode. This is
3429 only safe when simplify_subreg can convert MODE constants into integer
3430 constants. At present, it can only do this reliably if the value
3431 fits within a HOST_WIDE_INT. */
3432 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3434 rtx ret = emit_move_via_integer (mode, x, y, false);
3435 if (ret)
3436 return ret;
3439 return emit_move_multi_word (mode, x, y);
3442 /* Generate code to copy Y into X.
3443 Both Y and X must have the same mode, except that
3444 Y can be a constant with VOIDmode.
3445 This mode cannot be BLKmode; use emit_block_move for that.
3447 Return the last instruction emitted. */
3450 emit_move_insn (rtx x, rtx y)
3452 enum machine_mode mode = GET_MODE (x);
3453 rtx y_cst = NULL_RTX;
3454 rtx last_insn, set;
3456 gcc_assert (mode != BLKmode
3457 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3459 if (CONSTANT_P (y))
3461 if (optimize
3462 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3463 && (last_insn = compress_float_constant (x, y)))
3464 return last_insn;
3466 y_cst = y;
3468 if (!targetm.legitimate_constant_p (mode, y))
3470 y = force_const_mem (mode, y);
3472 /* If the target's cannot_force_const_mem prevented the spill,
3473 assume that the target's move expanders will also take care
3474 of the non-legitimate constant. */
3475 if (!y)
3476 y = y_cst;
3477 else
3478 y = use_anchored_address (y);
3482 /* If X or Y are memory references, verify that their addresses are valid
3483 for the machine. */
3484 if (MEM_P (x)
3485 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3486 MEM_ADDR_SPACE (x))
3487 && ! push_operand (x, GET_MODE (x))))
3488 x = validize_mem (x);
3490 if (MEM_P (y)
3491 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3492 MEM_ADDR_SPACE (y)))
3493 y = validize_mem (y);
3495 gcc_assert (mode != BLKmode);
3497 last_insn = emit_move_insn_1 (x, y);
3499 if (y_cst && REG_P (x)
3500 && (set = single_set (last_insn)) != NULL_RTX
3501 && SET_DEST (set) == x
3502 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3503 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3505 return last_insn;
3508 /* If Y is representable exactly in a narrower mode, and the target can
3509 perform the extension directly from constant or memory, then emit the
3510 move as an extension. */
3512 static rtx
3513 compress_float_constant (rtx x, rtx y)
3515 enum machine_mode dstmode = GET_MODE (x);
3516 enum machine_mode orig_srcmode = GET_MODE (y);
3517 enum machine_mode srcmode;
3518 REAL_VALUE_TYPE r;
3519 int oldcost, newcost;
3520 bool speed = optimize_insn_for_speed_p ();
3522 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3524 if (targetm.legitimate_constant_p (dstmode, y))
3525 oldcost = set_src_cost (y, speed);
3526 else
3527 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3529 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3530 srcmode != orig_srcmode;
3531 srcmode = GET_MODE_WIDER_MODE (srcmode))
3533 enum insn_code ic;
3534 rtx trunc_y, last_insn;
3536 /* Skip if the target can't extend this way. */
3537 ic = can_extend_p (dstmode, srcmode, 0);
3538 if (ic == CODE_FOR_nothing)
3539 continue;
3541 /* Skip if the narrowed value isn't exact. */
3542 if (! exact_real_truncate (srcmode, &r))
3543 continue;
3545 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3547 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3549 /* Skip if the target needs extra instructions to perform
3550 the extension. */
3551 if (!insn_operand_matches (ic, 1, trunc_y))
3552 continue;
3553 /* This is valid, but may not be cheaper than the original. */
3554 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3555 speed);
3556 if (oldcost < newcost)
3557 continue;
3559 else if (float_extend_from_mem[dstmode][srcmode])
3561 trunc_y = force_const_mem (srcmode, trunc_y);
3562 /* This is valid, but may not be cheaper than the original. */
3563 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3564 speed);
3565 if (oldcost < newcost)
3566 continue;
3567 trunc_y = validize_mem (trunc_y);
3569 else
3570 continue;
3572 /* For CSE's benefit, force the compressed constant pool entry
3573 into a new pseudo. This constant may be used in different modes,
3574 and if not, combine will put things back together for us. */
3575 trunc_y = force_reg (srcmode, trunc_y);
3576 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3577 last_insn = get_last_insn ();
3579 if (REG_P (x))
3580 set_unique_reg_note (last_insn, REG_EQUAL, y);
3582 return last_insn;
3585 return NULL_RTX;
3588 /* Pushing data onto the stack. */
3590 /* Push a block of length SIZE (perhaps variable)
3591 and return an rtx to address the beginning of the block.
3592 The value may be virtual_outgoing_args_rtx.
3594 EXTRA is the number of bytes of padding to push in addition to SIZE.
3595 BELOW nonzero means this padding comes at low addresses;
3596 otherwise, the padding comes at high addresses. */
3599 push_block (rtx size, int extra, int below)
3601 rtx temp;
3603 size = convert_modes (Pmode, ptr_mode, size, 1);
3604 if (CONSTANT_P (size))
3605 anti_adjust_stack (plus_constant (Pmode, size, extra));
3606 else if (REG_P (size) && extra == 0)
3607 anti_adjust_stack (size);
3608 else
3610 temp = copy_to_mode_reg (Pmode, size);
3611 if (extra != 0)
3612 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3613 temp, 0, OPTAB_LIB_WIDEN);
3614 anti_adjust_stack (temp);
3617 #ifndef STACK_GROWS_DOWNWARD
3618 if (0)
3619 #else
3620 if (1)
3621 #endif
3623 temp = virtual_outgoing_args_rtx;
3624 if (extra != 0 && below)
3625 temp = plus_constant (Pmode, temp, extra);
3627 else
3629 if (CONST_INT_P (size))
3630 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3631 -INTVAL (size) - (below ? 0 : extra));
3632 else if (extra != 0 && !below)
3633 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3634 negate_rtx (Pmode, plus_constant (Pmode, size,
3635 extra)));
3636 else
3637 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3638 negate_rtx (Pmode, size));
3641 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3644 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3646 static rtx
3647 mem_autoinc_base (rtx mem)
3649 if (MEM_P (mem))
3651 rtx addr = XEXP (mem, 0);
3652 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3653 return XEXP (addr, 0);
3655 return NULL;
3658 /* A utility routine used here, in reload, and in try_split. The insns
3659 after PREV up to and including LAST are known to adjust the stack,
3660 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3661 placing notes as appropriate. PREV may be NULL, indicating the
3662 entire insn sequence prior to LAST should be scanned.
3664 The set of allowed stack pointer modifications is small:
3665 (1) One or more auto-inc style memory references (aka pushes),
3666 (2) One or more addition/subtraction with the SP as destination,
3667 (3) A single move insn with the SP as destination,
3668 (4) A call_pop insn,
3669 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3671 Insns in the sequence that do not modify the SP are ignored,
3672 except for noreturn calls.
3674 The return value is the amount of adjustment that can be trivially
3675 verified, via immediate operand or auto-inc. If the adjustment
3676 cannot be trivially extracted, the return value is INT_MIN. */
3678 HOST_WIDE_INT
3679 find_args_size_adjust (rtx insn)
3681 rtx dest, set, pat;
3682 int i;
3684 pat = PATTERN (insn);
3685 set = NULL;
3687 /* Look for a call_pop pattern. */
3688 if (CALL_P (insn))
3690 /* We have to allow non-call_pop patterns for the case
3691 of emit_single_push_insn of a TLS address. */
3692 if (GET_CODE (pat) != PARALLEL)
3693 return 0;
3695 /* All call_pop have a stack pointer adjust in the parallel.
3696 The call itself is always first, and the stack adjust is
3697 usually last, so search from the end. */
3698 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3700 set = XVECEXP (pat, 0, i);
3701 if (GET_CODE (set) != SET)
3702 continue;
3703 dest = SET_DEST (set);
3704 if (dest == stack_pointer_rtx)
3705 break;
3707 /* We'd better have found the stack pointer adjust. */
3708 if (i == 0)
3709 return 0;
3710 /* Fall through to process the extracted SET and DEST
3711 as if it was a standalone insn. */
3713 else if (GET_CODE (pat) == SET)
3714 set = pat;
3715 else if ((set = single_set (insn)) != NULL)
3717 else if (GET_CODE (pat) == PARALLEL)
3719 /* ??? Some older ports use a parallel with a stack adjust
3720 and a store for a PUSH_ROUNDING pattern, rather than a
3721 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3722 /* ??? See h8300 and m68k, pushqi1. */
3723 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3725 set = XVECEXP (pat, 0, i);
3726 if (GET_CODE (set) != SET)
3727 continue;
3728 dest = SET_DEST (set);
3729 if (dest == stack_pointer_rtx)
3730 break;
3732 /* We do not expect an auto-inc of the sp in the parallel. */
3733 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3734 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3735 != stack_pointer_rtx);
3737 if (i < 0)
3738 return 0;
3740 else
3741 return 0;
3743 dest = SET_DEST (set);
3745 /* Look for direct modifications of the stack pointer. */
3746 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3748 /* Look for a trivial adjustment, otherwise assume nothing. */
3749 /* Note that the SPU restore_stack_block pattern refers to
3750 the stack pointer in V4SImode. Consider that non-trivial. */
3751 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3752 && GET_CODE (SET_SRC (set)) == PLUS
3753 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3754 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3755 return INTVAL (XEXP (SET_SRC (set), 1));
3756 /* ??? Reload can generate no-op moves, which will be cleaned
3757 up later. Recognize it and continue searching. */
3758 else if (rtx_equal_p (dest, SET_SRC (set)))
3759 return 0;
3760 else
3761 return HOST_WIDE_INT_MIN;
3763 else
3765 rtx mem, addr;
3767 /* Otherwise only think about autoinc patterns. */
3768 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3770 mem = dest;
3771 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3772 != stack_pointer_rtx);
3774 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3775 mem = SET_SRC (set);
3776 else
3777 return 0;
3779 addr = XEXP (mem, 0);
3780 switch (GET_CODE (addr))
3782 case PRE_INC:
3783 case POST_INC:
3784 return GET_MODE_SIZE (GET_MODE (mem));
3785 case PRE_DEC:
3786 case POST_DEC:
3787 return -GET_MODE_SIZE (GET_MODE (mem));
3788 case PRE_MODIFY:
3789 case POST_MODIFY:
3790 addr = XEXP (addr, 1);
3791 gcc_assert (GET_CODE (addr) == PLUS);
3792 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3793 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3794 return INTVAL (XEXP (addr, 1));
3795 default:
3796 gcc_unreachable ();
3802 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3804 int args_size = end_args_size;
3805 bool saw_unknown = false;
3806 rtx insn;
3808 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3810 HOST_WIDE_INT this_delta;
3812 if (!NONDEBUG_INSN_P (insn))
3813 continue;
3815 this_delta = find_args_size_adjust (insn);
3816 if (this_delta == 0)
3818 if (!CALL_P (insn)
3819 || ACCUMULATE_OUTGOING_ARGS
3820 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3821 continue;
3824 gcc_assert (!saw_unknown);
3825 if (this_delta == HOST_WIDE_INT_MIN)
3826 saw_unknown = true;
3828 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3829 #ifdef STACK_GROWS_DOWNWARD
3830 this_delta = -this_delta;
3831 #endif
3832 args_size -= this_delta;
3835 return saw_unknown ? INT_MIN : args_size;
3838 #ifdef PUSH_ROUNDING
3839 /* Emit single push insn. */
3841 static void
3842 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3844 rtx dest_addr;
3845 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3846 rtx dest;
3847 enum insn_code icode;
3849 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3850 /* If there is push pattern, use it. Otherwise try old way of throwing
3851 MEM representing push operation to move expander. */
3852 icode = optab_handler (push_optab, mode);
3853 if (icode != CODE_FOR_nothing)
3855 struct expand_operand ops[1];
3857 create_input_operand (&ops[0], x, mode);
3858 if (maybe_expand_insn (icode, 1, ops))
3859 return;
3861 if (GET_MODE_SIZE (mode) == rounded_size)
3862 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3863 /* If we are to pad downward, adjust the stack pointer first and
3864 then store X into the stack location using an offset. This is
3865 because emit_move_insn does not know how to pad; it does not have
3866 access to type. */
3867 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3869 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3870 HOST_WIDE_INT offset;
3872 emit_move_insn (stack_pointer_rtx,
3873 expand_binop (Pmode,
3874 #ifdef STACK_GROWS_DOWNWARD
3875 sub_optab,
3876 #else
3877 add_optab,
3878 #endif
3879 stack_pointer_rtx,
3880 GEN_INT (rounded_size),
3881 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3883 offset = (HOST_WIDE_INT) padding_size;
3884 #ifdef STACK_GROWS_DOWNWARD
3885 if (STACK_PUSH_CODE == POST_DEC)
3886 /* We have already decremented the stack pointer, so get the
3887 previous value. */
3888 offset += (HOST_WIDE_INT) rounded_size;
3889 #else
3890 if (STACK_PUSH_CODE == POST_INC)
3891 /* We have already incremented the stack pointer, so get the
3892 previous value. */
3893 offset -= (HOST_WIDE_INT) rounded_size;
3894 #endif
3895 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3897 else
3899 #ifdef STACK_GROWS_DOWNWARD
3900 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3901 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3902 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3903 #else
3904 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3905 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3906 GEN_INT (rounded_size));
3907 #endif
3908 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3911 dest = gen_rtx_MEM (mode, dest_addr);
3913 if (type != 0)
3915 set_mem_attributes (dest, type, 1);
3917 if (flag_optimize_sibling_calls)
3918 /* Function incoming arguments may overlap with sibling call
3919 outgoing arguments and we cannot allow reordering of reads
3920 from function arguments with stores to outgoing arguments
3921 of sibling calls. */
3922 set_mem_alias_set (dest, 0);
3924 emit_move_insn (dest, x);
3927 /* Emit and annotate a single push insn. */
3929 static void
3930 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3932 int delta, old_delta = stack_pointer_delta;
3933 rtx prev = get_last_insn ();
3934 rtx last;
3936 emit_single_push_insn_1 (mode, x, type);
3938 last = get_last_insn ();
3940 /* Notice the common case where we emitted exactly one insn. */
3941 if (PREV_INSN (last) == prev)
3943 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3944 return;
3947 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
3948 gcc_assert (delta == INT_MIN || delta == old_delta);
3950 #endif
3952 /* Generate code to push X onto the stack, assuming it has mode MODE and
3953 type TYPE.
3954 MODE is redundant except when X is a CONST_INT (since they don't
3955 carry mode info).
3956 SIZE is an rtx for the size of data to be copied (in bytes),
3957 needed only if X is BLKmode.
3959 ALIGN (in bits) is maximum alignment we can assume.
3961 If PARTIAL and REG are both nonzero, then copy that many of the first
3962 bytes of X into registers starting with REG, and push the rest of X.
3963 The amount of space pushed is decreased by PARTIAL bytes.
3964 REG must be a hard register in this case.
3965 If REG is zero but PARTIAL is not, take any all others actions for an
3966 argument partially in registers, but do not actually load any
3967 registers.
3969 EXTRA is the amount in bytes of extra space to leave next to this arg.
3970 This is ignored if an argument block has already been allocated.
3972 On a machine that lacks real push insns, ARGS_ADDR is the address of
3973 the bottom of the argument block for this call. We use indexing off there
3974 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3975 argument block has not been preallocated.
3977 ARGS_SO_FAR is the size of args previously pushed for this call.
3979 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3980 for arguments passed in registers. If nonzero, it will be the number
3981 of bytes required. */
3983 void
3984 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3985 unsigned int align, int partial, rtx reg, int extra,
3986 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3987 rtx alignment_pad)
3989 rtx xinner;
3990 enum direction stack_direction
3991 #ifdef STACK_GROWS_DOWNWARD
3992 = downward;
3993 #else
3994 = upward;
3995 #endif
3997 /* Decide where to pad the argument: `downward' for below,
3998 `upward' for above, or `none' for don't pad it.
3999 Default is below for small data on big-endian machines; else above. */
4000 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4002 /* Invert direction if stack is post-decrement.
4003 FIXME: why? */
4004 if (STACK_PUSH_CODE == POST_DEC)
4005 if (where_pad != none)
4006 where_pad = (where_pad == downward ? upward : downward);
4008 xinner = x;
4010 if (mode == BLKmode
4011 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4013 /* Copy a block into the stack, entirely or partially. */
4015 rtx temp;
4016 int used;
4017 int offset;
4018 int skip;
4020 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4021 used = partial - offset;
4023 if (mode != BLKmode)
4025 /* A value is to be stored in an insufficiently aligned
4026 stack slot; copy via a suitably aligned slot if
4027 necessary. */
4028 size = GEN_INT (GET_MODE_SIZE (mode));
4029 if (!MEM_P (xinner))
4031 temp = assign_temp (type, 0, 1, 1);
4032 emit_move_insn (temp, xinner);
4033 xinner = temp;
4037 gcc_assert (size);
4039 /* USED is now the # of bytes we need not copy to the stack
4040 because registers will take care of them. */
4042 if (partial != 0)
4043 xinner = adjust_address (xinner, BLKmode, used);
4045 /* If the partial register-part of the arg counts in its stack size,
4046 skip the part of stack space corresponding to the registers.
4047 Otherwise, start copying to the beginning of the stack space,
4048 by setting SKIP to 0. */
4049 skip = (reg_parm_stack_space == 0) ? 0 : used;
4051 #ifdef PUSH_ROUNDING
4052 /* Do it with several push insns if that doesn't take lots of insns
4053 and if there is no difficulty with push insns that skip bytes
4054 on the stack for alignment purposes. */
4055 if (args_addr == 0
4056 && PUSH_ARGS
4057 && CONST_INT_P (size)
4058 && skip == 0
4059 && MEM_ALIGN (xinner) >= align
4060 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4061 /* Here we avoid the case of a structure whose weak alignment
4062 forces many pushes of a small amount of data,
4063 and such small pushes do rounding that causes trouble. */
4064 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4065 || align >= BIGGEST_ALIGNMENT
4066 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4067 == (align / BITS_PER_UNIT)))
4068 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4070 /* Push padding now if padding above and stack grows down,
4071 or if padding below and stack grows up.
4072 But if space already allocated, this has already been done. */
4073 if (extra && args_addr == 0
4074 && where_pad != none && where_pad != stack_direction)
4075 anti_adjust_stack (GEN_INT (extra));
4077 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4079 else
4080 #endif /* PUSH_ROUNDING */
4082 rtx target;
4084 /* Otherwise make space on the stack and copy the data
4085 to the address of that space. */
4087 /* Deduct words put into registers from the size we must copy. */
4088 if (partial != 0)
4090 if (CONST_INT_P (size))
4091 size = GEN_INT (INTVAL (size) - used);
4092 else
4093 size = expand_binop (GET_MODE (size), sub_optab, size,
4094 GEN_INT (used), NULL_RTX, 0,
4095 OPTAB_LIB_WIDEN);
4098 /* Get the address of the stack space.
4099 In this case, we do not deal with EXTRA separately.
4100 A single stack adjust will do. */
4101 if (! args_addr)
4103 temp = push_block (size, extra, where_pad == downward);
4104 extra = 0;
4106 else if (CONST_INT_P (args_so_far))
4107 temp = memory_address (BLKmode,
4108 plus_constant (Pmode, args_addr,
4109 skip + INTVAL (args_so_far)));
4110 else
4111 temp = memory_address (BLKmode,
4112 plus_constant (Pmode,
4113 gen_rtx_PLUS (Pmode,
4114 args_addr,
4115 args_so_far),
4116 skip));
4118 if (!ACCUMULATE_OUTGOING_ARGS)
4120 /* If the source is referenced relative to the stack pointer,
4121 copy it to another register to stabilize it. We do not need
4122 to do this if we know that we won't be changing sp. */
4124 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4125 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4126 temp = copy_to_reg (temp);
4129 target = gen_rtx_MEM (BLKmode, temp);
4131 /* We do *not* set_mem_attributes here, because incoming arguments
4132 may overlap with sibling call outgoing arguments and we cannot
4133 allow reordering of reads from function arguments with stores
4134 to outgoing arguments of sibling calls. We do, however, want
4135 to record the alignment of the stack slot. */
4136 /* ALIGN may well be better aligned than TYPE, e.g. due to
4137 PARM_BOUNDARY. Assume the caller isn't lying. */
4138 set_mem_align (target, align);
4140 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4143 else if (partial > 0)
4145 /* Scalar partly in registers. */
4147 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4148 int i;
4149 int not_stack;
4150 /* # bytes of start of argument
4151 that we must make space for but need not store. */
4152 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4153 int args_offset = INTVAL (args_so_far);
4154 int skip;
4156 /* Push padding now if padding above and stack grows down,
4157 or if padding below and stack grows up.
4158 But if space already allocated, this has already been done. */
4159 if (extra && args_addr == 0
4160 && where_pad != none && where_pad != stack_direction)
4161 anti_adjust_stack (GEN_INT (extra));
4163 /* If we make space by pushing it, we might as well push
4164 the real data. Otherwise, we can leave OFFSET nonzero
4165 and leave the space uninitialized. */
4166 if (args_addr == 0)
4167 offset = 0;
4169 /* Now NOT_STACK gets the number of words that we don't need to
4170 allocate on the stack. Convert OFFSET to words too. */
4171 not_stack = (partial - offset) / UNITS_PER_WORD;
4172 offset /= UNITS_PER_WORD;
4174 /* If the partial register-part of the arg counts in its stack size,
4175 skip the part of stack space corresponding to the registers.
4176 Otherwise, start copying to the beginning of the stack space,
4177 by setting SKIP to 0. */
4178 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4180 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4181 x = validize_mem (force_const_mem (mode, x));
4183 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4184 SUBREGs of such registers are not allowed. */
4185 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4186 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4187 x = copy_to_reg (x);
4189 /* Loop over all the words allocated on the stack for this arg. */
4190 /* We can do it by words, because any scalar bigger than a word
4191 has a size a multiple of a word. */
4192 #ifndef PUSH_ARGS_REVERSED
4193 for (i = not_stack; i < size; i++)
4194 #else
4195 for (i = size - 1; i >= not_stack; i--)
4196 #endif
4197 if (i >= not_stack + offset)
4198 emit_push_insn (operand_subword_force (x, i, mode),
4199 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4200 0, args_addr,
4201 GEN_INT (args_offset + ((i - not_stack + skip)
4202 * UNITS_PER_WORD)),
4203 reg_parm_stack_space, alignment_pad);
4205 else
4207 rtx addr;
4208 rtx dest;
4210 /* Push padding now if padding above and stack grows down,
4211 or if padding below and stack grows up.
4212 But if space already allocated, this has already been done. */
4213 if (extra && args_addr == 0
4214 && where_pad != none && where_pad != stack_direction)
4215 anti_adjust_stack (GEN_INT (extra));
4217 #ifdef PUSH_ROUNDING
4218 if (args_addr == 0 && PUSH_ARGS)
4219 emit_single_push_insn (mode, x, type);
4220 else
4221 #endif
4223 if (CONST_INT_P (args_so_far))
4224 addr
4225 = memory_address (mode,
4226 plus_constant (Pmode, args_addr,
4227 INTVAL (args_so_far)));
4228 else
4229 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4230 args_so_far));
4231 dest = gen_rtx_MEM (mode, addr);
4233 /* We do *not* set_mem_attributes here, because incoming arguments
4234 may overlap with sibling call outgoing arguments and we cannot
4235 allow reordering of reads from function arguments with stores
4236 to outgoing arguments of sibling calls. We do, however, want
4237 to record the alignment of the stack slot. */
4238 /* ALIGN may well be better aligned than TYPE, e.g. due to
4239 PARM_BOUNDARY. Assume the caller isn't lying. */
4240 set_mem_align (dest, align);
4242 emit_move_insn (dest, x);
4246 /* If part should go in registers, copy that part
4247 into the appropriate registers. Do this now, at the end,
4248 since mem-to-mem copies above may do function calls. */
4249 if (partial > 0 && reg != 0)
4251 /* Handle calls that pass values in multiple non-contiguous locations.
4252 The Irix 6 ABI has examples of this. */
4253 if (GET_CODE (reg) == PARALLEL)
4254 emit_group_load (reg, x, type, -1);
4255 else
4257 gcc_assert (partial % UNITS_PER_WORD == 0);
4258 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4262 if (extra && args_addr == 0 && where_pad == stack_direction)
4263 anti_adjust_stack (GEN_INT (extra));
4265 if (alignment_pad && args_addr == 0)
4266 anti_adjust_stack (alignment_pad);
4269 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4270 operations. */
4272 static rtx
4273 get_subtarget (rtx x)
4275 return (optimize
4276 || x == 0
4277 /* Only registers can be subtargets. */
4278 || !REG_P (x)
4279 /* Don't use hard regs to avoid extending their life. */
4280 || REGNO (x) < FIRST_PSEUDO_REGISTER
4281 ? 0 : x);
4284 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4285 FIELD is a bitfield. Returns true if the optimization was successful,
4286 and there's nothing else to do. */
4288 static bool
4289 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4290 unsigned HOST_WIDE_INT bitpos,
4291 unsigned HOST_WIDE_INT bitregion_start,
4292 unsigned HOST_WIDE_INT bitregion_end,
4293 enum machine_mode mode1, rtx str_rtx,
4294 tree to, tree src)
4296 enum machine_mode str_mode = GET_MODE (str_rtx);
4297 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4298 tree op0, op1;
4299 rtx value, result;
4300 optab binop;
4301 gimple srcstmt;
4302 enum tree_code code;
4304 if (mode1 != VOIDmode
4305 || bitsize >= BITS_PER_WORD
4306 || str_bitsize > BITS_PER_WORD
4307 || TREE_SIDE_EFFECTS (to)
4308 || TREE_THIS_VOLATILE (to))
4309 return false;
4311 STRIP_NOPS (src);
4312 if (TREE_CODE (src) != SSA_NAME)
4313 return false;
4314 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4315 return false;
4317 srcstmt = get_gimple_for_ssa_name (src);
4318 if (!srcstmt
4319 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4320 return false;
4322 code = gimple_assign_rhs_code (srcstmt);
4324 op0 = gimple_assign_rhs1 (srcstmt);
4326 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4327 to find its initialization. Hopefully the initialization will
4328 be from a bitfield load. */
4329 if (TREE_CODE (op0) == SSA_NAME)
4331 gimple op0stmt = get_gimple_for_ssa_name (op0);
4333 /* We want to eventually have OP0 be the same as TO, which
4334 should be a bitfield. */
4335 if (!op0stmt
4336 || !is_gimple_assign (op0stmt)
4337 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4338 return false;
4339 op0 = gimple_assign_rhs1 (op0stmt);
4342 op1 = gimple_assign_rhs2 (srcstmt);
4344 if (!operand_equal_p (to, op0, 0))
4345 return false;
4347 if (MEM_P (str_rtx))
4349 unsigned HOST_WIDE_INT offset1;
4351 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4352 str_mode = word_mode;
4353 str_mode = get_best_mode (bitsize, bitpos,
4354 bitregion_start, bitregion_end,
4355 MEM_ALIGN (str_rtx), str_mode, 0);
4356 if (str_mode == VOIDmode)
4357 return false;
4358 str_bitsize = GET_MODE_BITSIZE (str_mode);
4360 offset1 = bitpos;
4361 bitpos %= str_bitsize;
4362 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4363 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4365 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4366 return false;
4368 /* If the bit field covers the whole REG/MEM, store_field
4369 will likely generate better code. */
4370 if (bitsize >= str_bitsize)
4371 return false;
4373 /* We can't handle fields split across multiple entities. */
4374 if (bitpos + bitsize > str_bitsize)
4375 return false;
4377 if (BYTES_BIG_ENDIAN)
4378 bitpos = str_bitsize - bitpos - bitsize;
4380 switch (code)
4382 case PLUS_EXPR:
4383 case MINUS_EXPR:
4384 /* For now, just optimize the case of the topmost bitfield
4385 where we don't need to do any masking and also
4386 1 bit bitfields where xor can be used.
4387 We might win by one instruction for the other bitfields
4388 too if insv/extv instructions aren't used, so that
4389 can be added later. */
4390 if (bitpos + bitsize != str_bitsize
4391 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4392 break;
4394 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4395 value = convert_modes (str_mode,
4396 TYPE_MODE (TREE_TYPE (op1)), value,
4397 TYPE_UNSIGNED (TREE_TYPE (op1)));
4399 /* We may be accessing data outside the field, which means
4400 we can alias adjacent data. */
4401 if (MEM_P (str_rtx))
4403 str_rtx = shallow_copy_rtx (str_rtx);
4404 set_mem_alias_set (str_rtx, 0);
4405 set_mem_expr (str_rtx, 0);
4408 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4409 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4411 value = expand_and (str_mode, value, const1_rtx, NULL);
4412 binop = xor_optab;
4414 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4415 result = expand_binop (str_mode, binop, str_rtx,
4416 value, str_rtx, 1, OPTAB_WIDEN);
4417 if (result != str_rtx)
4418 emit_move_insn (str_rtx, result);
4419 return true;
4421 case BIT_IOR_EXPR:
4422 case BIT_XOR_EXPR:
4423 if (TREE_CODE (op1) != INTEGER_CST)
4424 break;
4425 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4426 value = convert_modes (str_mode,
4427 TYPE_MODE (TREE_TYPE (op1)), value,
4428 TYPE_UNSIGNED (TREE_TYPE (op1)));
4430 /* We may be accessing data outside the field, which means
4431 we can alias adjacent data. */
4432 if (MEM_P (str_rtx))
4434 str_rtx = shallow_copy_rtx (str_rtx);
4435 set_mem_alias_set (str_rtx, 0);
4436 set_mem_expr (str_rtx, 0);
4439 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4440 if (bitpos + bitsize != str_bitsize)
4442 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1);
4443 value = expand_and (str_mode, value, mask, NULL_RTX);
4445 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4446 result = expand_binop (str_mode, binop, str_rtx,
4447 value, str_rtx, 1, OPTAB_WIDEN);
4448 if (result != str_rtx)
4449 emit_move_insn (str_rtx, result);
4450 return true;
4452 default:
4453 break;
4456 return false;
4459 /* In the C++ memory model, consecutive bit fields in a structure are
4460 considered one memory location.
4462 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4463 returns the bit range of consecutive bits in which this COMPONENT_REF
4464 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4465 and *OFFSET may be adjusted in the process.
4467 If the access does not need to be restricted, 0 is returned in both
4468 *BITSTART and *BITEND. */
4470 static void
4471 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4472 unsigned HOST_WIDE_INT *bitend,
4473 tree exp,
4474 HOST_WIDE_INT *bitpos,
4475 tree *offset)
4477 HOST_WIDE_INT bitoffset;
4478 tree field, repr;
4480 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4482 field = TREE_OPERAND (exp, 1);
4483 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4484 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4485 need to limit the range we can access. */
4486 if (!repr)
4488 *bitstart = *bitend = 0;
4489 return;
4492 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4493 part of a larger bit field, then the representative does not serve any
4494 useful purpose. This can occur in Ada. */
4495 if (handled_component_p (TREE_OPERAND (exp, 0)))
4497 enum machine_mode rmode;
4498 HOST_WIDE_INT rbitsize, rbitpos;
4499 tree roffset;
4500 int unsignedp;
4501 int volatilep = 0;
4502 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4503 &roffset, &rmode, &unsignedp, &volatilep, false);
4504 if ((rbitpos % BITS_PER_UNIT) != 0)
4506 *bitstart = *bitend = 0;
4507 return;
4511 /* Compute the adjustment to bitpos from the offset of the field
4512 relative to the representative. DECL_FIELD_OFFSET of field and
4513 repr are the same by construction if they are not constants,
4514 see finish_bitfield_layout. */
4515 if (host_integerp (DECL_FIELD_OFFSET (field), 1)
4516 && host_integerp (DECL_FIELD_OFFSET (repr), 1))
4517 bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
4518 - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
4519 else
4520 bitoffset = 0;
4521 bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
4522 - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
4524 /* If the adjustment is larger than bitpos, we would have a negative bit
4525 position for the lower bound and this may wreak havoc later. This can
4526 occur only if we have a non-null offset, so adjust offset and bitpos
4527 to make the lower bound non-negative. */
4528 if (bitoffset > *bitpos)
4530 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4532 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4533 gcc_assert (*offset != NULL_TREE);
4535 *bitpos += adjust;
4536 *offset
4537 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4538 *bitstart = 0;
4540 else
4541 *bitstart = *bitpos - bitoffset;
4543 *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
4546 /* Returns true if the MEM_REF REF refers to an object that does not
4547 reside in memory and has non-BLKmode. */
4549 static bool
4550 mem_ref_refers_to_non_mem_p (tree ref)
4552 tree base = TREE_OPERAND (ref, 0);
4553 if (TREE_CODE (base) != ADDR_EXPR)
4554 return false;
4555 base = TREE_OPERAND (base, 0);
4556 return (DECL_P (base)
4557 && !TREE_ADDRESSABLE (base)
4558 && DECL_MODE (base) != BLKmode
4559 && DECL_RTL_SET_P (base)
4560 && !MEM_P (DECL_RTL (base)));
4563 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4564 is true, try generating a nontemporal store. */
4566 void
4567 expand_assignment (tree to, tree from, bool nontemporal)
4569 rtx to_rtx = 0;
4570 rtx result;
4571 enum machine_mode mode;
4572 unsigned int align;
4573 enum insn_code icode;
4575 /* Don't crash if the lhs of the assignment was erroneous. */
4576 if (TREE_CODE (to) == ERROR_MARK)
4578 expand_normal (from);
4579 return;
4582 /* Optimize away no-op moves without side-effects. */
4583 if (operand_equal_p (to, from, 0))
4584 return;
4586 /* Handle misaligned stores. */
4587 mode = TYPE_MODE (TREE_TYPE (to));
4588 if ((TREE_CODE (to) == MEM_REF
4589 || TREE_CODE (to) == TARGET_MEM_REF)
4590 && mode != BLKmode
4591 && !mem_ref_refers_to_non_mem_p (to)
4592 && ((align = get_object_or_type_alignment (to))
4593 < GET_MODE_ALIGNMENT (mode))
4594 && (((icode = optab_handler (movmisalign_optab, mode))
4595 != CODE_FOR_nothing)
4596 || SLOW_UNALIGNED_ACCESS (mode, align)))
4598 rtx reg, mem;
4600 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4601 reg = force_not_mem (reg);
4602 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4604 if (icode != CODE_FOR_nothing)
4606 struct expand_operand ops[2];
4608 create_fixed_operand (&ops[0], mem);
4609 create_input_operand (&ops[1], reg, mode);
4610 /* The movmisalign<mode> pattern cannot fail, else the assignment
4611 would silently be omitted. */
4612 expand_insn (icode, 2, ops);
4614 else
4615 store_bit_field (mem, GET_MODE_BITSIZE (mode),
4616 0, 0, 0, mode, reg);
4617 return;
4620 /* Assignment of a structure component needs special treatment
4621 if the structure component's rtx is not simply a MEM.
4622 Assignment of an array element at a constant index, and assignment of
4623 an array element in an unaligned packed structure field, has the same
4624 problem. Same for (partially) storing into a non-memory object. */
4625 if (handled_component_p (to)
4626 || (TREE_CODE (to) == MEM_REF
4627 && mem_ref_refers_to_non_mem_p (to))
4628 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4630 enum machine_mode mode1;
4631 HOST_WIDE_INT bitsize, bitpos;
4632 unsigned HOST_WIDE_INT bitregion_start = 0;
4633 unsigned HOST_WIDE_INT bitregion_end = 0;
4634 tree offset;
4635 int unsignedp;
4636 int volatilep = 0;
4637 tree tem;
4638 bool misalignp;
4639 rtx mem = NULL_RTX;
4641 push_temp_slots ();
4642 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4643 &unsignedp, &volatilep, true);
4645 if (TREE_CODE (to) == COMPONENT_REF
4646 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4647 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4649 /* If we are going to use store_bit_field and extract_bit_field,
4650 make sure to_rtx will be safe for multiple use. */
4651 mode = TYPE_MODE (TREE_TYPE (tem));
4652 if (TREE_CODE (tem) == MEM_REF
4653 && mode != BLKmode
4654 && ((align = get_object_or_type_alignment (tem))
4655 < GET_MODE_ALIGNMENT (mode))
4656 && ((icode = optab_handler (movmisalign_optab, mode))
4657 != CODE_FOR_nothing))
4659 struct expand_operand ops[2];
4661 misalignp = true;
4662 to_rtx = gen_reg_rtx (mode);
4663 mem = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4665 /* If the misaligned store doesn't overwrite all bits, perform
4666 rmw cycle on MEM. */
4667 if (bitsize != GET_MODE_BITSIZE (mode))
4669 create_input_operand (&ops[0], to_rtx, mode);
4670 create_fixed_operand (&ops[1], mem);
4671 /* The movmisalign<mode> pattern cannot fail, else the assignment
4672 would silently be omitted. */
4673 expand_insn (icode, 2, ops);
4675 mem = copy_rtx (mem);
4678 else
4680 misalignp = false;
4681 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4684 /* If the bitfield is volatile, we want to access it in the
4685 field's mode, not the computed mode.
4686 If a MEM has VOIDmode (external with incomplete type),
4687 use BLKmode for it instead. */
4688 if (MEM_P (to_rtx))
4690 if (volatilep && flag_strict_volatile_bitfields > 0)
4691 to_rtx = adjust_address (to_rtx, mode1, 0);
4692 else if (GET_MODE (to_rtx) == VOIDmode)
4693 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4696 if (offset != 0)
4698 enum machine_mode address_mode;
4699 rtx offset_rtx;
4701 if (!MEM_P (to_rtx))
4703 /* We can get constant negative offsets into arrays with broken
4704 user code. Translate this to a trap instead of ICEing. */
4705 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4706 expand_builtin_trap ();
4707 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4710 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4711 address_mode = get_address_mode (to_rtx);
4712 if (GET_MODE (offset_rtx) != address_mode)
4713 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4715 /* A constant address in TO_RTX can have VOIDmode, we must not try
4716 to call force_reg for that case. Avoid that case. */
4717 if (MEM_P (to_rtx)
4718 && GET_MODE (to_rtx) == BLKmode
4719 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4720 && bitsize > 0
4721 && (bitpos % bitsize) == 0
4722 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4723 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4725 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4726 bitpos = 0;
4729 to_rtx = offset_address (to_rtx, offset_rtx,
4730 highest_pow2_factor_for_target (to,
4731 offset));
4734 /* No action is needed if the target is not a memory and the field
4735 lies completely outside that target. This can occur if the source
4736 code contains an out-of-bounds access to a small array. */
4737 if (!MEM_P (to_rtx)
4738 && GET_MODE (to_rtx) != BLKmode
4739 && (unsigned HOST_WIDE_INT) bitpos
4740 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4742 expand_normal (from);
4743 result = NULL;
4745 /* Handle expand_expr of a complex value returning a CONCAT. */
4746 else if (GET_CODE (to_rtx) == CONCAT)
4748 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4749 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4750 && bitpos == 0
4751 && bitsize == mode_bitsize)
4752 result = store_expr (from, to_rtx, false, nontemporal);
4753 else if (bitsize == mode_bitsize / 2
4754 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4755 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4756 nontemporal);
4757 else if (bitpos + bitsize <= mode_bitsize / 2)
4758 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4759 bitregion_start, bitregion_end,
4760 mode1, from, TREE_TYPE (tem),
4761 get_alias_set (to), nontemporal);
4762 else if (bitpos >= mode_bitsize / 2)
4763 result = store_field (XEXP (to_rtx, 1), bitsize,
4764 bitpos - mode_bitsize / 2,
4765 bitregion_start, bitregion_end,
4766 mode1, from,
4767 TREE_TYPE (tem), get_alias_set (to),
4768 nontemporal);
4769 else if (bitpos == 0 && bitsize == mode_bitsize)
4771 rtx from_rtx;
4772 result = expand_normal (from);
4773 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4774 TYPE_MODE (TREE_TYPE (from)), 0);
4775 emit_move_insn (XEXP (to_rtx, 0),
4776 read_complex_part (from_rtx, false));
4777 emit_move_insn (XEXP (to_rtx, 1),
4778 read_complex_part (from_rtx, true));
4780 else
4782 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4783 GET_MODE_SIZE (GET_MODE (to_rtx)),
4785 write_complex_part (temp, XEXP (to_rtx, 0), false);
4786 write_complex_part (temp, XEXP (to_rtx, 1), true);
4787 result = store_field (temp, bitsize, bitpos,
4788 bitregion_start, bitregion_end,
4789 mode1, from,
4790 TREE_TYPE (tem), get_alias_set (to),
4791 nontemporal);
4792 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4793 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4796 else
4798 if (MEM_P (to_rtx))
4800 /* If the field is at offset zero, we could have been given the
4801 DECL_RTX of the parent struct. Don't munge it. */
4802 to_rtx = shallow_copy_rtx (to_rtx);
4804 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4806 /* Deal with volatile and readonly fields. The former is only
4807 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4808 if (volatilep)
4809 MEM_VOLATILE_P (to_rtx) = 1;
4810 if (component_uses_parent_alias_set (to))
4811 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4814 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4815 bitregion_start, bitregion_end,
4816 mode1,
4817 to_rtx, to, from))
4818 result = NULL;
4819 else
4820 result = store_field (to_rtx, bitsize, bitpos,
4821 bitregion_start, bitregion_end,
4822 mode1, from,
4823 TREE_TYPE (tem), get_alias_set (to),
4824 nontemporal);
4827 if (misalignp)
4829 struct expand_operand ops[2];
4831 create_fixed_operand (&ops[0], mem);
4832 create_input_operand (&ops[1], to_rtx, mode);
4833 /* The movmisalign<mode> pattern cannot fail, else the assignment
4834 would silently be omitted. */
4835 expand_insn (icode, 2, ops);
4838 if (result)
4839 preserve_temp_slots (result);
4840 free_temp_slots ();
4841 pop_temp_slots ();
4842 return;
4845 /* If the rhs is a function call and its value is not an aggregate,
4846 call the function before we start to compute the lhs.
4847 This is needed for correct code for cases such as
4848 val = setjmp (buf) on machines where reference to val
4849 requires loading up part of an address in a separate insn.
4851 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4852 since it might be a promoted variable where the zero- or sign- extension
4853 needs to be done. Handling this in the normal way is safe because no
4854 computation is done before the call. The same is true for SSA names. */
4855 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4856 && COMPLETE_TYPE_P (TREE_TYPE (from))
4857 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4858 && ! (((TREE_CODE (to) == VAR_DECL
4859 || TREE_CODE (to) == PARM_DECL
4860 || TREE_CODE (to) == RESULT_DECL)
4861 && REG_P (DECL_RTL (to)))
4862 || TREE_CODE (to) == SSA_NAME))
4864 rtx value;
4866 push_temp_slots ();
4867 value = expand_normal (from);
4868 if (to_rtx == 0)
4869 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4871 /* Handle calls that return values in multiple non-contiguous locations.
4872 The Irix 6 ABI has examples of this. */
4873 if (GET_CODE (to_rtx) == PARALLEL)
4874 emit_group_load (to_rtx, value, TREE_TYPE (from),
4875 int_size_in_bytes (TREE_TYPE (from)));
4876 else if (GET_MODE (to_rtx) == BLKmode)
4877 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4878 else
4880 if (POINTER_TYPE_P (TREE_TYPE (to)))
4881 value = convert_memory_address_addr_space
4882 (GET_MODE (to_rtx), value,
4883 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4885 emit_move_insn (to_rtx, value);
4887 preserve_temp_slots (to_rtx);
4888 free_temp_slots ();
4889 pop_temp_slots ();
4890 return;
4893 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
4894 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4896 /* Don't move directly into a return register. */
4897 if (TREE_CODE (to) == RESULT_DECL
4898 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4900 rtx temp;
4902 push_temp_slots ();
4903 if (REG_P (to_rtx) && TYPE_MODE (TREE_TYPE (from)) == BLKmode)
4904 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4905 else
4906 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4908 if (GET_CODE (to_rtx) == PARALLEL)
4909 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4910 int_size_in_bytes (TREE_TYPE (from)));
4911 else if (temp)
4912 emit_move_insn (to_rtx, temp);
4914 preserve_temp_slots (to_rtx);
4915 free_temp_slots ();
4916 pop_temp_slots ();
4917 return;
4920 /* In case we are returning the contents of an object which overlaps
4921 the place the value is being stored, use a safe function when copying
4922 a value through a pointer into a structure value return block. */
4923 if (TREE_CODE (to) == RESULT_DECL
4924 && TREE_CODE (from) == INDIRECT_REF
4925 && ADDR_SPACE_GENERIC_P
4926 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4927 && refs_may_alias_p (to, from)
4928 && cfun->returns_struct
4929 && !cfun->returns_pcc_struct)
4931 rtx from_rtx, size;
4933 push_temp_slots ();
4934 size = expr_size (from);
4935 from_rtx = expand_normal (from);
4937 emit_library_call (memmove_libfunc, LCT_NORMAL,
4938 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4939 XEXP (from_rtx, 0), Pmode,
4940 convert_to_mode (TYPE_MODE (sizetype),
4941 size, TYPE_UNSIGNED (sizetype)),
4942 TYPE_MODE (sizetype));
4944 preserve_temp_slots (to_rtx);
4945 free_temp_slots ();
4946 pop_temp_slots ();
4947 return;
4950 /* Compute FROM and store the value in the rtx we got. */
4952 push_temp_slots ();
4953 result = store_expr (from, to_rtx, 0, nontemporal);
4954 preserve_temp_slots (result);
4955 free_temp_slots ();
4956 pop_temp_slots ();
4957 return;
4960 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4961 succeeded, false otherwise. */
4963 bool
4964 emit_storent_insn (rtx to, rtx from)
4966 struct expand_operand ops[2];
4967 enum machine_mode mode = GET_MODE (to);
4968 enum insn_code code = optab_handler (storent_optab, mode);
4970 if (code == CODE_FOR_nothing)
4971 return false;
4973 create_fixed_operand (&ops[0], to);
4974 create_input_operand (&ops[1], from, mode);
4975 return maybe_expand_insn (code, 2, ops);
4978 /* Generate code for computing expression EXP,
4979 and storing the value into TARGET.
4981 If the mode is BLKmode then we may return TARGET itself.
4982 It turns out that in BLKmode it doesn't cause a problem.
4983 because C has no operators that could combine two different
4984 assignments into the same BLKmode object with different values
4985 with no sequence point. Will other languages need this to
4986 be more thorough?
4988 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4989 stack, and block moves may need to be treated specially.
4991 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4994 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4996 rtx temp;
4997 rtx alt_rtl = NULL_RTX;
4998 location_t loc = EXPR_LOCATION (exp);
5000 if (VOID_TYPE_P (TREE_TYPE (exp)))
5002 /* C++ can generate ?: expressions with a throw expression in one
5003 branch and an rvalue in the other. Here, we resolve attempts to
5004 store the throw expression's nonexistent result. */
5005 gcc_assert (!call_param_p);
5006 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5007 return NULL_RTX;
5009 if (TREE_CODE (exp) == COMPOUND_EXPR)
5011 /* Perform first part of compound expression, then assign from second
5012 part. */
5013 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5014 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5015 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5016 nontemporal);
5018 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5020 /* For conditional expression, get safe form of the target. Then
5021 test the condition, doing the appropriate assignment on either
5022 side. This avoids the creation of unnecessary temporaries.
5023 For non-BLKmode, it is more efficient not to do this. */
5025 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5027 do_pending_stack_adjust ();
5028 NO_DEFER_POP;
5029 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5030 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5031 nontemporal);
5032 emit_jump_insn (gen_jump (lab2));
5033 emit_barrier ();
5034 emit_label (lab1);
5035 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5036 nontemporal);
5037 emit_label (lab2);
5038 OK_DEFER_POP;
5040 return NULL_RTX;
5042 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5043 /* If this is a scalar in a register that is stored in a wider mode
5044 than the declared mode, compute the result into its declared mode
5045 and then convert to the wider mode. Our value is the computed
5046 expression. */
5048 rtx inner_target = 0;
5050 /* We can do the conversion inside EXP, which will often result
5051 in some optimizations. Do the conversion in two steps: first
5052 change the signedness, if needed, then the extend. But don't
5053 do this if the type of EXP is a subtype of something else
5054 since then the conversion might involve more than just
5055 converting modes. */
5056 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5057 && TREE_TYPE (TREE_TYPE (exp)) == 0
5058 && GET_MODE_PRECISION (GET_MODE (target))
5059 == TYPE_PRECISION (TREE_TYPE (exp)))
5061 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5062 != SUBREG_PROMOTED_UNSIGNED_P (target))
5064 /* Some types, e.g. Fortran's logical*4, won't have a signed
5065 version, so use the mode instead. */
5066 tree ntype
5067 = (signed_or_unsigned_type_for
5068 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5069 if (ntype == NULL)
5070 ntype = lang_hooks.types.type_for_mode
5071 (TYPE_MODE (TREE_TYPE (exp)),
5072 SUBREG_PROMOTED_UNSIGNED_P (target));
5074 exp = fold_convert_loc (loc, ntype, exp);
5077 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5078 (GET_MODE (SUBREG_REG (target)),
5079 SUBREG_PROMOTED_UNSIGNED_P (target)),
5080 exp);
5082 inner_target = SUBREG_REG (target);
5085 temp = expand_expr (exp, inner_target, VOIDmode,
5086 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5088 /* If TEMP is a VOIDmode constant, use convert_modes to make
5089 sure that we properly convert it. */
5090 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5092 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5093 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5094 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5095 GET_MODE (target), temp,
5096 SUBREG_PROMOTED_UNSIGNED_P (target));
5099 convert_move (SUBREG_REG (target), temp,
5100 SUBREG_PROMOTED_UNSIGNED_P (target));
5102 return NULL_RTX;
5104 else if ((TREE_CODE (exp) == STRING_CST
5105 || (TREE_CODE (exp) == MEM_REF
5106 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5107 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5108 == STRING_CST
5109 && integer_zerop (TREE_OPERAND (exp, 1))))
5110 && !nontemporal && !call_param_p
5111 && MEM_P (target))
5113 /* Optimize initialization of an array with a STRING_CST. */
5114 HOST_WIDE_INT exp_len, str_copy_len;
5115 rtx dest_mem;
5116 tree str = TREE_CODE (exp) == STRING_CST
5117 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5119 exp_len = int_expr_size (exp);
5120 if (exp_len <= 0)
5121 goto normal_expr;
5123 if (TREE_STRING_LENGTH (str) <= 0)
5124 goto normal_expr;
5126 str_copy_len = strlen (TREE_STRING_POINTER (str));
5127 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5128 goto normal_expr;
5130 str_copy_len = TREE_STRING_LENGTH (str);
5131 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5132 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5134 str_copy_len += STORE_MAX_PIECES - 1;
5135 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5137 str_copy_len = MIN (str_copy_len, exp_len);
5138 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5139 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5140 MEM_ALIGN (target), false))
5141 goto normal_expr;
5143 dest_mem = target;
5145 dest_mem = store_by_pieces (dest_mem,
5146 str_copy_len, builtin_strncpy_read_str,
5147 CONST_CAST (char *,
5148 TREE_STRING_POINTER (str)),
5149 MEM_ALIGN (target), false,
5150 exp_len > str_copy_len ? 1 : 0);
5151 if (exp_len > str_copy_len)
5152 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5153 GEN_INT (exp_len - str_copy_len),
5154 BLOCK_OP_NORMAL);
5155 return NULL_RTX;
5157 else
5159 rtx tmp_target;
5161 normal_expr:
5162 /* If we want to use a nontemporal store, force the value to
5163 register first. */
5164 tmp_target = nontemporal ? NULL_RTX : target;
5165 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5166 (call_param_p
5167 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5168 &alt_rtl);
5171 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5172 the same as that of TARGET, adjust the constant. This is needed, for
5173 example, in case it is a CONST_DOUBLE and we want only a word-sized
5174 value. */
5175 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5176 && TREE_CODE (exp) != ERROR_MARK
5177 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5178 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5179 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5181 /* If value was not generated in the target, store it there.
5182 Convert the value to TARGET's type first if necessary and emit the
5183 pending incrementations that have been queued when expanding EXP.
5184 Note that we cannot emit the whole queue blindly because this will
5185 effectively disable the POST_INC optimization later.
5187 If TEMP and TARGET compare equal according to rtx_equal_p, but
5188 one or both of them are volatile memory refs, we have to distinguish
5189 two cases:
5190 - expand_expr has used TARGET. In this case, we must not generate
5191 another copy. This can be detected by TARGET being equal according
5192 to == .
5193 - expand_expr has not used TARGET - that means that the source just
5194 happens to have the same RTX form. Since temp will have been created
5195 by expand_expr, it will compare unequal according to == .
5196 We must generate a copy in this case, to reach the correct number
5197 of volatile memory references. */
5199 if ((! rtx_equal_p (temp, target)
5200 || (temp != target && (side_effects_p (temp)
5201 || side_effects_p (target))))
5202 && TREE_CODE (exp) != ERROR_MARK
5203 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5204 but TARGET is not valid memory reference, TEMP will differ
5205 from TARGET although it is really the same location. */
5206 && !(alt_rtl
5207 && rtx_equal_p (alt_rtl, target)
5208 && !side_effects_p (alt_rtl)
5209 && !side_effects_p (target))
5210 /* If there's nothing to copy, don't bother. Don't call
5211 expr_size unless necessary, because some front-ends (C++)
5212 expr_size-hook must not be given objects that are not
5213 supposed to be bit-copied or bit-initialized. */
5214 && expr_size (exp) != const0_rtx)
5216 if (GET_MODE (temp) != GET_MODE (target)
5217 && GET_MODE (temp) != VOIDmode)
5219 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5220 if (GET_MODE (target) == BLKmode
5221 && GET_MODE (temp) == BLKmode)
5222 emit_block_move (target, temp, expr_size (exp),
5223 (call_param_p
5224 ? BLOCK_OP_CALL_PARM
5225 : BLOCK_OP_NORMAL));
5226 else if (GET_MODE (target) == BLKmode)
5227 store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5228 0, 0, 0, GET_MODE (temp), temp);
5229 else
5230 convert_move (target, temp, unsignedp);
5233 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5235 /* Handle copying a string constant into an array. The string
5236 constant may be shorter than the array. So copy just the string's
5237 actual length, and clear the rest. First get the size of the data
5238 type of the string, which is actually the size of the target. */
5239 rtx size = expr_size (exp);
5241 if (CONST_INT_P (size)
5242 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5243 emit_block_move (target, temp, size,
5244 (call_param_p
5245 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5246 else
5248 enum machine_mode pointer_mode
5249 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5250 enum machine_mode address_mode = get_address_mode (target);
5252 /* Compute the size of the data to copy from the string. */
5253 tree copy_size
5254 = size_binop_loc (loc, MIN_EXPR,
5255 make_tree (sizetype, size),
5256 size_int (TREE_STRING_LENGTH (exp)));
5257 rtx copy_size_rtx
5258 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5259 (call_param_p
5260 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5261 rtx label = 0;
5263 /* Copy that much. */
5264 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5265 TYPE_UNSIGNED (sizetype));
5266 emit_block_move (target, temp, copy_size_rtx,
5267 (call_param_p
5268 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5270 /* Figure out how much is left in TARGET that we have to clear.
5271 Do all calculations in pointer_mode. */
5272 if (CONST_INT_P (copy_size_rtx))
5274 size = plus_constant (address_mode, size,
5275 -INTVAL (copy_size_rtx));
5276 target = adjust_address (target, BLKmode,
5277 INTVAL (copy_size_rtx));
5279 else
5281 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5282 copy_size_rtx, NULL_RTX, 0,
5283 OPTAB_LIB_WIDEN);
5285 if (GET_MODE (copy_size_rtx) != address_mode)
5286 copy_size_rtx = convert_to_mode (address_mode,
5287 copy_size_rtx,
5288 TYPE_UNSIGNED (sizetype));
5290 target = offset_address (target, copy_size_rtx,
5291 highest_pow2_factor (copy_size));
5292 label = gen_label_rtx ();
5293 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5294 GET_MODE (size), 0, label);
5297 if (size != const0_rtx)
5298 clear_storage (target, size, BLOCK_OP_NORMAL);
5300 if (label)
5301 emit_label (label);
5304 /* Handle calls that return values in multiple non-contiguous locations.
5305 The Irix 6 ABI has examples of this. */
5306 else if (GET_CODE (target) == PARALLEL)
5307 emit_group_load (target, temp, TREE_TYPE (exp),
5308 int_size_in_bytes (TREE_TYPE (exp)));
5309 else if (GET_MODE (temp) == BLKmode)
5310 emit_block_move (target, temp, expr_size (exp),
5311 (call_param_p
5312 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5313 else if (nontemporal
5314 && emit_storent_insn (target, temp))
5315 /* If we managed to emit a nontemporal store, there is nothing else to
5316 do. */
5318 else
5320 temp = force_operand (temp, target);
5321 if (temp != target)
5322 emit_move_insn (target, temp);
5326 return NULL_RTX;
5329 /* Return true if field F of structure TYPE is a flexible array. */
5331 static bool
5332 flexible_array_member_p (const_tree f, const_tree type)
5334 const_tree tf;
5336 tf = TREE_TYPE (f);
5337 return (DECL_CHAIN (f) == NULL
5338 && TREE_CODE (tf) == ARRAY_TYPE
5339 && TYPE_DOMAIN (tf)
5340 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5341 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5342 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5343 && int_size_in_bytes (type) >= 0);
5346 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5347 must have in order for it to completely initialize a value of type TYPE.
5348 Return -1 if the number isn't known.
5350 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5352 static HOST_WIDE_INT
5353 count_type_elements (const_tree type, bool for_ctor_p)
5355 switch (TREE_CODE (type))
5357 case ARRAY_TYPE:
5359 tree nelts;
5361 nelts = array_type_nelts (type);
5362 if (nelts && host_integerp (nelts, 1))
5364 unsigned HOST_WIDE_INT n;
5366 n = tree_low_cst (nelts, 1) + 1;
5367 if (n == 0 || for_ctor_p)
5368 return n;
5369 else
5370 return n * count_type_elements (TREE_TYPE (type), false);
5372 return for_ctor_p ? -1 : 1;
5375 case RECORD_TYPE:
5377 unsigned HOST_WIDE_INT n;
5378 tree f;
5380 n = 0;
5381 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5382 if (TREE_CODE (f) == FIELD_DECL)
5384 if (!for_ctor_p)
5385 n += count_type_elements (TREE_TYPE (f), false);
5386 else if (!flexible_array_member_p (f, type))
5387 /* Don't count flexible arrays, which are not supposed
5388 to be initialized. */
5389 n += 1;
5392 return n;
5395 case UNION_TYPE:
5396 case QUAL_UNION_TYPE:
5398 tree f;
5399 HOST_WIDE_INT n, m;
5401 gcc_assert (!for_ctor_p);
5402 /* Estimate the number of scalars in each field and pick the
5403 maximum. Other estimates would do instead; the idea is simply
5404 to make sure that the estimate is not sensitive to the ordering
5405 of the fields. */
5406 n = 1;
5407 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5408 if (TREE_CODE (f) == FIELD_DECL)
5410 m = count_type_elements (TREE_TYPE (f), false);
5411 /* If the field doesn't span the whole union, add an extra
5412 scalar for the rest. */
5413 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5414 TYPE_SIZE (type)) != 1)
5415 m++;
5416 if (n < m)
5417 n = m;
5419 return n;
5422 case COMPLEX_TYPE:
5423 return 2;
5425 case VECTOR_TYPE:
5426 return TYPE_VECTOR_SUBPARTS (type);
5428 case INTEGER_TYPE:
5429 case REAL_TYPE:
5430 case FIXED_POINT_TYPE:
5431 case ENUMERAL_TYPE:
5432 case BOOLEAN_TYPE:
5433 case POINTER_TYPE:
5434 case OFFSET_TYPE:
5435 case REFERENCE_TYPE:
5436 case NULLPTR_TYPE:
5437 return 1;
5439 case ERROR_MARK:
5440 return 0;
5442 case VOID_TYPE:
5443 case METHOD_TYPE:
5444 case FUNCTION_TYPE:
5445 case LANG_TYPE:
5446 default:
5447 gcc_unreachable ();
5451 /* Helper for categorize_ctor_elements. Identical interface. */
5453 static bool
5454 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5455 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5457 unsigned HOST_WIDE_INT idx;
5458 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5459 tree value, purpose, elt_type;
5461 /* Whether CTOR is a valid constant initializer, in accordance with what
5462 initializer_constant_valid_p does. If inferred from the constructor
5463 elements, true until proven otherwise. */
5464 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5465 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5467 nz_elts = 0;
5468 init_elts = 0;
5469 num_fields = 0;
5470 elt_type = NULL_TREE;
5472 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5474 HOST_WIDE_INT mult = 1;
5476 if (TREE_CODE (purpose) == RANGE_EXPR)
5478 tree lo_index = TREE_OPERAND (purpose, 0);
5479 tree hi_index = TREE_OPERAND (purpose, 1);
5481 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5482 mult = (tree_low_cst (hi_index, 1)
5483 - tree_low_cst (lo_index, 1) + 1);
5485 num_fields += mult;
5486 elt_type = TREE_TYPE (value);
5488 switch (TREE_CODE (value))
5490 case CONSTRUCTOR:
5492 HOST_WIDE_INT nz = 0, ic = 0;
5494 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5495 p_complete);
5497 nz_elts += mult * nz;
5498 init_elts += mult * ic;
5500 if (const_from_elts_p && const_p)
5501 const_p = const_elt_p;
5503 break;
5505 case INTEGER_CST:
5506 case REAL_CST:
5507 case FIXED_CST:
5508 if (!initializer_zerop (value))
5509 nz_elts += mult;
5510 init_elts += mult;
5511 break;
5513 case STRING_CST:
5514 nz_elts += mult * TREE_STRING_LENGTH (value);
5515 init_elts += mult * TREE_STRING_LENGTH (value);
5516 break;
5518 case COMPLEX_CST:
5519 if (!initializer_zerop (TREE_REALPART (value)))
5520 nz_elts += mult;
5521 if (!initializer_zerop (TREE_IMAGPART (value)))
5522 nz_elts += mult;
5523 init_elts += mult;
5524 break;
5526 case VECTOR_CST:
5528 unsigned i;
5529 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5531 tree v = VECTOR_CST_ELT (value, i);
5532 if (!initializer_zerop (v))
5533 nz_elts += mult;
5534 init_elts += mult;
5537 break;
5539 default:
5541 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5542 nz_elts += mult * tc;
5543 init_elts += mult * tc;
5545 if (const_from_elts_p && const_p)
5546 const_p = initializer_constant_valid_p (value, elt_type)
5547 != NULL_TREE;
5549 break;
5553 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5554 num_fields, elt_type))
5555 *p_complete = false;
5557 *p_nz_elts += nz_elts;
5558 *p_init_elts += init_elts;
5560 return const_p;
5563 /* Examine CTOR to discover:
5564 * how many scalar fields are set to nonzero values,
5565 and place it in *P_NZ_ELTS;
5566 * how many scalar fields in total are in CTOR,
5567 and place it in *P_ELT_COUNT.
5568 * whether the constructor is complete -- in the sense that every
5569 meaningful byte is explicitly given a value --
5570 and place it in *P_COMPLETE.
5572 Return whether or not CTOR is a valid static constant initializer, the same
5573 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5575 bool
5576 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5577 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5579 *p_nz_elts = 0;
5580 *p_init_elts = 0;
5581 *p_complete = true;
5583 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5586 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5587 of which had type LAST_TYPE. Each element was itself a complete
5588 initializer, in the sense that every meaningful byte was explicitly
5589 given a value. Return true if the same is true for the constructor
5590 as a whole. */
5592 bool
5593 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5594 const_tree last_type)
5596 if (TREE_CODE (type) == UNION_TYPE
5597 || TREE_CODE (type) == QUAL_UNION_TYPE)
5599 if (num_elts == 0)
5600 return false;
5602 gcc_assert (num_elts == 1 && last_type);
5604 /* ??? We could look at each element of the union, and find the
5605 largest element. Which would avoid comparing the size of the
5606 initialized element against any tail padding in the union.
5607 Doesn't seem worth the effort... */
5608 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5611 return count_type_elements (type, true) == num_elts;
5614 /* Return 1 if EXP contains mostly (3/4) zeros. */
5616 static int
5617 mostly_zeros_p (const_tree exp)
5619 if (TREE_CODE (exp) == CONSTRUCTOR)
5621 HOST_WIDE_INT nz_elts, init_elts;
5622 bool complete_p;
5624 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5625 return !complete_p || nz_elts < init_elts / 4;
5628 return initializer_zerop (exp);
5631 /* Return 1 if EXP contains all zeros. */
5633 static int
5634 all_zeros_p (const_tree exp)
5636 if (TREE_CODE (exp) == CONSTRUCTOR)
5638 HOST_WIDE_INT nz_elts, init_elts;
5639 bool complete_p;
5641 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5642 return nz_elts == 0;
5645 return initializer_zerop (exp);
5648 /* Helper function for store_constructor.
5649 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5650 TYPE is the type of the CONSTRUCTOR, not the element type.
5651 CLEARED is as for store_constructor.
5652 ALIAS_SET is the alias set to use for any stores.
5654 This provides a recursive shortcut back to store_constructor when it isn't
5655 necessary to go through store_field. This is so that we can pass through
5656 the cleared field to let store_constructor know that we may not have to
5657 clear a substructure if the outer structure has already been cleared. */
5659 static void
5660 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5661 HOST_WIDE_INT bitpos, enum machine_mode mode,
5662 tree exp, tree type, int cleared,
5663 alias_set_type alias_set)
5665 if (TREE_CODE (exp) == CONSTRUCTOR
5666 /* We can only call store_constructor recursively if the size and
5667 bit position are on a byte boundary. */
5668 && bitpos % BITS_PER_UNIT == 0
5669 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5670 /* If we have a nonzero bitpos for a register target, then we just
5671 let store_field do the bitfield handling. This is unlikely to
5672 generate unnecessary clear instructions anyways. */
5673 && (bitpos == 0 || MEM_P (target)))
5675 if (MEM_P (target))
5676 target
5677 = adjust_address (target,
5678 GET_MODE (target) == BLKmode
5679 || 0 != (bitpos
5680 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5681 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5684 /* Update the alias set, if required. */
5685 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5686 && MEM_ALIAS_SET (target) != 0)
5688 target = copy_rtx (target);
5689 set_mem_alias_set (target, alias_set);
5692 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5694 else
5695 store_field (target, bitsize, bitpos, 0, 0, mode, exp, type, alias_set,
5696 false);
5699 /* Store the value of constructor EXP into the rtx TARGET.
5700 TARGET is either a REG or a MEM; we know it cannot conflict, since
5701 safe_from_p has been called.
5702 CLEARED is true if TARGET is known to have been zero'd.
5703 SIZE is the number of bytes of TARGET we are allowed to modify: this
5704 may not be the same as the size of EXP if we are assigning to a field
5705 which has been packed to exclude padding bits. */
5707 static void
5708 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5710 tree type = TREE_TYPE (exp);
5711 #ifdef WORD_REGISTER_OPERATIONS
5712 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5713 #endif
5715 switch (TREE_CODE (type))
5717 case RECORD_TYPE:
5718 case UNION_TYPE:
5719 case QUAL_UNION_TYPE:
5721 unsigned HOST_WIDE_INT idx;
5722 tree field, value;
5724 /* If size is zero or the target is already cleared, do nothing. */
5725 if (size == 0 || cleared)
5726 cleared = 1;
5727 /* We either clear the aggregate or indicate the value is dead. */
5728 else if ((TREE_CODE (type) == UNION_TYPE
5729 || TREE_CODE (type) == QUAL_UNION_TYPE)
5730 && ! CONSTRUCTOR_ELTS (exp))
5731 /* If the constructor is empty, clear the union. */
5733 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5734 cleared = 1;
5737 /* If we are building a static constructor into a register,
5738 set the initial value as zero so we can fold the value into
5739 a constant. But if more than one register is involved,
5740 this probably loses. */
5741 else if (REG_P (target) && TREE_STATIC (exp)
5742 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5744 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5745 cleared = 1;
5748 /* If the constructor has fewer fields than the structure or
5749 if we are initializing the structure to mostly zeros, clear
5750 the whole structure first. Don't do this if TARGET is a
5751 register whose mode size isn't equal to SIZE since
5752 clear_storage can't handle this case. */
5753 else if (size > 0
5754 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5755 != fields_length (type))
5756 || mostly_zeros_p (exp))
5757 && (!REG_P (target)
5758 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5759 == size)))
5761 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5762 cleared = 1;
5765 if (REG_P (target) && !cleared)
5766 emit_clobber (target);
5768 /* Store each element of the constructor into the
5769 corresponding field of TARGET. */
5770 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5772 enum machine_mode mode;
5773 HOST_WIDE_INT bitsize;
5774 HOST_WIDE_INT bitpos = 0;
5775 tree offset;
5776 rtx to_rtx = target;
5778 /* Just ignore missing fields. We cleared the whole
5779 structure, above, if any fields are missing. */
5780 if (field == 0)
5781 continue;
5783 if (cleared && initializer_zerop (value))
5784 continue;
5786 if (host_integerp (DECL_SIZE (field), 1))
5787 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5788 else
5789 bitsize = -1;
5791 mode = DECL_MODE (field);
5792 if (DECL_BIT_FIELD (field))
5793 mode = VOIDmode;
5795 offset = DECL_FIELD_OFFSET (field);
5796 if (host_integerp (offset, 0)
5797 && host_integerp (bit_position (field), 0))
5799 bitpos = int_bit_position (field);
5800 offset = 0;
5802 else
5803 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5805 if (offset)
5807 enum machine_mode address_mode;
5808 rtx offset_rtx;
5810 offset
5811 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5812 make_tree (TREE_TYPE (exp),
5813 target));
5815 offset_rtx = expand_normal (offset);
5816 gcc_assert (MEM_P (to_rtx));
5818 address_mode = get_address_mode (to_rtx);
5819 if (GET_MODE (offset_rtx) != address_mode)
5820 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5822 to_rtx = offset_address (to_rtx, offset_rtx,
5823 highest_pow2_factor (offset));
5826 #ifdef WORD_REGISTER_OPERATIONS
5827 /* If this initializes a field that is smaller than a
5828 word, at the start of a word, try to widen it to a full
5829 word. This special case allows us to output C++ member
5830 function initializations in a form that the optimizers
5831 can understand. */
5832 if (REG_P (target)
5833 && bitsize < BITS_PER_WORD
5834 && bitpos % BITS_PER_WORD == 0
5835 && GET_MODE_CLASS (mode) == MODE_INT
5836 && TREE_CODE (value) == INTEGER_CST
5837 && exp_size >= 0
5838 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5840 tree type = TREE_TYPE (value);
5842 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5844 type = lang_hooks.types.type_for_mode
5845 (word_mode, TYPE_UNSIGNED (type));
5846 value = fold_convert (type, value);
5849 if (BYTES_BIG_ENDIAN)
5850 value
5851 = fold_build2 (LSHIFT_EXPR, type, value,
5852 build_int_cst (type,
5853 BITS_PER_WORD - bitsize));
5854 bitsize = BITS_PER_WORD;
5855 mode = word_mode;
5857 #endif
5859 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5860 && DECL_NONADDRESSABLE_P (field))
5862 to_rtx = copy_rtx (to_rtx);
5863 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5866 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5867 value, type, cleared,
5868 get_alias_set (TREE_TYPE (field)));
5870 break;
5872 case ARRAY_TYPE:
5874 tree value, index;
5875 unsigned HOST_WIDE_INT i;
5876 int need_to_clear;
5877 tree domain;
5878 tree elttype = TREE_TYPE (type);
5879 int const_bounds_p;
5880 HOST_WIDE_INT minelt = 0;
5881 HOST_WIDE_INT maxelt = 0;
5883 domain = TYPE_DOMAIN (type);
5884 const_bounds_p = (TYPE_MIN_VALUE (domain)
5885 && TYPE_MAX_VALUE (domain)
5886 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5887 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5889 /* If we have constant bounds for the range of the type, get them. */
5890 if (const_bounds_p)
5892 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5893 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5896 /* If the constructor has fewer elements than the array, clear
5897 the whole array first. Similarly if this is static
5898 constructor of a non-BLKmode object. */
5899 if (cleared)
5900 need_to_clear = 0;
5901 else if (REG_P (target) && TREE_STATIC (exp))
5902 need_to_clear = 1;
5903 else
5905 unsigned HOST_WIDE_INT idx;
5906 tree index, value;
5907 HOST_WIDE_INT count = 0, zero_count = 0;
5908 need_to_clear = ! const_bounds_p;
5910 /* This loop is a more accurate version of the loop in
5911 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5912 is also needed to check for missing elements. */
5913 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5915 HOST_WIDE_INT this_node_count;
5917 if (need_to_clear)
5918 break;
5920 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5922 tree lo_index = TREE_OPERAND (index, 0);
5923 tree hi_index = TREE_OPERAND (index, 1);
5925 if (! host_integerp (lo_index, 1)
5926 || ! host_integerp (hi_index, 1))
5928 need_to_clear = 1;
5929 break;
5932 this_node_count = (tree_low_cst (hi_index, 1)
5933 - tree_low_cst (lo_index, 1) + 1);
5935 else
5936 this_node_count = 1;
5938 count += this_node_count;
5939 if (mostly_zeros_p (value))
5940 zero_count += this_node_count;
5943 /* Clear the entire array first if there are any missing
5944 elements, or if the incidence of zero elements is >=
5945 75%. */
5946 if (! need_to_clear
5947 && (count < maxelt - minelt + 1
5948 || 4 * zero_count >= 3 * count))
5949 need_to_clear = 1;
5952 if (need_to_clear && size > 0)
5954 if (REG_P (target))
5955 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5956 else
5957 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5958 cleared = 1;
5961 if (!cleared && REG_P (target))
5962 /* Inform later passes that the old value is dead. */
5963 emit_clobber (target);
5965 /* Store each element of the constructor into the
5966 corresponding element of TARGET, determined by counting the
5967 elements. */
5968 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5970 enum machine_mode mode;
5971 HOST_WIDE_INT bitsize;
5972 HOST_WIDE_INT bitpos;
5973 rtx xtarget = target;
5975 if (cleared && initializer_zerop (value))
5976 continue;
5978 mode = TYPE_MODE (elttype);
5979 if (mode == BLKmode)
5980 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5981 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5982 : -1);
5983 else
5984 bitsize = GET_MODE_BITSIZE (mode);
5986 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5988 tree lo_index = TREE_OPERAND (index, 0);
5989 tree hi_index = TREE_OPERAND (index, 1);
5990 rtx index_r, pos_rtx;
5991 HOST_WIDE_INT lo, hi, count;
5992 tree position;
5994 /* If the range is constant and "small", unroll the loop. */
5995 if (const_bounds_p
5996 && host_integerp (lo_index, 0)
5997 && host_integerp (hi_index, 0)
5998 && (lo = tree_low_cst (lo_index, 0),
5999 hi = tree_low_cst (hi_index, 0),
6000 count = hi - lo + 1,
6001 (!MEM_P (target)
6002 || count <= 2
6003 || (host_integerp (TYPE_SIZE (elttype), 1)
6004 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
6005 <= 40 * 8)))))
6007 lo -= minelt; hi -= minelt;
6008 for (; lo <= hi; lo++)
6010 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6012 if (MEM_P (target)
6013 && !MEM_KEEP_ALIAS_SET_P (target)
6014 && TREE_CODE (type) == ARRAY_TYPE
6015 && TYPE_NONALIASED_COMPONENT (type))
6017 target = copy_rtx (target);
6018 MEM_KEEP_ALIAS_SET_P (target) = 1;
6021 store_constructor_field
6022 (target, bitsize, bitpos, mode, value, type, cleared,
6023 get_alias_set (elttype));
6026 else
6028 rtx loop_start = gen_label_rtx ();
6029 rtx loop_end = gen_label_rtx ();
6030 tree exit_cond;
6032 expand_normal (hi_index);
6034 index = build_decl (EXPR_LOCATION (exp),
6035 VAR_DECL, NULL_TREE, domain);
6036 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6037 SET_DECL_RTL (index, index_r);
6038 store_expr (lo_index, index_r, 0, false);
6040 /* Build the head of the loop. */
6041 do_pending_stack_adjust ();
6042 emit_label (loop_start);
6044 /* Assign value to element index. */
6045 position =
6046 fold_convert (ssizetype,
6047 fold_build2 (MINUS_EXPR,
6048 TREE_TYPE (index),
6049 index,
6050 TYPE_MIN_VALUE (domain)));
6052 position =
6053 size_binop (MULT_EXPR, position,
6054 fold_convert (ssizetype,
6055 TYPE_SIZE_UNIT (elttype)));
6057 pos_rtx = expand_normal (position);
6058 xtarget = offset_address (target, pos_rtx,
6059 highest_pow2_factor (position));
6060 xtarget = adjust_address (xtarget, mode, 0);
6061 if (TREE_CODE (value) == CONSTRUCTOR)
6062 store_constructor (value, xtarget, cleared,
6063 bitsize / BITS_PER_UNIT);
6064 else
6065 store_expr (value, xtarget, 0, false);
6067 /* Generate a conditional jump to exit the loop. */
6068 exit_cond = build2 (LT_EXPR, integer_type_node,
6069 index, hi_index);
6070 jumpif (exit_cond, loop_end, -1);
6072 /* Update the loop counter, and jump to the head of
6073 the loop. */
6074 expand_assignment (index,
6075 build2 (PLUS_EXPR, TREE_TYPE (index),
6076 index, integer_one_node),
6077 false);
6079 emit_jump (loop_start);
6081 /* Build the end of the loop. */
6082 emit_label (loop_end);
6085 else if ((index != 0 && ! host_integerp (index, 0))
6086 || ! host_integerp (TYPE_SIZE (elttype), 1))
6088 tree position;
6090 if (index == 0)
6091 index = ssize_int (1);
6093 if (minelt)
6094 index = fold_convert (ssizetype,
6095 fold_build2 (MINUS_EXPR,
6096 TREE_TYPE (index),
6097 index,
6098 TYPE_MIN_VALUE (domain)));
6100 position =
6101 size_binop (MULT_EXPR, index,
6102 fold_convert (ssizetype,
6103 TYPE_SIZE_UNIT (elttype)));
6104 xtarget = offset_address (target,
6105 expand_normal (position),
6106 highest_pow2_factor (position));
6107 xtarget = adjust_address (xtarget, mode, 0);
6108 store_expr (value, xtarget, 0, false);
6110 else
6112 if (index != 0)
6113 bitpos = ((tree_low_cst (index, 0) - minelt)
6114 * tree_low_cst (TYPE_SIZE (elttype), 1));
6115 else
6116 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6118 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6119 && TREE_CODE (type) == ARRAY_TYPE
6120 && TYPE_NONALIASED_COMPONENT (type))
6122 target = copy_rtx (target);
6123 MEM_KEEP_ALIAS_SET_P (target) = 1;
6125 store_constructor_field (target, bitsize, bitpos, mode, value,
6126 type, cleared, get_alias_set (elttype));
6129 break;
6132 case VECTOR_TYPE:
6134 unsigned HOST_WIDE_INT idx;
6135 constructor_elt *ce;
6136 int i;
6137 int need_to_clear;
6138 int icode = 0;
6139 tree elttype = TREE_TYPE (type);
6140 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6141 enum machine_mode eltmode = TYPE_MODE (elttype);
6142 HOST_WIDE_INT bitsize;
6143 HOST_WIDE_INT bitpos;
6144 rtvec vector = NULL;
6145 unsigned n_elts;
6146 alias_set_type alias;
6148 gcc_assert (eltmode != BLKmode);
6150 n_elts = TYPE_VECTOR_SUBPARTS (type);
6151 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6153 enum machine_mode mode = GET_MODE (target);
6155 icode = (int) optab_handler (vec_init_optab, mode);
6156 if (icode != CODE_FOR_nothing)
6158 unsigned int i;
6160 vector = rtvec_alloc (n_elts);
6161 for (i = 0; i < n_elts; i++)
6162 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6166 /* If the constructor has fewer elements than the vector,
6167 clear the whole array first. Similarly if this is static
6168 constructor of a non-BLKmode object. */
6169 if (cleared)
6170 need_to_clear = 0;
6171 else if (REG_P (target) && TREE_STATIC (exp))
6172 need_to_clear = 1;
6173 else
6175 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6176 tree value;
6178 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6180 int n_elts_here = tree_low_cst
6181 (int_const_binop (TRUNC_DIV_EXPR,
6182 TYPE_SIZE (TREE_TYPE (value)),
6183 TYPE_SIZE (elttype)), 1);
6185 count += n_elts_here;
6186 if (mostly_zeros_p (value))
6187 zero_count += n_elts_here;
6190 /* Clear the entire vector first if there are any missing elements,
6191 or if the incidence of zero elements is >= 75%. */
6192 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6195 if (need_to_clear && size > 0 && !vector)
6197 if (REG_P (target))
6198 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6199 else
6200 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6201 cleared = 1;
6204 /* Inform later passes that the old value is dead. */
6205 if (!cleared && !vector && REG_P (target))
6206 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6208 if (MEM_P (target))
6209 alias = MEM_ALIAS_SET (target);
6210 else
6211 alias = get_alias_set (elttype);
6213 /* Store each element of the constructor into the corresponding
6214 element of TARGET, determined by counting the elements. */
6215 for (idx = 0, i = 0;
6216 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6217 idx++, i += bitsize / elt_size)
6219 HOST_WIDE_INT eltpos;
6220 tree value = ce->value;
6222 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6223 if (cleared && initializer_zerop (value))
6224 continue;
6226 if (ce->index)
6227 eltpos = tree_low_cst (ce->index, 1);
6228 else
6229 eltpos = i;
6231 if (vector)
6233 /* Vector CONSTRUCTORs should only be built from smaller
6234 vectors in the case of BLKmode vectors. */
6235 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6236 RTVEC_ELT (vector, eltpos)
6237 = expand_normal (value);
6239 else
6241 enum machine_mode value_mode =
6242 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6243 ? TYPE_MODE (TREE_TYPE (value))
6244 : eltmode;
6245 bitpos = eltpos * elt_size;
6246 store_constructor_field (target, bitsize, bitpos,
6247 value_mode, value, type,
6248 cleared, alias);
6252 if (vector)
6253 emit_insn (GEN_FCN (icode)
6254 (target,
6255 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6256 break;
6259 default:
6260 gcc_unreachable ();
6264 /* Store the value of EXP (an expression tree)
6265 into a subfield of TARGET which has mode MODE and occupies
6266 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6267 If MODE is VOIDmode, it means that we are storing into a bit-field.
6269 BITREGION_START is bitpos of the first bitfield in this region.
6270 BITREGION_END is the bitpos of the ending bitfield in this region.
6271 These two fields are 0, if the C++ memory model does not apply,
6272 or we are not interested in keeping track of bitfield regions.
6274 Always return const0_rtx unless we have something particular to
6275 return.
6277 TYPE is the type of the underlying object,
6279 ALIAS_SET is the alias set for the destination. This value will
6280 (in general) be different from that for TARGET, since TARGET is a
6281 reference to the containing structure.
6283 If NONTEMPORAL is true, try generating a nontemporal store. */
6285 static rtx
6286 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6287 unsigned HOST_WIDE_INT bitregion_start,
6288 unsigned HOST_WIDE_INT bitregion_end,
6289 enum machine_mode mode, tree exp, tree type,
6290 alias_set_type alias_set, bool nontemporal)
6292 if (TREE_CODE (exp) == ERROR_MARK)
6293 return const0_rtx;
6295 /* If we have nothing to store, do nothing unless the expression has
6296 side-effects. */
6297 if (bitsize == 0)
6298 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6300 /* If we are storing into an unaligned field of an aligned union that is
6301 in a register, we may have the mode of TARGET being an integer mode but
6302 MODE == BLKmode. In that case, get an aligned object whose size and
6303 alignment are the same as TARGET and store TARGET into it (we can avoid
6304 the store if the field being stored is the entire width of TARGET). Then
6305 call ourselves recursively to store the field into a BLKmode version of
6306 that object. Finally, load from the object into TARGET. This is not
6307 very efficient in general, but should only be slightly more expensive
6308 than the otherwise-required unaligned accesses. Perhaps this can be
6309 cleaned up later. It's tempting to make OBJECT readonly, but it's set
6310 twice, once with emit_move_insn and once via store_field. */
6312 if (mode == BLKmode
6313 && (REG_P (target) || GET_CODE (target) == SUBREG))
6315 rtx object = assign_temp (type, 0, 1, 1);
6316 rtx blk_object = adjust_address (object, BLKmode, 0);
6318 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
6319 emit_move_insn (object, target);
6321 store_field (blk_object, bitsize, bitpos,
6322 bitregion_start, bitregion_end,
6323 mode, exp, type, MEM_ALIAS_SET (blk_object), nontemporal);
6325 emit_move_insn (target, object);
6327 /* We want to return the BLKmode version of the data. */
6328 return blk_object;
6331 if (GET_CODE (target) == CONCAT)
6333 /* We're storing into a struct containing a single __complex. */
6335 gcc_assert (!bitpos);
6336 return store_expr (exp, target, 0, nontemporal);
6339 /* If the structure is in a register or if the component
6340 is a bit field, we cannot use addressing to access it.
6341 Use bit-field techniques or SUBREG to store in it. */
6343 if (mode == VOIDmode
6344 || (mode != BLKmode && ! direct_store[(int) mode]
6345 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6346 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6347 || REG_P (target)
6348 || GET_CODE (target) == SUBREG
6349 /* If the field isn't aligned enough to store as an ordinary memref,
6350 store it as a bit field. */
6351 || (mode != BLKmode
6352 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6353 || bitpos % GET_MODE_ALIGNMENT (mode))
6354 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6355 || (bitpos % BITS_PER_UNIT != 0)))
6356 || (bitsize >= 0 && mode != BLKmode
6357 && GET_MODE_BITSIZE (mode) > bitsize)
6358 /* If the RHS and field are a constant size and the size of the
6359 RHS isn't the same size as the bitfield, we must use bitfield
6360 operations. */
6361 || (bitsize >= 0
6362 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6363 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6364 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6365 decl we must use bitfield operations. */
6366 || (bitsize >= 0
6367 && TREE_CODE (exp) == MEM_REF
6368 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6369 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6370 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6371 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6373 rtx temp;
6374 gimple nop_def;
6376 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6377 implies a mask operation. If the precision is the same size as
6378 the field we're storing into, that mask is redundant. This is
6379 particularly common with bit field assignments generated by the
6380 C front end. */
6381 nop_def = get_def_for_expr (exp, NOP_EXPR);
6382 if (nop_def)
6384 tree type = TREE_TYPE (exp);
6385 if (INTEGRAL_TYPE_P (type)
6386 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6387 && bitsize == TYPE_PRECISION (type))
6389 tree op = gimple_assign_rhs1 (nop_def);
6390 type = TREE_TYPE (op);
6391 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6392 exp = op;
6396 temp = expand_normal (exp);
6398 /* If BITSIZE is narrower than the size of the type of EXP
6399 we will be narrowing TEMP. Normally, what's wanted are the
6400 low-order bits. However, if EXP's type is a record and this is
6401 big-endian machine, we want the upper BITSIZE bits. */
6402 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6403 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6404 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6405 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6406 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6407 NULL_RTX, 1);
6409 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6410 if (mode != VOIDmode && mode != BLKmode
6411 && mode != TYPE_MODE (TREE_TYPE (exp)))
6412 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6414 /* If the modes of TEMP and TARGET are both BLKmode, both
6415 must be in memory and BITPOS must be aligned on a byte
6416 boundary. If so, we simply do a block copy. Likewise
6417 for a BLKmode-like TARGET. */
6418 if (GET_MODE (temp) == BLKmode
6419 && (GET_MODE (target) == BLKmode
6420 || (MEM_P (target)
6421 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6422 && (bitpos % BITS_PER_UNIT) == 0
6423 && (bitsize % BITS_PER_UNIT) == 0)))
6425 gcc_assert (MEM_P (target) && MEM_P (temp)
6426 && (bitpos % BITS_PER_UNIT) == 0);
6428 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6429 emit_block_move (target, temp,
6430 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6431 / BITS_PER_UNIT),
6432 BLOCK_OP_NORMAL);
6434 return const0_rtx;
6437 /* Store the value in the bitfield. */
6438 store_bit_field (target, bitsize, bitpos,
6439 bitregion_start, bitregion_end,
6440 mode, temp);
6442 return const0_rtx;
6444 else
6446 /* Now build a reference to just the desired component. */
6447 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6449 if (to_rtx == target)
6450 to_rtx = copy_rtx (to_rtx);
6452 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6453 set_mem_alias_set (to_rtx, alias_set);
6455 return store_expr (exp, to_rtx, 0, nontemporal);
6459 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6460 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6461 codes and find the ultimate containing object, which we return.
6463 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6464 bit position, and *PUNSIGNEDP to the signedness of the field.
6465 If the position of the field is variable, we store a tree
6466 giving the variable offset (in units) in *POFFSET.
6467 This offset is in addition to the bit position.
6468 If the position is not variable, we store 0 in *POFFSET.
6470 If any of the extraction expressions is volatile,
6471 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6473 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6474 Otherwise, it is a mode that can be used to access the field.
6476 If the field describes a variable-sized object, *PMODE is set to
6477 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6478 this case, but the address of the object can be found.
6480 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6481 look through nodes that serve as markers of a greater alignment than
6482 the one that can be deduced from the expression. These nodes make it
6483 possible for front-ends to prevent temporaries from being created by
6484 the middle-end on alignment considerations. For that purpose, the
6485 normal operating mode at high-level is to always pass FALSE so that
6486 the ultimate containing object is really returned; moreover, the
6487 associated predicate handled_component_p will always return TRUE
6488 on these nodes, thus indicating that they are essentially handled
6489 by get_inner_reference. TRUE should only be passed when the caller
6490 is scanning the expression in order to build another representation
6491 and specifically knows how to handle these nodes; as such, this is
6492 the normal operating mode in the RTL expanders. */
6494 tree
6495 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6496 HOST_WIDE_INT *pbitpos, tree *poffset,
6497 enum machine_mode *pmode, int *punsignedp,
6498 int *pvolatilep, bool keep_aligning)
6500 tree size_tree = 0;
6501 enum machine_mode mode = VOIDmode;
6502 bool blkmode_bitfield = false;
6503 tree offset = size_zero_node;
6504 double_int bit_offset = double_int_zero;
6506 /* First get the mode, signedness, and size. We do this from just the
6507 outermost expression. */
6508 *pbitsize = -1;
6509 if (TREE_CODE (exp) == COMPONENT_REF)
6511 tree field = TREE_OPERAND (exp, 1);
6512 size_tree = DECL_SIZE (field);
6513 if (!DECL_BIT_FIELD (field))
6514 mode = DECL_MODE (field);
6515 else if (DECL_MODE (field) == BLKmode)
6516 blkmode_bitfield = true;
6517 else if (TREE_THIS_VOLATILE (exp)
6518 && flag_strict_volatile_bitfields > 0)
6519 /* Volatile bitfields should be accessed in the mode of the
6520 field's type, not the mode computed based on the bit
6521 size. */
6522 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6524 *punsignedp = DECL_UNSIGNED (field);
6526 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6528 size_tree = TREE_OPERAND (exp, 1);
6529 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6530 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6532 /* For vector types, with the correct size of access, use the mode of
6533 inner type. */
6534 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6535 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6536 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6537 mode = TYPE_MODE (TREE_TYPE (exp));
6539 else
6541 mode = TYPE_MODE (TREE_TYPE (exp));
6542 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6544 if (mode == BLKmode)
6545 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6546 else
6547 *pbitsize = GET_MODE_BITSIZE (mode);
6550 if (size_tree != 0)
6552 if (! host_integerp (size_tree, 1))
6553 mode = BLKmode, *pbitsize = -1;
6554 else
6555 *pbitsize = tree_low_cst (size_tree, 1);
6558 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6559 and find the ultimate containing object. */
6560 while (1)
6562 switch (TREE_CODE (exp))
6564 case BIT_FIELD_REF:
6565 bit_offset
6566 = double_int_add (bit_offset,
6567 tree_to_double_int (TREE_OPERAND (exp, 2)));
6568 break;
6570 case COMPONENT_REF:
6572 tree field = TREE_OPERAND (exp, 1);
6573 tree this_offset = component_ref_field_offset (exp);
6575 /* If this field hasn't been filled in yet, don't go past it.
6576 This should only happen when folding expressions made during
6577 type construction. */
6578 if (this_offset == 0)
6579 break;
6581 offset = size_binop (PLUS_EXPR, offset, this_offset);
6582 bit_offset = double_int_add (bit_offset,
6583 tree_to_double_int
6584 (DECL_FIELD_BIT_OFFSET (field)));
6586 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6588 break;
6590 case ARRAY_REF:
6591 case ARRAY_RANGE_REF:
6593 tree index = TREE_OPERAND (exp, 1);
6594 tree low_bound = array_ref_low_bound (exp);
6595 tree unit_size = array_ref_element_size (exp);
6597 /* We assume all arrays have sizes that are a multiple of a byte.
6598 First subtract the lower bound, if any, in the type of the
6599 index, then convert to sizetype and multiply by the size of
6600 the array element. */
6601 if (! integer_zerop (low_bound))
6602 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6603 index, low_bound);
6605 offset = size_binop (PLUS_EXPR, offset,
6606 size_binop (MULT_EXPR,
6607 fold_convert (sizetype, index),
6608 unit_size));
6610 break;
6612 case REALPART_EXPR:
6613 break;
6615 case IMAGPART_EXPR:
6616 bit_offset = double_int_add (bit_offset,
6617 uhwi_to_double_int (*pbitsize));
6618 break;
6620 case VIEW_CONVERT_EXPR:
6621 if (keep_aligning && STRICT_ALIGNMENT
6622 && (TYPE_ALIGN (TREE_TYPE (exp))
6623 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6624 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6625 < BIGGEST_ALIGNMENT)
6626 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6627 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6628 goto done;
6629 break;
6631 case MEM_REF:
6632 /* Hand back the decl for MEM[&decl, off]. */
6633 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6635 tree off = TREE_OPERAND (exp, 1);
6636 if (!integer_zerop (off))
6638 double_int boff, coff = mem_ref_offset (exp);
6639 boff = double_int_lshift (coff,
6640 BITS_PER_UNIT == 8
6641 ? 3 : exact_log2 (BITS_PER_UNIT),
6642 HOST_BITS_PER_DOUBLE_INT, true);
6643 bit_offset = double_int_add (bit_offset, boff);
6645 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6647 goto done;
6649 default:
6650 goto done;
6653 /* If any reference in the chain is volatile, the effect is volatile. */
6654 if (TREE_THIS_VOLATILE (exp))
6655 *pvolatilep = 1;
6657 exp = TREE_OPERAND (exp, 0);
6659 done:
6661 /* If OFFSET is constant, see if we can return the whole thing as a
6662 constant bit position. Make sure to handle overflow during
6663 this conversion. */
6664 if (TREE_CODE (offset) == INTEGER_CST)
6666 double_int tem = tree_to_double_int (offset);
6667 tem = double_int_sext (tem, TYPE_PRECISION (sizetype));
6668 tem = double_int_lshift (tem,
6669 BITS_PER_UNIT == 8
6670 ? 3 : exact_log2 (BITS_PER_UNIT),
6671 HOST_BITS_PER_DOUBLE_INT, true);
6672 tem = double_int_add (tem, bit_offset);
6673 if (double_int_fits_in_shwi_p (tem))
6675 *pbitpos = double_int_to_shwi (tem);
6676 *poffset = offset = NULL_TREE;
6680 /* Otherwise, split it up. */
6681 if (offset)
6683 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6684 if (double_int_negative_p (bit_offset))
6686 double_int mask
6687 = double_int_mask (BITS_PER_UNIT == 8
6688 ? 3 : exact_log2 (BITS_PER_UNIT));
6689 double_int tem = double_int_and_not (bit_offset, mask);
6690 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6691 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6692 bit_offset = double_int_sub (bit_offset, tem);
6693 tem = double_int_rshift (tem,
6694 BITS_PER_UNIT == 8
6695 ? 3 : exact_log2 (BITS_PER_UNIT),
6696 HOST_BITS_PER_DOUBLE_INT, true);
6697 offset = size_binop (PLUS_EXPR, offset,
6698 double_int_to_tree (sizetype, tem));
6701 *pbitpos = double_int_to_shwi (bit_offset);
6702 *poffset = offset;
6705 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6706 if (mode == VOIDmode
6707 && blkmode_bitfield
6708 && (*pbitpos % BITS_PER_UNIT) == 0
6709 && (*pbitsize % BITS_PER_UNIT) == 0)
6710 *pmode = BLKmode;
6711 else
6712 *pmode = mode;
6714 return exp;
6717 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6718 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6719 EXP is marked as PACKED. */
6721 bool
6722 contains_packed_reference (const_tree exp)
6724 bool packed_p = false;
6726 while (1)
6728 switch (TREE_CODE (exp))
6730 case COMPONENT_REF:
6732 tree field = TREE_OPERAND (exp, 1);
6733 packed_p = DECL_PACKED (field)
6734 || TYPE_PACKED (TREE_TYPE (field))
6735 || TYPE_PACKED (TREE_TYPE (exp));
6736 if (packed_p)
6737 goto done;
6739 break;
6741 case BIT_FIELD_REF:
6742 case ARRAY_REF:
6743 case ARRAY_RANGE_REF:
6744 case REALPART_EXPR:
6745 case IMAGPART_EXPR:
6746 case VIEW_CONVERT_EXPR:
6747 break;
6749 default:
6750 goto done;
6752 exp = TREE_OPERAND (exp, 0);
6754 done:
6755 return packed_p;
6758 /* Return a tree of sizetype representing the size, in bytes, of the element
6759 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6761 tree
6762 array_ref_element_size (tree exp)
6764 tree aligned_size = TREE_OPERAND (exp, 3);
6765 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6766 location_t loc = EXPR_LOCATION (exp);
6768 /* If a size was specified in the ARRAY_REF, it's the size measured
6769 in alignment units of the element type. So multiply by that value. */
6770 if (aligned_size)
6772 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6773 sizetype from another type of the same width and signedness. */
6774 if (TREE_TYPE (aligned_size) != sizetype)
6775 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6776 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6777 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6780 /* Otherwise, take the size from that of the element type. Substitute
6781 any PLACEHOLDER_EXPR that we have. */
6782 else
6783 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6786 /* Return a tree representing the lower bound of the array mentioned in
6787 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6789 tree
6790 array_ref_low_bound (tree exp)
6792 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6794 /* If a lower bound is specified in EXP, use it. */
6795 if (TREE_OPERAND (exp, 2))
6796 return TREE_OPERAND (exp, 2);
6798 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6799 substituting for a PLACEHOLDER_EXPR as needed. */
6800 if (domain_type && TYPE_MIN_VALUE (domain_type))
6801 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6803 /* Otherwise, return a zero of the appropriate type. */
6804 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6807 /* Returns true if REF is an array reference to an array at the end of
6808 a structure. If this is the case, the array may be allocated larger
6809 than its upper bound implies. */
6811 bool
6812 array_at_struct_end_p (tree ref)
6814 if (TREE_CODE (ref) != ARRAY_REF
6815 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6816 return false;
6818 while (handled_component_p (ref))
6820 /* If the reference chain contains a component reference to a
6821 non-union type and there follows another field the reference
6822 is not at the end of a structure. */
6823 if (TREE_CODE (ref) == COMPONENT_REF
6824 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6826 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6827 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6828 nextf = DECL_CHAIN (nextf);
6829 if (nextf)
6830 return false;
6833 ref = TREE_OPERAND (ref, 0);
6836 /* If the reference is based on a declared entity, the size of the array
6837 is constrained by its given domain. */
6838 if (DECL_P (ref))
6839 return false;
6841 return true;
6844 /* Return a tree representing the upper bound of the array mentioned in
6845 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6847 tree
6848 array_ref_up_bound (tree exp)
6850 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6852 /* If there is a domain type and it has an upper bound, use it, substituting
6853 for a PLACEHOLDER_EXPR as needed. */
6854 if (domain_type && TYPE_MAX_VALUE (domain_type))
6855 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6857 /* Otherwise fail. */
6858 return NULL_TREE;
6861 /* Return a tree representing the offset, in bytes, of the field referenced
6862 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6864 tree
6865 component_ref_field_offset (tree exp)
6867 tree aligned_offset = TREE_OPERAND (exp, 2);
6868 tree field = TREE_OPERAND (exp, 1);
6869 location_t loc = EXPR_LOCATION (exp);
6871 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6872 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6873 value. */
6874 if (aligned_offset)
6876 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6877 sizetype from another type of the same width and signedness. */
6878 if (TREE_TYPE (aligned_offset) != sizetype)
6879 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6880 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6881 size_int (DECL_OFFSET_ALIGN (field)
6882 / BITS_PER_UNIT));
6885 /* Otherwise, take the offset from that of the field. Substitute
6886 any PLACEHOLDER_EXPR that we have. */
6887 else
6888 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6891 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6893 static unsigned HOST_WIDE_INT
6894 target_align (const_tree target)
6896 /* We might have a chain of nested references with intermediate misaligning
6897 bitfields components, so need to recurse to find out. */
6899 unsigned HOST_WIDE_INT this_align, outer_align;
6901 switch (TREE_CODE (target))
6903 case BIT_FIELD_REF:
6904 return 1;
6906 case COMPONENT_REF:
6907 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6908 outer_align = target_align (TREE_OPERAND (target, 0));
6909 return MIN (this_align, outer_align);
6911 case ARRAY_REF:
6912 case ARRAY_RANGE_REF:
6913 this_align = TYPE_ALIGN (TREE_TYPE (target));
6914 outer_align = target_align (TREE_OPERAND (target, 0));
6915 return MIN (this_align, outer_align);
6917 CASE_CONVERT:
6918 case NON_LVALUE_EXPR:
6919 case VIEW_CONVERT_EXPR:
6920 this_align = TYPE_ALIGN (TREE_TYPE (target));
6921 outer_align = target_align (TREE_OPERAND (target, 0));
6922 return MAX (this_align, outer_align);
6924 default:
6925 return TYPE_ALIGN (TREE_TYPE (target));
6930 /* Given an rtx VALUE that may contain additions and multiplications, return
6931 an equivalent value that just refers to a register, memory, or constant.
6932 This is done by generating instructions to perform the arithmetic and
6933 returning a pseudo-register containing the value.
6935 The returned value may be a REG, SUBREG, MEM or constant. */
6938 force_operand (rtx value, rtx target)
6940 rtx op1, op2;
6941 /* Use subtarget as the target for operand 0 of a binary operation. */
6942 rtx subtarget = get_subtarget (target);
6943 enum rtx_code code = GET_CODE (value);
6945 /* Check for subreg applied to an expression produced by loop optimizer. */
6946 if (code == SUBREG
6947 && !REG_P (SUBREG_REG (value))
6948 && !MEM_P (SUBREG_REG (value)))
6950 value
6951 = simplify_gen_subreg (GET_MODE (value),
6952 force_reg (GET_MODE (SUBREG_REG (value)),
6953 force_operand (SUBREG_REG (value),
6954 NULL_RTX)),
6955 GET_MODE (SUBREG_REG (value)),
6956 SUBREG_BYTE (value));
6957 code = GET_CODE (value);
6960 /* Check for a PIC address load. */
6961 if ((code == PLUS || code == MINUS)
6962 && XEXP (value, 0) == pic_offset_table_rtx
6963 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6964 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6965 || GET_CODE (XEXP (value, 1)) == CONST))
6967 if (!subtarget)
6968 subtarget = gen_reg_rtx (GET_MODE (value));
6969 emit_move_insn (subtarget, value);
6970 return subtarget;
6973 if (ARITHMETIC_P (value))
6975 op2 = XEXP (value, 1);
6976 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6977 subtarget = 0;
6978 if (code == MINUS && CONST_INT_P (op2))
6980 code = PLUS;
6981 op2 = negate_rtx (GET_MODE (value), op2);
6984 /* Check for an addition with OP2 a constant integer and our first
6985 operand a PLUS of a virtual register and something else. In that
6986 case, we want to emit the sum of the virtual register and the
6987 constant first and then add the other value. This allows virtual
6988 register instantiation to simply modify the constant rather than
6989 creating another one around this addition. */
6990 if (code == PLUS && CONST_INT_P (op2)
6991 && GET_CODE (XEXP (value, 0)) == PLUS
6992 && REG_P (XEXP (XEXP (value, 0), 0))
6993 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6994 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6996 rtx temp = expand_simple_binop (GET_MODE (value), code,
6997 XEXP (XEXP (value, 0), 0), op2,
6998 subtarget, 0, OPTAB_LIB_WIDEN);
6999 return expand_simple_binop (GET_MODE (value), code, temp,
7000 force_operand (XEXP (XEXP (value,
7001 0), 1), 0),
7002 target, 0, OPTAB_LIB_WIDEN);
7005 op1 = force_operand (XEXP (value, 0), subtarget);
7006 op2 = force_operand (op2, NULL_RTX);
7007 switch (code)
7009 case MULT:
7010 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7011 case DIV:
7012 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7013 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7014 target, 1, OPTAB_LIB_WIDEN);
7015 else
7016 return expand_divmod (0,
7017 FLOAT_MODE_P (GET_MODE (value))
7018 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7019 GET_MODE (value), op1, op2, target, 0);
7020 case MOD:
7021 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7022 target, 0);
7023 case UDIV:
7024 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7025 target, 1);
7026 case UMOD:
7027 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7028 target, 1);
7029 case ASHIFTRT:
7030 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7031 target, 0, OPTAB_LIB_WIDEN);
7032 default:
7033 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7034 target, 1, OPTAB_LIB_WIDEN);
7037 if (UNARY_P (value))
7039 if (!target)
7040 target = gen_reg_rtx (GET_MODE (value));
7041 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7042 switch (code)
7044 case ZERO_EXTEND:
7045 case SIGN_EXTEND:
7046 case TRUNCATE:
7047 case FLOAT_EXTEND:
7048 case FLOAT_TRUNCATE:
7049 convert_move (target, op1, code == ZERO_EXTEND);
7050 return target;
7052 case FIX:
7053 case UNSIGNED_FIX:
7054 expand_fix (target, op1, code == UNSIGNED_FIX);
7055 return target;
7057 case FLOAT:
7058 case UNSIGNED_FLOAT:
7059 expand_float (target, op1, code == UNSIGNED_FLOAT);
7060 return target;
7062 default:
7063 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7067 #ifdef INSN_SCHEDULING
7068 /* On machines that have insn scheduling, we want all memory reference to be
7069 explicit, so we need to deal with such paradoxical SUBREGs. */
7070 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7071 value
7072 = simplify_gen_subreg (GET_MODE (value),
7073 force_reg (GET_MODE (SUBREG_REG (value)),
7074 force_operand (SUBREG_REG (value),
7075 NULL_RTX)),
7076 GET_MODE (SUBREG_REG (value)),
7077 SUBREG_BYTE (value));
7078 #endif
7080 return value;
7083 /* Subroutine of expand_expr: return nonzero iff there is no way that
7084 EXP can reference X, which is being modified. TOP_P is nonzero if this
7085 call is going to be used to determine whether we need a temporary
7086 for EXP, as opposed to a recursive call to this function.
7088 It is always safe for this routine to return zero since it merely
7089 searches for optimization opportunities. */
7092 safe_from_p (const_rtx x, tree exp, int top_p)
7094 rtx exp_rtl = 0;
7095 int i, nops;
7097 if (x == 0
7098 /* If EXP has varying size, we MUST use a target since we currently
7099 have no way of allocating temporaries of variable size
7100 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7101 So we assume here that something at a higher level has prevented a
7102 clash. This is somewhat bogus, but the best we can do. Only
7103 do this when X is BLKmode and when we are at the top level. */
7104 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7105 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7106 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7107 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7108 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7109 != INTEGER_CST)
7110 && GET_MODE (x) == BLKmode)
7111 /* If X is in the outgoing argument area, it is always safe. */
7112 || (MEM_P (x)
7113 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7114 || (GET_CODE (XEXP (x, 0)) == PLUS
7115 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7116 return 1;
7118 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7119 find the underlying pseudo. */
7120 if (GET_CODE (x) == SUBREG)
7122 x = SUBREG_REG (x);
7123 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7124 return 0;
7127 /* Now look at our tree code and possibly recurse. */
7128 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7130 case tcc_declaration:
7131 exp_rtl = DECL_RTL_IF_SET (exp);
7132 break;
7134 case tcc_constant:
7135 return 1;
7137 case tcc_exceptional:
7138 if (TREE_CODE (exp) == TREE_LIST)
7140 while (1)
7142 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7143 return 0;
7144 exp = TREE_CHAIN (exp);
7145 if (!exp)
7146 return 1;
7147 if (TREE_CODE (exp) != TREE_LIST)
7148 return safe_from_p (x, exp, 0);
7151 else if (TREE_CODE (exp) == CONSTRUCTOR)
7153 constructor_elt *ce;
7154 unsigned HOST_WIDE_INT idx;
7156 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
7157 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7158 || !safe_from_p (x, ce->value, 0))
7159 return 0;
7160 return 1;
7162 else if (TREE_CODE (exp) == ERROR_MARK)
7163 return 1; /* An already-visited SAVE_EXPR? */
7164 else
7165 return 0;
7167 case tcc_statement:
7168 /* The only case we look at here is the DECL_INITIAL inside a
7169 DECL_EXPR. */
7170 return (TREE_CODE (exp) != DECL_EXPR
7171 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7172 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7173 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7175 case tcc_binary:
7176 case tcc_comparison:
7177 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7178 return 0;
7179 /* Fall through. */
7181 case tcc_unary:
7182 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7184 case tcc_expression:
7185 case tcc_reference:
7186 case tcc_vl_exp:
7187 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7188 the expression. If it is set, we conflict iff we are that rtx or
7189 both are in memory. Otherwise, we check all operands of the
7190 expression recursively. */
7192 switch (TREE_CODE (exp))
7194 case ADDR_EXPR:
7195 /* If the operand is static or we are static, we can't conflict.
7196 Likewise if we don't conflict with the operand at all. */
7197 if (staticp (TREE_OPERAND (exp, 0))
7198 || TREE_STATIC (exp)
7199 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7200 return 1;
7202 /* Otherwise, the only way this can conflict is if we are taking
7203 the address of a DECL a that address if part of X, which is
7204 very rare. */
7205 exp = TREE_OPERAND (exp, 0);
7206 if (DECL_P (exp))
7208 if (!DECL_RTL_SET_P (exp)
7209 || !MEM_P (DECL_RTL (exp)))
7210 return 0;
7211 else
7212 exp_rtl = XEXP (DECL_RTL (exp), 0);
7214 break;
7216 case MEM_REF:
7217 if (MEM_P (x)
7218 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7219 get_alias_set (exp)))
7220 return 0;
7221 break;
7223 case CALL_EXPR:
7224 /* Assume that the call will clobber all hard registers and
7225 all of memory. */
7226 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7227 || MEM_P (x))
7228 return 0;
7229 break;
7231 case WITH_CLEANUP_EXPR:
7232 case CLEANUP_POINT_EXPR:
7233 /* Lowered by gimplify.c. */
7234 gcc_unreachable ();
7236 case SAVE_EXPR:
7237 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7239 default:
7240 break;
7243 /* If we have an rtx, we do not need to scan our operands. */
7244 if (exp_rtl)
7245 break;
7247 nops = TREE_OPERAND_LENGTH (exp);
7248 for (i = 0; i < nops; i++)
7249 if (TREE_OPERAND (exp, i) != 0
7250 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7251 return 0;
7253 break;
7255 case tcc_type:
7256 /* Should never get a type here. */
7257 gcc_unreachable ();
7260 /* If we have an rtl, find any enclosed object. Then see if we conflict
7261 with it. */
7262 if (exp_rtl)
7264 if (GET_CODE (exp_rtl) == SUBREG)
7266 exp_rtl = SUBREG_REG (exp_rtl);
7267 if (REG_P (exp_rtl)
7268 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7269 return 0;
7272 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7273 are memory and they conflict. */
7274 return ! (rtx_equal_p (x, exp_rtl)
7275 || (MEM_P (x) && MEM_P (exp_rtl)
7276 && true_dependence (exp_rtl, VOIDmode, x)));
7279 /* If we reach here, it is safe. */
7280 return 1;
7284 /* Return the highest power of two that EXP is known to be a multiple of.
7285 This is used in updating alignment of MEMs in array references. */
7287 unsigned HOST_WIDE_INT
7288 highest_pow2_factor (const_tree exp)
7290 unsigned HOST_WIDE_INT c0, c1;
7292 switch (TREE_CODE (exp))
7294 case INTEGER_CST:
7295 /* We can find the lowest bit that's a one. If the low
7296 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
7297 We need to handle this case since we can find it in a COND_EXPR,
7298 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
7299 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
7300 later ICE. */
7301 if (TREE_OVERFLOW (exp))
7302 return BIGGEST_ALIGNMENT;
7303 else
7305 /* Note: tree_low_cst is intentionally not used here,
7306 we don't care about the upper bits. */
7307 c0 = TREE_INT_CST_LOW (exp);
7308 c0 &= -c0;
7309 return c0 ? c0 : BIGGEST_ALIGNMENT;
7311 break;
7313 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
7314 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7315 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7316 return MIN (c0, c1);
7318 case MULT_EXPR:
7319 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7320 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7321 return c0 * c1;
7323 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
7324 case CEIL_DIV_EXPR:
7325 if (integer_pow2p (TREE_OPERAND (exp, 1))
7326 && host_integerp (TREE_OPERAND (exp, 1), 1))
7328 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7329 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
7330 return MAX (1, c0 / c1);
7332 break;
7334 case BIT_AND_EXPR:
7335 /* The highest power of two of a bit-and expression is the maximum of
7336 that of its operands. We typically get here for a complex LHS and
7337 a constant negative power of two on the RHS to force an explicit
7338 alignment, so don't bother looking at the LHS. */
7339 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7341 CASE_CONVERT:
7342 case SAVE_EXPR:
7343 return highest_pow2_factor (TREE_OPERAND (exp, 0));
7345 case COMPOUND_EXPR:
7346 return highest_pow2_factor (TREE_OPERAND (exp, 1));
7348 case COND_EXPR:
7349 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7350 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
7351 return MIN (c0, c1);
7353 default:
7354 break;
7357 return 1;
7360 /* Similar, except that the alignment requirements of TARGET are
7361 taken into account. Assume it is at least as aligned as its
7362 type, unless it is a COMPONENT_REF in which case the layout of
7363 the structure gives the alignment. */
7365 static unsigned HOST_WIDE_INT
7366 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7368 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7369 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7371 return MAX (factor, talign);
7374 /* Convert the tree comparision code TCODE to the rtl one where the
7375 signedness is UNSIGNEDP. */
7377 static enum rtx_code
7378 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7380 enum rtx_code code;
7381 switch (tcode)
7383 case EQ_EXPR:
7384 code = EQ;
7385 break;
7386 case NE_EXPR:
7387 code = NE;
7388 break;
7389 case LT_EXPR:
7390 code = unsignedp ? LTU : LT;
7391 break;
7392 case LE_EXPR:
7393 code = unsignedp ? LEU : LE;
7394 break;
7395 case GT_EXPR:
7396 code = unsignedp ? GTU : GT;
7397 break;
7398 case GE_EXPR:
7399 code = unsignedp ? GEU : GE;
7400 break;
7401 case UNORDERED_EXPR:
7402 code = UNORDERED;
7403 break;
7404 case ORDERED_EXPR:
7405 code = ORDERED;
7406 break;
7407 case UNLT_EXPR:
7408 code = UNLT;
7409 break;
7410 case UNLE_EXPR:
7411 code = UNLE;
7412 break;
7413 case UNGT_EXPR:
7414 code = UNGT;
7415 break;
7416 case UNGE_EXPR:
7417 code = UNGE;
7418 break;
7419 case UNEQ_EXPR:
7420 code = UNEQ;
7421 break;
7422 case LTGT_EXPR:
7423 code = LTGT;
7424 break;
7426 default:
7427 gcc_unreachable ();
7429 return code;
7432 /* Subroutine of expand_expr. Expand the two operands of a binary
7433 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7434 The value may be stored in TARGET if TARGET is nonzero. The
7435 MODIFIER argument is as documented by expand_expr. */
7437 static void
7438 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7439 enum expand_modifier modifier)
7441 if (! safe_from_p (target, exp1, 1))
7442 target = 0;
7443 if (operand_equal_p (exp0, exp1, 0))
7445 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7446 *op1 = copy_rtx (*op0);
7448 else
7450 /* If we need to preserve evaluation order, copy exp0 into its own
7451 temporary variable so that it can't be clobbered by exp1. */
7452 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7453 exp0 = save_expr (exp0);
7454 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7455 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7460 /* Return a MEM that contains constant EXP. DEFER is as for
7461 output_constant_def and MODIFIER is as for expand_expr. */
7463 static rtx
7464 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7466 rtx mem;
7468 mem = output_constant_def (exp, defer);
7469 if (modifier != EXPAND_INITIALIZER)
7470 mem = use_anchored_address (mem);
7471 return mem;
7474 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7475 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7477 static rtx
7478 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7479 enum expand_modifier modifier, addr_space_t as)
7481 rtx result, subtarget;
7482 tree inner, offset;
7483 HOST_WIDE_INT bitsize, bitpos;
7484 int volatilep, unsignedp;
7485 enum machine_mode mode1;
7487 /* If we are taking the address of a constant and are at the top level,
7488 we have to use output_constant_def since we can't call force_const_mem
7489 at top level. */
7490 /* ??? This should be considered a front-end bug. We should not be
7491 generating ADDR_EXPR of something that isn't an LVALUE. The only
7492 exception here is STRING_CST. */
7493 if (CONSTANT_CLASS_P (exp))
7495 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7496 if (modifier < EXPAND_SUM)
7497 result = force_operand (result, target);
7498 return result;
7501 /* Everything must be something allowed by is_gimple_addressable. */
7502 switch (TREE_CODE (exp))
7504 case INDIRECT_REF:
7505 /* This case will happen via recursion for &a->b. */
7506 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7508 case MEM_REF:
7510 tree tem = TREE_OPERAND (exp, 0);
7511 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7512 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7513 return expand_expr (tem, target, tmode, modifier);
7516 case CONST_DECL:
7517 /* Expand the initializer like constants above. */
7518 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7519 0, modifier), 0);
7520 if (modifier < EXPAND_SUM)
7521 result = force_operand (result, target);
7522 return result;
7524 case REALPART_EXPR:
7525 /* The real part of the complex number is always first, therefore
7526 the address is the same as the address of the parent object. */
7527 offset = 0;
7528 bitpos = 0;
7529 inner = TREE_OPERAND (exp, 0);
7530 break;
7532 case IMAGPART_EXPR:
7533 /* The imaginary part of the complex number is always second.
7534 The expression is therefore always offset by the size of the
7535 scalar type. */
7536 offset = 0;
7537 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7538 inner = TREE_OPERAND (exp, 0);
7539 break;
7541 default:
7542 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7543 expand_expr, as that can have various side effects; LABEL_DECLs for
7544 example, may not have their DECL_RTL set yet. Expand the rtl of
7545 CONSTRUCTORs too, which should yield a memory reference for the
7546 constructor's contents. Assume language specific tree nodes can
7547 be expanded in some interesting way. */
7548 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7549 if (DECL_P (exp)
7550 || TREE_CODE (exp) == CONSTRUCTOR
7551 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7553 result = expand_expr (exp, target, tmode,
7554 modifier == EXPAND_INITIALIZER
7555 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7557 /* If the DECL isn't in memory, then the DECL wasn't properly
7558 marked TREE_ADDRESSABLE, which will be either a front-end
7559 or a tree optimizer bug. */
7561 if (TREE_ADDRESSABLE (exp)
7562 && ! MEM_P (result)
7563 && ! targetm.calls.allocate_stack_slots_for_args())
7565 error ("local frame unavailable (naked function?)");
7566 return result;
7568 else
7569 gcc_assert (MEM_P (result));
7570 result = XEXP (result, 0);
7572 /* ??? Is this needed anymore? */
7573 if (DECL_P (exp))
7574 TREE_USED (exp) = 1;
7576 if (modifier != EXPAND_INITIALIZER
7577 && modifier != EXPAND_CONST_ADDRESS
7578 && modifier != EXPAND_SUM)
7579 result = force_operand (result, target);
7580 return result;
7583 /* Pass FALSE as the last argument to get_inner_reference although
7584 we are expanding to RTL. The rationale is that we know how to
7585 handle "aligning nodes" here: we can just bypass them because
7586 they won't change the final object whose address will be returned
7587 (they actually exist only for that purpose). */
7588 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7589 &mode1, &unsignedp, &volatilep, false);
7590 break;
7593 /* We must have made progress. */
7594 gcc_assert (inner != exp);
7596 subtarget = offset || bitpos ? NULL_RTX : target;
7597 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7598 inner alignment, force the inner to be sufficiently aligned. */
7599 if (CONSTANT_CLASS_P (inner)
7600 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7602 inner = copy_node (inner);
7603 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7604 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7605 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7607 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7609 if (offset)
7611 rtx tmp;
7613 if (modifier != EXPAND_NORMAL)
7614 result = force_operand (result, NULL);
7615 tmp = expand_expr (offset, NULL_RTX, tmode,
7616 modifier == EXPAND_INITIALIZER
7617 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7619 result = convert_memory_address_addr_space (tmode, result, as);
7620 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7622 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7623 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7624 else
7626 subtarget = bitpos ? NULL_RTX : target;
7627 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7628 1, OPTAB_LIB_WIDEN);
7632 if (bitpos)
7634 /* Someone beforehand should have rejected taking the address
7635 of such an object. */
7636 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7638 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7639 if (modifier < EXPAND_SUM)
7640 result = force_operand (result, target);
7643 return result;
7646 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7647 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7649 static rtx
7650 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7651 enum expand_modifier modifier)
7653 addr_space_t as = ADDR_SPACE_GENERIC;
7654 enum machine_mode address_mode = Pmode;
7655 enum machine_mode pointer_mode = ptr_mode;
7656 enum machine_mode rmode;
7657 rtx result;
7659 /* Target mode of VOIDmode says "whatever's natural". */
7660 if (tmode == VOIDmode)
7661 tmode = TYPE_MODE (TREE_TYPE (exp));
7663 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7665 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7666 address_mode = targetm.addr_space.address_mode (as);
7667 pointer_mode = targetm.addr_space.pointer_mode (as);
7670 /* We can get called with some Weird Things if the user does silliness
7671 like "(short) &a". In that case, convert_memory_address won't do
7672 the right thing, so ignore the given target mode. */
7673 if (tmode != address_mode && tmode != pointer_mode)
7674 tmode = address_mode;
7676 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7677 tmode, modifier, as);
7679 /* Despite expand_expr claims concerning ignoring TMODE when not
7680 strictly convenient, stuff breaks if we don't honor it. Note
7681 that combined with the above, we only do this for pointer modes. */
7682 rmode = GET_MODE (result);
7683 if (rmode == VOIDmode)
7684 rmode = tmode;
7685 if (rmode != tmode)
7686 result = convert_memory_address_addr_space (tmode, result, as);
7688 return result;
7691 /* Generate code for computing CONSTRUCTOR EXP.
7692 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7693 is TRUE, instead of creating a temporary variable in memory
7694 NULL is returned and the caller needs to handle it differently. */
7696 static rtx
7697 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7698 bool avoid_temp_mem)
7700 tree type = TREE_TYPE (exp);
7701 enum machine_mode mode = TYPE_MODE (type);
7703 /* Try to avoid creating a temporary at all. This is possible
7704 if all of the initializer is zero.
7705 FIXME: try to handle all [0..255] initializers we can handle
7706 with memset. */
7707 if (TREE_STATIC (exp)
7708 && !TREE_ADDRESSABLE (exp)
7709 && target != 0 && mode == BLKmode
7710 && all_zeros_p (exp))
7712 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7713 return target;
7716 /* All elts simple constants => refer to a constant in memory. But
7717 if this is a non-BLKmode mode, let it store a field at a time
7718 since that should make a CONST_INT or CONST_DOUBLE when we
7719 fold. Likewise, if we have a target we can use, it is best to
7720 store directly into the target unless the type is large enough
7721 that memcpy will be used. If we are making an initializer and
7722 all operands are constant, put it in memory as well.
7724 FIXME: Avoid trying to fill vector constructors piece-meal.
7725 Output them with output_constant_def below unless we're sure
7726 they're zeros. This should go away when vector initializers
7727 are treated like VECTOR_CST instead of arrays. */
7728 if ((TREE_STATIC (exp)
7729 && ((mode == BLKmode
7730 && ! (target != 0 && safe_from_p (target, exp, 1)))
7731 || TREE_ADDRESSABLE (exp)
7732 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7733 && (! MOVE_BY_PIECES_P
7734 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7735 TYPE_ALIGN (type)))
7736 && ! mostly_zeros_p (exp))))
7737 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7738 && TREE_CONSTANT (exp)))
7740 rtx constructor;
7742 if (avoid_temp_mem)
7743 return NULL_RTX;
7745 constructor = expand_expr_constant (exp, 1, modifier);
7747 if (modifier != EXPAND_CONST_ADDRESS
7748 && modifier != EXPAND_INITIALIZER
7749 && modifier != EXPAND_SUM)
7750 constructor = validize_mem (constructor);
7752 return constructor;
7755 /* Handle calls that pass values in multiple non-contiguous
7756 locations. The Irix 6 ABI has examples of this. */
7757 if (target == 0 || ! safe_from_p (target, exp, 1)
7758 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7760 if (avoid_temp_mem)
7761 return NULL_RTX;
7763 target
7764 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7765 | (TREE_READONLY (exp)
7766 * TYPE_QUAL_CONST))),
7767 0, TREE_ADDRESSABLE (exp), 1);
7770 store_constructor (exp, target, 0, int_expr_size (exp));
7771 return target;
7775 /* expand_expr: generate code for computing expression EXP.
7776 An rtx for the computed value is returned. The value is never null.
7777 In the case of a void EXP, const0_rtx is returned.
7779 The value may be stored in TARGET if TARGET is nonzero.
7780 TARGET is just a suggestion; callers must assume that
7781 the rtx returned may not be the same as TARGET.
7783 If TARGET is CONST0_RTX, it means that the value will be ignored.
7785 If TMODE is not VOIDmode, it suggests generating the
7786 result in mode TMODE. But this is done only when convenient.
7787 Otherwise, TMODE is ignored and the value generated in its natural mode.
7788 TMODE is just a suggestion; callers must assume that
7789 the rtx returned may not have mode TMODE.
7791 Note that TARGET may have neither TMODE nor MODE. In that case, it
7792 probably will not be used.
7794 If MODIFIER is EXPAND_SUM then when EXP is an addition
7795 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7796 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7797 products as above, or REG or MEM, or constant.
7798 Ordinarily in such cases we would output mul or add instructions
7799 and then return a pseudo reg containing the sum.
7801 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7802 it also marks a label as absolutely required (it can't be dead).
7803 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7804 This is used for outputting expressions used in initializers.
7806 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7807 with a constant address even if that address is not normally legitimate.
7808 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7810 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7811 a call parameter. Such targets require special care as we haven't yet
7812 marked TARGET so that it's safe from being trashed by libcalls. We
7813 don't want to use TARGET for anything but the final result;
7814 Intermediate values must go elsewhere. Additionally, calls to
7815 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7817 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7818 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7819 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7820 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7821 recursively. */
7824 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7825 enum expand_modifier modifier, rtx *alt_rtl)
7827 rtx ret;
7829 /* Handle ERROR_MARK before anybody tries to access its type. */
7830 if (TREE_CODE (exp) == ERROR_MARK
7831 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7833 ret = CONST0_RTX (tmode);
7834 return ret ? ret : const0_rtx;
7837 /* If this is an expression of some kind and it has an associated line
7838 number, then emit the line number before expanding the expression.
7840 We need to save and restore the file and line information so that
7841 errors discovered during expansion are emitted with the right
7842 information. It would be better of the diagnostic routines
7843 used the file/line information embedded in the tree nodes rather
7844 than globals. */
7845 if (cfun && EXPR_HAS_LOCATION (exp))
7847 location_t saved_location = input_location;
7848 location_t saved_curr_loc = get_curr_insn_source_location ();
7849 tree saved_block = get_curr_insn_block ();
7850 input_location = EXPR_LOCATION (exp);
7851 set_curr_insn_source_location (input_location);
7853 /* Record where the insns produced belong. */
7854 set_curr_insn_block (TREE_BLOCK (exp));
7856 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7858 input_location = saved_location;
7859 set_curr_insn_block (saved_block);
7860 set_curr_insn_source_location (saved_curr_loc);
7862 else
7864 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7867 return ret;
7870 /* Try to expand the conditional expression which is represented by
7871 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7872 return the rtl reg which repsents the result. Otherwise return
7873 NULL_RTL. */
7875 static rtx
7876 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7877 tree treeop1 ATTRIBUTE_UNUSED,
7878 tree treeop2 ATTRIBUTE_UNUSED)
7880 #ifdef HAVE_conditional_move
7881 rtx insn;
7882 rtx op00, op01, op1, op2;
7883 enum rtx_code comparison_code;
7884 enum machine_mode comparison_mode;
7885 gimple srcstmt;
7886 rtx temp;
7887 tree type = TREE_TYPE (treeop1);
7888 int unsignedp = TYPE_UNSIGNED (type);
7889 enum machine_mode mode = TYPE_MODE (type);
7891 temp = assign_temp (type, 0, 0, 1);
7893 /* If we cannot do a conditional move on the mode, try doing it
7894 with the promoted mode. */
7895 if (!can_conditionally_move_p (mode))
7896 mode = promote_mode (type, mode, &unsignedp);
7898 if (!can_conditionally_move_p (mode))
7899 return NULL_RTX;
7901 start_sequence ();
7902 expand_operands (treeop1, treeop2,
7903 temp, &op1, &op2, EXPAND_NORMAL);
7905 if (TREE_CODE (treeop0) == SSA_NAME
7906 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7908 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7909 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7910 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7911 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7912 comparison_mode = TYPE_MODE (type);
7913 unsignedp = TYPE_UNSIGNED (type);
7914 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7916 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7918 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7919 enum tree_code cmpcode = TREE_CODE (treeop0);
7920 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7921 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7922 unsignedp = TYPE_UNSIGNED (type);
7923 comparison_mode = TYPE_MODE (type);
7924 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7926 else
7928 op00 = expand_normal (treeop0);
7929 op01 = const0_rtx;
7930 comparison_code = NE;
7931 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7934 if (GET_MODE (op1) != mode)
7935 op1 = gen_lowpart (mode, op1);
7937 if (GET_MODE (op2) != mode)
7938 op2 = gen_lowpart (mode, op2);
7940 /* Try to emit the conditional move. */
7941 insn = emit_conditional_move (temp, comparison_code,
7942 op00, op01, comparison_mode,
7943 op1, op2, mode,
7944 unsignedp);
7946 /* If we could do the conditional move, emit the sequence,
7947 and return. */
7948 if (insn)
7950 rtx seq = get_insns ();
7951 end_sequence ();
7952 emit_insn (seq);
7953 return temp;
7956 /* Otherwise discard the sequence and fall back to code with
7957 branches. */
7958 end_sequence ();
7959 #endif
7960 return NULL_RTX;
7964 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7965 enum expand_modifier modifier)
7967 rtx op0, op1, op2, temp;
7968 tree type;
7969 int unsignedp;
7970 enum machine_mode mode;
7971 enum tree_code code = ops->code;
7972 optab this_optab;
7973 rtx subtarget, original_target;
7974 int ignore;
7975 bool reduce_bit_field;
7976 location_t loc = ops->location;
7977 tree treeop0, treeop1, treeop2;
7978 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7979 ? reduce_to_bit_field_precision ((expr), \
7980 target, \
7981 type) \
7982 : (expr))
7984 type = ops->type;
7985 mode = TYPE_MODE (type);
7986 unsignedp = TYPE_UNSIGNED (type);
7988 treeop0 = ops->op0;
7989 treeop1 = ops->op1;
7990 treeop2 = ops->op2;
7992 /* We should be called only on simple (binary or unary) expressions,
7993 exactly those that are valid in gimple expressions that aren't
7994 GIMPLE_SINGLE_RHS (or invalid). */
7995 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7996 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7997 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7999 ignore = (target == const0_rtx
8000 || ((CONVERT_EXPR_CODE_P (code)
8001 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8002 && TREE_CODE (type) == VOID_TYPE));
8004 /* We should be called only if we need the result. */
8005 gcc_assert (!ignore);
8007 /* An operation in what may be a bit-field type needs the
8008 result to be reduced to the precision of the bit-field type,
8009 which is narrower than that of the type's mode. */
8010 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8011 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8013 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8014 target = 0;
8016 /* Use subtarget as the target for operand 0 of a binary operation. */
8017 subtarget = get_subtarget (target);
8018 original_target = target;
8020 switch (code)
8022 case NON_LVALUE_EXPR:
8023 case PAREN_EXPR:
8024 CASE_CONVERT:
8025 if (treeop0 == error_mark_node)
8026 return const0_rtx;
8028 if (TREE_CODE (type) == UNION_TYPE)
8030 tree valtype = TREE_TYPE (treeop0);
8032 /* If both input and output are BLKmode, this conversion isn't doing
8033 anything except possibly changing memory attribute. */
8034 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8036 rtx result = expand_expr (treeop0, target, tmode,
8037 modifier);
8039 result = copy_rtx (result);
8040 set_mem_attributes (result, type, 0);
8041 return result;
8044 if (target == 0)
8046 if (TYPE_MODE (type) != BLKmode)
8047 target = gen_reg_rtx (TYPE_MODE (type));
8048 else
8049 target = assign_temp (type, 0, 1, 1);
8052 if (MEM_P (target))
8053 /* Store data into beginning of memory target. */
8054 store_expr (treeop0,
8055 adjust_address (target, TYPE_MODE (valtype), 0),
8056 modifier == EXPAND_STACK_PARM,
8057 false);
8059 else
8061 gcc_assert (REG_P (target));
8063 /* Store this field into a union of the proper type. */
8064 store_field (target,
8065 MIN ((int_size_in_bytes (TREE_TYPE
8066 (treeop0))
8067 * BITS_PER_UNIT),
8068 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8069 0, 0, 0, TYPE_MODE (valtype), treeop0,
8070 type, 0, false);
8073 /* Return the entire union. */
8074 return target;
8077 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8079 op0 = expand_expr (treeop0, target, VOIDmode,
8080 modifier);
8082 /* If the signedness of the conversion differs and OP0 is
8083 a promoted SUBREG, clear that indication since we now
8084 have to do the proper extension. */
8085 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8086 && GET_CODE (op0) == SUBREG)
8087 SUBREG_PROMOTED_VAR_P (op0) = 0;
8089 return REDUCE_BIT_FIELD (op0);
8092 op0 = expand_expr (treeop0, NULL_RTX, mode,
8093 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8094 if (GET_MODE (op0) == mode)
8097 /* If OP0 is a constant, just convert it into the proper mode. */
8098 else if (CONSTANT_P (op0))
8100 tree inner_type = TREE_TYPE (treeop0);
8101 enum machine_mode inner_mode = GET_MODE (op0);
8103 if (inner_mode == VOIDmode)
8104 inner_mode = TYPE_MODE (inner_type);
8106 if (modifier == EXPAND_INITIALIZER)
8107 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8108 subreg_lowpart_offset (mode,
8109 inner_mode));
8110 else
8111 op0= convert_modes (mode, inner_mode, op0,
8112 TYPE_UNSIGNED (inner_type));
8115 else if (modifier == EXPAND_INITIALIZER)
8116 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8118 else if (target == 0)
8119 op0 = convert_to_mode (mode, op0,
8120 TYPE_UNSIGNED (TREE_TYPE
8121 (treeop0)));
8122 else
8124 convert_move (target, op0,
8125 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8126 op0 = target;
8129 return REDUCE_BIT_FIELD (op0);
8131 case ADDR_SPACE_CONVERT_EXPR:
8133 tree treeop0_type = TREE_TYPE (treeop0);
8134 addr_space_t as_to;
8135 addr_space_t as_from;
8137 gcc_assert (POINTER_TYPE_P (type));
8138 gcc_assert (POINTER_TYPE_P (treeop0_type));
8140 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8141 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8143 /* Conversions between pointers to the same address space should
8144 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8145 gcc_assert (as_to != as_from);
8147 /* Ask target code to handle conversion between pointers
8148 to overlapping address spaces. */
8149 if (targetm.addr_space.subset_p (as_to, as_from)
8150 || targetm.addr_space.subset_p (as_from, as_to))
8152 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8153 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8154 gcc_assert (op0);
8155 return op0;
8158 /* For disjoint address spaces, converting anything but
8159 a null pointer invokes undefined behaviour. We simply
8160 always return a null pointer here. */
8161 return CONST0_RTX (mode);
8164 case POINTER_PLUS_EXPR:
8165 /* Even though the sizetype mode and the pointer's mode can be different
8166 expand is able to handle this correctly and get the correct result out
8167 of the PLUS_EXPR code. */
8168 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8169 if sizetype precision is smaller than pointer precision. */
8170 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8171 treeop1 = fold_convert_loc (loc, type,
8172 fold_convert_loc (loc, ssizetype,
8173 treeop1));
8174 /* If sizetype precision is larger than pointer precision, truncate the
8175 offset to have matching modes. */
8176 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8177 treeop1 = fold_convert_loc (loc, type, treeop1);
8179 case PLUS_EXPR:
8180 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8181 something else, make sure we add the register to the constant and
8182 then to the other thing. This case can occur during strength
8183 reduction and doing it this way will produce better code if the
8184 frame pointer or argument pointer is eliminated.
8186 fold-const.c will ensure that the constant is always in the inner
8187 PLUS_EXPR, so the only case we need to do anything about is if
8188 sp, ap, or fp is our second argument, in which case we must swap
8189 the innermost first argument and our second argument. */
8191 if (TREE_CODE (treeop0) == PLUS_EXPR
8192 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8193 && TREE_CODE (treeop1) == VAR_DECL
8194 && (DECL_RTL (treeop1) == frame_pointer_rtx
8195 || DECL_RTL (treeop1) == stack_pointer_rtx
8196 || DECL_RTL (treeop1) == arg_pointer_rtx))
8198 gcc_unreachable ();
8201 /* If the result is to be ptr_mode and we are adding an integer to
8202 something, we might be forming a constant. So try to use
8203 plus_constant. If it produces a sum and we can't accept it,
8204 use force_operand. This allows P = &ARR[const] to generate
8205 efficient code on machines where a SYMBOL_REF is not a valid
8206 address.
8208 If this is an EXPAND_SUM call, always return the sum. */
8209 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8210 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8212 if (modifier == EXPAND_STACK_PARM)
8213 target = 0;
8214 if (TREE_CODE (treeop0) == INTEGER_CST
8215 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8216 && TREE_CONSTANT (treeop1))
8218 rtx constant_part;
8220 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8221 EXPAND_SUM);
8222 /* Use immed_double_const to ensure that the constant is
8223 truncated according to the mode of OP1, then sign extended
8224 to a HOST_WIDE_INT. Using the constant directly can result
8225 in non-canonical RTL in a 64x32 cross compile. */
8226 constant_part
8227 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8228 (HOST_WIDE_INT) 0,
8229 TYPE_MODE (TREE_TYPE (treeop1)));
8230 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8231 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8232 op1 = force_operand (op1, target);
8233 return REDUCE_BIT_FIELD (op1);
8236 else if (TREE_CODE (treeop1) == INTEGER_CST
8237 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8238 && TREE_CONSTANT (treeop0))
8240 rtx constant_part;
8242 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8243 (modifier == EXPAND_INITIALIZER
8244 ? EXPAND_INITIALIZER : EXPAND_SUM));
8245 if (! CONSTANT_P (op0))
8247 op1 = expand_expr (treeop1, NULL_RTX,
8248 VOIDmode, modifier);
8249 /* Return a PLUS if modifier says it's OK. */
8250 if (modifier == EXPAND_SUM
8251 || modifier == EXPAND_INITIALIZER)
8252 return simplify_gen_binary (PLUS, mode, op0, op1);
8253 goto binop2;
8255 /* Use immed_double_const to ensure that the constant is
8256 truncated according to the mode of OP1, then sign extended
8257 to a HOST_WIDE_INT. Using the constant directly can result
8258 in non-canonical RTL in a 64x32 cross compile. */
8259 constant_part
8260 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8261 (HOST_WIDE_INT) 0,
8262 TYPE_MODE (TREE_TYPE (treeop0)));
8263 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8264 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8265 op0 = force_operand (op0, target);
8266 return REDUCE_BIT_FIELD (op0);
8270 /* Use TER to expand pointer addition of a negated value
8271 as pointer subtraction. */
8272 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8273 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8274 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8275 && TREE_CODE (treeop1) == SSA_NAME
8276 && TYPE_MODE (TREE_TYPE (treeop0))
8277 == TYPE_MODE (TREE_TYPE (treeop1)))
8279 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8280 if (def)
8282 treeop1 = gimple_assign_rhs1 (def);
8283 code = MINUS_EXPR;
8284 goto do_minus;
8288 /* No sense saving up arithmetic to be done
8289 if it's all in the wrong mode to form part of an address.
8290 And force_operand won't know whether to sign-extend or
8291 zero-extend. */
8292 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8293 || mode != ptr_mode)
8295 expand_operands (treeop0, treeop1,
8296 subtarget, &op0, &op1, EXPAND_NORMAL);
8297 if (op0 == const0_rtx)
8298 return op1;
8299 if (op1 == const0_rtx)
8300 return op0;
8301 goto binop2;
8304 expand_operands (treeop0, treeop1,
8305 subtarget, &op0, &op1, modifier);
8306 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8308 case MINUS_EXPR:
8309 do_minus:
8310 /* For initializers, we are allowed to return a MINUS of two
8311 symbolic constants. Here we handle all cases when both operands
8312 are constant. */
8313 /* Handle difference of two symbolic constants,
8314 for the sake of an initializer. */
8315 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8316 && really_constant_p (treeop0)
8317 && really_constant_p (treeop1))
8319 expand_operands (treeop0, treeop1,
8320 NULL_RTX, &op0, &op1, modifier);
8322 /* If the last operand is a CONST_INT, use plus_constant of
8323 the negated constant. Else make the MINUS. */
8324 if (CONST_INT_P (op1))
8325 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8326 -INTVAL (op1)));
8327 else
8328 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8331 /* No sense saving up arithmetic to be done
8332 if it's all in the wrong mode to form part of an address.
8333 And force_operand won't know whether to sign-extend or
8334 zero-extend. */
8335 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8336 || mode != ptr_mode)
8337 goto binop;
8339 expand_operands (treeop0, treeop1,
8340 subtarget, &op0, &op1, modifier);
8342 /* Convert A - const to A + (-const). */
8343 if (CONST_INT_P (op1))
8345 op1 = negate_rtx (mode, op1);
8346 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8349 goto binop2;
8351 case WIDEN_MULT_PLUS_EXPR:
8352 case WIDEN_MULT_MINUS_EXPR:
8353 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8354 op2 = expand_normal (treeop2);
8355 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8356 target, unsignedp);
8357 return target;
8359 case WIDEN_MULT_EXPR:
8360 /* If first operand is constant, swap them.
8361 Thus the following special case checks need only
8362 check the second operand. */
8363 if (TREE_CODE (treeop0) == INTEGER_CST)
8365 tree t1 = treeop0;
8366 treeop0 = treeop1;
8367 treeop1 = t1;
8370 /* First, check if we have a multiplication of one signed and one
8371 unsigned operand. */
8372 if (TREE_CODE (treeop1) != INTEGER_CST
8373 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8374 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8376 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8377 this_optab = usmul_widen_optab;
8378 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8379 != CODE_FOR_nothing)
8381 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8382 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8383 EXPAND_NORMAL);
8384 else
8385 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8386 EXPAND_NORMAL);
8387 goto binop3;
8390 /* Check for a multiplication with matching signedness. */
8391 else if ((TREE_CODE (treeop1) == INTEGER_CST
8392 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8393 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8394 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8396 tree op0type = TREE_TYPE (treeop0);
8397 enum machine_mode innermode = TYPE_MODE (op0type);
8398 bool zextend_p = TYPE_UNSIGNED (op0type);
8399 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8400 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8402 if (TREE_CODE (treeop0) != INTEGER_CST)
8404 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8405 != CODE_FOR_nothing)
8407 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8408 EXPAND_NORMAL);
8409 temp = expand_widening_mult (mode, op0, op1, target,
8410 unsignedp, this_optab);
8411 return REDUCE_BIT_FIELD (temp);
8413 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8414 != CODE_FOR_nothing
8415 && innermode == word_mode)
8417 rtx htem, hipart;
8418 op0 = expand_normal (treeop0);
8419 if (TREE_CODE (treeop1) == INTEGER_CST)
8420 op1 = convert_modes (innermode, mode,
8421 expand_normal (treeop1), unsignedp);
8422 else
8423 op1 = expand_normal (treeop1);
8424 temp = expand_binop (mode, other_optab, op0, op1, target,
8425 unsignedp, OPTAB_LIB_WIDEN);
8426 hipart = gen_highpart (innermode, temp);
8427 htem = expand_mult_highpart_adjust (innermode, hipart,
8428 op0, op1, hipart,
8429 zextend_p);
8430 if (htem != hipart)
8431 emit_move_insn (hipart, htem);
8432 return REDUCE_BIT_FIELD (temp);
8436 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8437 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8438 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8439 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8441 case FMA_EXPR:
8443 optab opt = fma_optab;
8444 gimple def0, def2;
8446 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8447 call. */
8448 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8450 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8451 tree call_expr;
8453 gcc_assert (fn != NULL_TREE);
8454 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8455 return expand_builtin (call_expr, target, subtarget, mode, false);
8458 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8459 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8461 op0 = op2 = NULL;
8463 if (def0 && def2
8464 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8466 opt = fnms_optab;
8467 op0 = expand_normal (gimple_assign_rhs1 (def0));
8468 op2 = expand_normal (gimple_assign_rhs1 (def2));
8470 else if (def0
8471 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8473 opt = fnma_optab;
8474 op0 = expand_normal (gimple_assign_rhs1 (def0));
8476 else if (def2
8477 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8479 opt = fms_optab;
8480 op2 = expand_normal (gimple_assign_rhs1 (def2));
8483 if (op0 == NULL)
8484 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8485 if (op2 == NULL)
8486 op2 = expand_normal (treeop2);
8487 op1 = expand_normal (treeop1);
8489 return expand_ternary_op (TYPE_MODE (type), opt,
8490 op0, op1, op2, target, 0);
8493 case MULT_EXPR:
8494 /* If this is a fixed-point operation, then we cannot use the code
8495 below because "expand_mult" doesn't support sat/no-sat fixed-point
8496 multiplications. */
8497 if (ALL_FIXED_POINT_MODE_P (mode))
8498 goto binop;
8500 /* If first operand is constant, swap them.
8501 Thus the following special case checks need only
8502 check the second operand. */
8503 if (TREE_CODE (treeop0) == INTEGER_CST)
8505 tree t1 = treeop0;
8506 treeop0 = treeop1;
8507 treeop1 = t1;
8510 /* Attempt to return something suitable for generating an
8511 indexed address, for machines that support that. */
8513 if (modifier == EXPAND_SUM && mode == ptr_mode
8514 && host_integerp (treeop1, 0))
8516 tree exp1 = treeop1;
8518 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8519 EXPAND_SUM);
8521 if (!REG_P (op0))
8522 op0 = force_operand (op0, NULL_RTX);
8523 if (!REG_P (op0))
8524 op0 = copy_to_mode_reg (mode, op0);
8526 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8527 gen_int_mode (tree_low_cst (exp1, 0),
8528 TYPE_MODE (TREE_TYPE (exp1)))));
8531 if (modifier == EXPAND_STACK_PARM)
8532 target = 0;
8534 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8535 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8537 case TRUNC_DIV_EXPR:
8538 case FLOOR_DIV_EXPR:
8539 case CEIL_DIV_EXPR:
8540 case ROUND_DIV_EXPR:
8541 case EXACT_DIV_EXPR:
8542 /* If this is a fixed-point operation, then we cannot use the code
8543 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8544 divisions. */
8545 if (ALL_FIXED_POINT_MODE_P (mode))
8546 goto binop;
8548 if (modifier == EXPAND_STACK_PARM)
8549 target = 0;
8550 /* Possible optimization: compute the dividend with EXPAND_SUM
8551 then if the divisor is constant can optimize the case
8552 where some terms of the dividend have coeffs divisible by it. */
8553 expand_operands (treeop0, treeop1,
8554 subtarget, &op0, &op1, EXPAND_NORMAL);
8555 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8557 case RDIV_EXPR:
8558 goto binop;
8560 case TRUNC_MOD_EXPR:
8561 case FLOOR_MOD_EXPR:
8562 case CEIL_MOD_EXPR:
8563 case ROUND_MOD_EXPR:
8564 if (modifier == EXPAND_STACK_PARM)
8565 target = 0;
8566 expand_operands (treeop0, treeop1,
8567 subtarget, &op0, &op1, EXPAND_NORMAL);
8568 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8570 case FIXED_CONVERT_EXPR:
8571 op0 = expand_normal (treeop0);
8572 if (target == 0 || modifier == EXPAND_STACK_PARM)
8573 target = gen_reg_rtx (mode);
8575 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8576 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8577 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8578 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8579 else
8580 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8581 return target;
8583 case FIX_TRUNC_EXPR:
8584 op0 = expand_normal (treeop0);
8585 if (target == 0 || modifier == EXPAND_STACK_PARM)
8586 target = gen_reg_rtx (mode);
8587 expand_fix (target, op0, unsignedp);
8588 return target;
8590 case FLOAT_EXPR:
8591 op0 = expand_normal (treeop0);
8592 if (target == 0 || modifier == EXPAND_STACK_PARM)
8593 target = gen_reg_rtx (mode);
8594 /* expand_float can't figure out what to do if FROM has VOIDmode.
8595 So give it the correct mode. With -O, cse will optimize this. */
8596 if (GET_MODE (op0) == VOIDmode)
8597 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8598 op0);
8599 expand_float (target, op0,
8600 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8601 return target;
8603 case NEGATE_EXPR:
8604 op0 = expand_expr (treeop0, subtarget,
8605 VOIDmode, EXPAND_NORMAL);
8606 if (modifier == EXPAND_STACK_PARM)
8607 target = 0;
8608 temp = expand_unop (mode,
8609 optab_for_tree_code (NEGATE_EXPR, type,
8610 optab_default),
8611 op0, target, 0);
8612 gcc_assert (temp);
8613 return REDUCE_BIT_FIELD (temp);
8615 case ABS_EXPR:
8616 op0 = expand_expr (treeop0, subtarget,
8617 VOIDmode, EXPAND_NORMAL);
8618 if (modifier == EXPAND_STACK_PARM)
8619 target = 0;
8621 /* ABS_EXPR is not valid for complex arguments. */
8622 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8623 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8625 /* Unsigned abs is simply the operand. Testing here means we don't
8626 risk generating incorrect code below. */
8627 if (TYPE_UNSIGNED (type))
8628 return op0;
8630 return expand_abs (mode, op0, target, unsignedp,
8631 safe_from_p (target, treeop0, 1));
8633 case MAX_EXPR:
8634 case MIN_EXPR:
8635 target = original_target;
8636 if (target == 0
8637 || modifier == EXPAND_STACK_PARM
8638 || (MEM_P (target) && MEM_VOLATILE_P (target))
8639 || GET_MODE (target) != mode
8640 || (REG_P (target)
8641 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8642 target = gen_reg_rtx (mode);
8643 expand_operands (treeop0, treeop1,
8644 target, &op0, &op1, EXPAND_NORMAL);
8646 /* First try to do it with a special MIN or MAX instruction.
8647 If that does not win, use a conditional jump to select the proper
8648 value. */
8649 this_optab = optab_for_tree_code (code, type, optab_default);
8650 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8651 OPTAB_WIDEN);
8652 if (temp != 0)
8653 return temp;
8655 /* At this point, a MEM target is no longer useful; we will get better
8656 code without it. */
8658 if (! REG_P (target))
8659 target = gen_reg_rtx (mode);
8661 /* If op1 was placed in target, swap op0 and op1. */
8662 if (target != op0 && target == op1)
8664 temp = op0;
8665 op0 = op1;
8666 op1 = temp;
8669 /* We generate better code and avoid problems with op1 mentioning
8670 target by forcing op1 into a pseudo if it isn't a constant. */
8671 if (! CONSTANT_P (op1))
8672 op1 = force_reg (mode, op1);
8675 enum rtx_code comparison_code;
8676 rtx cmpop1 = op1;
8678 if (code == MAX_EXPR)
8679 comparison_code = unsignedp ? GEU : GE;
8680 else
8681 comparison_code = unsignedp ? LEU : LE;
8683 /* Canonicalize to comparisons against 0. */
8684 if (op1 == const1_rtx)
8686 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8687 or (a != 0 ? a : 1) for unsigned.
8688 For MIN we are safe converting (a <= 1 ? a : 1)
8689 into (a <= 0 ? a : 1) */
8690 cmpop1 = const0_rtx;
8691 if (code == MAX_EXPR)
8692 comparison_code = unsignedp ? NE : GT;
8694 if (op1 == constm1_rtx && !unsignedp)
8696 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8697 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8698 cmpop1 = const0_rtx;
8699 if (code == MIN_EXPR)
8700 comparison_code = LT;
8702 #ifdef HAVE_conditional_move
8703 /* Use a conditional move if possible. */
8704 if (can_conditionally_move_p (mode))
8706 rtx insn;
8708 /* ??? Same problem as in expmed.c: emit_conditional_move
8709 forces a stack adjustment via compare_from_rtx, and we
8710 lose the stack adjustment if the sequence we are about
8711 to create is discarded. */
8712 do_pending_stack_adjust ();
8714 start_sequence ();
8716 /* Try to emit the conditional move. */
8717 insn = emit_conditional_move (target, comparison_code,
8718 op0, cmpop1, mode,
8719 op0, op1, mode,
8720 unsignedp);
8722 /* If we could do the conditional move, emit the sequence,
8723 and return. */
8724 if (insn)
8726 rtx seq = get_insns ();
8727 end_sequence ();
8728 emit_insn (seq);
8729 return target;
8732 /* Otherwise discard the sequence and fall back to code with
8733 branches. */
8734 end_sequence ();
8736 #endif
8737 if (target != op0)
8738 emit_move_insn (target, op0);
8740 temp = gen_label_rtx ();
8741 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8742 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8743 -1);
8745 emit_move_insn (target, op1);
8746 emit_label (temp);
8747 return target;
8749 case BIT_NOT_EXPR:
8750 op0 = expand_expr (treeop0, subtarget,
8751 VOIDmode, EXPAND_NORMAL);
8752 if (modifier == EXPAND_STACK_PARM)
8753 target = 0;
8754 /* In case we have to reduce the result to bitfield precision
8755 for unsigned bitfield expand this as XOR with a proper constant
8756 instead. */
8757 if (reduce_bit_field && TYPE_UNSIGNED (type))
8758 temp = expand_binop (mode, xor_optab, op0,
8759 immed_double_int_const
8760 (double_int_mask (TYPE_PRECISION (type)), mode),
8761 target, 1, OPTAB_LIB_WIDEN);
8762 else
8763 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8764 gcc_assert (temp);
8765 return temp;
8767 /* ??? Can optimize bitwise operations with one arg constant.
8768 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8769 and (a bitwise1 b) bitwise2 b (etc)
8770 but that is probably not worth while. */
8772 case BIT_AND_EXPR:
8773 case BIT_IOR_EXPR:
8774 case BIT_XOR_EXPR:
8775 goto binop;
8777 case LROTATE_EXPR:
8778 case RROTATE_EXPR:
8779 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8780 || (GET_MODE_PRECISION (TYPE_MODE (type))
8781 == TYPE_PRECISION (type)));
8782 /* fall through */
8784 case LSHIFT_EXPR:
8785 case RSHIFT_EXPR:
8786 /* If this is a fixed-point operation, then we cannot use the code
8787 below because "expand_shift" doesn't support sat/no-sat fixed-point
8788 shifts. */
8789 if (ALL_FIXED_POINT_MODE_P (mode))
8790 goto binop;
8792 if (! safe_from_p (subtarget, treeop1, 1))
8793 subtarget = 0;
8794 if (modifier == EXPAND_STACK_PARM)
8795 target = 0;
8796 op0 = expand_expr (treeop0, subtarget,
8797 VOIDmode, EXPAND_NORMAL);
8798 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8799 unsignedp);
8800 if (code == LSHIFT_EXPR)
8801 temp = REDUCE_BIT_FIELD (temp);
8802 return temp;
8804 /* Could determine the answer when only additive constants differ. Also,
8805 the addition of one can be handled by changing the condition. */
8806 case LT_EXPR:
8807 case LE_EXPR:
8808 case GT_EXPR:
8809 case GE_EXPR:
8810 case EQ_EXPR:
8811 case NE_EXPR:
8812 case UNORDERED_EXPR:
8813 case ORDERED_EXPR:
8814 case UNLT_EXPR:
8815 case UNLE_EXPR:
8816 case UNGT_EXPR:
8817 case UNGE_EXPR:
8818 case UNEQ_EXPR:
8819 case LTGT_EXPR:
8820 temp = do_store_flag (ops,
8821 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8822 tmode != VOIDmode ? tmode : mode);
8823 if (temp)
8824 return temp;
8826 /* Use a compare and a jump for BLKmode comparisons, or for function
8827 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8829 if ((target == 0
8830 || modifier == EXPAND_STACK_PARM
8831 || ! safe_from_p (target, treeop0, 1)
8832 || ! safe_from_p (target, treeop1, 1)
8833 /* Make sure we don't have a hard reg (such as function's return
8834 value) live across basic blocks, if not optimizing. */
8835 || (!optimize && REG_P (target)
8836 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8837 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8839 emit_move_insn (target, const0_rtx);
8841 op1 = gen_label_rtx ();
8842 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8844 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8845 emit_move_insn (target, constm1_rtx);
8846 else
8847 emit_move_insn (target, const1_rtx);
8849 emit_label (op1);
8850 return target;
8852 case COMPLEX_EXPR:
8853 /* Get the rtx code of the operands. */
8854 op0 = expand_normal (treeop0);
8855 op1 = expand_normal (treeop1);
8857 if (!target)
8858 target = gen_reg_rtx (TYPE_MODE (type));
8860 /* Move the real (op0) and imaginary (op1) parts to their location. */
8861 write_complex_part (target, op0, false);
8862 write_complex_part (target, op1, true);
8864 return target;
8866 case WIDEN_SUM_EXPR:
8868 tree oprnd0 = treeop0;
8869 tree oprnd1 = treeop1;
8871 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8872 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8873 target, unsignedp);
8874 return target;
8877 case REDUC_MAX_EXPR:
8878 case REDUC_MIN_EXPR:
8879 case REDUC_PLUS_EXPR:
8881 op0 = expand_normal (treeop0);
8882 this_optab = optab_for_tree_code (code, type, optab_default);
8883 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8884 gcc_assert (temp);
8885 return temp;
8888 case VEC_LSHIFT_EXPR:
8889 case VEC_RSHIFT_EXPR:
8891 target = expand_vec_shift_expr (ops, target);
8892 return target;
8895 case VEC_UNPACK_HI_EXPR:
8896 case VEC_UNPACK_LO_EXPR:
8898 op0 = expand_normal (treeop0);
8899 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8900 target, unsignedp);
8901 gcc_assert (temp);
8902 return temp;
8905 case VEC_UNPACK_FLOAT_HI_EXPR:
8906 case VEC_UNPACK_FLOAT_LO_EXPR:
8908 op0 = expand_normal (treeop0);
8909 /* The signedness is determined from input operand. */
8910 temp = expand_widen_pattern_expr
8911 (ops, op0, NULL_RTX, NULL_RTX,
8912 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8914 gcc_assert (temp);
8915 return temp;
8918 case VEC_WIDEN_MULT_HI_EXPR:
8919 case VEC_WIDEN_MULT_LO_EXPR:
8921 tree oprnd0 = treeop0;
8922 tree oprnd1 = treeop1;
8924 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8925 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8926 target, unsignedp);
8927 gcc_assert (target);
8928 return target;
8931 case VEC_WIDEN_LSHIFT_HI_EXPR:
8932 case VEC_WIDEN_LSHIFT_LO_EXPR:
8934 tree oprnd0 = treeop0;
8935 tree oprnd1 = treeop1;
8937 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8938 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8939 target, unsignedp);
8940 gcc_assert (target);
8941 return target;
8944 case VEC_PACK_TRUNC_EXPR:
8945 case VEC_PACK_SAT_EXPR:
8946 case VEC_PACK_FIX_TRUNC_EXPR:
8947 mode = TYPE_MODE (TREE_TYPE (treeop0));
8948 goto binop;
8950 case VEC_PERM_EXPR:
8951 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
8952 op2 = expand_normal (treeop2);
8954 /* Careful here: if the target doesn't support integral vector modes,
8955 a constant selection vector could wind up smooshed into a normal
8956 integral constant. */
8957 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
8959 tree sel_type = TREE_TYPE (treeop2);
8960 enum machine_mode vmode
8961 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
8962 TYPE_VECTOR_SUBPARTS (sel_type));
8963 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
8964 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
8965 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
8967 else
8968 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
8970 temp = expand_vec_perm (mode, op0, op1, op2, target);
8971 gcc_assert (temp);
8972 return temp;
8974 case DOT_PROD_EXPR:
8976 tree oprnd0 = treeop0;
8977 tree oprnd1 = treeop1;
8978 tree oprnd2 = treeop2;
8979 rtx op2;
8981 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8982 op2 = expand_normal (oprnd2);
8983 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8984 target, unsignedp);
8985 return target;
8988 case REALIGN_LOAD_EXPR:
8990 tree oprnd0 = treeop0;
8991 tree oprnd1 = treeop1;
8992 tree oprnd2 = treeop2;
8993 rtx op2;
8995 this_optab = optab_for_tree_code (code, type, optab_default);
8996 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8997 op2 = expand_normal (oprnd2);
8998 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8999 target, unsignedp);
9000 gcc_assert (temp);
9001 return temp;
9004 case COND_EXPR:
9005 /* A COND_EXPR with its type being VOID_TYPE represents a
9006 conditional jump and is handled in
9007 expand_gimple_cond_expr. */
9008 gcc_assert (!VOID_TYPE_P (type));
9010 /* Note that COND_EXPRs whose type is a structure or union
9011 are required to be constructed to contain assignments of
9012 a temporary variable, so that we can evaluate them here
9013 for side effect only. If type is void, we must do likewise. */
9015 gcc_assert (!TREE_ADDRESSABLE (type)
9016 && !ignore
9017 && TREE_TYPE (treeop1) != void_type_node
9018 && TREE_TYPE (treeop2) != void_type_node);
9020 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9021 if (temp)
9022 return temp;
9024 /* If we are not to produce a result, we have no target. Otherwise,
9025 if a target was specified use it; it will not be used as an
9026 intermediate target unless it is safe. If no target, use a
9027 temporary. */
9029 if (modifier != EXPAND_STACK_PARM
9030 && original_target
9031 && safe_from_p (original_target, treeop0, 1)
9032 && GET_MODE (original_target) == mode
9033 && !MEM_P (original_target))
9034 temp = original_target;
9035 else
9036 temp = assign_temp (type, 0, 0, 1);
9038 do_pending_stack_adjust ();
9039 NO_DEFER_POP;
9040 op0 = gen_label_rtx ();
9041 op1 = gen_label_rtx ();
9042 jumpifnot (treeop0, op0, -1);
9043 store_expr (treeop1, temp,
9044 modifier == EXPAND_STACK_PARM,
9045 false);
9047 emit_jump_insn (gen_jump (op1));
9048 emit_barrier ();
9049 emit_label (op0);
9050 store_expr (treeop2, temp,
9051 modifier == EXPAND_STACK_PARM,
9052 false);
9054 emit_label (op1);
9055 OK_DEFER_POP;
9056 return temp;
9058 case VEC_COND_EXPR:
9059 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9060 return target;
9062 default:
9063 gcc_unreachable ();
9066 /* Here to do an ordinary binary operator. */
9067 binop:
9068 expand_operands (treeop0, treeop1,
9069 subtarget, &op0, &op1, EXPAND_NORMAL);
9070 binop2:
9071 this_optab = optab_for_tree_code (code, type, optab_default);
9072 binop3:
9073 if (modifier == EXPAND_STACK_PARM)
9074 target = 0;
9075 temp = expand_binop (mode, this_optab, op0, op1, target,
9076 unsignedp, OPTAB_LIB_WIDEN);
9077 gcc_assert (temp);
9078 /* Bitwise operations do not need bitfield reduction as we expect their
9079 operands being properly truncated. */
9080 if (code == BIT_XOR_EXPR
9081 || code == BIT_AND_EXPR
9082 || code == BIT_IOR_EXPR)
9083 return temp;
9084 return REDUCE_BIT_FIELD (temp);
9086 #undef REDUCE_BIT_FIELD
9089 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9090 enum expand_modifier modifier, rtx *alt_rtl)
9092 rtx op0, op1, temp, decl_rtl;
9093 tree type;
9094 int unsignedp;
9095 enum machine_mode mode;
9096 enum tree_code code = TREE_CODE (exp);
9097 rtx subtarget, original_target;
9098 int ignore;
9099 tree context;
9100 bool reduce_bit_field;
9101 location_t loc = EXPR_LOCATION (exp);
9102 struct separate_ops ops;
9103 tree treeop0, treeop1, treeop2;
9104 tree ssa_name = NULL_TREE;
9105 gimple g;
9107 type = TREE_TYPE (exp);
9108 mode = TYPE_MODE (type);
9109 unsignedp = TYPE_UNSIGNED (type);
9111 treeop0 = treeop1 = treeop2 = NULL_TREE;
9112 if (!VL_EXP_CLASS_P (exp))
9113 switch (TREE_CODE_LENGTH (code))
9115 default:
9116 case 3: treeop2 = TREE_OPERAND (exp, 2);
9117 case 2: treeop1 = TREE_OPERAND (exp, 1);
9118 case 1: treeop0 = TREE_OPERAND (exp, 0);
9119 case 0: break;
9121 ops.code = code;
9122 ops.type = type;
9123 ops.op0 = treeop0;
9124 ops.op1 = treeop1;
9125 ops.op2 = treeop2;
9126 ops.location = loc;
9128 ignore = (target == const0_rtx
9129 || ((CONVERT_EXPR_CODE_P (code)
9130 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9131 && TREE_CODE (type) == VOID_TYPE));
9133 /* An operation in what may be a bit-field type needs the
9134 result to be reduced to the precision of the bit-field type,
9135 which is narrower than that of the type's mode. */
9136 reduce_bit_field = (!ignore
9137 && INTEGRAL_TYPE_P (type)
9138 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9140 /* If we are going to ignore this result, we need only do something
9141 if there is a side-effect somewhere in the expression. If there
9142 is, short-circuit the most common cases here. Note that we must
9143 not call expand_expr with anything but const0_rtx in case this
9144 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9146 if (ignore)
9148 if (! TREE_SIDE_EFFECTS (exp))
9149 return const0_rtx;
9151 /* Ensure we reference a volatile object even if value is ignored, but
9152 don't do this if all we are doing is taking its address. */
9153 if (TREE_THIS_VOLATILE (exp)
9154 && TREE_CODE (exp) != FUNCTION_DECL
9155 && mode != VOIDmode && mode != BLKmode
9156 && modifier != EXPAND_CONST_ADDRESS)
9158 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9159 if (MEM_P (temp))
9160 copy_to_reg (temp);
9161 return const0_rtx;
9164 if (TREE_CODE_CLASS (code) == tcc_unary
9165 || code == COMPONENT_REF || code == INDIRECT_REF)
9166 return expand_expr (treeop0, const0_rtx, VOIDmode,
9167 modifier);
9169 else if (TREE_CODE_CLASS (code) == tcc_binary
9170 || TREE_CODE_CLASS (code) == tcc_comparison
9171 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9173 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9174 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9175 return const0_rtx;
9177 else if (code == BIT_FIELD_REF)
9179 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9180 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9181 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
9182 return const0_rtx;
9185 target = 0;
9188 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9189 target = 0;
9191 /* Use subtarget as the target for operand 0 of a binary operation. */
9192 subtarget = get_subtarget (target);
9193 original_target = target;
9195 switch (code)
9197 case LABEL_DECL:
9199 tree function = decl_function_context (exp);
9201 temp = label_rtx (exp);
9202 temp = gen_rtx_LABEL_REF (Pmode, temp);
9204 if (function != current_function_decl
9205 && function != 0)
9206 LABEL_REF_NONLOCAL_P (temp) = 1;
9208 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9209 return temp;
9212 case SSA_NAME:
9213 /* ??? ivopts calls expander, without any preparation from
9214 out-of-ssa. So fake instructions as if this was an access to the
9215 base variable. This unnecessarily allocates a pseudo, see how we can
9216 reuse it, if partition base vars have it set already. */
9217 if (!currently_expanding_to_rtl)
9218 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
9219 NULL);
9221 g = get_gimple_for_ssa_name (exp);
9222 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9223 if (g == NULL
9224 && modifier == EXPAND_INITIALIZER
9225 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9226 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9227 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9228 g = SSA_NAME_DEF_STMT (exp);
9229 if (g)
9231 rtx r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9232 tmode, modifier, NULL);
9233 if (REG_P (r) && !REG_EXPR (r))
9234 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9235 return r;
9238 ssa_name = exp;
9239 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9240 exp = SSA_NAME_VAR (ssa_name);
9241 goto expand_decl_rtl;
9243 case PARM_DECL:
9244 case VAR_DECL:
9245 /* If a static var's type was incomplete when the decl was written,
9246 but the type is complete now, lay out the decl now. */
9247 if (DECL_SIZE (exp) == 0
9248 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9249 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9250 layout_decl (exp, 0);
9252 /* ... fall through ... */
9254 case FUNCTION_DECL:
9255 case RESULT_DECL:
9256 decl_rtl = DECL_RTL (exp);
9257 expand_decl_rtl:
9258 gcc_assert (decl_rtl);
9259 decl_rtl = copy_rtx (decl_rtl);
9260 /* Record writes to register variables. */
9261 if (modifier == EXPAND_WRITE
9262 && REG_P (decl_rtl)
9263 && HARD_REGISTER_P (decl_rtl))
9264 add_to_hard_reg_set (&crtl->asm_clobbers,
9265 GET_MODE (decl_rtl), REGNO (decl_rtl));
9267 /* Ensure variable marked as used even if it doesn't go through
9268 a parser. If it hasn't be used yet, write out an external
9269 definition. */
9270 TREE_USED (exp) = 1;
9272 /* Show we haven't gotten RTL for this yet. */
9273 temp = 0;
9275 /* Variables inherited from containing functions should have
9276 been lowered by this point. */
9277 context = decl_function_context (exp);
9278 gcc_assert (!context
9279 || context == current_function_decl
9280 || TREE_STATIC (exp)
9281 || DECL_EXTERNAL (exp)
9282 /* ??? C++ creates functions that are not TREE_STATIC. */
9283 || TREE_CODE (exp) == FUNCTION_DECL);
9285 /* This is the case of an array whose size is to be determined
9286 from its initializer, while the initializer is still being parsed.
9287 See expand_decl. */
9289 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9290 temp = validize_mem (decl_rtl);
9292 /* If DECL_RTL is memory, we are in the normal case and the
9293 address is not valid, get the address into a register. */
9295 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9297 if (alt_rtl)
9298 *alt_rtl = decl_rtl;
9299 decl_rtl = use_anchored_address (decl_rtl);
9300 if (modifier != EXPAND_CONST_ADDRESS
9301 && modifier != EXPAND_SUM
9302 && !memory_address_addr_space_p (DECL_MODE (exp),
9303 XEXP (decl_rtl, 0),
9304 MEM_ADDR_SPACE (decl_rtl)))
9305 temp = replace_equiv_address (decl_rtl,
9306 copy_rtx (XEXP (decl_rtl, 0)));
9309 /* If we got something, return it. But first, set the alignment
9310 if the address is a register. */
9311 if (temp != 0)
9313 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9314 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9316 return temp;
9319 /* If the mode of DECL_RTL does not match that of the decl,
9320 there are two cases: we are dealing with a BLKmode value
9321 that is returned in a register, or we are dealing with
9322 a promoted value. In the latter case, return a SUBREG
9323 of the wanted mode, but mark it so that we know that it
9324 was already extended. */
9325 if (REG_P (decl_rtl)
9326 && DECL_MODE (exp) != BLKmode
9327 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9329 enum machine_mode pmode;
9331 /* Get the signedness to be used for this variable. Ensure we get
9332 the same mode we got when the variable was declared. */
9333 if (code == SSA_NAME
9334 && (g = SSA_NAME_DEF_STMT (ssa_name))
9335 && gimple_code (g) == GIMPLE_CALL)
9337 gcc_assert (!gimple_call_internal_p (g));
9338 pmode = promote_function_mode (type, mode, &unsignedp,
9339 gimple_call_fntype (g),
9342 else
9343 pmode = promote_decl_mode (exp, &unsignedp);
9344 gcc_assert (GET_MODE (decl_rtl) == pmode);
9346 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9347 SUBREG_PROMOTED_VAR_P (temp) = 1;
9348 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9349 return temp;
9352 return decl_rtl;
9354 case INTEGER_CST:
9355 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9356 TREE_INT_CST_HIGH (exp), mode);
9358 return temp;
9360 case VECTOR_CST:
9362 tree tmp = NULL_TREE;
9363 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9364 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9365 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9366 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9367 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9368 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9369 return const_vector_from_tree (exp);
9370 if (GET_MODE_CLASS (mode) == MODE_INT)
9372 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9373 if (type_for_mode)
9374 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9376 if (!tmp)
9378 VEC(constructor_elt,gc) *v;
9379 unsigned i;
9380 v = VEC_alloc (constructor_elt, gc, VECTOR_CST_NELTS (exp));
9381 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9382 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9383 tmp = build_constructor (type, v);
9385 return expand_expr (tmp, ignore ? const0_rtx : target,
9386 tmode, modifier);
9389 case CONST_DECL:
9390 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9392 case REAL_CST:
9393 /* If optimized, generate immediate CONST_DOUBLE
9394 which will be turned into memory by reload if necessary.
9396 We used to force a register so that loop.c could see it. But
9397 this does not allow gen_* patterns to perform optimizations with
9398 the constants. It also produces two insns in cases like "x = 1.0;".
9399 On most machines, floating-point constants are not permitted in
9400 many insns, so we'd end up copying it to a register in any case.
9402 Now, we do the copying in expand_binop, if appropriate. */
9403 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9404 TYPE_MODE (TREE_TYPE (exp)));
9406 case FIXED_CST:
9407 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9408 TYPE_MODE (TREE_TYPE (exp)));
9410 case COMPLEX_CST:
9411 /* Handle evaluating a complex constant in a CONCAT target. */
9412 if (original_target && GET_CODE (original_target) == CONCAT)
9414 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9415 rtx rtarg, itarg;
9417 rtarg = XEXP (original_target, 0);
9418 itarg = XEXP (original_target, 1);
9420 /* Move the real and imaginary parts separately. */
9421 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9422 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9424 if (op0 != rtarg)
9425 emit_move_insn (rtarg, op0);
9426 if (op1 != itarg)
9427 emit_move_insn (itarg, op1);
9429 return original_target;
9432 /* ... fall through ... */
9434 case STRING_CST:
9435 temp = expand_expr_constant (exp, 1, modifier);
9437 /* temp contains a constant address.
9438 On RISC machines where a constant address isn't valid,
9439 make some insns to get that address into a register. */
9440 if (modifier != EXPAND_CONST_ADDRESS
9441 && modifier != EXPAND_INITIALIZER
9442 && modifier != EXPAND_SUM
9443 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9444 MEM_ADDR_SPACE (temp)))
9445 return replace_equiv_address (temp,
9446 copy_rtx (XEXP (temp, 0)));
9447 return temp;
9449 case SAVE_EXPR:
9451 tree val = treeop0;
9452 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9454 if (!SAVE_EXPR_RESOLVED_P (exp))
9456 /* We can indeed still hit this case, typically via builtin
9457 expanders calling save_expr immediately before expanding
9458 something. Assume this means that we only have to deal
9459 with non-BLKmode values. */
9460 gcc_assert (GET_MODE (ret) != BLKmode);
9462 val = build_decl (EXPR_LOCATION (exp),
9463 VAR_DECL, NULL, TREE_TYPE (exp));
9464 DECL_ARTIFICIAL (val) = 1;
9465 DECL_IGNORED_P (val) = 1;
9466 treeop0 = val;
9467 TREE_OPERAND (exp, 0) = treeop0;
9468 SAVE_EXPR_RESOLVED_P (exp) = 1;
9470 if (!CONSTANT_P (ret))
9471 ret = copy_to_reg (ret);
9472 SET_DECL_RTL (val, ret);
9475 return ret;
9479 case CONSTRUCTOR:
9480 /* If we don't need the result, just ensure we evaluate any
9481 subexpressions. */
9482 if (ignore)
9484 unsigned HOST_WIDE_INT idx;
9485 tree value;
9487 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9488 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9490 return const0_rtx;
9493 return expand_constructor (exp, target, modifier, false);
9495 case TARGET_MEM_REF:
9497 addr_space_t as
9498 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9499 struct mem_address addr;
9500 enum insn_code icode;
9501 unsigned int align;
9503 get_address_description (exp, &addr);
9504 op0 = addr_for_mem_ref (&addr, as, true);
9505 op0 = memory_address_addr_space (mode, op0, as);
9506 temp = gen_rtx_MEM (mode, op0);
9507 set_mem_attributes (temp, exp, 0);
9508 set_mem_addr_space (temp, as);
9509 align = get_object_or_type_alignment (exp);
9510 if (modifier != EXPAND_WRITE
9511 && mode != BLKmode
9512 && align < GET_MODE_ALIGNMENT (mode)
9513 /* If the target does not have special handling for unaligned
9514 loads of mode then it can use regular moves for them. */
9515 && ((icode = optab_handler (movmisalign_optab, mode))
9516 != CODE_FOR_nothing))
9518 struct expand_operand ops[2];
9520 /* We've already validated the memory, and we're creating a
9521 new pseudo destination. The predicates really can't fail,
9522 nor can the generator. */
9523 create_output_operand (&ops[0], NULL_RTX, mode);
9524 create_fixed_operand (&ops[1], temp);
9525 expand_insn (icode, 2, ops);
9526 return ops[0].value;
9528 return temp;
9531 case MEM_REF:
9533 addr_space_t as
9534 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9535 enum machine_mode address_mode;
9536 tree base = TREE_OPERAND (exp, 0);
9537 gimple def_stmt;
9538 enum insn_code icode;
9539 unsigned align;
9540 /* Handle expansion of non-aliased memory with non-BLKmode. That
9541 might end up in a register. */
9542 if (mem_ref_refers_to_non_mem_p (exp))
9544 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9545 tree bit_offset;
9546 tree bftype;
9547 base = TREE_OPERAND (base, 0);
9548 if (offset == 0
9549 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
9550 && (GET_MODE_BITSIZE (DECL_MODE (base))
9551 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
9552 return expand_expr (build1 (VIEW_CONVERT_EXPR,
9553 TREE_TYPE (exp), base),
9554 target, tmode, modifier);
9555 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
9556 bftype = TREE_TYPE (base);
9557 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
9558 bftype = TREE_TYPE (exp);
9559 else
9561 temp = assign_stack_temp (DECL_MODE (base),
9562 GET_MODE_SIZE (DECL_MODE (base)),
9564 store_expr (base, temp, 0, false);
9565 temp = adjust_address (temp, BLKmode, offset);
9566 set_mem_size (temp, int_size_in_bytes (TREE_TYPE (exp)));
9567 return temp;
9569 return expand_expr (build3 (BIT_FIELD_REF, bftype,
9570 base,
9571 TYPE_SIZE (TREE_TYPE (exp)),
9572 bit_offset),
9573 target, tmode, modifier);
9575 address_mode = targetm.addr_space.address_mode (as);
9576 base = TREE_OPERAND (exp, 0);
9577 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9579 tree mask = gimple_assign_rhs2 (def_stmt);
9580 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9581 gimple_assign_rhs1 (def_stmt), mask);
9582 TREE_OPERAND (exp, 0) = base;
9584 align = get_object_or_type_alignment (exp);
9585 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9586 op0 = memory_address_addr_space (address_mode, op0, as);
9587 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9589 rtx off
9590 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9591 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9593 op0 = memory_address_addr_space (mode, op0, as);
9594 temp = gen_rtx_MEM (mode, op0);
9595 set_mem_attributes (temp, exp, 0);
9596 set_mem_addr_space (temp, as);
9597 if (TREE_THIS_VOLATILE (exp))
9598 MEM_VOLATILE_P (temp) = 1;
9599 if (modifier != EXPAND_WRITE
9600 && mode != BLKmode
9601 && align < GET_MODE_ALIGNMENT (mode))
9603 if ((icode = optab_handler (movmisalign_optab, mode))
9604 != CODE_FOR_nothing)
9606 struct expand_operand ops[2];
9608 /* We've already validated the memory, and we're creating a
9609 new pseudo destination. The predicates really can't fail,
9610 nor can the generator. */
9611 create_output_operand (&ops[0], NULL_RTX, mode);
9612 create_fixed_operand (&ops[1], temp);
9613 expand_insn (icode, 2, ops);
9614 return ops[0].value;
9616 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9617 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9618 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9619 true, (modifier == EXPAND_STACK_PARM
9620 ? NULL_RTX : target),
9621 mode, mode);
9623 return temp;
9626 case ARRAY_REF:
9629 tree array = treeop0;
9630 tree index = treeop1;
9632 /* Fold an expression like: "foo"[2].
9633 This is not done in fold so it won't happen inside &.
9634 Don't fold if this is for wide characters since it's too
9635 difficult to do correctly and this is a very rare case. */
9637 if (modifier != EXPAND_CONST_ADDRESS
9638 && modifier != EXPAND_INITIALIZER
9639 && modifier != EXPAND_MEMORY)
9641 tree t = fold_read_from_constant_string (exp);
9643 if (t)
9644 return expand_expr (t, target, tmode, modifier);
9647 /* If this is a constant index into a constant array,
9648 just get the value from the array. Handle both the cases when
9649 we have an explicit constructor and when our operand is a variable
9650 that was declared const. */
9652 if (modifier != EXPAND_CONST_ADDRESS
9653 && modifier != EXPAND_INITIALIZER
9654 && modifier != EXPAND_MEMORY
9655 && TREE_CODE (array) == CONSTRUCTOR
9656 && ! TREE_SIDE_EFFECTS (array)
9657 && TREE_CODE (index) == INTEGER_CST)
9659 unsigned HOST_WIDE_INT ix;
9660 tree field, value;
9662 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9663 field, value)
9664 if (tree_int_cst_equal (field, index))
9666 if (!TREE_SIDE_EFFECTS (value))
9667 return expand_expr (fold (value), target, tmode, modifier);
9668 break;
9672 else if (optimize >= 1
9673 && modifier != EXPAND_CONST_ADDRESS
9674 && modifier != EXPAND_INITIALIZER
9675 && modifier != EXPAND_MEMORY
9676 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9677 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9678 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
9679 && const_value_known_p (array))
9681 if (TREE_CODE (index) == INTEGER_CST)
9683 tree init = DECL_INITIAL (array);
9685 if (TREE_CODE (init) == CONSTRUCTOR)
9687 unsigned HOST_WIDE_INT ix;
9688 tree field, value;
9690 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9691 field, value)
9692 if (tree_int_cst_equal (field, index))
9694 if (TREE_SIDE_EFFECTS (value))
9695 break;
9697 if (TREE_CODE (value) == CONSTRUCTOR)
9699 /* If VALUE is a CONSTRUCTOR, this
9700 optimization is only useful if
9701 this doesn't store the CONSTRUCTOR
9702 into memory. If it does, it is more
9703 efficient to just load the data from
9704 the array directly. */
9705 rtx ret = expand_constructor (value, target,
9706 modifier, true);
9707 if (ret == NULL_RTX)
9708 break;
9711 return expand_expr (fold (value), target, tmode,
9712 modifier);
9715 else if(TREE_CODE (init) == STRING_CST)
9717 tree index1 = index;
9718 tree low_bound = array_ref_low_bound (exp);
9719 index1 = fold_convert_loc (loc, sizetype,
9720 treeop1);
9722 /* Optimize the special-case of a zero lower bound.
9724 We convert the low_bound to sizetype to avoid some problems
9725 with constant folding. (E.g. suppose the lower bound is 1,
9726 and its mode is QI. Without the conversion,l (ARRAY
9727 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9728 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9730 if (! integer_zerop (low_bound))
9731 index1 = size_diffop_loc (loc, index1,
9732 fold_convert_loc (loc, sizetype,
9733 low_bound));
9735 if (0 > compare_tree_int (index1,
9736 TREE_STRING_LENGTH (init)))
9738 tree type = TREE_TYPE (TREE_TYPE (init));
9739 enum machine_mode mode = TYPE_MODE (type);
9741 if (GET_MODE_CLASS (mode) == MODE_INT
9742 && GET_MODE_SIZE (mode) == 1)
9743 return gen_int_mode (TREE_STRING_POINTER (init)
9744 [TREE_INT_CST_LOW (index1)],
9745 mode);
9751 goto normal_inner_ref;
9753 case COMPONENT_REF:
9754 /* If the operand is a CONSTRUCTOR, we can just extract the
9755 appropriate field if it is present. */
9756 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9758 unsigned HOST_WIDE_INT idx;
9759 tree field, value;
9761 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9762 idx, field, value)
9763 if (field == treeop1
9764 /* We can normally use the value of the field in the
9765 CONSTRUCTOR. However, if this is a bitfield in
9766 an integral mode that we can fit in a HOST_WIDE_INT,
9767 we must mask only the number of bits in the bitfield,
9768 since this is done implicitly by the constructor. If
9769 the bitfield does not meet either of those conditions,
9770 we can't do this optimization. */
9771 && (! DECL_BIT_FIELD (field)
9772 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9773 && (GET_MODE_PRECISION (DECL_MODE (field))
9774 <= HOST_BITS_PER_WIDE_INT))))
9776 if (DECL_BIT_FIELD (field)
9777 && modifier == EXPAND_STACK_PARM)
9778 target = 0;
9779 op0 = expand_expr (value, target, tmode, modifier);
9780 if (DECL_BIT_FIELD (field))
9782 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9783 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9785 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9787 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
9788 op0 = expand_and (imode, op0, op1, target);
9790 else
9792 int count = GET_MODE_PRECISION (imode) - bitsize;
9794 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9795 target, 0);
9796 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9797 target, 0);
9801 return op0;
9804 goto normal_inner_ref;
9806 case BIT_FIELD_REF:
9807 case ARRAY_RANGE_REF:
9808 normal_inner_ref:
9810 enum machine_mode mode1, mode2;
9811 HOST_WIDE_INT bitsize, bitpos;
9812 tree offset;
9813 int volatilep = 0, must_force_mem;
9814 bool packedp = false;
9815 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9816 &mode1, &unsignedp, &volatilep, true);
9817 rtx orig_op0, memloc;
9818 bool mem_attrs_from_type = false;
9820 /* If we got back the original object, something is wrong. Perhaps
9821 we are evaluating an expression too early. In any event, don't
9822 infinitely recurse. */
9823 gcc_assert (tem != exp);
9825 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9826 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9827 && DECL_PACKED (TREE_OPERAND (exp, 1))))
9828 packedp = true;
9830 /* If TEM's type is a union of variable size, pass TARGET to the inner
9831 computation, since it will need a temporary and TARGET is known
9832 to have to do. This occurs in unchecked conversion in Ada. */
9833 orig_op0 = op0
9834 = expand_expr (tem,
9835 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9836 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9837 != INTEGER_CST)
9838 && modifier != EXPAND_STACK_PARM
9839 ? target : NULL_RTX),
9840 VOIDmode,
9841 (modifier == EXPAND_INITIALIZER
9842 || modifier == EXPAND_CONST_ADDRESS
9843 || modifier == EXPAND_STACK_PARM)
9844 ? modifier : EXPAND_NORMAL);
9847 /* If the bitfield is volatile, we want to access it in the
9848 field's mode, not the computed mode.
9849 If a MEM has VOIDmode (external with incomplete type),
9850 use BLKmode for it instead. */
9851 if (MEM_P (op0))
9853 if (volatilep && flag_strict_volatile_bitfields > 0)
9854 op0 = adjust_address (op0, mode1, 0);
9855 else if (GET_MODE (op0) == VOIDmode)
9856 op0 = adjust_address (op0, BLKmode, 0);
9859 mode2
9860 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9862 /* If we have either an offset, a BLKmode result, or a reference
9863 outside the underlying object, we must force it to memory.
9864 Such a case can occur in Ada if we have unchecked conversion
9865 of an expression from a scalar type to an aggregate type or
9866 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9867 passed a partially uninitialized object or a view-conversion
9868 to a larger size. */
9869 must_force_mem = (offset
9870 || mode1 == BLKmode
9871 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9873 /* Handle CONCAT first. */
9874 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9876 if (bitpos == 0
9877 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9878 return op0;
9879 if (bitpos == 0
9880 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9881 && bitsize)
9883 op0 = XEXP (op0, 0);
9884 mode2 = GET_MODE (op0);
9886 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9887 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9888 && bitpos
9889 && bitsize)
9891 op0 = XEXP (op0, 1);
9892 bitpos = 0;
9893 mode2 = GET_MODE (op0);
9895 else
9896 /* Otherwise force into memory. */
9897 must_force_mem = 1;
9900 /* If this is a constant, put it in a register if it is a legitimate
9901 constant and we don't need a memory reference. */
9902 if (CONSTANT_P (op0)
9903 && mode2 != BLKmode
9904 && targetm.legitimate_constant_p (mode2, op0)
9905 && !must_force_mem)
9906 op0 = force_reg (mode2, op0);
9908 /* Otherwise, if this is a constant, try to force it to the constant
9909 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9910 is a legitimate constant. */
9911 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9912 op0 = validize_mem (memloc);
9914 /* Otherwise, if this is a constant or the object is not in memory
9915 and need be, put it there. */
9916 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9918 tree nt = build_qualified_type (TREE_TYPE (tem),
9919 (TYPE_QUALS (TREE_TYPE (tem))
9920 | TYPE_QUAL_CONST));
9921 memloc = assign_temp (nt, 1, 1, 1);
9922 emit_move_insn (memloc, op0);
9923 op0 = memloc;
9924 mem_attrs_from_type = true;
9927 if (offset)
9929 enum machine_mode address_mode;
9930 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9931 EXPAND_SUM);
9933 gcc_assert (MEM_P (op0));
9935 address_mode = get_address_mode (op0);
9936 if (GET_MODE (offset_rtx) != address_mode)
9937 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9939 if (GET_MODE (op0) == BLKmode
9940 /* A constant address in OP0 can have VOIDmode, we must
9941 not try to call force_reg in that case. */
9942 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9943 && bitsize != 0
9944 && (bitpos % bitsize) == 0
9945 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9946 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9948 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9949 bitpos = 0;
9952 op0 = offset_address (op0, offset_rtx,
9953 highest_pow2_factor (offset));
9956 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9957 record its alignment as BIGGEST_ALIGNMENT. */
9958 if (MEM_P (op0) && bitpos == 0 && offset != 0
9959 && is_aligning_offset (offset, tem))
9960 set_mem_align (op0, BIGGEST_ALIGNMENT);
9962 /* Don't forget about volatility even if this is a bitfield. */
9963 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9965 if (op0 == orig_op0)
9966 op0 = copy_rtx (op0);
9968 MEM_VOLATILE_P (op0) = 1;
9971 /* In cases where an aligned union has an unaligned object
9972 as a field, we might be extracting a BLKmode value from
9973 an integer-mode (e.g., SImode) object. Handle this case
9974 by doing the extract into an object as wide as the field
9975 (which we know to be the width of a basic mode), then
9976 storing into memory, and changing the mode to BLKmode. */
9977 if (mode1 == VOIDmode
9978 || REG_P (op0) || GET_CODE (op0) == SUBREG
9979 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9980 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9981 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9982 && modifier != EXPAND_CONST_ADDRESS
9983 && modifier != EXPAND_INITIALIZER)
9984 /* If the field is volatile, we always want an aligned
9985 access. Do this in following two situations:
9986 1. the access is not already naturally
9987 aligned, otherwise "normal" (non-bitfield) volatile fields
9988 become non-addressable.
9989 2. the bitsize is narrower than the access size. Need
9990 to extract bitfields from the access. */
9991 || (volatilep && flag_strict_volatile_bitfields > 0
9992 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
9993 || (mode1 != BLKmode
9994 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
9995 /* If the field isn't aligned enough to fetch as a memref,
9996 fetch it as a bit field. */
9997 || (mode1 != BLKmode
9998 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9999 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10000 || (MEM_P (op0)
10001 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10002 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10003 && ((modifier == EXPAND_CONST_ADDRESS
10004 || modifier == EXPAND_INITIALIZER)
10005 ? STRICT_ALIGNMENT
10006 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10007 || (bitpos % BITS_PER_UNIT != 0)))
10008 /* If the type and the field are a constant size and the
10009 size of the type isn't the same size as the bitfield,
10010 we must use bitfield operations. */
10011 || (bitsize >= 0
10012 && TYPE_SIZE (TREE_TYPE (exp))
10013 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10014 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10015 bitsize)))
10017 enum machine_mode ext_mode = mode;
10019 if (ext_mode == BLKmode
10020 && ! (target != 0 && MEM_P (op0)
10021 && MEM_P (target)
10022 && bitpos % BITS_PER_UNIT == 0))
10023 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10025 if (ext_mode == BLKmode)
10027 if (target == 0)
10028 target = assign_temp (type, 0, 1, 1);
10030 if (bitsize == 0)
10031 return target;
10033 /* In this case, BITPOS must start at a byte boundary and
10034 TARGET, if specified, must be a MEM. */
10035 gcc_assert (MEM_P (op0)
10036 && (!target || MEM_P (target))
10037 && !(bitpos % BITS_PER_UNIT));
10039 emit_block_move (target,
10040 adjust_address (op0, VOIDmode,
10041 bitpos / BITS_PER_UNIT),
10042 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10043 / BITS_PER_UNIT),
10044 (modifier == EXPAND_STACK_PARM
10045 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10047 return target;
10050 op0 = validize_mem (op0);
10052 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10053 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10055 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
10056 (modifier == EXPAND_STACK_PARM
10057 ? NULL_RTX : target),
10058 ext_mode, ext_mode);
10060 /* If the result is a record type and BITSIZE is narrower than
10061 the mode of OP0, an integral mode, and this is a big endian
10062 machine, we must put the field into the high-order bits. */
10063 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10064 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10065 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10066 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10067 GET_MODE_BITSIZE (GET_MODE (op0))
10068 - bitsize, op0, 1);
10070 /* If the result type is BLKmode, store the data into a temporary
10071 of the appropriate type, but with the mode corresponding to the
10072 mode for the data we have (op0's mode). It's tempting to make
10073 this a constant type, since we know it's only being stored once,
10074 but that can cause problems if we are taking the address of this
10075 COMPONENT_REF because the MEM of any reference via that address
10076 will have flags corresponding to the type, which will not
10077 necessarily be constant. */
10078 if (mode == BLKmode)
10080 rtx new_rtx;
10082 new_rtx = assign_stack_temp_for_type (ext_mode,
10083 GET_MODE_BITSIZE (ext_mode),
10084 0, type);
10085 emit_move_insn (new_rtx, op0);
10086 op0 = copy_rtx (new_rtx);
10087 PUT_MODE (op0, BLKmode);
10090 return op0;
10093 /* If the result is BLKmode, use that to access the object
10094 now as well. */
10095 if (mode == BLKmode)
10096 mode1 = BLKmode;
10098 /* Get a reference to just this component. */
10099 if (modifier == EXPAND_CONST_ADDRESS
10100 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10101 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10102 else
10103 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10105 if (op0 == orig_op0)
10106 op0 = copy_rtx (op0);
10108 /* If op0 is a temporary because of forcing to memory, pass only the
10109 type to set_mem_attributes so that the original expression is never
10110 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10111 if (mem_attrs_from_type)
10112 set_mem_attributes (op0, type, 0);
10113 else
10114 set_mem_attributes (op0, exp, 0);
10116 if (REG_P (XEXP (op0, 0)))
10117 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10119 MEM_VOLATILE_P (op0) |= volatilep;
10120 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10121 || modifier == EXPAND_CONST_ADDRESS
10122 || modifier == EXPAND_INITIALIZER)
10123 return op0;
10124 else if (target == 0)
10125 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10127 convert_move (target, op0, unsignedp);
10128 return target;
10131 case OBJ_TYPE_REF:
10132 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10134 case CALL_EXPR:
10135 /* All valid uses of __builtin_va_arg_pack () are removed during
10136 inlining. */
10137 if (CALL_EXPR_VA_ARG_PACK (exp))
10138 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10140 tree fndecl = get_callee_fndecl (exp), attr;
10142 if (fndecl
10143 && (attr = lookup_attribute ("error",
10144 DECL_ATTRIBUTES (fndecl))) != NULL)
10145 error ("%Kcall to %qs declared with attribute error: %s",
10146 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10147 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10148 if (fndecl
10149 && (attr = lookup_attribute ("warning",
10150 DECL_ATTRIBUTES (fndecl))) != NULL)
10151 warning_at (tree_nonartificial_location (exp),
10152 0, "%Kcall to %qs declared with attribute warning: %s",
10153 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10154 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10156 /* Check for a built-in function. */
10157 if (fndecl && DECL_BUILT_IN (fndecl))
10159 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10160 return expand_builtin (exp, target, subtarget, tmode, ignore);
10163 return expand_call (exp, target, ignore);
10165 case VIEW_CONVERT_EXPR:
10166 op0 = NULL_RTX;
10168 /* If we are converting to BLKmode, try to avoid an intermediate
10169 temporary by fetching an inner memory reference. */
10170 if (mode == BLKmode
10171 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10172 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10173 && handled_component_p (treeop0))
10175 enum machine_mode mode1;
10176 HOST_WIDE_INT bitsize, bitpos;
10177 tree offset;
10178 int unsignedp;
10179 int volatilep = 0;
10180 tree tem
10181 = get_inner_reference (treeop0, &bitsize, &bitpos,
10182 &offset, &mode1, &unsignedp, &volatilep,
10183 true);
10184 rtx orig_op0;
10186 /* ??? We should work harder and deal with non-zero offsets. */
10187 if (!offset
10188 && (bitpos % BITS_PER_UNIT) == 0
10189 && bitsize >= 0
10190 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
10192 /* See the normal_inner_ref case for the rationale. */
10193 orig_op0
10194 = expand_expr (tem,
10195 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10196 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10197 != INTEGER_CST)
10198 && modifier != EXPAND_STACK_PARM
10199 ? target : NULL_RTX),
10200 VOIDmode,
10201 (modifier == EXPAND_INITIALIZER
10202 || modifier == EXPAND_CONST_ADDRESS
10203 || modifier == EXPAND_STACK_PARM)
10204 ? modifier : EXPAND_NORMAL);
10206 if (MEM_P (orig_op0))
10208 op0 = orig_op0;
10210 /* Get a reference to just this component. */
10211 if (modifier == EXPAND_CONST_ADDRESS
10212 || modifier == EXPAND_SUM
10213 || modifier == EXPAND_INITIALIZER)
10214 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10215 else
10216 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10218 if (op0 == orig_op0)
10219 op0 = copy_rtx (op0);
10221 set_mem_attributes (op0, treeop0, 0);
10222 if (REG_P (XEXP (op0, 0)))
10223 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10225 MEM_VOLATILE_P (op0) |= volatilep;
10230 if (!op0)
10231 op0 = expand_expr (treeop0,
10232 NULL_RTX, VOIDmode, modifier);
10234 /* If the input and output modes are both the same, we are done. */
10235 if (mode == GET_MODE (op0))
10237 /* If neither mode is BLKmode, and both modes are the same size
10238 then we can use gen_lowpart. */
10239 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10240 && (GET_MODE_PRECISION (mode)
10241 == GET_MODE_PRECISION (GET_MODE (op0)))
10242 && !COMPLEX_MODE_P (GET_MODE (op0)))
10244 if (GET_CODE (op0) == SUBREG)
10245 op0 = force_reg (GET_MODE (op0), op0);
10246 temp = gen_lowpart_common (mode, op0);
10247 if (temp)
10248 op0 = temp;
10249 else
10251 if (!REG_P (op0) && !MEM_P (op0))
10252 op0 = force_reg (GET_MODE (op0), op0);
10253 op0 = gen_lowpart (mode, op0);
10256 /* If both types are integral, convert from one mode to the other. */
10257 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10258 op0 = convert_modes (mode, GET_MODE (op0), op0,
10259 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10260 /* As a last resort, spill op0 to memory, and reload it in a
10261 different mode. */
10262 else if (!MEM_P (op0))
10264 /* If the operand is not a MEM, force it into memory. Since we
10265 are going to be changing the mode of the MEM, don't call
10266 force_const_mem for constants because we don't allow pool
10267 constants to change mode. */
10268 tree inner_type = TREE_TYPE (treeop0);
10270 gcc_assert (!TREE_ADDRESSABLE (exp));
10272 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10273 target
10274 = assign_stack_temp_for_type
10275 (TYPE_MODE (inner_type),
10276 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
10278 emit_move_insn (target, op0);
10279 op0 = target;
10282 /* At this point, OP0 is in the correct mode. If the output type is
10283 such that the operand is known to be aligned, indicate that it is.
10284 Otherwise, we need only be concerned about alignment for non-BLKmode
10285 results. */
10286 if (MEM_P (op0))
10288 enum insn_code icode;
10290 op0 = copy_rtx (op0);
10292 if (TYPE_ALIGN_OK (type))
10293 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10294 else if (mode != BLKmode
10295 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10296 /* If the target does have special handling for unaligned
10297 loads of mode then use them. */
10298 && ((icode = optab_handler (movmisalign_optab, mode))
10299 != CODE_FOR_nothing))
10301 rtx reg, insn;
10303 op0 = adjust_address (op0, mode, 0);
10304 /* We've already validated the memory, and we're creating a
10305 new pseudo destination. The predicates really can't
10306 fail. */
10307 reg = gen_reg_rtx (mode);
10309 /* Nor can the insn generator. */
10310 insn = GEN_FCN (icode) (reg, op0);
10311 emit_insn (insn);
10312 return reg;
10314 else if (STRICT_ALIGNMENT
10315 && mode != BLKmode
10316 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10318 tree inner_type = TREE_TYPE (treeop0);
10319 HOST_WIDE_INT temp_size
10320 = MAX (int_size_in_bytes (inner_type),
10321 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10322 rtx new_rtx
10323 = assign_stack_temp_for_type (mode, temp_size, 0, type);
10324 rtx new_with_op0_mode
10325 = adjust_address (new_rtx, GET_MODE (op0), 0);
10327 gcc_assert (!TREE_ADDRESSABLE (exp));
10329 if (GET_MODE (op0) == BLKmode)
10330 emit_block_move (new_with_op0_mode, op0,
10331 GEN_INT (GET_MODE_SIZE (mode)),
10332 (modifier == EXPAND_STACK_PARM
10333 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10334 else
10335 emit_move_insn (new_with_op0_mode, op0);
10337 op0 = new_rtx;
10340 op0 = adjust_address (op0, mode, 0);
10343 return op0;
10345 case MODIFY_EXPR:
10347 tree lhs = treeop0;
10348 tree rhs = treeop1;
10349 gcc_assert (ignore);
10351 /* Check for |= or &= of a bitfield of size one into another bitfield
10352 of size 1. In this case, (unless we need the result of the
10353 assignment) we can do this more efficiently with a
10354 test followed by an assignment, if necessary.
10356 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10357 things change so we do, this code should be enhanced to
10358 support it. */
10359 if (TREE_CODE (lhs) == COMPONENT_REF
10360 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10361 || TREE_CODE (rhs) == BIT_AND_EXPR)
10362 && TREE_OPERAND (rhs, 0) == lhs
10363 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10364 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10365 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10367 rtx label = gen_label_rtx ();
10368 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10369 do_jump (TREE_OPERAND (rhs, 1),
10370 value ? label : 0,
10371 value ? 0 : label, -1);
10372 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10373 MOVE_NONTEMPORAL (exp));
10374 do_pending_stack_adjust ();
10375 emit_label (label);
10376 return const0_rtx;
10379 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
10380 return const0_rtx;
10383 case ADDR_EXPR:
10384 return expand_expr_addr_expr (exp, target, tmode, modifier);
10386 case REALPART_EXPR:
10387 op0 = expand_normal (treeop0);
10388 return read_complex_part (op0, false);
10390 case IMAGPART_EXPR:
10391 op0 = expand_normal (treeop0);
10392 return read_complex_part (op0, true);
10394 case RETURN_EXPR:
10395 case LABEL_EXPR:
10396 case GOTO_EXPR:
10397 case SWITCH_EXPR:
10398 case ASM_EXPR:
10399 /* Expanded in cfgexpand.c. */
10400 gcc_unreachable ();
10402 case TRY_CATCH_EXPR:
10403 case CATCH_EXPR:
10404 case EH_FILTER_EXPR:
10405 case TRY_FINALLY_EXPR:
10406 /* Lowered by tree-eh.c. */
10407 gcc_unreachable ();
10409 case WITH_CLEANUP_EXPR:
10410 case CLEANUP_POINT_EXPR:
10411 case TARGET_EXPR:
10412 case CASE_LABEL_EXPR:
10413 case VA_ARG_EXPR:
10414 case BIND_EXPR:
10415 case INIT_EXPR:
10416 case CONJ_EXPR:
10417 case COMPOUND_EXPR:
10418 case PREINCREMENT_EXPR:
10419 case PREDECREMENT_EXPR:
10420 case POSTINCREMENT_EXPR:
10421 case POSTDECREMENT_EXPR:
10422 case LOOP_EXPR:
10423 case EXIT_EXPR:
10424 /* Lowered by gimplify.c. */
10425 gcc_unreachable ();
10427 case FDESC_EXPR:
10428 /* Function descriptors are not valid except for as
10429 initialization constants, and should not be expanded. */
10430 gcc_unreachable ();
10432 case WITH_SIZE_EXPR:
10433 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10434 have pulled out the size to use in whatever context it needed. */
10435 return expand_expr_real (treeop0, original_target, tmode,
10436 modifier, alt_rtl);
10438 case COMPOUND_LITERAL_EXPR:
10440 /* Initialize the anonymous variable declared in the compound
10441 literal, then return the variable. */
10442 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
10444 /* Create RTL for this variable. */
10445 if (!DECL_RTL_SET_P (decl))
10447 if (DECL_HARD_REGISTER (decl))
10448 /* The user specified an assembler name for this variable.
10449 Set that up now. */
10450 rest_of_decl_compilation (decl, 0, 0);
10451 else
10452 expand_decl (decl);
10455 return expand_expr_real (decl, original_target, tmode,
10456 modifier, alt_rtl);
10459 default:
10460 return expand_expr_real_2 (&ops, target, tmode, modifier);
10464 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10465 signedness of TYPE), possibly returning the result in TARGET. */
10466 static rtx
10467 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10469 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10470 if (target && GET_MODE (target) != GET_MODE (exp))
10471 target = 0;
10472 /* For constant values, reduce using build_int_cst_type. */
10473 if (CONST_INT_P (exp))
10475 HOST_WIDE_INT value = INTVAL (exp);
10476 tree t = build_int_cst_type (type, value);
10477 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10479 else if (TYPE_UNSIGNED (type))
10481 rtx mask = immed_double_int_const (double_int_mask (prec),
10482 GET_MODE (exp));
10483 return expand_and (GET_MODE (exp), exp, mask, target);
10485 else
10487 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10488 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10489 exp, count, target, 0);
10490 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10491 exp, count, target, 0);
10495 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10496 when applied to the address of EXP produces an address known to be
10497 aligned more than BIGGEST_ALIGNMENT. */
10499 static int
10500 is_aligning_offset (const_tree offset, const_tree exp)
10502 /* Strip off any conversions. */
10503 while (CONVERT_EXPR_P (offset))
10504 offset = TREE_OPERAND (offset, 0);
10506 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10507 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10508 if (TREE_CODE (offset) != BIT_AND_EXPR
10509 || !host_integerp (TREE_OPERAND (offset, 1), 1)
10510 || compare_tree_int (TREE_OPERAND (offset, 1),
10511 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10512 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10513 return 0;
10515 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10516 It must be NEGATE_EXPR. Then strip any more conversions. */
10517 offset = TREE_OPERAND (offset, 0);
10518 while (CONVERT_EXPR_P (offset))
10519 offset = TREE_OPERAND (offset, 0);
10521 if (TREE_CODE (offset) != NEGATE_EXPR)
10522 return 0;
10524 offset = TREE_OPERAND (offset, 0);
10525 while (CONVERT_EXPR_P (offset))
10526 offset = TREE_OPERAND (offset, 0);
10528 /* This must now be the address of EXP. */
10529 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10532 /* Return the tree node if an ARG corresponds to a string constant or zero
10533 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10534 in bytes within the string that ARG is accessing. The type of the
10535 offset will be `sizetype'. */
10537 tree
10538 string_constant (tree arg, tree *ptr_offset)
10540 tree array, offset, lower_bound;
10541 STRIP_NOPS (arg);
10543 if (TREE_CODE (arg) == ADDR_EXPR)
10545 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10547 *ptr_offset = size_zero_node;
10548 return TREE_OPERAND (arg, 0);
10550 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10552 array = TREE_OPERAND (arg, 0);
10553 offset = size_zero_node;
10555 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10557 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10558 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10559 if (TREE_CODE (array) != STRING_CST
10560 && TREE_CODE (array) != VAR_DECL)
10561 return 0;
10563 /* Check if the array has a nonzero lower bound. */
10564 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10565 if (!integer_zerop (lower_bound))
10567 /* If the offset and base aren't both constants, return 0. */
10568 if (TREE_CODE (lower_bound) != INTEGER_CST)
10569 return 0;
10570 if (TREE_CODE (offset) != INTEGER_CST)
10571 return 0;
10572 /* Adjust offset by the lower bound. */
10573 offset = size_diffop (fold_convert (sizetype, offset),
10574 fold_convert (sizetype, lower_bound));
10577 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10579 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10580 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10581 if (TREE_CODE (array) != ADDR_EXPR)
10582 return 0;
10583 array = TREE_OPERAND (array, 0);
10584 if (TREE_CODE (array) != STRING_CST
10585 && TREE_CODE (array) != VAR_DECL)
10586 return 0;
10588 else
10589 return 0;
10591 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10593 tree arg0 = TREE_OPERAND (arg, 0);
10594 tree arg1 = TREE_OPERAND (arg, 1);
10596 STRIP_NOPS (arg0);
10597 STRIP_NOPS (arg1);
10599 if (TREE_CODE (arg0) == ADDR_EXPR
10600 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10601 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10603 array = TREE_OPERAND (arg0, 0);
10604 offset = arg1;
10606 else if (TREE_CODE (arg1) == ADDR_EXPR
10607 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10608 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10610 array = TREE_OPERAND (arg1, 0);
10611 offset = arg0;
10613 else
10614 return 0;
10616 else
10617 return 0;
10619 if (TREE_CODE (array) == STRING_CST)
10621 *ptr_offset = fold_convert (sizetype, offset);
10622 return array;
10624 else if (TREE_CODE (array) == VAR_DECL
10625 || TREE_CODE (array) == CONST_DECL)
10627 int length;
10629 /* Variables initialized to string literals can be handled too. */
10630 if (!const_value_known_p (array)
10631 || !DECL_INITIAL (array)
10632 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
10633 return 0;
10635 /* Avoid const char foo[4] = "abcde"; */
10636 if (DECL_SIZE_UNIT (array) == NULL_TREE
10637 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10638 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
10639 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10640 return 0;
10642 /* If variable is bigger than the string literal, OFFSET must be constant
10643 and inside of the bounds of the string literal. */
10644 offset = fold_convert (sizetype, offset);
10645 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10646 && (! host_integerp (offset, 1)
10647 || compare_tree_int (offset, length) >= 0))
10648 return 0;
10650 *ptr_offset = offset;
10651 return DECL_INITIAL (array);
10654 return 0;
10657 /* Generate code to calculate OPS, and exploded expression
10658 using a store-flag instruction and return an rtx for the result.
10659 OPS reflects a comparison.
10661 If TARGET is nonzero, store the result there if convenient.
10663 Return zero if there is no suitable set-flag instruction
10664 available on this machine.
10666 Once expand_expr has been called on the arguments of the comparison,
10667 we are committed to doing the store flag, since it is not safe to
10668 re-evaluate the expression. We emit the store-flag insn by calling
10669 emit_store_flag, but only expand the arguments if we have a reason
10670 to believe that emit_store_flag will be successful. If we think that
10671 it will, but it isn't, we have to simulate the store-flag with a
10672 set/jump/set sequence. */
10674 static rtx
10675 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10677 enum rtx_code code;
10678 tree arg0, arg1, type;
10679 tree tem;
10680 enum machine_mode operand_mode;
10681 int unsignedp;
10682 rtx op0, op1;
10683 rtx subtarget = target;
10684 location_t loc = ops->location;
10686 arg0 = ops->op0;
10687 arg1 = ops->op1;
10689 /* Don't crash if the comparison was erroneous. */
10690 if (arg0 == error_mark_node || arg1 == error_mark_node)
10691 return const0_rtx;
10693 type = TREE_TYPE (arg0);
10694 operand_mode = TYPE_MODE (type);
10695 unsignedp = TYPE_UNSIGNED (type);
10697 /* We won't bother with BLKmode store-flag operations because it would mean
10698 passing a lot of information to emit_store_flag. */
10699 if (operand_mode == BLKmode)
10700 return 0;
10702 /* We won't bother with store-flag operations involving function pointers
10703 when function pointers must be canonicalized before comparisons. */
10704 #ifdef HAVE_canonicalize_funcptr_for_compare
10705 if (HAVE_canonicalize_funcptr_for_compare
10706 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10707 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10708 == FUNCTION_TYPE))
10709 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10710 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10711 == FUNCTION_TYPE))))
10712 return 0;
10713 #endif
10715 STRIP_NOPS (arg0);
10716 STRIP_NOPS (arg1);
10718 /* For vector typed comparisons emit code to generate the desired
10719 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10720 expander for this. */
10721 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10723 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10724 tree if_true = constant_boolean_node (true, ops->type);
10725 tree if_false = constant_boolean_node (false, ops->type);
10726 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10729 /* For vector typed comparisons emit code to generate the desired
10730 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10731 expander for this. */
10732 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10734 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10735 tree if_true = constant_boolean_node (true, ops->type);
10736 tree if_false = constant_boolean_node (false, ops->type);
10737 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10740 /* Get the rtx comparison code to use. We know that EXP is a comparison
10741 operation of some type. Some comparisons against 1 and -1 can be
10742 converted to comparisons with zero. Do so here so that the tests
10743 below will be aware that we have a comparison with zero. These
10744 tests will not catch constants in the first operand, but constants
10745 are rarely passed as the first operand. */
10747 switch (ops->code)
10749 case EQ_EXPR:
10750 code = EQ;
10751 break;
10752 case NE_EXPR:
10753 code = NE;
10754 break;
10755 case LT_EXPR:
10756 if (integer_onep (arg1))
10757 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10758 else
10759 code = unsignedp ? LTU : LT;
10760 break;
10761 case LE_EXPR:
10762 if (! unsignedp && integer_all_onesp (arg1))
10763 arg1 = integer_zero_node, code = LT;
10764 else
10765 code = unsignedp ? LEU : LE;
10766 break;
10767 case GT_EXPR:
10768 if (! unsignedp && integer_all_onesp (arg1))
10769 arg1 = integer_zero_node, code = GE;
10770 else
10771 code = unsignedp ? GTU : GT;
10772 break;
10773 case GE_EXPR:
10774 if (integer_onep (arg1))
10775 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10776 else
10777 code = unsignedp ? GEU : GE;
10778 break;
10780 case UNORDERED_EXPR:
10781 code = UNORDERED;
10782 break;
10783 case ORDERED_EXPR:
10784 code = ORDERED;
10785 break;
10786 case UNLT_EXPR:
10787 code = UNLT;
10788 break;
10789 case UNLE_EXPR:
10790 code = UNLE;
10791 break;
10792 case UNGT_EXPR:
10793 code = UNGT;
10794 break;
10795 case UNGE_EXPR:
10796 code = UNGE;
10797 break;
10798 case UNEQ_EXPR:
10799 code = UNEQ;
10800 break;
10801 case LTGT_EXPR:
10802 code = LTGT;
10803 break;
10805 default:
10806 gcc_unreachable ();
10809 /* Put a constant second. */
10810 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10811 || TREE_CODE (arg0) == FIXED_CST)
10813 tem = arg0; arg0 = arg1; arg1 = tem;
10814 code = swap_condition (code);
10817 /* If this is an equality or inequality test of a single bit, we can
10818 do this by shifting the bit being tested to the low-order bit and
10819 masking the result with the constant 1. If the condition was EQ,
10820 we xor it with 1. This does not require an scc insn and is faster
10821 than an scc insn even if we have it.
10823 The code to make this transformation was moved into fold_single_bit_test,
10824 so we just call into the folder and expand its result. */
10826 if ((code == NE || code == EQ)
10827 && integer_zerop (arg1)
10828 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10830 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10831 if (srcstmt
10832 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10834 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10835 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10836 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10837 gimple_assign_rhs1 (srcstmt),
10838 gimple_assign_rhs2 (srcstmt));
10839 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10840 if (temp)
10841 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10845 if (! get_subtarget (target)
10846 || GET_MODE (subtarget) != operand_mode)
10847 subtarget = 0;
10849 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10851 if (target == 0)
10852 target = gen_reg_rtx (mode);
10854 /* Try a cstore if possible. */
10855 return emit_store_flag_force (target, code, op0, op1,
10856 operand_mode, unsignedp,
10857 (TYPE_PRECISION (ops->type) == 1
10858 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10862 /* Stubs in case we haven't got a casesi insn. */
10863 #ifndef HAVE_casesi
10864 # define HAVE_casesi 0
10865 # define gen_casesi(a, b, c, d, e) (0)
10866 # define CODE_FOR_casesi CODE_FOR_nothing
10867 #endif
10869 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10870 0 otherwise (i.e. if there is no casesi instruction). */
10872 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10873 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10874 rtx fallback_label ATTRIBUTE_UNUSED)
10876 struct expand_operand ops[5];
10877 enum machine_mode index_mode = SImode;
10878 rtx op1, op2, index;
10880 if (! HAVE_casesi)
10881 return 0;
10883 /* Convert the index to SImode. */
10884 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10886 enum machine_mode omode = TYPE_MODE (index_type);
10887 rtx rangertx = expand_normal (range);
10889 /* We must handle the endpoints in the original mode. */
10890 index_expr = build2 (MINUS_EXPR, index_type,
10891 index_expr, minval);
10892 minval = integer_zero_node;
10893 index = expand_normal (index_expr);
10894 if (default_label)
10895 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10896 omode, 1, default_label);
10897 /* Now we can safely truncate. */
10898 index = convert_to_mode (index_mode, index, 0);
10900 else
10902 if (TYPE_MODE (index_type) != index_mode)
10904 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10905 index_expr = fold_convert (index_type, index_expr);
10908 index = expand_normal (index_expr);
10911 do_pending_stack_adjust ();
10913 op1 = expand_normal (minval);
10914 op2 = expand_normal (range);
10916 create_input_operand (&ops[0], index, index_mode);
10917 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10918 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10919 create_fixed_operand (&ops[3], table_label);
10920 create_fixed_operand (&ops[4], (default_label
10921 ? default_label
10922 : fallback_label));
10923 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10924 return 1;
10927 /* Attempt to generate a tablejump instruction; same concept. */
10928 #ifndef HAVE_tablejump
10929 #define HAVE_tablejump 0
10930 #define gen_tablejump(x, y) (0)
10931 #endif
10933 /* Subroutine of the next function.
10935 INDEX is the value being switched on, with the lowest value
10936 in the table already subtracted.
10937 MODE is its expected mode (needed if INDEX is constant).
10938 RANGE is the length of the jump table.
10939 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10941 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10942 index value is out of range. */
10944 static void
10945 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10946 rtx default_label)
10948 rtx temp, vector;
10950 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10951 cfun->cfg->max_jumptable_ents = INTVAL (range);
10953 /* Do an unsigned comparison (in the proper mode) between the index
10954 expression and the value which represents the length of the range.
10955 Since we just finished subtracting the lower bound of the range
10956 from the index expression, this comparison allows us to simultaneously
10957 check that the original index expression value is both greater than
10958 or equal to the minimum value of the range and less than or equal to
10959 the maximum value of the range. */
10961 if (default_label)
10962 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10963 default_label);
10965 /* If index is in range, it must fit in Pmode.
10966 Convert to Pmode so we can index with it. */
10967 if (mode != Pmode)
10968 index = convert_to_mode (Pmode, index, 1);
10970 /* Don't let a MEM slip through, because then INDEX that comes
10971 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10972 and break_out_memory_refs will go to work on it and mess it up. */
10973 #ifdef PIC_CASE_VECTOR_ADDRESS
10974 if (flag_pic && !REG_P (index))
10975 index = copy_to_mode_reg (Pmode, index);
10976 #endif
10978 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10979 GET_MODE_SIZE, because this indicates how large insns are. The other
10980 uses should all be Pmode, because they are addresses. This code
10981 could fail if addresses and insns are not the same size. */
10982 index = gen_rtx_PLUS (Pmode,
10983 gen_rtx_MULT (Pmode, index,
10984 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10985 gen_rtx_LABEL_REF (Pmode, table_label));
10986 #ifdef PIC_CASE_VECTOR_ADDRESS
10987 if (flag_pic)
10988 index = PIC_CASE_VECTOR_ADDRESS (index);
10989 else
10990 #endif
10991 index = memory_address (CASE_VECTOR_MODE, index);
10992 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10993 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10994 convert_move (temp, vector, 0);
10996 emit_jump_insn (gen_tablejump (temp, table_label));
10998 /* If we are generating PIC code or if the table is PC-relative, the
10999 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11000 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11001 emit_barrier ();
11005 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11006 rtx table_label, rtx default_label)
11008 rtx index;
11010 if (! HAVE_tablejump)
11011 return 0;
11013 index_expr = fold_build2 (MINUS_EXPR, index_type,
11014 fold_convert (index_type, index_expr),
11015 fold_convert (index_type, minval));
11016 index = expand_normal (index_expr);
11017 do_pending_stack_adjust ();
11019 do_tablejump (index, TYPE_MODE (index_type),
11020 convert_modes (TYPE_MODE (index_type),
11021 TYPE_MODE (TREE_TYPE (range)),
11022 expand_normal (range),
11023 TYPE_UNSIGNED (TREE_TYPE (range))),
11024 table_label, default_label);
11025 return 1;
11028 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11029 static rtx
11030 const_vector_from_tree (tree exp)
11032 rtvec v;
11033 unsigned i;
11034 int units;
11035 tree elt;
11036 enum machine_mode inner, mode;
11038 mode = TYPE_MODE (TREE_TYPE (exp));
11040 if (initializer_zerop (exp))
11041 return CONST0_RTX (mode);
11043 units = GET_MODE_NUNITS (mode);
11044 inner = GET_MODE_INNER (mode);
11046 v = rtvec_alloc (units);
11048 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11050 elt = VECTOR_CST_ELT (exp, i);
11052 if (TREE_CODE (elt) == REAL_CST)
11053 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11054 inner);
11055 else if (TREE_CODE (elt) == FIXED_CST)
11056 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11057 inner);
11058 else
11059 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11060 inner);
11063 return gen_rtx_CONST_VECTOR (mode, v);
11066 /* Build a decl for a personality function given a language prefix. */
11068 tree
11069 build_personality_function (const char *lang)
11071 const char *unwind_and_version;
11072 tree decl, type;
11073 char *name;
11075 switch (targetm_common.except_unwind_info (&global_options))
11077 case UI_NONE:
11078 return NULL;
11079 case UI_SJLJ:
11080 unwind_and_version = "_sj0";
11081 break;
11082 case UI_DWARF2:
11083 case UI_TARGET:
11084 unwind_and_version = "_v0";
11085 break;
11086 default:
11087 gcc_unreachable ();
11090 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11092 type = build_function_type_list (integer_type_node, integer_type_node,
11093 long_long_unsigned_type_node,
11094 ptr_type_node, ptr_type_node, NULL_TREE);
11095 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11096 get_identifier (name), type);
11097 DECL_ARTIFICIAL (decl) = 1;
11098 DECL_EXTERNAL (decl) = 1;
11099 TREE_PUBLIC (decl) = 1;
11101 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11102 are the flags assigned by targetm.encode_section_info. */
11103 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11105 return decl;
11108 /* Extracts the personality function of DECL and returns the corresponding
11109 libfunc. */
11112 get_personality_function (tree decl)
11114 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11115 enum eh_personality_kind pk;
11117 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11118 if (pk == eh_personality_none)
11119 return NULL;
11121 if (!personality
11122 && pk == eh_personality_any)
11123 personality = lang_hooks.eh_personality ();
11125 if (pk == eh_personality_lang)
11126 gcc_assert (personality != NULL_TREE);
11128 return XEXP (DECL_RTL (personality), 0);
11131 #include "gt-expr.h"