PR target/66563
[official-gcc.git] / gcc / expr.c
blob78904c24bf27630fa3e25e80884a3bea94718570
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
31 #include "attribs.h"
32 #include "varasm.h"
33 #include "flags.h"
34 #include "regs.h"
35 #include "hard-reg-set.h"
36 #include "except.h"
37 #include "function.h"
38 #include "insn-config.h"
39 #include "insn-attr.h"
40 #include "expmed.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "calls.h"
44 #include "emit-rtl.h"
45 #include "stmt.h"
46 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
47 #include "expr.h"
48 #include "insn-codes.h"
49 #include "optabs.h"
50 #include "libfuncs.h"
51 #include "recog.h"
52 #include "reload.h"
53 #include "typeclass.h"
54 #include "toplev.h"
55 #include "langhooks.h"
56 #include "intl.h"
57 #include "tm_p.h"
58 #include "tree-iterator.h"
59 #include "predict.h"
60 #include "dominance.h"
61 #include "cfg.h"
62 #include "basic-block.h"
63 #include "tree-ssa-alias.h"
64 #include "internal-fn.h"
65 #include "gimple-expr.h"
66 #include "gimple.h"
67 #include "gimple-ssa.h"
68 #include "plugin-api.h"
69 #include "ipa-ref.h"
70 #include "cgraph.h"
71 #include "tree-ssanames.h"
72 #include "target.h"
73 #include "common/common-target.h"
74 #include "timevar.h"
75 #include "df.h"
76 #include "diagnostic.h"
77 #include "tree-ssa-live.h"
78 #include "tree-outof-ssa.h"
79 #include "target-globals.h"
80 #include "params.h"
81 #include "tree-ssa-address.h"
82 #include "cfgexpand.h"
83 #include "builtins.h"
84 #include "tree-chkp.h"
85 #include "rtl-chkp.h"
86 #include "ccmp.h"
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* This structure is used by move_by_pieces to describe the move to
98 be performed. */
99 struct move_by_pieces_d
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
111 int reverse;
114 /* This structure is used by store_by_pieces to describe the clear to
115 be performed. */
117 struct store_by_pieces_d
119 rtx to;
120 rtx to_addr;
121 int autinc_to;
122 int explicit_inc_to;
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
126 void *constfundata;
127 int reverse;
130 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
131 struct move_by_pieces_d *);
132 static bool block_move_libcall_safe_for_call_parm (void);
133 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
134 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
135 unsigned HOST_WIDE_INT);
136 static tree emit_block_move_libcall_fn (int);
137 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
138 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
139 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
140 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
141 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
142 struct store_by_pieces_d *);
143 static tree clear_storage_libcall_fn (int);
144 static rtx_insn *compress_float_constant (rtx, rtx);
145 static rtx get_subtarget (rtx);
146 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
147 HOST_WIDE_INT, machine_mode,
148 tree, int, alias_set_type);
149 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
150 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
151 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
152 machine_mode, tree, alias_set_type, bool);
154 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
156 static int is_aligning_offset (const_tree, const_tree);
157 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
158 static rtx do_store_flag (sepops, rtx, machine_mode);
159 #ifdef PUSH_ROUNDING
160 static void emit_single_push_insn (machine_mode, rtx, tree);
161 #endif
162 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
163 static rtx const_vector_from_tree (tree);
164 static tree tree_expr_size (const_tree);
165 static HOST_WIDE_INT int_expr_size (tree);
168 /* This is run to set up which modes can be used
169 directly in memory and to initialize the block move optab. It is run
170 at the beginning of compilation and when the target is reinitialized. */
172 void
173 init_expr_target (void)
175 rtx insn, pat;
176 machine_mode mode;
177 int num_clobbers;
178 rtx mem, mem1;
179 rtx reg;
181 /* Try indexing by frame ptr and try by stack ptr.
182 It is known that on the Convex the stack ptr isn't a valid index.
183 With luck, one or the other is valid on any machine. */
184 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
185 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
187 /* A scratch register we can modify in-place below to avoid
188 useless RTL allocations. */
189 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
191 insn = rtx_alloc (INSN);
192 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
193 PATTERN (insn) = pat;
195 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
196 mode = (machine_mode) ((int) mode + 1))
198 int regno;
200 direct_load[(int) mode] = direct_store[(int) mode] = 0;
201 PUT_MODE (mem, mode);
202 PUT_MODE (mem1, mode);
204 /* See if there is some register that can be used in this mode and
205 directly loaded or stored from memory. */
207 if (mode != VOIDmode && mode != BLKmode)
208 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
209 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
210 regno++)
212 if (! HARD_REGNO_MODE_OK (regno, mode))
213 continue;
215 set_mode_and_regno (reg, mode, regno);
217 SET_SRC (pat) = mem;
218 SET_DEST (pat) = reg;
219 if (recog (pat, insn, &num_clobbers) >= 0)
220 direct_load[(int) mode] = 1;
222 SET_SRC (pat) = mem1;
223 SET_DEST (pat) = reg;
224 if (recog (pat, insn, &num_clobbers) >= 0)
225 direct_load[(int) mode] = 1;
227 SET_SRC (pat) = reg;
228 SET_DEST (pat) = mem;
229 if (recog (pat, insn, &num_clobbers) >= 0)
230 direct_store[(int) mode] = 1;
232 SET_SRC (pat) = reg;
233 SET_DEST (pat) = mem1;
234 if (recog (pat, insn, &num_clobbers) >= 0)
235 direct_store[(int) mode] = 1;
239 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
241 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
242 mode = GET_MODE_WIDER_MODE (mode))
244 machine_mode srcmode;
245 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
246 srcmode = GET_MODE_WIDER_MODE (srcmode))
248 enum insn_code ic;
250 ic = can_extend_p (mode, srcmode, 0);
251 if (ic == CODE_FOR_nothing)
252 continue;
254 PUT_MODE (mem, srcmode);
256 if (insn_operand_matches (ic, 1, mem))
257 float_extend_from_mem[mode][srcmode] = true;
262 /* This is run at the start of compiling a function. */
264 void
265 init_expr (void)
267 memset (&crtl->expr, 0, sizeof (crtl->expr));
270 /* Copy data from FROM to TO, where the machine modes are not the same.
271 Both modes may be integer, or both may be floating, or both may be
272 fixed-point.
273 UNSIGNEDP should be nonzero if FROM is an unsigned type.
274 This causes zero-extension instead of sign-extension. */
276 void
277 convert_move (rtx to, rtx from, int unsignedp)
279 machine_mode to_mode = GET_MODE (to);
280 machine_mode from_mode = GET_MODE (from);
281 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
282 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
283 enum insn_code code;
284 rtx libcall;
286 /* rtx code for making an equivalent value. */
287 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
288 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
291 gcc_assert (to_real == from_real);
292 gcc_assert (to_mode != BLKmode);
293 gcc_assert (from_mode != BLKmode);
295 /* If the source and destination are already the same, then there's
296 nothing to do. */
297 if (to == from)
298 return;
300 /* If FROM is a SUBREG that indicates that we have already done at least
301 the required extension, strip it. We don't handle such SUBREGs as
302 TO here. */
304 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
305 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
306 >= GET_MODE_PRECISION (to_mode))
307 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
308 from = gen_lowpart (to_mode, from), from_mode = to_mode;
310 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
312 if (to_mode == from_mode
313 || (from_mode == VOIDmode && CONSTANT_P (from)))
315 emit_move_insn (to, from);
316 return;
319 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
321 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
323 if (VECTOR_MODE_P (to_mode))
324 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
325 else
326 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
328 emit_move_insn (to, from);
329 return;
332 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
334 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
335 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
336 return;
339 if (to_real)
341 rtx value;
342 rtx_insn *insns;
343 convert_optab tab;
345 gcc_assert ((GET_MODE_PRECISION (from_mode)
346 != GET_MODE_PRECISION (to_mode))
347 || (DECIMAL_FLOAT_MODE_P (from_mode)
348 != DECIMAL_FLOAT_MODE_P (to_mode)));
350 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
351 /* Conversion between decimal float and binary float, same size. */
352 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
353 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
354 tab = sext_optab;
355 else
356 tab = trunc_optab;
358 /* Try converting directly if the insn is supported. */
360 code = convert_optab_handler (tab, to_mode, from_mode);
361 if (code != CODE_FOR_nothing)
363 emit_unop_insn (code, to, from,
364 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
365 return;
368 /* Otherwise use a libcall. */
369 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
371 /* Is this conversion implemented yet? */
372 gcc_assert (libcall);
374 start_sequence ();
375 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
376 1, from, from_mode);
377 insns = get_insns ();
378 end_sequence ();
379 emit_libcall_block (insns, to, value,
380 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
381 from)
382 : gen_rtx_FLOAT_EXTEND (to_mode, from));
383 return;
386 /* Handle pointer conversion. */ /* SPEE 900220. */
387 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
389 convert_optab ctab;
391 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
392 ctab = trunc_optab;
393 else if (unsignedp)
394 ctab = zext_optab;
395 else
396 ctab = sext_optab;
398 if (convert_optab_handler (ctab, to_mode, from_mode)
399 != CODE_FOR_nothing)
401 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
402 to, from, UNKNOWN);
403 return;
407 /* Targets are expected to provide conversion insns between PxImode and
408 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
409 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
411 machine_mode full_mode
412 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
414 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
415 != CODE_FOR_nothing);
417 if (full_mode != from_mode)
418 from = convert_to_mode (full_mode, from, unsignedp);
419 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
420 to, from, UNKNOWN);
421 return;
423 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
425 rtx new_from;
426 machine_mode full_mode
427 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
428 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
429 enum insn_code icode;
431 icode = convert_optab_handler (ctab, full_mode, from_mode);
432 gcc_assert (icode != CODE_FOR_nothing);
434 if (to_mode == full_mode)
436 emit_unop_insn (icode, to, from, UNKNOWN);
437 return;
440 new_from = gen_reg_rtx (full_mode);
441 emit_unop_insn (icode, new_from, from, UNKNOWN);
443 /* else proceed to integer conversions below. */
444 from_mode = full_mode;
445 from = new_from;
448 /* Make sure both are fixed-point modes or both are not. */
449 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
450 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
451 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
453 /* If we widen from_mode to to_mode and they are in the same class,
454 we won't saturate the result.
455 Otherwise, always saturate the result to play safe. */
456 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
457 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
458 expand_fixed_convert (to, from, 0, 0);
459 else
460 expand_fixed_convert (to, from, 0, 1);
461 return;
464 /* Now both modes are integers. */
466 /* Handle expanding beyond a word. */
467 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
468 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
470 rtx_insn *insns;
471 rtx lowpart;
472 rtx fill_value;
473 rtx lowfrom;
474 int i;
475 machine_mode lowpart_mode;
476 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
478 /* Try converting directly if the insn is supported. */
479 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
480 != CODE_FOR_nothing)
482 /* If FROM is a SUBREG, put it into a register. Do this
483 so that we always generate the same set of insns for
484 better cse'ing; if an intermediate assignment occurred,
485 we won't be doing the operation directly on the SUBREG. */
486 if (optimize > 0 && GET_CODE (from) == SUBREG)
487 from = force_reg (from_mode, from);
488 emit_unop_insn (code, to, from, equiv_code);
489 return;
491 /* Next, try converting via full word. */
492 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
493 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
494 != CODE_FOR_nothing))
496 rtx word_to = gen_reg_rtx (word_mode);
497 if (REG_P (to))
499 if (reg_overlap_mentioned_p (to, from))
500 from = force_reg (from_mode, from);
501 emit_clobber (to);
503 convert_move (word_to, from, unsignedp);
504 emit_unop_insn (code, to, word_to, equiv_code);
505 return;
508 /* No special multiword conversion insn; do it by hand. */
509 start_sequence ();
511 /* Since we will turn this into a no conflict block, we must ensure the
512 the source does not overlap the target so force it into an isolated
513 register when maybe so. Likewise for any MEM input, since the
514 conversion sequence might require several references to it and we
515 must ensure we're getting the same value every time. */
517 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
518 from = force_reg (from_mode, from);
520 /* Get a copy of FROM widened to a word, if necessary. */
521 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
522 lowpart_mode = word_mode;
523 else
524 lowpart_mode = from_mode;
526 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
528 lowpart = gen_lowpart (lowpart_mode, to);
529 emit_move_insn (lowpart, lowfrom);
531 /* Compute the value to put in each remaining word. */
532 if (unsignedp)
533 fill_value = const0_rtx;
534 else
535 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
536 LT, lowfrom, const0_rtx,
537 lowpart_mode, 0, -1);
539 /* Fill the remaining words. */
540 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
542 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
543 rtx subword = operand_subword (to, index, 1, to_mode);
545 gcc_assert (subword);
547 if (fill_value != subword)
548 emit_move_insn (subword, fill_value);
551 insns = get_insns ();
552 end_sequence ();
554 emit_insn (insns);
555 return;
558 /* Truncating multi-word to a word or less. */
559 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
560 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
562 if (!((MEM_P (from)
563 && ! MEM_VOLATILE_P (from)
564 && direct_load[(int) to_mode]
565 && ! mode_dependent_address_p (XEXP (from, 0),
566 MEM_ADDR_SPACE (from)))
567 || REG_P (from)
568 || GET_CODE (from) == SUBREG))
569 from = force_reg (from_mode, from);
570 convert_move (to, gen_lowpart (word_mode, from), 0);
571 return;
574 /* Now follow all the conversions between integers
575 no more than a word long. */
577 /* For truncation, usually we can just refer to FROM in a narrower mode. */
578 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
579 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
581 if (!((MEM_P (from)
582 && ! MEM_VOLATILE_P (from)
583 && direct_load[(int) to_mode]
584 && ! mode_dependent_address_p (XEXP (from, 0),
585 MEM_ADDR_SPACE (from)))
586 || REG_P (from)
587 || GET_CODE (from) == SUBREG))
588 from = force_reg (from_mode, from);
589 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
590 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
591 from = copy_to_reg (from);
592 emit_move_insn (to, gen_lowpart (to_mode, from));
593 return;
596 /* Handle extension. */
597 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
599 /* Convert directly if that works. */
600 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
601 != CODE_FOR_nothing)
603 emit_unop_insn (code, to, from, equiv_code);
604 return;
606 else
608 machine_mode intermediate;
609 rtx tmp;
610 int shift_amount;
612 /* Search for a mode to convert via. */
613 for (intermediate = from_mode; intermediate != VOIDmode;
614 intermediate = GET_MODE_WIDER_MODE (intermediate))
615 if (((can_extend_p (to_mode, intermediate, unsignedp)
616 != CODE_FOR_nothing)
617 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
618 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
619 && (can_extend_p (intermediate, from_mode, unsignedp)
620 != CODE_FOR_nothing))
622 convert_move (to, convert_to_mode (intermediate, from,
623 unsignedp), unsignedp);
624 return;
627 /* No suitable intermediate mode.
628 Generate what we need with shifts. */
629 shift_amount = (GET_MODE_PRECISION (to_mode)
630 - GET_MODE_PRECISION (from_mode));
631 from = gen_lowpart (to_mode, force_reg (from_mode, from));
632 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
633 to, unsignedp);
634 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
635 to, unsignedp);
636 if (tmp != to)
637 emit_move_insn (to, tmp);
638 return;
642 /* Support special truncate insns for certain modes. */
643 if (convert_optab_handler (trunc_optab, to_mode,
644 from_mode) != CODE_FOR_nothing)
646 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
647 to, from, UNKNOWN);
648 return;
651 /* Handle truncation of volatile memrefs, and so on;
652 the things that couldn't be truncated directly,
653 and for which there was no special instruction.
655 ??? Code above formerly short-circuited this, for most integer
656 mode pairs, with a force_reg in from_mode followed by a recursive
657 call to this routine. Appears always to have been wrong. */
658 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
660 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
661 emit_move_insn (to, temp);
662 return;
665 /* Mode combination is not recognized. */
666 gcc_unreachable ();
669 /* Return an rtx for a value that would result
670 from converting X to mode MODE.
671 Both X and MODE may be floating, or both integer.
672 UNSIGNEDP is nonzero if X is an unsigned value.
673 This can be done by referring to a part of X in place
674 or by copying to a new temporary with conversion. */
677 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
679 return convert_modes (mode, VOIDmode, x, unsignedp);
682 /* Return an rtx for a value that would result
683 from converting X from mode OLDMODE to mode MODE.
684 Both modes may be floating, or both integer.
685 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion.
690 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
693 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
695 rtx temp;
697 /* If FROM is a SUBREG that indicates that we have already done at least
698 the required extension, strip it. */
700 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
701 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
702 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
703 x = gen_lowpart (mode, SUBREG_REG (x));
705 if (GET_MODE (x) != VOIDmode)
706 oldmode = GET_MODE (x);
708 if (mode == oldmode)
709 return x;
711 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
713 /* If the caller did not tell us the old mode, then there is not
714 much to do with respect to canonicalization. We have to
715 assume that all the bits are significant. */
716 if (GET_MODE_CLASS (oldmode) != MODE_INT)
717 oldmode = MAX_MODE_INT;
718 wide_int w = wide_int::from (std::make_pair (x, oldmode),
719 GET_MODE_PRECISION (mode),
720 unsignedp ? UNSIGNED : SIGNED);
721 return immed_wide_int_const (w, mode);
724 /* We can do this with a gen_lowpart if both desired and current modes
725 are integer, and this is either a constant integer, a register, or a
726 non-volatile MEM. */
727 if (GET_MODE_CLASS (mode) == MODE_INT
728 && GET_MODE_CLASS (oldmode) == MODE_INT
729 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
730 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
731 || (REG_P (x)
732 && (!HARD_REGISTER_P (x)
733 || HARD_REGNO_MODE_OK (REGNO (x), mode))
734 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
736 return gen_lowpart (mode, x);
738 /* Converting from integer constant into mode is always equivalent to an
739 subreg operation. */
740 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
742 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
743 return simplify_gen_subreg (mode, x, oldmode, 0);
746 temp = gen_reg_rtx (mode);
747 convert_move (temp, x, unsignedp);
748 return temp;
751 /* Return the largest alignment we can use for doing a move (or store)
752 of MAX_PIECES. ALIGN is the largest alignment we could use. */
754 static unsigned int
755 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
757 machine_mode tmode;
759 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
760 if (align >= GET_MODE_ALIGNMENT (tmode))
761 align = GET_MODE_ALIGNMENT (tmode);
762 else
764 machine_mode tmode, xmode;
766 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
767 tmode != VOIDmode;
768 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
769 if (GET_MODE_SIZE (tmode) > max_pieces
770 || SLOW_UNALIGNED_ACCESS (tmode, align))
771 break;
773 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
776 return align;
779 /* Return the widest integer mode no wider than SIZE. If no such mode
780 can be found, return VOIDmode. */
782 static machine_mode
783 widest_int_mode_for_size (unsigned int size)
785 machine_mode tmode, mode = VOIDmode;
787 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
788 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
789 if (GET_MODE_SIZE (tmode) < size)
790 mode = tmode;
792 return mode;
795 /* Determine whether the LEN bytes can be moved by using several move
796 instructions. Return nonzero if a call to move_by_pieces should
797 succeed. */
800 can_move_by_pieces (unsigned HOST_WIDE_INT len,
801 unsigned int align)
803 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
804 optimize_insn_for_speed_p ());
807 /* Generate several move instructions to copy LEN bytes from block FROM to
808 block TO. (These are MEM rtx's with BLKmode).
810 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
811 used to push FROM to the stack.
813 ALIGN is maximum stack alignment we can assume.
815 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
816 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
817 stpcpy. */
820 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
821 unsigned int align, int endp)
823 struct move_by_pieces_d data;
824 machine_mode to_addr_mode;
825 machine_mode from_addr_mode = get_address_mode (from);
826 rtx to_addr, from_addr = XEXP (from, 0);
827 unsigned int max_size = MOVE_MAX_PIECES + 1;
828 enum insn_code icode;
830 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
832 data.offset = 0;
833 data.from_addr = from_addr;
834 if (to)
836 to_addr_mode = get_address_mode (to);
837 to_addr = XEXP (to, 0);
838 data.to = to;
839 data.autinc_to
840 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
841 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
842 data.reverse
843 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
845 else
847 to_addr_mode = VOIDmode;
848 to_addr = NULL_RTX;
849 data.to = NULL_RTX;
850 data.autinc_to = 1;
851 if (STACK_GROWS_DOWNWARD)
852 data.reverse = 1;
853 else
854 data.reverse = 0;
856 data.to_addr = to_addr;
857 data.from = from;
858 data.autinc_from
859 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
860 || GET_CODE (from_addr) == POST_INC
861 || GET_CODE (from_addr) == POST_DEC);
863 data.explicit_inc_from = 0;
864 data.explicit_inc_to = 0;
865 if (data.reverse) data.offset = len;
866 data.len = len;
868 /* If copying requires more than two move insns,
869 copy addresses to registers (to make displacements shorter)
870 and use post-increment if available. */
871 if (!(data.autinc_from && data.autinc_to)
872 && move_by_pieces_ninsns (len, align, max_size) > 2)
874 /* Find the mode of the largest move...
875 MODE might not be used depending on the definitions of the
876 USE_* macros below. */
877 machine_mode mode ATTRIBUTE_UNUSED
878 = widest_int_mode_for_size (max_size);
880 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
882 data.from_addr = copy_to_mode_reg (from_addr_mode,
883 plus_constant (from_addr_mode,
884 from_addr, len));
885 data.autinc_from = 1;
886 data.explicit_inc_from = -1;
888 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
890 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
891 data.autinc_from = 1;
892 data.explicit_inc_from = 1;
894 if (!data.autinc_from && CONSTANT_P (from_addr))
895 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
896 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
898 data.to_addr = copy_to_mode_reg (to_addr_mode,
899 plus_constant (to_addr_mode,
900 to_addr, len));
901 data.autinc_to = 1;
902 data.explicit_inc_to = -1;
904 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
906 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
907 data.autinc_to = 1;
908 data.explicit_inc_to = 1;
910 if (!data.autinc_to && CONSTANT_P (to_addr))
911 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
914 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
916 /* First move what we can in the largest integer mode, then go to
917 successively smaller modes. */
919 while (max_size > 1 && data.len > 0)
921 machine_mode mode = widest_int_mode_for_size (max_size);
923 if (mode == VOIDmode)
924 break;
926 icode = optab_handler (mov_optab, mode);
927 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
928 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
930 max_size = GET_MODE_SIZE (mode);
933 /* The code above should have handled everything. */
934 gcc_assert (!data.len);
936 if (endp)
938 rtx to1;
940 gcc_assert (!data.reverse);
941 if (data.autinc_to)
943 if (endp == 2)
945 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
946 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
947 else
948 data.to_addr = copy_to_mode_reg (to_addr_mode,
949 plus_constant (to_addr_mode,
950 data.to_addr,
951 -1));
953 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
954 data.offset);
956 else
958 if (endp == 2)
959 --data.offset;
960 to1 = adjust_address (data.to, QImode, data.offset);
962 return to1;
964 else
965 return data.to;
968 /* Return number of insns required to move L bytes by pieces.
969 ALIGN (in bits) is maximum alignment we can assume. */
971 unsigned HOST_WIDE_INT
972 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
973 unsigned int max_size)
975 unsigned HOST_WIDE_INT n_insns = 0;
977 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
979 while (max_size > 1 && l > 0)
981 machine_mode mode;
982 enum insn_code icode;
984 mode = widest_int_mode_for_size (max_size);
986 if (mode == VOIDmode)
987 break;
989 icode = optab_handler (mov_optab, mode);
990 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
991 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
993 max_size = GET_MODE_SIZE (mode);
996 gcc_assert (!l);
997 return n_insns;
1000 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1001 with move instructions for mode MODE. GENFUN is the gen_... function
1002 to make a move insn for that mode. DATA has all the other info. */
1004 static void
1005 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1006 struct move_by_pieces_d *data)
1008 unsigned int size = GET_MODE_SIZE (mode);
1009 rtx to1 = NULL_RTX, from1;
1011 while (data->len >= size)
1013 if (data->reverse)
1014 data->offset -= size;
1016 if (data->to)
1018 if (data->autinc_to)
1019 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1020 data->offset);
1021 else
1022 to1 = adjust_address (data->to, mode, data->offset);
1025 if (data->autinc_from)
1026 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1027 data->offset);
1028 else
1029 from1 = adjust_address (data->from, mode, data->offset);
1031 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1032 emit_insn (gen_add2_insn (data->to_addr,
1033 gen_int_mode (-(HOST_WIDE_INT) size,
1034 GET_MODE (data->to_addr))));
1035 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1036 emit_insn (gen_add2_insn (data->from_addr,
1037 gen_int_mode (-(HOST_WIDE_INT) size,
1038 GET_MODE (data->from_addr))));
1040 if (data->to)
1041 emit_insn ((*genfun) (to1, from1));
1042 else
1044 #ifdef PUSH_ROUNDING
1045 emit_single_push_insn (mode, from1, NULL);
1046 #else
1047 gcc_unreachable ();
1048 #endif
1051 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1052 emit_insn (gen_add2_insn (data->to_addr,
1053 gen_int_mode (size,
1054 GET_MODE (data->to_addr))));
1055 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1056 emit_insn (gen_add2_insn (data->from_addr,
1057 gen_int_mode (size,
1058 GET_MODE (data->from_addr))));
1060 if (! data->reverse)
1061 data->offset += size;
1063 data->len -= size;
1067 /* Emit code to move a block Y to a block X. This may be done with
1068 string-move instructions, with multiple scalar move instructions,
1069 or with a library call.
1071 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1072 SIZE is an rtx that says how long they are.
1073 ALIGN is the maximum alignment we can assume they have.
1074 METHOD describes what kind of copy this is, and what mechanisms may be used.
1075 MIN_SIZE is the minimal size of block to move
1076 MAX_SIZE is the maximal size of block to move, if it can not be represented
1077 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1079 Return the address of the new block, if memcpy is called and returns it,
1080 0 otherwise. */
1083 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1084 unsigned int expected_align, HOST_WIDE_INT expected_size,
1085 unsigned HOST_WIDE_INT min_size,
1086 unsigned HOST_WIDE_INT max_size,
1087 unsigned HOST_WIDE_INT probable_max_size)
1089 bool may_use_call;
1090 rtx retval = 0;
1091 unsigned int align;
1093 gcc_assert (size);
1094 if (CONST_INT_P (size)
1095 && INTVAL (size) == 0)
1096 return 0;
1098 switch (method)
1100 case BLOCK_OP_NORMAL:
1101 case BLOCK_OP_TAILCALL:
1102 may_use_call = true;
1103 break;
1105 case BLOCK_OP_CALL_PARM:
1106 may_use_call = block_move_libcall_safe_for_call_parm ();
1108 /* Make inhibit_defer_pop nonzero around the library call
1109 to force it to pop the arguments right away. */
1110 NO_DEFER_POP;
1111 break;
1113 case BLOCK_OP_NO_LIBCALL:
1114 may_use_call = false;
1115 break;
1117 default:
1118 gcc_unreachable ();
1121 gcc_assert (MEM_P (x) && MEM_P (y));
1122 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1123 gcc_assert (align >= BITS_PER_UNIT);
1125 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1126 block copy is more efficient for other large modes, e.g. DCmode. */
1127 x = adjust_address (x, BLKmode, 0);
1128 y = adjust_address (y, BLKmode, 0);
1130 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1131 can be incorrect is coming from __builtin_memcpy. */
1132 if (CONST_INT_P (size))
1134 x = shallow_copy_rtx (x);
1135 y = shallow_copy_rtx (y);
1136 set_mem_size (x, INTVAL (size));
1137 set_mem_size (y, INTVAL (size));
1140 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1141 move_by_pieces (x, y, INTVAL (size), align, 0);
1142 else if (emit_block_move_via_movmem (x, y, size, align,
1143 expected_align, expected_size,
1144 min_size, max_size, probable_max_size))
1146 else if (may_use_call
1147 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1148 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1150 /* Since x and y are passed to a libcall, mark the corresponding
1151 tree EXPR as addressable. */
1152 tree y_expr = MEM_EXPR (y);
1153 tree x_expr = MEM_EXPR (x);
1154 if (y_expr)
1155 mark_addressable (y_expr);
1156 if (x_expr)
1157 mark_addressable (x_expr);
1158 retval = emit_block_move_via_libcall (x, y, size,
1159 method == BLOCK_OP_TAILCALL);
1162 else
1163 emit_block_move_via_loop (x, y, size, align);
1165 if (method == BLOCK_OP_CALL_PARM)
1166 OK_DEFER_POP;
1168 return retval;
1172 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1174 unsigned HOST_WIDE_INT max, min = 0;
1175 if (GET_CODE (size) == CONST_INT)
1176 min = max = UINTVAL (size);
1177 else
1178 max = GET_MODE_MASK (GET_MODE (size));
1179 return emit_block_move_hints (x, y, size, method, 0, -1,
1180 min, max, max);
1183 /* A subroutine of emit_block_move. Returns true if calling the
1184 block move libcall will not clobber any parameters which may have
1185 already been placed on the stack. */
1187 static bool
1188 block_move_libcall_safe_for_call_parm (void)
1190 #if defined (REG_PARM_STACK_SPACE)
1191 tree fn;
1192 #endif
1194 /* If arguments are pushed on the stack, then they're safe. */
1195 if (PUSH_ARGS)
1196 return true;
1198 /* If registers go on the stack anyway, any argument is sure to clobber
1199 an outgoing argument. */
1200 #if defined (REG_PARM_STACK_SPACE)
1201 fn = emit_block_move_libcall_fn (false);
1202 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1203 depend on its argument. */
1204 (void) fn;
1205 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1206 && REG_PARM_STACK_SPACE (fn) != 0)
1207 return false;
1208 #endif
1210 /* If any argument goes in memory, then it might clobber an outgoing
1211 argument. */
1213 CUMULATIVE_ARGS args_so_far_v;
1214 cumulative_args_t args_so_far;
1215 tree fn, arg;
1217 fn = emit_block_move_libcall_fn (false);
1218 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1219 args_so_far = pack_cumulative_args (&args_so_far_v);
1221 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1222 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1224 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1225 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1226 NULL_TREE, true);
1227 if (!tmp || !REG_P (tmp))
1228 return false;
1229 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1230 return false;
1231 targetm.calls.function_arg_advance (args_so_far, mode,
1232 NULL_TREE, true);
1235 return true;
1238 /* A subroutine of emit_block_move. Expand a movmem pattern;
1239 return true if successful. */
1241 static bool
1242 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1243 unsigned int expected_align, HOST_WIDE_INT expected_size,
1244 unsigned HOST_WIDE_INT min_size,
1245 unsigned HOST_WIDE_INT max_size,
1246 unsigned HOST_WIDE_INT probable_max_size)
1248 int save_volatile_ok = volatile_ok;
1249 machine_mode mode;
1251 if (expected_align < align)
1252 expected_align = align;
1253 if (expected_size != -1)
1255 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1256 expected_size = probable_max_size;
1257 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1258 expected_size = min_size;
1261 /* Since this is a move insn, we don't care about volatility. */
1262 volatile_ok = 1;
1264 /* Try the most limited insn first, because there's no point
1265 including more than one in the machine description unless
1266 the more limited one has some advantage. */
1268 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1269 mode = GET_MODE_WIDER_MODE (mode))
1271 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1273 if (code != CODE_FOR_nothing
1274 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1275 here because if SIZE is less than the mode mask, as it is
1276 returned by the macro, it will definitely be less than the
1277 actual mode mask. Since SIZE is within the Pmode address
1278 space, we limit MODE to Pmode. */
1279 && ((CONST_INT_P (size)
1280 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1281 <= (GET_MODE_MASK (mode) >> 1)))
1282 || max_size <= (GET_MODE_MASK (mode) >> 1)
1283 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1285 struct expand_operand ops[9];
1286 unsigned int nops;
1288 /* ??? When called via emit_block_move_for_call, it'd be
1289 nice if there were some way to inform the backend, so
1290 that it doesn't fail the expansion because it thinks
1291 emitting the libcall would be more efficient. */
1292 nops = insn_data[(int) code].n_generator_args;
1293 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1295 create_fixed_operand (&ops[0], x);
1296 create_fixed_operand (&ops[1], y);
1297 /* The check above guarantees that this size conversion is valid. */
1298 create_convert_operand_to (&ops[2], size, mode, true);
1299 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1300 if (nops >= 6)
1302 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1303 create_integer_operand (&ops[5], expected_size);
1305 if (nops >= 8)
1307 create_integer_operand (&ops[6], min_size);
1308 /* If we can not represent the maximal size,
1309 make parameter NULL. */
1310 if ((HOST_WIDE_INT) max_size != -1)
1311 create_integer_operand (&ops[7], max_size);
1312 else
1313 create_fixed_operand (&ops[7], NULL);
1315 if (nops == 9)
1317 /* If we can not represent the maximal size,
1318 make parameter NULL. */
1319 if ((HOST_WIDE_INT) probable_max_size != -1)
1320 create_integer_operand (&ops[8], probable_max_size);
1321 else
1322 create_fixed_operand (&ops[8], NULL);
1324 if (maybe_expand_insn (code, nops, ops))
1326 volatile_ok = save_volatile_ok;
1327 return true;
1332 volatile_ok = save_volatile_ok;
1333 return false;
1336 /* A subroutine of emit_block_move. Expand a call to memcpy.
1337 Return the return value from memcpy, 0 otherwise. */
1340 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1342 rtx dst_addr, src_addr;
1343 tree call_expr, fn, src_tree, dst_tree, size_tree;
1344 machine_mode size_mode;
1345 rtx retval;
1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348 pseudos. We can then place those new pseudos into a VAR_DECL and
1349 use them later. */
1351 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1352 src_addr = copy_addr_to_reg (XEXP (src, 0));
1354 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1355 src_addr = convert_memory_address (ptr_mode, src_addr);
1357 dst_tree = make_tree (ptr_type_node, dst_addr);
1358 src_tree = make_tree (ptr_type_node, src_addr);
1360 size_mode = TYPE_MODE (sizetype);
1362 size = convert_to_mode (size_mode, size, 1);
1363 size = copy_to_mode_reg (size_mode, size);
1365 /* It is incorrect to use the libcall calling conventions to call
1366 memcpy in this context. This could be a user call to memcpy and
1367 the user may wish to examine the return value from memcpy. For
1368 targets where libcalls and normal calls have different conventions
1369 for returning pointers, we could end up generating incorrect code. */
1371 size_tree = make_tree (sizetype, size);
1373 fn = emit_block_move_libcall_fn (true);
1374 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1375 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1377 retval = expand_normal (call_expr);
1379 return retval;
1382 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1383 for the function we use for block copies. */
1385 static GTY(()) tree block_move_fn;
1387 void
1388 init_block_move_fn (const char *asmspec)
1390 if (!block_move_fn)
1392 tree args, fn, attrs, attr_args;
1394 fn = get_identifier ("memcpy");
1395 args = build_function_type_list (ptr_type_node, ptr_type_node,
1396 const_ptr_type_node, sizetype,
1397 NULL_TREE);
1399 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1400 DECL_EXTERNAL (fn) = 1;
1401 TREE_PUBLIC (fn) = 1;
1402 DECL_ARTIFICIAL (fn) = 1;
1403 TREE_NOTHROW (fn) = 1;
1404 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1405 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1407 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1408 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1410 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1412 block_move_fn = fn;
1415 if (asmspec)
1416 set_user_assembler_name (block_move_fn, asmspec);
1419 static tree
1420 emit_block_move_libcall_fn (int for_call)
1422 static bool emitted_extern;
1424 if (!block_move_fn)
1425 init_block_move_fn (NULL);
1427 if (for_call && !emitted_extern)
1429 emitted_extern = true;
1430 make_decl_rtl (block_move_fn);
1433 return block_move_fn;
1436 /* A subroutine of emit_block_move. Copy the data via an explicit
1437 loop. This is used only when libcalls are forbidden. */
1438 /* ??? It'd be nice to copy in hunks larger than QImode. */
1440 static void
1441 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1442 unsigned int align ATTRIBUTE_UNUSED)
1444 rtx_code_label *cmp_label, *top_label;
1445 rtx iter, x_addr, y_addr, tmp;
1446 machine_mode x_addr_mode = get_address_mode (x);
1447 machine_mode y_addr_mode = get_address_mode (y);
1448 machine_mode iter_mode;
1450 iter_mode = GET_MODE (size);
1451 if (iter_mode == VOIDmode)
1452 iter_mode = word_mode;
1454 top_label = gen_label_rtx ();
1455 cmp_label = gen_label_rtx ();
1456 iter = gen_reg_rtx (iter_mode);
1458 emit_move_insn (iter, const0_rtx);
1460 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1461 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1462 do_pending_stack_adjust ();
1464 emit_jump (cmp_label);
1465 emit_label (top_label);
1467 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1468 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1470 if (x_addr_mode != y_addr_mode)
1471 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1472 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1474 x = change_address (x, QImode, x_addr);
1475 y = change_address (y, QImode, y_addr);
1477 emit_move_insn (x, y);
1479 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1480 true, OPTAB_LIB_WIDEN);
1481 if (tmp != iter)
1482 emit_move_insn (iter, tmp);
1484 emit_label (cmp_label);
1486 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1487 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1490 /* Copy all or part of a value X into registers starting at REGNO.
1491 The number of registers to be filled is NREGS. */
1493 void
1494 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1496 int i;
1497 rtx pat;
1498 rtx_insn *last;
1500 if (nregs == 0)
1501 return;
1503 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1504 x = validize_mem (force_const_mem (mode, x));
1506 /* See if the machine can do this with a load multiple insn. */
1507 if (HAVE_load_multiple)
1509 last = get_last_insn ();
1510 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1511 GEN_INT (nregs));
1512 if (pat)
1514 emit_insn (pat);
1515 return;
1517 else
1518 delete_insns_since (last);
1521 for (i = 0; i < nregs; i++)
1522 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1523 operand_subword_force (x, i, mode));
1526 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1527 The number of registers to be filled is NREGS. */
1529 void
1530 move_block_from_reg (int regno, rtx x, int nregs)
1532 int i;
1534 if (nregs == 0)
1535 return;
1537 /* See if the machine can do this with a store multiple insn. */
1538 if (HAVE_store_multiple)
1540 rtx_insn *last = get_last_insn ();
1541 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1542 GEN_INT (nregs));
1543 if (pat)
1545 emit_insn (pat);
1546 return;
1548 else
1549 delete_insns_since (last);
1552 for (i = 0; i < nregs; i++)
1554 rtx tem = operand_subword (x, i, 1, BLKmode);
1556 gcc_assert (tem);
1558 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1562 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1563 ORIG, where ORIG is a non-consecutive group of registers represented by
1564 a PARALLEL. The clone is identical to the original except in that the
1565 original set of registers is replaced by a new set of pseudo registers.
1566 The new set has the same modes as the original set. */
1569 gen_group_rtx (rtx orig)
1571 int i, length;
1572 rtx *tmps;
1574 gcc_assert (GET_CODE (orig) == PARALLEL);
1576 length = XVECLEN (orig, 0);
1577 tmps = XALLOCAVEC (rtx, length);
1579 /* Skip a NULL entry in first slot. */
1580 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1582 if (i)
1583 tmps[0] = 0;
1585 for (; i < length; i++)
1587 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1588 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1590 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1593 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1596 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1597 except that values are placed in TMPS[i], and must later be moved
1598 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1600 static void
1601 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1603 rtx src;
1604 int start, i;
1605 machine_mode m = GET_MODE (orig_src);
1607 gcc_assert (GET_CODE (dst) == PARALLEL);
1609 if (m != VOIDmode
1610 && !SCALAR_INT_MODE_P (m)
1611 && !MEM_P (orig_src)
1612 && GET_CODE (orig_src) != CONCAT)
1614 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1615 if (imode == BLKmode)
1616 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1617 else
1618 src = gen_reg_rtx (imode);
1619 if (imode != BLKmode)
1620 src = gen_lowpart (GET_MODE (orig_src), src);
1621 emit_move_insn (src, orig_src);
1622 /* ...and back again. */
1623 if (imode != BLKmode)
1624 src = gen_lowpart (imode, src);
1625 emit_group_load_1 (tmps, dst, src, type, ssize);
1626 return;
1629 /* Check for a NULL entry, used to indicate that the parameter goes
1630 both on the stack and in registers. */
1631 if (XEXP (XVECEXP (dst, 0, 0), 0))
1632 start = 0;
1633 else
1634 start = 1;
1636 /* Process the pieces. */
1637 for (i = start; i < XVECLEN (dst, 0); i++)
1639 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1640 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1641 unsigned int bytelen = GET_MODE_SIZE (mode);
1642 int shift = 0;
1644 /* Handle trailing fragments that run over the size of the struct. */
1645 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1647 /* Arrange to shift the fragment to where it belongs.
1648 extract_bit_field loads to the lsb of the reg. */
1649 if (
1650 #ifdef BLOCK_REG_PADDING
1651 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1652 == (BYTES_BIG_ENDIAN ? upward : downward)
1653 #else
1654 BYTES_BIG_ENDIAN
1655 #endif
1657 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1658 bytelen = ssize - bytepos;
1659 gcc_assert (bytelen > 0);
1662 /* If we won't be loading directly from memory, protect the real source
1663 from strange tricks we might play; but make sure that the source can
1664 be loaded directly into the destination. */
1665 src = orig_src;
1666 if (!MEM_P (orig_src)
1667 && (!CONSTANT_P (orig_src)
1668 || (GET_MODE (orig_src) != mode
1669 && GET_MODE (orig_src) != VOIDmode)))
1671 if (GET_MODE (orig_src) == VOIDmode)
1672 src = gen_reg_rtx (mode);
1673 else
1674 src = gen_reg_rtx (GET_MODE (orig_src));
1676 emit_move_insn (src, orig_src);
1679 /* Optimize the access just a bit. */
1680 if (MEM_P (src)
1681 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1682 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1683 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1684 && bytelen == GET_MODE_SIZE (mode))
1686 tmps[i] = gen_reg_rtx (mode);
1687 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1689 else if (COMPLEX_MODE_P (mode)
1690 && GET_MODE (src) == mode
1691 && bytelen == GET_MODE_SIZE (mode))
1692 /* Let emit_move_complex do the bulk of the work. */
1693 tmps[i] = src;
1694 else if (GET_CODE (src) == CONCAT)
1696 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1697 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1699 if ((bytepos == 0 && bytelen == slen0)
1700 || (bytepos != 0 && bytepos + bytelen <= slen))
1702 /* The following assumes that the concatenated objects all
1703 have the same size. In this case, a simple calculation
1704 can be used to determine the object and the bit field
1705 to be extracted. */
1706 tmps[i] = XEXP (src, bytepos / slen0);
1707 if (! CONSTANT_P (tmps[i])
1708 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1709 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1710 (bytepos % slen0) * BITS_PER_UNIT,
1711 1, NULL_RTX, mode, mode);
1713 else
1715 rtx mem;
1717 gcc_assert (!bytepos);
1718 mem = assign_stack_temp (GET_MODE (src), slen);
1719 emit_move_insn (mem, src);
1720 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1721 0, 1, NULL_RTX, mode, mode);
1724 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1725 SIMD register, which is currently broken. While we get GCC
1726 to emit proper RTL for these cases, let's dump to memory. */
1727 else if (VECTOR_MODE_P (GET_MODE (dst))
1728 && REG_P (src))
1730 int slen = GET_MODE_SIZE (GET_MODE (src));
1731 rtx mem;
1733 mem = assign_stack_temp (GET_MODE (src), slen);
1734 emit_move_insn (mem, src);
1735 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1737 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1738 && XVECLEN (dst, 0) > 1)
1739 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1740 else if (CONSTANT_P (src))
1742 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1744 if (len == ssize)
1745 tmps[i] = src;
1746 else
1748 rtx first, second;
1750 /* TODO: const_wide_int can have sizes other than this... */
1751 gcc_assert (2 * len == ssize);
1752 split_double (src, &first, &second);
1753 if (i)
1754 tmps[i] = second;
1755 else
1756 tmps[i] = first;
1759 else if (REG_P (src) && GET_MODE (src) == mode)
1760 tmps[i] = src;
1761 else
1762 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1763 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1764 mode, mode);
1766 if (shift)
1767 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1768 shift, tmps[i], 0);
1772 /* Emit code to move a block SRC of type TYPE to a block DST,
1773 where DST is non-consecutive registers represented by a PARALLEL.
1774 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1775 if not known. */
1777 void
1778 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1780 rtx *tmps;
1781 int i;
1783 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1784 emit_group_load_1 (tmps, dst, src, type, ssize);
1786 /* Copy the extracted pieces into the proper (probable) hard regs. */
1787 for (i = 0; i < XVECLEN (dst, 0); i++)
1789 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1790 if (d == NULL)
1791 continue;
1792 emit_move_insn (d, tmps[i]);
1796 /* Similar, but load SRC into new pseudos in a format that looks like
1797 PARALLEL. This can later be fed to emit_group_move to get things
1798 in the right place. */
1801 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1803 rtvec vec;
1804 int i;
1806 vec = rtvec_alloc (XVECLEN (parallel, 0));
1807 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1809 /* Convert the vector to look just like the original PARALLEL, except
1810 with the computed values. */
1811 for (i = 0; i < XVECLEN (parallel, 0); i++)
1813 rtx e = XVECEXP (parallel, 0, i);
1814 rtx d = XEXP (e, 0);
1816 if (d)
1818 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1819 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1821 RTVEC_ELT (vec, i) = e;
1824 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1827 /* Emit code to move a block SRC to block DST, where SRC and DST are
1828 non-consecutive groups of registers, each represented by a PARALLEL. */
1830 void
1831 emit_group_move (rtx dst, rtx src)
1833 int i;
1835 gcc_assert (GET_CODE (src) == PARALLEL
1836 && GET_CODE (dst) == PARALLEL
1837 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1839 /* Skip first entry if NULL. */
1840 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1841 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1842 XEXP (XVECEXP (src, 0, i), 0));
1845 /* Move a group of registers represented by a PARALLEL into pseudos. */
1848 emit_group_move_into_temps (rtx src)
1850 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1851 int i;
1853 for (i = 0; i < XVECLEN (src, 0); i++)
1855 rtx e = XVECEXP (src, 0, i);
1856 rtx d = XEXP (e, 0);
1858 if (d)
1859 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1860 RTVEC_ELT (vec, i) = e;
1863 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1866 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1867 where SRC is non-consecutive registers represented by a PARALLEL.
1868 SSIZE represents the total size of block ORIG_DST, or -1 if not
1869 known. */
1871 void
1872 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1874 rtx *tmps, dst;
1875 int start, finish, i;
1876 machine_mode m = GET_MODE (orig_dst);
1878 gcc_assert (GET_CODE (src) == PARALLEL);
1880 if (!SCALAR_INT_MODE_P (m)
1881 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1883 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1884 if (imode == BLKmode)
1885 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1886 else
1887 dst = gen_reg_rtx (imode);
1888 emit_group_store (dst, src, type, ssize);
1889 if (imode != BLKmode)
1890 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1891 emit_move_insn (orig_dst, dst);
1892 return;
1895 /* Check for a NULL entry, used to indicate that the parameter goes
1896 both on the stack and in registers. */
1897 if (XEXP (XVECEXP (src, 0, 0), 0))
1898 start = 0;
1899 else
1900 start = 1;
1901 finish = XVECLEN (src, 0);
1903 tmps = XALLOCAVEC (rtx, finish);
1905 /* Copy the (probable) hard regs into pseudos. */
1906 for (i = start; i < finish; i++)
1908 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1909 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1911 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1912 emit_move_insn (tmps[i], reg);
1914 else
1915 tmps[i] = reg;
1918 /* If we won't be storing directly into memory, protect the real destination
1919 from strange tricks we might play. */
1920 dst = orig_dst;
1921 if (GET_CODE (dst) == PARALLEL)
1923 rtx temp;
1925 /* We can get a PARALLEL dst if there is a conditional expression in
1926 a return statement. In that case, the dst and src are the same,
1927 so no action is necessary. */
1928 if (rtx_equal_p (dst, src))
1929 return;
1931 /* It is unclear if we can ever reach here, but we may as well handle
1932 it. Allocate a temporary, and split this into a store/load to/from
1933 the temporary. */
1934 temp = assign_stack_temp (GET_MODE (dst), ssize);
1935 emit_group_store (temp, src, type, ssize);
1936 emit_group_load (dst, temp, type, ssize);
1937 return;
1939 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1941 machine_mode outer = GET_MODE (dst);
1942 machine_mode inner;
1943 HOST_WIDE_INT bytepos;
1944 bool done = false;
1945 rtx temp;
1947 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1948 dst = gen_reg_rtx (outer);
1950 /* Make life a bit easier for combine. */
1951 /* If the first element of the vector is the low part
1952 of the destination mode, use a paradoxical subreg to
1953 initialize the destination. */
1954 if (start < finish)
1956 inner = GET_MODE (tmps[start]);
1957 bytepos = subreg_lowpart_offset (inner, outer);
1958 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1960 temp = simplify_gen_subreg (outer, tmps[start],
1961 inner, 0);
1962 if (temp)
1964 emit_move_insn (dst, temp);
1965 done = true;
1966 start++;
1971 /* If the first element wasn't the low part, try the last. */
1972 if (!done
1973 && start < finish - 1)
1975 inner = GET_MODE (tmps[finish - 1]);
1976 bytepos = subreg_lowpart_offset (inner, outer);
1977 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1979 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1980 inner, 0);
1981 if (temp)
1983 emit_move_insn (dst, temp);
1984 done = true;
1985 finish--;
1990 /* Otherwise, simply initialize the result to zero. */
1991 if (!done)
1992 emit_move_insn (dst, CONST0_RTX (outer));
1995 /* Process the pieces. */
1996 for (i = start; i < finish; i++)
1998 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1999 machine_mode mode = GET_MODE (tmps[i]);
2000 unsigned int bytelen = GET_MODE_SIZE (mode);
2001 unsigned int adj_bytelen;
2002 rtx dest = dst;
2004 /* Handle trailing fragments that run over the size of the struct. */
2005 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2006 adj_bytelen = ssize - bytepos;
2007 else
2008 adj_bytelen = bytelen;
2010 if (GET_CODE (dst) == CONCAT)
2012 if (bytepos + adj_bytelen
2013 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014 dest = XEXP (dst, 0);
2015 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2017 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2018 dest = XEXP (dst, 1);
2020 else
2022 machine_mode dest_mode = GET_MODE (dest);
2023 machine_mode tmp_mode = GET_MODE (tmps[i]);
2025 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2027 if (GET_MODE_ALIGNMENT (dest_mode)
2028 >= GET_MODE_ALIGNMENT (tmp_mode))
2030 dest = assign_stack_temp (dest_mode,
2031 GET_MODE_SIZE (dest_mode));
2032 emit_move_insn (adjust_address (dest,
2033 tmp_mode,
2034 bytepos),
2035 tmps[i]);
2036 dst = dest;
2038 else
2040 dest = assign_stack_temp (tmp_mode,
2041 GET_MODE_SIZE (tmp_mode));
2042 emit_move_insn (dest, tmps[i]);
2043 dst = adjust_address (dest, dest_mode, bytepos);
2045 break;
2049 /* Handle trailing fragments that run over the size of the struct. */
2050 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2052 /* store_bit_field always takes its value from the lsb.
2053 Move the fragment to the lsb if it's not already there. */
2054 if (
2055 #ifdef BLOCK_REG_PADDING
2056 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2057 == (BYTES_BIG_ENDIAN ? upward : downward)
2058 #else
2059 BYTES_BIG_ENDIAN
2060 #endif
2063 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2064 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2065 shift, tmps[i], 0);
2068 /* Make sure not to write past the end of the struct. */
2069 store_bit_field (dest,
2070 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2071 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2072 VOIDmode, tmps[i]);
2075 /* Optimize the access just a bit. */
2076 else if (MEM_P (dest)
2077 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2078 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2079 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2080 && bytelen == GET_MODE_SIZE (mode))
2081 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2083 else
2084 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2085 0, 0, mode, tmps[i]);
2088 /* Copy from the pseudo into the (probable) hard reg. */
2089 if (orig_dst != dst)
2090 emit_move_insn (orig_dst, dst);
2093 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2094 of the value stored in X. */
2097 maybe_emit_group_store (rtx x, tree type)
2099 machine_mode mode = TYPE_MODE (type);
2100 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2101 if (GET_CODE (x) == PARALLEL)
2103 rtx result = gen_reg_rtx (mode);
2104 emit_group_store (result, x, type, int_size_in_bytes (type));
2105 return result;
2107 return x;
2110 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2112 This is used on targets that return BLKmode values in registers. */
2114 void
2115 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2117 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2118 rtx src = NULL, dst = NULL;
2119 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2120 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2121 machine_mode mode = GET_MODE (srcreg);
2122 machine_mode tmode = GET_MODE (target);
2123 machine_mode copy_mode;
2125 /* BLKmode registers created in the back-end shouldn't have survived. */
2126 gcc_assert (mode != BLKmode);
2128 /* If the structure doesn't take up a whole number of words, see whether
2129 SRCREG is padded on the left or on the right. If it's on the left,
2130 set PADDING_CORRECTION to the number of bits to skip.
2132 In most ABIs, the structure will be returned at the least end of
2133 the register, which translates to right padding on little-endian
2134 targets and left padding on big-endian targets. The opposite
2135 holds if the structure is returned at the most significant
2136 end of the register. */
2137 if (bytes % UNITS_PER_WORD != 0
2138 && (targetm.calls.return_in_msb (type)
2139 ? !BYTES_BIG_ENDIAN
2140 : BYTES_BIG_ENDIAN))
2141 padding_correction
2142 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2144 /* We can use a single move if we have an exact mode for the size. */
2145 else if (MEM_P (target)
2146 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2147 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2148 && bytes == GET_MODE_SIZE (mode))
2150 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2151 return;
2154 /* And if we additionally have the same mode for a register. */
2155 else if (REG_P (target)
2156 && GET_MODE (target) == mode
2157 && bytes == GET_MODE_SIZE (mode))
2159 emit_move_insn (target, srcreg);
2160 return;
2163 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2164 into a new pseudo which is a full word. */
2165 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2167 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2168 mode = word_mode;
2171 /* Copy the structure BITSIZE bits at a time. If the target lives in
2172 memory, take care of not reading/writing past its end by selecting
2173 a copy mode suited to BITSIZE. This should always be possible given
2174 how it is computed.
2176 If the target lives in register, make sure not to select a copy mode
2177 larger than the mode of the register.
2179 We could probably emit more efficient code for machines which do not use
2180 strict alignment, but it doesn't seem worth the effort at the current
2181 time. */
2183 copy_mode = word_mode;
2184 if (MEM_P (target))
2186 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2187 if (mem_mode != BLKmode)
2188 copy_mode = mem_mode;
2190 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2191 copy_mode = tmode;
2193 for (bitpos = 0, xbitpos = padding_correction;
2194 bitpos < bytes * BITS_PER_UNIT;
2195 bitpos += bitsize, xbitpos += bitsize)
2197 /* We need a new source operand each time xbitpos is on a
2198 word boundary and when xbitpos == padding_correction
2199 (the first time through). */
2200 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2201 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2203 /* We need a new destination operand each time bitpos is on
2204 a word boundary. */
2205 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2206 dst = target;
2207 else if (bitpos % BITS_PER_WORD == 0)
2208 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2210 /* Use xbitpos for the source extraction (right justified) and
2211 bitpos for the destination store (left justified). */
2212 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2213 extract_bit_field (src, bitsize,
2214 xbitpos % BITS_PER_WORD, 1,
2215 NULL_RTX, copy_mode, copy_mode));
2219 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2220 register if it contains any data, otherwise return null.
2222 This is used on targets that return BLKmode values in registers. */
2225 copy_blkmode_to_reg (machine_mode mode, tree src)
2227 int i, n_regs;
2228 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2229 unsigned int bitsize;
2230 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2231 machine_mode dst_mode;
2233 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2235 x = expand_normal (src);
2237 bytes = int_size_in_bytes (TREE_TYPE (src));
2238 if (bytes == 0)
2239 return NULL_RTX;
2241 /* If the structure doesn't take up a whole number of words, see
2242 whether the register value should be padded on the left or on
2243 the right. Set PADDING_CORRECTION to the number of padding
2244 bits needed on the left side.
2246 In most ABIs, the structure will be returned at the least end of
2247 the register, which translates to right padding on little-endian
2248 targets and left padding on big-endian targets. The opposite
2249 holds if the structure is returned at the most significant
2250 end of the register. */
2251 if (bytes % UNITS_PER_WORD != 0
2252 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2253 ? !BYTES_BIG_ENDIAN
2254 : BYTES_BIG_ENDIAN))
2255 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2256 * BITS_PER_UNIT));
2258 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2259 dst_words = XALLOCAVEC (rtx, n_regs);
2260 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2262 /* Copy the structure BITSIZE bits at a time. */
2263 for (bitpos = 0, xbitpos = padding_correction;
2264 bitpos < bytes * BITS_PER_UNIT;
2265 bitpos += bitsize, xbitpos += bitsize)
2267 /* We need a new destination pseudo each time xbitpos is
2268 on a word boundary and when xbitpos == padding_correction
2269 (the first time through). */
2270 if (xbitpos % BITS_PER_WORD == 0
2271 || xbitpos == padding_correction)
2273 /* Generate an appropriate register. */
2274 dst_word = gen_reg_rtx (word_mode);
2275 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2277 /* Clear the destination before we move anything into it. */
2278 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2281 /* We need a new source operand each time bitpos is on a word
2282 boundary. */
2283 if (bitpos % BITS_PER_WORD == 0)
2284 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2286 /* Use bitpos for the source extraction (left justified) and
2287 xbitpos for the destination store (right justified). */
2288 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2289 0, 0, word_mode,
2290 extract_bit_field (src_word, bitsize,
2291 bitpos % BITS_PER_WORD, 1,
2292 NULL_RTX, word_mode, word_mode));
2295 if (mode == BLKmode)
2297 /* Find the smallest integer mode large enough to hold the
2298 entire structure. */
2299 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2300 mode != VOIDmode;
2301 mode = GET_MODE_WIDER_MODE (mode))
2302 /* Have we found a large enough mode? */
2303 if (GET_MODE_SIZE (mode) >= bytes)
2304 break;
2306 /* A suitable mode should have been found. */
2307 gcc_assert (mode != VOIDmode);
2310 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2311 dst_mode = word_mode;
2312 else
2313 dst_mode = mode;
2314 dst = gen_reg_rtx (dst_mode);
2316 for (i = 0; i < n_regs; i++)
2317 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2319 if (mode != dst_mode)
2320 dst = gen_lowpart (mode, dst);
2322 return dst;
2325 /* Add a USE expression for REG to the (possibly empty) list pointed
2326 to by CALL_FUSAGE. REG must denote a hard register. */
2328 void
2329 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2331 gcc_assert (REG_P (reg));
2333 if (!HARD_REGISTER_P (reg))
2334 return;
2336 *call_fusage
2337 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2340 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2341 to by CALL_FUSAGE. REG must denote a hard register. */
2343 void
2344 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2346 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2348 *call_fusage
2349 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2352 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2353 starting at REGNO. All of these registers must be hard registers. */
2355 void
2356 use_regs (rtx *call_fusage, int regno, int nregs)
2358 int i;
2360 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2362 for (i = 0; i < nregs; i++)
2363 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2366 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2367 PARALLEL REGS. This is for calls that pass values in multiple
2368 non-contiguous locations. The Irix 6 ABI has examples of this. */
2370 void
2371 use_group_regs (rtx *call_fusage, rtx regs)
2373 int i;
2375 for (i = 0; i < XVECLEN (regs, 0); i++)
2377 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2379 /* A NULL entry means the parameter goes both on the stack and in
2380 registers. This can also be a MEM for targets that pass values
2381 partially on the stack and partially in registers. */
2382 if (reg != 0 && REG_P (reg))
2383 use_reg (call_fusage, reg);
2387 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2388 assigment and the code of the expresion on the RHS is CODE. Return
2389 NULL otherwise. */
2391 static gimple
2392 get_def_for_expr (tree name, enum tree_code code)
2394 gimple def_stmt;
2396 if (TREE_CODE (name) != SSA_NAME)
2397 return NULL;
2399 def_stmt = get_gimple_for_ssa_name (name);
2400 if (!def_stmt
2401 || gimple_assign_rhs_code (def_stmt) != code)
2402 return NULL;
2404 return def_stmt;
2407 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2408 assigment and the class of the expresion on the RHS is CLASS. Return
2409 NULL otherwise. */
2411 static gimple
2412 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2414 gimple def_stmt;
2416 if (TREE_CODE (name) != SSA_NAME)
2417 return NULL;
2419 def_stmt = get_gimple_for_ssa_name (name);
2420 if (!def_stmt
2421 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2422 return NULL;
2424 return def_stmt;
2428 /* Determine whether the LEN bytes generated by CONSTFUN can be
2429 stored to memory using several move instructions. CONSTFUNDATA is
2430 a pointer which will be passed as argument in every CONSTFUN call.
2431 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2432 a memset operation and false if it's a copy of a constant string.
2433 Return nonzero if a call to store_by_pieces should succeed. */
2436 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2437 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2438 void *constfundata, unsigned int align, bool memsetp)
2440 unsigned HOST_WIDE_INT l;
2441 unsigned int max_size;
2442 HOST_WIDE_INT offset = 0;
2443 machine_mode mode;
2444 enum insn_code icode;
2445 int reverse;
2446 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2447 rtx cst ATTRIBUTE_UNUSED;
2449 if (len == 0)
2450 return 1;
2452 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2453 memsetp
2454 ? SET_BY_PIECES
2455 : STORE_BY_PIECES,
2456 optimize_insn_for_speed_p ()))
2457 return 0;
2459 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2461 /* We would first store what we can in the largest integer mode, then go to
2462 successively smaller modes. */
2464 for (reverse = 0;
2465 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2466 reverse++)
2468 l = len;
2469 max_size = STORE_MAX_PIECES + 1;
2470 while (max_size > 1 && l > 0)
2472 mode = widest_int_mode_for_size (max_size);
2474 if (mode == VOIDmode)
2475 break;
2477 icode = optab_handler (mov_optab, mode);
2478 if (icode != CODE_FOR_nothing
2479 && align >= GET_MODE_ALIGNMENT (mode))
2481 unsigned int size = GET_MODE_SIZE (mode);
2483 while (l >= size)
2485 if (reverse)
2486 offset -= size;
2488 cst = (*constfun) (constfundata, offset, mode);
2489 if (!targetm.legitimate_constant_p (mode, cst))
2490 return 0;
2492 if (!reverse)
2493 offset += size;
2495 l -= size;
2499 max_size = GET_MODE_SIZE (mode);
2502 /* The code above should have handled everything. */
2503 gcc_assert (!l);
2506 return 1;
2509 /* Generate several move instructions to store LEN bytes generated by
2510 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2511 pointer which will be passed as argument in every CONSTFUN call.
2512 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2513 a memset operation and false if it's a copy of a constant string.
2514 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2515 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2516 stpcpy. */
2519 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2520 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2521 void *constfundata, unsigned int align, bool memsetp, int endp)
2523 machine_mode to_addr_mode = get_address_mode (to);
2524 struct store_by_pieces_d data;
2526 if (len == 0)
2528 gcc_assert (endp != 2);
2529 return to;
2532 gcc_assert (targetm.use_by_pieces_infrastructure_p
2533 (len, align,
2534 memsetp
2535 ? SET_BY_PIECES
2536 : STORE_BY_PIECES,
2537 optimize_insn_for_speed_p ()));
2539 data.constfun = constfun;
2540 data.constfundata = constfundata;
2541 data.len = len;
2542 data.to = to;
2543 store_by_pieces_1 (&data, align);
2544 if (endp)
2546 rtx to1;
2548 gcc_assert (!data.reverse);
2549 if (data.autinc_to)
2551 if (endp == 2)
2553 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2554 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2555 else
2556 data.to_addr = copy_to_mode_reg (to_addr_mode,
2557 plus_constant (to_addr_mode,
2558 data.to_addr,
2559 -1));
2561 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2562 data.offset);
2564 else
2566 if (endp == 2)
2567 --data.offset;
2568 to1 = adjust_address (data.to, QImode, data.offset);
2570 return to1;
2572 else
2573 return data.to;
2576 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2577 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2579 static void
2580 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2582 struct store_by_pieces_d data;
2584 if (len == 0)
2585 return;
2587 data.constfun = clear_by_pieces_1;
2588 data.constfundata = NULL;
2589 data.len = len;
2590 data.to = to;
2591 store_by_pieces_1 (&data, align);
2594 /* Callback routine for clear_by_pieces.
2595 Return const0_rtx unconditionally. */
2597 static rtx
2598 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2599 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2600 machine_mode mode ATTRIBUTE_UNUSED)
2602 return const0_rtx;
2605 /* Subroutine of clear_by_pieces and store_by_pieces.
2606 Generate several move instructions to store LEN bytes of block TO. (A MEM
2607 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2609 static void
2610 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2611 unsigned int align ATTRIBUTE_UNUSED)
2613 machine_mode to_addr_mode = get_address_mode (data->to);
2614 rtx to_addr = XEXP (data->to, 0);
2615 unsigned int max_size = STORE_MAX_PIECES + 1;
2616 enum insn_code icode;
2618 data->offset = 0;
2619 data->to_addr = to_addr;
2620 data->autinc_to
2621 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2622 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2624 data->explicit_inc_to = 0;
2625 data->reverse
2626 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2627 if (data->reverse)
2628 data->offset = data->len;
2630 /* If storing requires more than two move insns,
2631 copy addresses to registers (to make displacements shorter)
2632 and use post-increment if available. */
2633 if (!data->autinc_to
2634 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2636 /* Determine the main mode we'll be using.
2637 MODE might not be used depending on the definitions of the
2638 USE_* macros below. */
2639 machine_mode mode ATTRIBUTE_UNUSED
2640 = widest_int_mode_for_size (max_size);
2642 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2644 data->to_addr = copy_to_mode_reg (to_addr_mode,
2645 plus_constant (to_addr_mode,
2646 to_addr,
2647 data->len));
2648 data->autinc_to = 1;
2649 data->explicit_inc_to = -1;
2652 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2653 && ! data->autinc_to)
2655 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2656 data->autinc_to = 1;
2657 data->explicit_inc_to = 1;
2660 if ( !data->autinc_to && CONSTANT_P (to_addr))
2661 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2664 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2666 /* First store what we can in the largest integer mode, then go to
2667 successively smaller modes. */
2669 while (max_size > 1 && data->len > 0)
2671 machine_mode mode = widest_int_mode_for_size (max_size);
2673 if (mode == VOIDmode)
2674 break;
2676 icode = optab_handler (mov_optab, mode);
2677 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2678 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2680 max_size = GET_MODE_SIZE (mode);
2683 /* The code above should have handled everything. */
2684 gcc_assert (!data->len);
2687 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2688 with move instructions for mode MODE. GENFUN is the gen_... function
2689 to make a move insn for that mode. DATA has all the other info. */
2691 static void
2692 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2693 struct store_by_pieces_d *data)
2695 unsigned int size = GET_MODE_SIZE (mode);
2696 rtx to1, cst;
2698 while (data->len >= size)
2700 if (data->reverse)
2701 data->offset -= size;
2703 if (data->autinc_to)
2704 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2705 data->offset);
2706 else
2707 to1 = adjust_address (data->to, mode, data->offset);
2709 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2710 emit_insn (gen_add2_insn (data->to_addr,
2711 gen_int_mode (-(HOST_WIDE_INT) size,
2712 GET_MODE (data->to_addr))));
2714 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2715 emit_insn ((*genfun) (to1, cst));
2717 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2718 emit_insn (gen_add2_insn (data->to_addr,
2719 gen_int_mode (size,
2720 GET_MODE (data->to_addr))));
2722 if (! data->reverse)
2723 data->offset += size;
2725 data->len -= size;
2729 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2730 its length in bytes. */
2733 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2734 unsigned int expected_align, HOST_WIDE_INT expected_size,
2735 unsigned HOST_WIDE_INT min_size,
2736 unsigned HOST_WIDE_INT max_size,
2737 unsigned HOST_WIDE_INT probable_max_size)
2739 machine_mode mode = GET_MODE (object);
2740 unsigned int align;
2742 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2744 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2745 just move a zero. Otherwise, do this a piece at a time. */
2746 if (mode != BLKmode
2747 && CONST_INT_P (size)
2748 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2750 rtx zero = CONST0_RTX (mode);
2751 if (zero != NULL)
2753 emit_move_insn (object, zero);
2754 return NULL;
2757 if (COMPLEX_MODE_P (mode))
2759 zero = CONST0_RTX (GET_MODE_INNER (mode));
2760 if (zero != NULL)
2762 write_complex_part (object, zero, 0);
2763 write_complex_part (object, zero, 1);
2764 return NULL;
2769 if (size == const0_rtx)
2770 return NULL;
2772 align = MEM_ALIGN (object);
2774 if (CONST_INT_P (size)
2775 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2776 CLEAR_BY_PIECES,
2777 optimize_insn_for_speed_p ()))
2778 clear_by_pieces (object, INTVAL (size), align);
2779 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2780 expected_align, expected_size,
2781 min_size, max_size, probable_max_size))
2783 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2784 return set_storage_via_libcall (object, size, const0_rtx,
2785 method == BLOCK_OP_TAILCALL);
2786 else
2787 gcc_unreachable ();
2789 return NULL;
2793 clear_storage (rtx object, rtx size, enum block_op_methods method)
2795 unsigned HOST_WIDE_INT max, min = 0;
2796 if (GET_CODE (size) == CONST_INT)
2797 min = max = UINTVAL (size);
2798 else
2799 max = GET_MODE_MASK (GET_MODE (size));
2800 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2804 /* A subroutine of clear_storage. Expand a call to memset.
2805 Return the return value of memset, 0 otherwise. */
2808 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2810 tree call_expr, fn, object_tree, size_tree, val_tree;
2811 machine_mode size_mode;
2812 rtx retval;
2814 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2815 place those into new pseudos into a VAR_DECL and use them later. */
2817 object = copy_addr_to_reg (XEXP (object, 0));
2819 size_mode = TYPE_MODE (sizetype);
2820 size = convert_to_mode (size_mode, size, 1);
2821 size = copy_to_mode_reg (size_mode, size);
2823 /* It is incorrect to use the libcall calling conventions to call
2824 memset in this context. This could be a user call to memset and
2825 the user may wish to examine the return value from memset. For
2826 targets where libcalls and normal calls have different conventions
2827 for returning pointers, we could end up generating incorrect code. */
2829 object_tree = make_tree (ptr_type_node, object);
2830 if (!CONST_INT_P (val))
2831 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2832 size_tree = make_tree (sizetype, size);
2833 val_tree = make_tree (integer_type_node, val);
2835 fn = clear_storage_libcall_fn (true);
2836 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2837 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2839 retval = expand_normal (call_expr);
2841 return retval;
2844 /* A subroutine of set_storage_via_libcall. Create the tree node
2845 for the function we use for block clears. */
2847 tree block_clear_fn;
2849 void
2850 init_block_clear_fn (const char *asmspec)
2852 if (!block_clear_fn)
2854 tree fn, args;
2856 fn = get_identifier ("memset");
2857 args = build_function_type_list (ptr_type_node, ptr_type_node,
2858 integer_type_node, sizetype,
2859 NULL_TREE);
2861 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2862 DECL_EXTERNAL (fn) = 1;
2863 TREE_PUBLIC (fn) = 1;
2864 DECL_ARTIFICIAL (fn) = 1;
2865 TREE_NOTHROW (fn) = 1;
2866 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2867 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2869 block_clear_fn = fn;
2872 if (asmspec)
2873 set_user_assembler_name (block_clear_fn, asmspec);
2876 static tree
2877 clear_storage_libcall_fn (int for_call)
2879 static bool emitted_extern;
2881 if (!block_clear_fn)
2882 init_block_clear_fn (NULL);
2884 if (for_call && !emitted_extern)
2886 emitted_extern = true;
2887 make_decl_rtl (block_clear_fn);
2890 return block_clear_fn;
2893 /* Expand a setmem pattern; return true if successful. */
2895 bool
2896 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2897 unsigned int expected_align, HOST_WIDE_INT expected_size,
2898 unsigned HOST_WIDE_INT min_size,
2899 unsigned HOST_WIDE_INT max_size,
2900 unsigned HOST_WIDE_INT probable_max_size)
2902 /* Try the most limited insn first, because there's no point
2903 including more than one in the machine description unless
2904 the more limited one has some advantage. */
2906 machine_mode mode;
2908 if (expected_align < align)
2909 expected_align = align;
2910 if (expected_size != -1)
2912 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2913 expected_size = max_size;
2914 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2915 expected_size = min_size;
2918 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2919 mode = GET_MODE_WIDER_MODE (mode))
2921 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2923 if (code != CODE_FOR_nothing
2924 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2925 here because if SIZE is less than the mode mask, as it is
2926 returned by the macro, it will definitely be less than the
2927 actual mode mask. Since SIZE is within the Pmode address
2928 space, we limit MODE to Pmode. */
2929 && ((CONST_INT_P (size)
2930 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2931 <= (GET_MODE_MASK (mode) >> 1)))
2932 || max_size <= (GET_MODE_MASK (mode) >> 1)
2933 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2935 struct expand_operand ops[9];
2936 unsigned int nops;
2938 nops = insn_data[(int) code].n_generator_args;
2939 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2941 create_fixed_operand (&ops[0], object);
2942 /* The check above guarantees that this size conversion is valid. */
2943 create_convert_operand_to (&ops[1], size, mode, true);
2944 create_convert_operand_from (&ops[2], val, byte_mode, true);
2945 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2946 if (nops >= 6)
2948 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2949 create_integer_operand (&ops[5], expected_size);
2951 if (nops >= 8)
2953 create_integer_operand (&ops[6], min_size);
2954 /* If we can not represent the maximal size,
2955 make parameter NULL. */
2956 if ((HOST_WIDE_INT) max_size != -1)
2957 create_integer_operand (&ops[7], max_size);
2958 else
2959 create_fixed_operand (&ops[7], NULL);
2961 if (nops == 9)
2963 /* If we can not represent the maximal size,
2964 make parameter NULL. */
2965 if ((HOST_WIDE_INT) probable_max_size != -1)
2966 create_integer_operand (&ops[8], probable_max_size);
2967 else
2968 create_fixed_operand (&ops[8], NULL);
2970 if (maybe_expand_insn (code, nops, ops))
2971 return true;
2975 return false;
2979 /* Write to one of the components of the complex value CPLX. Write VAL to
2980 the real part if IMAG_P is false, and the imaginary part if its true. */
2982 void
2983 write_complex_part (rtx cplx, rtx val, bool imag_p)
2985 machine_mode cmode;
2986 machine_mode imode;
2987 unsigned ibitsize;
2989 if (GET_CODE (cplx) == CONCAT)
2991 emit_move_insn (XEXP (cplx, imag_p), val);
2992 return;
2995 cmode = GET_MODE (cplx);
2996 imode = GET_MODE_INNER (cmode);
2997 ibitsize = GET_MODE_BITSIZE (imode);
2999 /* For MEMs simplify_gen_subreg may generate an invalid new address
3000 because, e.g., the original address is considered mode-dependent
3001 by the target, which restricts simplify_subreg from invoking
3002 adjust_address_nv. Instead of preparing fallback support for an
3003 invalid address, we call adjust_address_nv directly. */
3004 if (MEM_P (cplx))
3006 emit_move_insn (adjust_address_nv (cplx, imode,
3007 imag_p ? GET_MODE_SIZE (imode) : 0),
3008 val);
3009 return;
3012 /* If the sub-object is at least word sized, then we know that subregging
3013 will work. This special case is important, since store_bit_field
3014 wants to operate on integer modes, and there's rarely an OImode to
3015 correspond to TCmode. */
3016 if (ibitsize >= BITS_PER_WORD
3017 /* For hard regs we have exact predicates. Assume we can split
3018 the original object if it spans an even number of hard regs.
3019 This special case is important for SCmode on 64-bit platforms
3020 where the natural size of floating-point regs is 32-bit. */
3021 || (REG_P (cplx)
3022 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3023 && REG_NREGS (cplx) % 2 == 0))
3025 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3026 imag_p ? GET_MODE_SIZE (imode) : 0);
3027 if (part)
3029 emit_move_insn (part, val);
3030 return;
3032 else
3033 /* simplify_gen_subreg may fail for sub-word MEMs. */
3034 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3037 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3040 /* Extract one of the components of the complex value CPLX. Extract the
3041 real part if IMAG_P is false, and the imaginary part if it's true. */
3043 static rtx
3044 read_complex_part (rtx cplx, bool imag_p)
3046 machine_mode cmode, imode;
3047 unsigned ibitsize;
3049 if (GET_CODE (cplx) == CONCAT)
3050 return XEXP (cplx, imag_p);
3052 cmode = GET_MODE (cplx);
3053 imode = GET_MODE_INNER (cmode);
3054 ibitsize = GET_MODE_BITSIZE (imode);
3056 /* Special case reads from complex constants that got spilled to memory. */
3057 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3059 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3060 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3062 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3063 if (CONSTANT_CLASS_P (part))
3064 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3068 /* For MEMs simplify_gen_subreg may generate an invalid new address
3069 because, e.g., the original address is considered mode-dependent
3070 by the target, which restricts simplify_subreg from invoking
3071 adjust_address_nv. Instead of preparing fallback support for an
3072 invalid address, we call adjust_address_nv directly. */
3073 if (MEM_P (cplx))
3074 return adjust_address_nv (cplx, imode,
3075 imag_p ? GET_MODE_SIZE (imode) : 0);
3077 /* If the sub-object is at least word sized, then we know that subregging
3078 will work. This special case is important, since extract_bit_field
3079 wants to operate on integer modes, and there's rarely an OImode to
3080 correspond to TCmode. */
3081 if (ibitsize >= BITS_PER_WORD
3082 /* For hard regs we have exact predicates. Assume we can split
3083 the original object if it spans an even number of hard regs.
3084 This special case is important for SCmode on 64-bit platforms
3085 where the natural size of floating-point regs is 32-bit. */
3086 || (REG_P (cplx)
3087 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3088 && REG_NREGS (cplx) % 2 == 0))
3090 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3091 imag_p ? GET_MODE_SIZE (imode) : 0);
3092 if (ret)
3093 return ret;
3094 else
3095 /* simplify_gen_subreg may fail for sub-word MEMs. */
3096 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3099 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3100 true, NULL_RTX, imode, imode);
3103 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3104 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3105 represented in NEW_MODE. If FORCE is true, this will never happen, as
3106 we'll force-create a SUBREG if needed. */
3108 static rtx
3109 emit_move_change_mode (machine_mode new_mode,
3110 machine_mode old_mode, rtx x, bool force)
3112 rtx ret;
3114 if (push_operand (x, GET_MODE (x)))
3116 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3117 MEM_COPY_ATTRIBUTES (ret, x);
3119 else if (MEM_P (x))
3121 /* We don't have to worry about changing the address since the
3122 size in bytes is supposed to be the same. */
3123 if (reload_in_progress)
3125 /* Copy the MEM to change the mode and move any
3126 substitutions from the old MEM to the new one. */
3127 ret = adjust_address_nv (x, new_mode, 0);
3128 copy_replacements (x, ret);
3130 else
3131 ret = adjust_address (x, new_mode, 0);
3133 else
3135 /* Note that we do want simplify_subreg's behavior of validating
3136 that the new mode is ok for a hard register. If we were to use
3137 simplify_gen_subreg, we would create the subreg, but would
3138 probably run into the target not being able to implement it. */
3139 /* Except, of course, when FORCE is true, when this is exactly what
3140 we want. Which is needed for CCmodes on some targets. */
3141 if (force)
3142 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3143 else
3144 ret = simplify_subreg (new_mode, x, old_mode, 0);
3147 return ret;
3150 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3151 an integer mode of the same size as MODE. Returns the instruction
3152 emitted, or NULL if such a move could not be generated. */
3154 static rtx_insn *
3155 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3157 machine_mode imode;
3158 enum insn_code code;
3160 /* There must exist a mode of the exact size we require. */
3161 imode = int_mode_for_mode (mode);
3162 if (imode == BLKmode)
3163 return NULL;
3165 /* The target must support moves in this mode. */
3166 code = optab_handler (mov_optab, imode);
3167 if (code == CODE_FOR_nothing)
3168 return NULL;
3170 x = emit_move_change_mode (imode, mode, x, force);
3171 if (x == NULL_RTX)
3172 return NULL;
3173 y = emit_move_change_mode (imode, mode, y, force);
3174 if (y == NULL_RTX)
3175 return NULL;
3176 return emit_insn (GEN_FCN (code) (x, y));
3179 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3180 Return an equivalent MEM that does not use an auto-increment. */
3183 emit_move_resolve_push (machine_mode mode, rtx x)
3185 enum rtx_code code = GET_CODE (XEXP (x, 0));
3186 HOST_WIDE_INT adjust;
3187 rtx temp;
3189 adjust = GET_MODE_SIZE (mode);
3190 #ifdef PUSH_ROUNDING
3191 adjust = PUSH_ROUNDING (adjust);
3192 #endif
3193 if (code == PRE_DEC || code == POST_DEC)
3194 adjust = -adjust;
3195 else if (code == PRE_MODIFY || code == POST_MODIFY)
3197 rtx expr = XEXP (XEXP (x, 0), 1);
3198 HOST_WIDE_INT val;
3200 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3201 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3202 val = INTVAL (XEXP (expr, 1));
3203 if (GET_CODE (expr) == MINUS)
3204 val = -val;
3205 gcc_assert (adjust == val || adjust == -val);
3206 adjust = val;
3209 /* Do not use anti_adjust_stack, since we don't want to update
3210 stack_pointer_delta. */
3211 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3212 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3213 0, OPTAB_LIB_WIDEN);
3214 if (temp != stack_pointer_rtx)
3215 emit_move_insn (stack_pointer_rtx, temp);
3217 switch (code)
3219 case PRE_INC:
3220 case PRE_DEC:
3221 case PRE_MODIFY:
3222 temp = stack_pointer_rtx;
3223 break;
3224 case POST_INC:
3225 case POST_DEC:
3226 case POST_MODIFY:
3227 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3228 break;
3229 default:
3230 gcc_unreachable ();
3233 return replace_equiv_address (x, temp);
3236 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3237 X is known to satisfy push_operand, and MODE is known to be complex.
3238 Returns the last instruction emitted. */
3240 rtx_insn *
3241 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3243 machine_mode submode = GET_MODE_INNER (mode);
3244 bool imag_first;
3246 #ifdef PUSH_ROUNDING
3247 unsigned int submodesize = GET_MODE_SIZE (submode);
3249 /* In case we output to the stack, but the size is smaller than the
3250 machine can push exactly, we need to use move instructions. */
3251 if (PUSH_ROUNDING (submodesize) != submodesize)
3253 x = emit_move_resolve_push (mode, x);
3254 return emit_move_insn (x, y);
3256 #endif
3258 /* Note that the real part always precedes the imag part in memory
3259 regardless of machine's endianness. */
3260 switch (GET_CODE (XEXP (x, 0)))
3262 case PRE_DEC:
3263 case POST_DEC:
3264 imag_first = true;
3265 break;
3266 case PRE_INC:
3267 case POST_INC:
3268 imag_first = false;
3269 break;
3270 default:
3271 gcc_unreachable ();
3274 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3275 read_complex_part (y, imag_first));
3276 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3277 read_complex_part (y, !imag_first));
3280 /* A subroutine of emit_move_complex. Perform the move from Y to X
3281 via two moves of the parts. Returns the last instruction emitted. */
3283 rtx_insn *
3284 emit_move_complex_parts (rtx x, rtx y)
3286 /* Show the output dies here. This is necessary for SUBREGs
3287 of pseudos since we cannot track their lifetimes correctly;
3288 hard regs shouldn't appear here except as return values. */
3289 if (!reload_completed && !reload_in_progress
3290 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3291 emit_clobber (x);
3293 write_complex_part (x, read_complex_part (y, false), false);
3294 write_complex_part (x, read_complex_part (y, true), true);
3296 return get_last_insn ();
3299 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3300 MODE is known to be complex. Returns the last instruction emitted. */
3302 static rtx_insn *
3303 emit_move_complex (machine_mode mode, rtx x, rtx y)
3305 bool try_int;
3307 /* Need to take special care for pushes, to maintain proper ordering
3308 of the data, and possibly extra padding. */
3309 if (push_operand (x, mode))
3310 return emit_move_complex_push (mode, x, y);
3312 /* See if we can coerce the target into moving both values at once, except
3313 for floating point where we favor moving as parts if this is easy. */
3314 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3315 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3316 && !(REG_P (x)
3317 && HARD_REGISTER_P (x)
3318 && REG_NREGS (x) == 1)
3319 && !(REG_P (y)
3320 && HARD_REGISTER_P (y)
3321 && REG_NREGS (y) == 1))
3322 try_int = false;
3323 /* Not possible if the values are inherently not adjacent. */
3324 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3325 try_int = false;
3326 /* Is possible if both are registers (or subregs of registers). */
3327 else if (register_operand (x, mode) && register_operand (y, mode))
3328 try_int = true;
3329 /* If one of the operands is a memory, and alignment constraints
3330 are friendly enough, we may be able to do combined memory operations.
3331 We do not attempt this if Y is a constant because that combination is
3332 usually better with the by-parts thing below. */
3333 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3334 && (!STRICT_ALIGNMENT
3335 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3336 try_int = true;
3337 else
3338 try_int = false;
3340 if (try_int)
3342 rtx_insn *ret;
3344 /* For memory to memory moves, optimal behavior can be had with the
3345 existing block move logic. */
3346 if (MEM_P (x) && MEM_P (y))
3348 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3349 BLOCK_OP_NO_LIBCALL);
3350 return get_last_insn ();
3353 ret = emit_move_via_integer (mode, x, y, true);
3354 if (ret)
3355 return ret;
3358 return emit_move_complex_parts (x, y);
3361 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3362 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3364 static rtx_insn *
3365 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3367 rtx_insn *ret;
3369 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3370 if (mode != CCmode)
3372 enum insn_code code = optab_handler (mov_optab, CCmode);
3373 if (code != CODE_FOR_nothing)
3375 x = emit_move_change_mode (CCmode, mode, x, true);
3376 y = emit_move_change_mode (CCmode, mode, y, true);
3377 return emit_insn (GEN_FCN (code) (x, y));
3381 /* Otherwise, find the MODE_INT mode of the same width. */
3382 ret = emit_move_via_integer (mode, x, y, false);
3383 gcc_assert (ret != NULL);
3384 return ret;
3387 /* Return true if word I of OP lies entirely in the
3388 undefined bits of a paradoxical subreg. */
3390 static bool
3391 undefined_operand_subword_p (const_rtx op, int i)
3393 machine_mode innermode, innermostmode;
3394 int offset;
3395 if (GET_CODE (op) != SUBREG)
3396 return false;
3397 innermode = GET_MODE (op);
3398 innermostmode = GET_MODE (SUBREG_REG (op));
3399 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3400 /* The SUBREG_BYTE represents offset, as if the value were stored in
3401 memory, except for a paradoxical subreg where we define
3402 SUBREG_BYTE to be 0; undo this exception as in
3403 simplify_subreg. */
3404 if (SUBREG_BYTE (op) == 0
3405 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3407 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3408 if (WORDS_BIG_ENDIAN)
3409 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3410 if (BYTES_BIG_ENDIAN)
3411 offset += difference % UNITS_PER_WORD;
3413 if (offset >= GET_MODE_SIZE (innermostmode)
3414 || offset <= -GET_MODE_SIZE (word_mode))
3415 return true;
3416 return false;
3419 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3420 MODE is any multi-word or full-word mode that lacks a move_insn
3421 pattern. Note that you will get better code if you define such
3422 patterns, even if they must turn into multiple assembler instructions. */
3424 static rtx_insn *
3425 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3427 rtx_insn *last_insn = 0;
3428 rtx_insn *seq;
3429 rtx inner;
3430 bool need_clobber;
3431 int i;
3433 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3435 /* If X is a push on the stack, do the push now and replace
3436 X with a reference to the stack pointer. */
3437 if (push_operand (x, mode))
3438 x = emit_move_resolve_push (mode, x);
3440 /* If we are in reload, see if either operand is a MEM whose address
3441 is scheduled for replacement. */
3442 if (reload_in_progress && MEM_P (x)
3443 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3444 x = replace_equiv_address_nv (x, inner);
3445 if (reload_in_progress && MEM_P (y)
3446 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3447 y = replace_equiv_address_nv (y, inner);
3449 start_sequence ();
3451 need_clobber = false;
3452 for (i = 0;
3453 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3454 i++)
3456 rtx xpart = operand_subword (x, i, 1, mode);
3457 rtx ypart;
3459 /* Do not generate code for a move if it would come entirely
3460 from the undefined bits of a paradoxical subreg. */
3461 if (undefined_operand_subword_p (y, i))
3462 continue;
3464 ypart = operand_subword (y, i, 1, mode);
3466 /* If we can't get a part of Y, put Y into memory if it is a
3467 constant. Otherwise, force it into a register. Then we must
3468 be able to get a part of Y. */
3469 if (ypart == 0 && CONSTANT_P (y))
3471 y = use_anchored_address (force_const_mem (mode, y));
3472 ypart = operand_subword (y, i, 1, mode);
3474 else if (ypart == 0)
3475 ypart = operand_subword_force (y, i, mode);
3477 gcc_assert (xpart && ypart);
3479 need_clobber |= (GET_CODE (xpart) == SUBREG);
3481 last_insn = emit_move_insn (xpart, ypart);
3484 seq = get_insns ();
3485 end_sequence ();
3487 /* Show the output dies here. This is necessary for SUBREGs
3488 of pseudos since we cannot track their lifetimes correctly;
3489 hard regs shouldn't appear here except as return values.
3490 We never want to emit such a clobber after reload. */
3491 if (x != y
3492 && ! (reload_in_progress || reload_completed)
3493 && need_clobber != 0)
3494 emit_clobber (x);
3496 emit_insn (seq);
3498 return last_insn;
3501 /* Low level part of emit_move_insn.
3502 Called just like emit_move_insn, but assumes X and Y
3503 are basically valid. */
3505 rtx_insn *
3506 emit_move_insn_1 (rtx x, rtx y)
3508 machine_mode mode = GET_MODE (x);
3509 enum insn_code code;
3511 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3513 code = optab_handler (mov_optab, mode);
3514 if (code != CODE_FOR_nothing)
3515 return emit_insn (GEN_FCN (code) (x, y));
3517 /* Expand complex moves by moving real part and imag part. */
3518 if (COMPLEX_MODE_P (mode))
3519 return emit_move_complex (mode, x, y);
3521 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3522 || ALL_FIXED_POINT_MODE_P (mode))
3524 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3526 /* If we can't find an integer mode, use multi words. */
3527 if (result)
3528 return result;
3529 else
3530 return emit_move_multi_word (mode, x, y);
3533 if (GET_MODE_CLASS (mode) == MODE_CC)
3534 return emit_move_ccmode (mode, x, y);
3536 /* Try using a move pattern for the corresponding integer mode. This is
3537 only safe when simplify_subreg can convert MODE constants into integer
3538 constants. At present, it can only do this reliably if the value
3539 fits within a HOST_WIDE_INT. */
3540 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3542 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3544 if (ret)
3546 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3547 return ret;
3551 return emit_move_multi_word (mode, x, y);
3554 /* Generate code to copy Y into X.
3555 Both Y and X must have the same mode, except that
3556 Y can be a constant with VOIDmode.
3557 This mode cannot be BLKmode; use emit_block_move for that.
3559 Return the last instruction emitted. */
3561 rtx_insn *
3562 emit_move_insn (rtx x, rtx y)
3564 machine_mode mode = GET_MODE (x);
3565 rtx y_cst = NULL_RTX;
3566 rtx_insn *last_insn;
3567 rtx set;
3569 gcc_assert (mode != BLKmode
3570 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3572 if (CONSTANT_P (y))
3574 if (optimize
3575 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3576 && (last_insn = compress_float_constant (x, y)))
3577 return last_insn;
3579 y_cst = y;
3581 if (!targetm.legitimate_constant_p (mode, y))
3583 y = force_const_mem (mode, y);
3585 /* If the target's cannot_force_const_mem prevented the spill,
3586 assume that the target's move expanders will also take care
3587 of the non-legitimate constant. */
3588 if (!y)
3589 y = y_cst;
3590 else
3591 y = use_anchored_address (y);
3595 /* If X or Y are memory references, verify that their addresses are valid
3596 for the machine. */
3597 if (MEM_P (x)
3598 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3599 MEM_ADDR_SPACE (x))
3600 && ! push_operand (x, GET_MODE (x))))
3601 x = validize_mem (x);
3603 if (MEM_P (y)
3604 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3605 MEM_ADDR_SPACE (y)))
3606 y = validize_mem (y);
3608 gcc_assert (mode != BLKmode);
3610 last_insn = emit_move_insn_1 (x, y);
3612 if (y_cst && REG_P (x)
3613 && (set = single_set (last_insn)) != NULL_RTX
3614 && SET_DEST (set) == x
3615 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3616 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3618 return last_insn;
3621 /* Generate the body of an instruction to copy Y into X.
3622 It may be a list of insns, if one insn isn't enough. */
3624 rtx_insn *
3625 gen_move_insn (rtx x, rtx y)
3627 rtx_insn *seq;
3629 start_sequence ();
3630 emit_move_insn_1 (x, y);
3631 seq = get_insns ();
3632 end_sequence ();
3633 return seq;
3636 /* Same as above, but return rtx (used as a callback, which must have
3637 prototype compatible with other functions returning rtx). */
3640 gen_move_insn_uncast (rtx x, rtx y)
3642 return gen_move_insn (x, y);
3645 /* If Y is representable exactly in a narrower mode, and the target can
3646 perform the extension directly from constant or memory, then emit the
3647 move as an extension. */
3649 static rtx_insn *
3650 compress_float_constant (rtx x, rtx y)
3652 machine_mode dstmode = GET_MODE (x);
3653 machine_mode orig_srcmode = GET_MODE (y);
3654 machine_mode srcmode;
3655 REAL_VALUE_TYPE r;
3656 int oldcost, newcost;
3657 bool speed = optimize_insn_for_speed_p ();
3659 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3661 if (targetm.legitimate_constant_p (dstmode, y))
3662 oldcost = set_src_cost (y, speed);
3663 else
3664 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3666 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3667 srcmode != orig_srcmode;
3668 srcmode = GET_MODE_WIDER_MODE (srcmode))
3670 enum insn_code ic;
3671 rtx trunc_y;
3672 rtx_insn *last_insn;
3674 /* Skip if the target can't extend this way. */
3675 ic = can_extend_p (dstmode, srcmode, 0);
3676 if (ic == CODE_FOR_nothing)
3677 continue;
3679 /* Skip if the narrowed value isn't exact. */
3680 if (! exact_real_truncate (srcmode, &r))
3681 continue;
3683 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3685 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3687 /* Skip if the target needs extra instructions to perform
3688 the extension. */
3689 if (!insn_operand_matches (ic, 1, trunc_y))
3690 continue;
3691 /* This is valid, but may not be cheaper than the original. */
3692 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3693 speed);
3694 if (oldcost < newcost)
3695 continue;
3697 else if (float_extend_from_mem[dstmode][srcmode])
3699 trunc_y = force_const_mem (srcmode, trunc_y);
3700 /* This is valid, but may not be cheaper than the original. */
3701 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3702 speed);
3703 if (oldcost < newcost)
3704 continue;
3705 trunc_y = validize_mem (trunc_y);
3707 else
3708 continue;
3710 /* For CSE's benefit, force the compressed constant pool entry
3711 into a new pseudo. This constant may be used in different modes,
3712 and if not, combine will put things back together for us. */
3713 trunc_y = force_reg (srcmode, trunc_y);
3715 /* If x is a hard register, perform the extension into a pseudo,
3716 so that e.g. stack realignment code is aware of it. */
3717 rtx target = x;
3718 if (REG_P (x) && HARD_REGISTER_P (x))
3719 target = gen_reg_rtx (dstmode);
3721 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3722 last_insn = get_last_insn ();
3724 if (REG_P (target))
3725 set_unique_reg_note (last_insn, REG_EQUAL, y);
3727 if (target != x)
3728 return emit_move_insn (x, target);
3729 return last_insn;
3732 return NULL;
3735 /* Pushing data onto the stack. */
3737 /* Push a block of length SIZE (perhaps variable)
3738 and return an rtx to address the beginning of the block.
3739 The value may be virtual_outgoing_args_rtx.
3741 EXTRA is the number of bytes of padding to push in addition to SIZE.
3742 BELOW nonzero means this padding comes at low addresses;
3743 otherwise, the padding comes at high addresses. */
3746 push_block (rtx size, int extra, int below)
3748 rtx temp;
3750 size = convert_modes (Pmode, ptr_mode, size, 1);
3751 if (CONSTANT_P (size))
3752 anti_adjust_stack (plus_constant (Pmode, size, extra));
3753 else if (REG_P (size) && extra == 0)
3754 anti_adjust_stack (size);
3755 else
3757 temp = copy_to_mode_reg (Pmode, size);
3758 if (extra != 0)
3759 temp = expand_binop (Pmode, add_optab, temp,
3760 gen_int_mode (extra, Pmode),
3761 temp, 0, OPTAB_LIB_WIDEN);
3762 anti_adjust_stack (temp);
3765 if (STACK_GROWS_DOWNWARD)
3767 temp = virtual_outgoing_args_rtx;
3768 if (extra != 0 && below)
3769 temp = plus_constant (Pmode, temp, extra);
3771 else
3773 if (CONST_INT_P (size))
3774 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3775 -INTVAL (size) - (below ? 0 : extra));
3776 else if (extra != 0 && !below)
3777 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3778 negate_rtx (Pmode, plus_constant (Pmode, size,
3779 extra)));
3780 else
3781 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3782 negate_rtx (Pmode, size));
3785 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3788 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3790 static rtx
3791 mem_autoinc_base (rtx mem)
3793 if (MEM_P (mem))
3795 rtx addr = XEXP (mem, 0);
3796 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3797 return XEXP (addr, 0);
3799 return NULL;
3802 /* A utility routine used here, in reload, and in try_split. The insns
3803 after PREV up to and including LAST are known to adjust the stack,
3804 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3805 placing notes as appropriate. PREV may be NULL, indicating the
3806 entire insn sequence prior to LAST should be scanned.
3808 The set of allowed stack pointer modifications is small:
3809 (1) One or more auto-inc style memory references (aka pushes),
3810 (2) One or more addition/subtraction with the SP as destination,
3811 (3) A single move insn with the SP as destination,
3812 (4) A call_pop insn,
3813 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3815 Insns in the sequence that do not modify the SP are ignored,
3816 except for noreturn calls.
3818 The return value is the amount of adjustment that can be trivially
3819 verified, via immediate operand or auto-inc. If the adjustment
3820 cannot be trivially extracted, the return value is INT_MIN. */
3822 HOST_WIDE_INT
3823 find_args_size_adjust (rtx_insn *insn)
3825 rtx dest, set, pat;
3826 int i;
3828 pat = PATTERN (insn);
3829 set = NULL;
3831 /* Look for a call_pop pattern. */
3832 if (CALL_P (insn))
3834 /* We have to allow non-call_pop patterns for the case
3835 of emit_single_push_insn of a TLS address. */
3836 if (GET_CODE (pat) != PARALLEL)
3837 return 0;
3839 /* All call_pop have a stack pointer adjust in the parallel.
3840 The call itself is always first, and the stack adjust is
3841 usually last, so search from the end. */
3842 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3844 set = XVECEXP (pat, 0, i);
3845 if (GET_CODE (set) != SET)
3846 continue;
3847 dest = SET_DEST (set);
3848 if (dest == stack_pointer_rtx)
3849 break;
3851 /* We'd better have found the stack pointer adjust. */
3852 if (i == 0)
3853 return 0;
3854 /* Fall through to process the extracted SET and DEST
3855 as if it was a standalone insn. */
3857 else if (GET_CODE (pat) == SET)
3858 set = pat;
3859 else if ((set = single_set (insn)) != NULL)
3861 else if (GET_CODE (pat) == PARALLEL)
3863 /* ??? Some older ports use a parallel with a stack adjust
3864 and a store for a PUSH_ROUNDING pattern, rather than a
3865 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3866 /* ??? See h8300 and m68k, pushqi1. */
3867 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3869 set = XVECEXP (pat, 0, i);
3870 if (GET_CODE (set) != SET)
3871 continue;
3872 dest = SET_DEST (set);
3873 if (dest == stack_pointer_rtx)
3874 break;
3876 /* We do not expect an auto-inc of the sp in the parallel. */
3877 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3878 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3879 != stack_pointer_rtx);
3881 if (i < 0)
3882 return 0;
3884 else
3885 return 0;
3887 dest = SET_DEST (set);
3889 /* Look for direct modifications of the stack pointer. */
3890 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3892 /* Look for a trivial adjustment, otherwise assume nothing. */
3893 /* Note that the SPU restore_stack_block pattern refers to
3894 the stack pointer in V4SImode. Consider that non-trivial. */
3895 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3896 && GET_CODE (SET_SRC (set)) == PLUS
3897 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3898 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3899 return INTVAL (XEXP (SET_SRC (set), 1));
3900 /* ??? Reload can generate no-op moves, which will be cleaned
3901 up later. Recognize it and continue searching. */
3902 else if (rtx_equal_p (dest, SET_SRC (set)))
3903 return 0;
3904 else
3905 return HOST_WIDE_INT_MIN;
3907 else
3909 rtx mem, addr;
3911 /* Otherwise only think about autoinc patterns. */
3912 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3914 mem = dest;
3915 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3916 != stack_pointer_rtx);
3918 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3919 mem = SET_SRC (set);
3920 else
3921 return 0;
3923 addr = XEXP (mem, 0);
3924 switch (GET_CODE (addr))
3926 case PRE_INC:
3927 case POST_INC:
3928 return GET_MODE_SIZE (GET_MODE (mem));
3929 case PRE_DEC:
3930 case POST_DEC:
3931 return -GET_MODE_SIZE (GET_MODE (mem));
3932 case PRE_MODIFY:
3933 case POST_MODIFY:
3934 addr = XEXP (addr, 1);
3935 gcc_assert (GET_CODE (addr) == PLUS);
3936 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3937 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3938 return INTVAL (XEXP (addr, 1));
3939 default:
3940 gcc_unreachable ();
3946 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3948 int args_size = end_args_size;
3949 bool saw_unknown = false;
3950 rtx_insn *insn;
3952 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3954 HOST_WIDE_INT this_delta;
3956 if (!NONDEBUG_INSN_P (insn))
3957 continue;
3959 this_delta = find_args_size_adjust (insn);
3960 if (this_delta == 0)
3962 if (!CALL_P (insn)
3963 || ACCUMULATE_OUTGOING_ARGS
3964 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3965 continue;
3968 gcc_assert (!saw_unknown);
3969 if (this_delta == HOST_WIDE_INT_MIN)
3970 saw_unknown = true;
3972 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3973 if (STACK_GROWS_DOWNWARD)
3974 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3976 args_size -= this_delta;
3979 return saw_unknown ? INT_MIN : args_size;
3982 #ifdef PUSH_ROUNDING
3983 /* Emit single push insn. */
3985 static void
3986 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
3988 rtx dest_addr;
3989 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3990 rtx dest;
3991 enum insn_code icode;
3993 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3994 /* If there is push pattern, use it. Otherwise try old way of throwing
3995 MEM representing push operation to move expander. */
3996 icode = optab_handler (push_optab, mode);
3997 if (icode != CODE_FOR_nothing)
3999 struct expand_operand ops[1];
4001 create_input_operand (&ops[0], x, mode);
4002 if (maybe_expand_insn (icode, 1, ops))
4003 return;
4005 if (GET_MODE_SIZE (mode) == rounded_size)
4006 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4007 /* If we are to pad downward, adjust the stack pointer first and
4008 then store X into the stack location using an offset. This is
4009 because emit_move_insn does not know how to pad; it does not have
4010 access to type. */
4011 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4013 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4014 HOST_WIDE_INT offset;
4016 emit_move_insn (stack_pointer_rtx,
4017 expand_binop (Pmode,
4018 STACK_GROWS_DOWNWARD ? sub_optab
4019 : add_optab,
4020 stack_pointer_rtx,
4021 gen_int_mode (rounded_size, Pmode),
4022 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4024 offset = (HOST_WIDE_INT) padding_size;
4025 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
4026 /* We have already decremented the stack pointer, so get the
4027 previous value. */
4028 offset += (HOST_WIDE_INT) rounded_size;
4030 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4031 /* We have already incremented the stack pointer, so get the
4032 previous value. */
4033 offset -= (HOST_WIDE_INT) rounded_size;
4035 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4036 gen_int_mode (offset, Pmode));
4038 else
4040 if (STACK_GROWS_DOWNWARD)
4041 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4042 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4043 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4044 Pmode));
4045 else
4046 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4047 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4048 gen_int_mode (rounded_size, Pmode));
4050 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4053 dest = gen_rtx_MEM (mode, dest_addr);
4055 if (type != 0)
4057 set_mem_attributes (dest, type, 1);
4059 if (cfun->tail_call_marked)
4060 /* Function incoming arguments may overlap with sibling call
4061 outgoing arguments and we cannot allow reordering of reads
4062 from function arguments with stores to outgoing arguments
4063 of sibling calls. */
4064 set_mem_alias_set (dest, 0);
4066 emit_move_insn (dest, x);
4069 /* Emit and annotate a single push insn. */
4071 static void
4072 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4074 int delta, old_delta = stack_pointer_delta;
4075 rtx_insn *prev = get_last_insn ();
4076 rtx_insn *last;
4078 emit_single_push_insn_1 (mode, x, type);
4080 last = get_last_insn ();
4082 /* Notice the common case where we emitted exactly one insn. */
4083 if (PREV_INSN (last) == prev)
4085 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4086 return;
4089 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4090 gcc_assert (delta == INT_MIN || delta == old_delta);
4092 #endif
4094 /* If reading SIZE bytes from X will end up reading from
4095 Y return the number of bytes that overlap. Return -1
4096 if there is no overlap or -2 if we can't determine
4097 (for example when X and Y have different base registers). */
4099 static int
4100 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4102 rtx tmp = plus_constant (Pmode, x, size);
4103 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4105 if (!CONST_INT_P (sub))
4106 return -2;
4108 HOST_WIDE_INT val = INTVAL (sub);
4110 return IN_RANGE (val, 1, size) ? val : -1;
4113 /* Generate code to push X onto the stack, assuming it has mode MODE and
4114 type TYPE.
4115 MODE is redundant except when X is a CONST_INT (since they don't
4116 carry mode info).
4117 SIZE is an rtx for the size of data to be copied (in bytes),
4118 needed only if X is BLKmode.
4119 Return true if successful. May return false if asked to push a
4120 partial argument during a sibcall optimization (as specified by
4121 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4122 to not overlap.
4124 ALIGN (in bits) is maximum alignment we can assume.
4126 If PARTIAL and REG are both nonzero, then copy that many of the first
4127 bytes of X into registers starting with REG, and push the rest of X.
4128 The amount of space pushed is decreased by PARTIAL bytes.
4129 REG must be a hard register in this case.
4130 If REG is zero but PARTIAL is not, take any all others actions for an
4131 argument partially in registers, but do not actually load any
4132 registers.
4134 EXTRA is the amount in bytes of extra space to leave next to this arg.
4135 This is ignored if an argument block has already been allocated.
4137 On a machine that lacks real push insns, ARGS_ADDR is the address of
4138 the bottom of the argument block for this call. We use indexing off there
4139 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4140 argument block has not been preallocated.
4142 ARGS_SO_FAR is the size of args previously pushed for this call.
4144 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4145 for arguments passed in registers. If nonzero, it will be the number
4146 of bytes required. */
4148 bool
4149 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4150 unsigned int align, int partial, rtx reg, int extra,
4151 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4152 rtx alignment_pad, bool sibcall_p)
4154 rtx xinner;
4155 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4157 /* Decide where to pad the argument: `downward' for below,
4158 `upward' for above, or `none' for don't pad it.
4159 Default is below for small data on big-endian machines; else above. */
4160 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4162 /* Invert direction if stack is post-decrement.
4163 FIXME: why? */
4164 if (STACK_PUSH_CODE == POST_DEC)
4165 if (where_pad != none)
4166 where_pad = (where_pad == downward ? upward : downward);
4168 xinner = x;
4170 int nregs = partial / UNITS_PER_WORD;
4171 rtx *tmp_regs = NULL;
4172 int overlapping = 0;
4174 if (mode == BLKmode
4175 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4177 /* Copy a block into the stack, entirely or partially. */
4179 rtx temp;
4180 int used;
4181 int offset;
4182 int skip;
4184 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4185 used = partial - offset;
4187 if (mode != BLKmode)
4189 /* A value is to be stored in an insufficiently aligned
4190 stack slot; copy via a suitably aligned slot if
4191 necessary. */
4192 size = GEN_INT (GET_MODE_SIZE (mode));
4193 if (!MEM_P (xinner))
4195 temp = assign_temp (type, 1, 1);
4196 emit_move_insn (temp, xinner);
4197 xinner = temp;
4201 gcc_assert (size);
4203 /* USED is now the # of bytes we need not copy to the stack
4204 because registers will take care of them. */
4206 if (partial != 0)
4207 xinner = adjust_address (xinner, BLKmode, used);
4209 /* If the partial register-part of the arg counts in its stack size,
4210 skip the part of stack space corresponding to the registers.
4211 Otherwise, start copying to the beginning of the stack space,
4212 by setting SKIP to 0. */
4213 skip = (reg_parm_stack_space == 0) ? 0 : used;
4215 #ifdef PUSH_ROUNDING
4216 /* Do it with several push insns if that doesn't take lots of insns
4217 and if there is no difficulty with push insns that skip bytes
4218 on the stack for alignment purposes. */
4219 if (args_addr == 0
4220 && PUSH_ARGS
4221 && CONST_INT_P (size)
4222 && skip == 0
4223 && MEM_ALIGN (xinner) >= align
4224 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4225 /* Here we avoid the case of a structure whose weak alignment
4226 forces many pushes of a small amount of data,
4227 and such small pushes do rounding that causes trouble. */
4228 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4229 || align >= BIGGEST_ALIGNMENT
4230 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4231 == (align / BITS_PER_UNIT)))
4232 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4234 /* Push padding now if padding above and stack grows down,
4235 or if padding below and stack grows up.
4236 But if space already allocated, this has already been done. */
4237 if (extra && args_addr == 0
4238 && where_pad != none && where_pad != stack_direction)
4239 anti_adjust_stack (GEN_INT (extra));
4241 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4243 else
4244 #endif /* PUSH_ROUNDING */
4246 rtx target;
4248 /* Otherwise make space on the stack and copy the data
4249 to the address of that space. */
4251 /* Deduct words put into registers from the size we must copy. */
4252 if (partial != 0)
4254 if (CONST_INT_P (size))
4255 size = GEN_INT (INTVAL (size) - used);
4256 else
4257 size = expand_binop (GET_MODE (size), sub_optab, size,
4258 gen_int_mode (used, GET_MODE (size)),
4259 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4262 /* Get the address of the stack space.
4263 In this case, we do not deal with EXTRA separately.
4264 A single stack adjust will do. */
4265 if (! args_addr)
4267 temp = push_block (size, extra, where_pad == downward);
4268 extra = 0;
4270 else if (CONST_INT_P (args_so_far))
4271 temp = memory_address (BLKmode,
4272 plus_constant (Pmode, args_addr,
4273 skip + INTVAL (args_so_far)));
4274 else
4275 temp = memory_address (BLKmode,
4276 plus_constant (Pmode,
4277 gen_rtx_PLUS (Pmode,
4278 args_addr,
4279 args_so_far),
4280 skip));
4282 if (!ACCUMULATE_OUTGOING_ARGS)
4284 /* If the source is referenced relative to the stack pointer,
4285 copy it to another register to stabilize it. We do not need
4286 to do this if we know that we won't be changing sp. */
4288 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4289 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4290 temp = copy_to_reg (temp);
4293 target = gen_rtx_MEM (BLKmode, temp);
4295 /* We do *not* set_mem_attributes here, because incoming arguments
4296 may overlap with sibling call outgoing arguments and we cannot
4297 allow reordering of reads from function arguments with stores
4298 to outgoing arguments of sibling calls. We do, however, want
4299 to record the alignment of the stack slot. */
4300 /* ALIGN may well be better aligned than TYPE, e.g. due to
4301 PARM_BOUNDARY. Assume the caller isn't lying. */
4302 set_mem_align (target, align);
4304 /* If part should go in registers and pushing to that part would
4305 overwrite some of the values that need to go into regs, load the
4306 overlapping values into temporary pseudos to be moved into the hard
4307 regs at the end after the stack pushing has completed.
4308 We cannot load them directly into the hard regs here because
4309 they can be clobbered by the block move expansions.
4310 See PR 65358. */
4312 if (partial > 0 && reg != 0 && mode == BLKmode
4313 && GET_CODE (reg) != PARALLEL)
4315 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4316 if (overlapping > 0)
4318 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4319 overlapping /= UNITS_PER_WORD;
4321 tmp_regs = XALLOCAVEC (rtx, overlapping);
4323 for (int i = 0; i < overlapping; i++)
4324 tmp_regs[i] = gen_reg_rtx (word_mode);
4326 for (int i = 0; i < overlapping; i++)
4327 emit_move_insn (tmp_regs[i],
4328 operand_subword_force (target, i, mode));
4330 else if (overlapping == -1)
4331 overlapping = 0;
4332 /* Could not determine whether there is overlap.
4333 Fail the sibcall. */
4334 else
4336 overlapping = 0;
4337 if (sibcall_p)
4338 return false;
4341 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4344 else if (partial > 0)
4346 /* Scalar partly in registers. */
4348 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4349 int i;
4350 int not_stack;
4351 /* # bytes of start of argument
4352 that we must make space for but need not store. */
4353 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4354 int args_offset = INTVAL (args_so_far);
4355 int skip;
4357 /* Push padding now if padding above and stack grows down,
4358 or if padding below and stack grows up.
4359 But if space already allocated, this has already been done. */
4360 if (extra && args_addr == 0
4361 && where_pad != none && where_pad != stack_direction)
4362 anti_adjust_stack (GEN_INT (extra));
4364 /* If we make space by pushing it, we might as well push
4365 the real data. Otherwise, we can leave OFFSET nonzero
4366 and leave the space uninitialized. */
4367 if (args_addr == 0)
4368 offset = 0;
4370 /* Now NOT_STACK gets the number of words that we don't need to
4371 allocate on the stack. Convert OFFSET to words too. */
4372 not_stack = (partial - offset) / UNITS_PER_WORD;
4373 offset /= UNITS_PER_WORD;
4375 /* If the partial register-part of the arg counts in its stack size,
4376 skip the part of stack space corresponding to the registers.
4377 Otherwise, start copying to the beginning of the stack space,
4378 by setting SKIP to 0. */
4379 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4381 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4382 x = validize_mem (force_const_mem (mode, x));
4384 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4385 SUBREGs of such registers are not allowed. */
4386 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4387 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4388 x = copy_to_reg (x);
4390 /* Loop over all the words allocated on the stack for this arg. */
4391 /* We can do it by words, because any scalar bigger than a word
4392 has a size a multiple of a word. */
4393 for (i = size - 1; i >= not_stack; i--)
4394 if (i >= not_stack + offset)
4395 if (!emit_push_insn (operand_subword_force (x, i, mode),
4396 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4397 0, args_addr,
4398 GEN_INT (args_offset + ((i - not_stack + skip)
4399 * UNITS_PER_WORD)),
4400 reg_parm_stack_space, alignment_pad, sibcall_p))
4401 return false;
4403 else
4405 rtx addr;
4406 rtx dest;
4408 /* Push padding now if padding above and stack grows down,
4409 or if padding below and stack grows up.
4410 But if space already allocated, this has already been done. */
4411 if (extra && args_addr == 0
4412 && where_pad != none && where_pad != stack_direction)
4413 anti_adjust_stack (GEN_INT (extra));
4415 #ifdef PUSH_ROUNDING
4416 if (args_addr == 0 && PUSH_ARGS)
4417 emit_single_push_insn (mode, x, type);
4418 else
4419 #endif
4421 if (CONST_INT_P (args_so_far))
4422 addr
4423 = memory_address (mode,
4424 plus_constant (Pmode, args_addr,
4425 INTVAL (args_so_far)));
4426 else
4427 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4428 args_so_far));
4429 dest = gen_rtx_MEM (mode, addr);
4431 /* We do *not* set_mem_attributes here, because incoming arguments
4432 may overlap with sibling call outgoing arguments and we cannot
4433 allow reordering of reads from function arguments with stores
4434 to outgoing arguments of sibling calls. We do, however, want
4435 to record the alignment of the stack slot. */
4436 /* ALIGN may well be better aligned than TYPE, e.g. due to
4437 PARM_BOUNDARY. Assume the caller isn't lying. */
4438 set_mem_align (dest, align);
4440 emit_move_insn (dest, x);
4444 /* Move the partial arguments into the registers and any overlapping
4445 values that we moved into the pseudos in tmp_regs. */
4446 if (partial > 0 && reg != 0)
4448 /* Handle calls that pass values in multiple non-contiguous locations.
4449 The Irix 6 ABI has examples of this. */
4450 if (GET_CODE (reg) == PARALLEL)
4451 emit_group_load (reg, x, type, -1);
4452 else
4454 gcc_assert (partial % UNITS_PER_WORD == 0);
4455 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4457 for (int i = 0; i < overlapping; i++)
4458 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4459 + nregs - overlapping + i),
4460 tmp_regs[i]);
4465 if (extra && args_addr == 0 && where_pad == stack_direction)
4466 anti_adjust_stack (GEN_INT (extra));
4468 if (alignment_pad && args_addr == 0)
4469 anti_adjust_stack (alignment_pad);
4471 return true;
4474 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4475 operations. */
4477 static rtx
4478 get_subtarget (rtx x)
4480 return (optimize
4481 || x == 0
4482 /* Only registers can be subtargets. */
4483 || !REG_P (x)
4484 /* Don't use hard regs to avoid extending their life. */
4485 || REGNO (x) < FIRST_PSEUDO_REGISTER
4486 ? 0 : x);
4489 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4490 FIELD is a bitfield. Returns true if the optimization was successful,
4491 and there's nothing else to do. */
4493 static bool
4494 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4495 unsigned HOST_WIDE_INT bitpos,
4496 unsigned HOST_WIDE_INT bitregion_start,
4497 unsigned HOST_WIDE_INT bitregion_end,
4498 machine_mode mode1, rtx str_rtx,
4499 tree to, tree src)
4501 machine_mode str_mode = GET_MODE (str_rtx);
4502 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4503 tree op0, op1;
4504 rtx value, result;
4505 optab binop;
4506 gimple srcstmt;
4507 enum tree_code code;
4509 if (mode1 != VOIDmode
4510 || bitsize >= BITS_PER_WORD
4511 || str_bitsize > BITS_PER_WORD
4512 || TREE_SIDE_EFFECTS (to)
4513 || TREE_THIS_VOLATILE (to))
4514 return false;
4516 STRIP_NOPS (src);
4517 if (TREE_CODE (src) != SSA_NAME)
4518 return false;
4519 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4520 return false;
4522 srcstmt = get_gimple_for_ssa_name (src);
4523 if (!srcstmt
4524 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4525 return false;
4527 code = gimple_assign_rhs_code (srcstmt);
4529 op0 = gimple_assign_rhs1 (srcstmt);
4531 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4532 to find its initialization. Hopefully the initialization will
4533 be from a bitfield load. */
4534 if (TREE_CODE (op0) == SSA_NAME)
4536 gimple op0stmt = get_gimple_for_ssa_name (op0);
4538 /* We want to eventually have OP0 be the same as TO, which
4539 should be a bitfield. */
4540 if (!op0stmt
4541 || !is_gimple_assign (op0stmt)
4542 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4543 return false;
4544 op0 = gimple_assign_rhs1 (op0stmt);
4547 op1 = gimple_assign_rhs2 (srcstmt);
4549 if (!operand_equal_p (to, op0, 0))
4550 return false;
4552 if (MEM_P (str_rtx))
4554 unsigned HOST_WIDE_INT offset1;
4556 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4557 str_mode = word_mode;
4558 str_mode = get_best_mode (bitsize, bitpos,
4559 bitregion_start, bitregion_end,
4560 MEM_ALIGN (str_rtx), str_mode, 0);
4561 if (str_mode == VOIDmode)
4562 return false;
4563 str_bitsize = GET_MODE_BITSIZE (str_mode);
4565 offset1 = bitpos;
4566 bitpos %= str_bitsize;
4567 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4568 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4570 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4571 return false;
4573 /* If the bit field covers the whole REG/MEM, store_field
4574 will likely generate better code. */
4575 if (bitsize >= str_bitsize)
4576 return false;
4578 /* We can't handle fields split across multiple entities. */
4579 if (bitpos + bitsize > str_bitsize)
4580 return false;
4582 if (BYTES_BIG_ENDIAN)
4583 bitpos = str_bitsize - bitpos - bitsize;
4585 switch (code)
4587 case PLUS_EXPR:
4588 case MINUS_EXPR:
4589 /* For now, just optimize the case of the topmost bitfield
4590 where we don't need to do any masking and also
4591 1 bit bitfields where xor can be used.
4592 We might win by one instruction for the other bitfields
4593 too if insv/extv instructions aren't used, so that
4594 can be added later. */
4595 if (bitpos + bitsize != str_bitsize
4596 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4597 break;
4599 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4600 value = convert_modes (str_mode,
4601 TYPE_MODE (TREE_TYPE (op1)), value,
4602 TYPE_UNSIGNED (TREE_TYPE (op1)));
4604 /* We may be accessing data outside the field, which means
4605 we can alias adjacent data. */
4606 if (MEM_P (str_rtx))
4608 str_rtx = shallow_copy_rtx (str_rtx);
4609 set_mem_alias_set (str_rtx, 0);
4610 set_mem_expr (str_rtx, 0);
4613 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4614 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4616 value = expand_and (str_mode, value, const1_rtx, NULL);
4617 binop = xor_optab;
4619 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4620 result = expand_binop (str_mode, binop, str_rtx,
4621 value, str_rtx, 1, OPTAB_WIDEN);
4622 if (result != str_rtx)
4623 emit_move_insn (str_rtx, result);
4624 return true;
4626 case BIT_IOR_EXPR:
4627 case BIT_XOR_EXPR:
4628 if (TREE_CODE (op1) != INTEGER_CST)
4629 break;
4630 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4631 value = convert_modes (str_mode,
4632 TYPE_MODE (TREE_TYPE (op1)), value,
4633 TYPE_UNSIGNED (TREE_TYPE (op1)));
4635 /* We may be accessing data outside the field, which means
4636 we can alias adjacent data. */
4637 if (MEM_P (str_rtx))
4639 str_rtx = shallow_copy_rtx (str_rtx);
4640 set_mem_alias_set (str_rtx, 0);
4641 set_mem_expr (str_rtx, 0);
4644 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4645 if (bitpos + bitsize != str_bitsize)
4647 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4648 str_mode);
4649 value = expand_and (str_mode, value, mask, NULL_RTX);
4651 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4652 result = expand_binop (str_mode, binop, str_rtx,
4653 value, str_rtx, 1, OPTAB_WIDEN);
4654 if (result != str_rtx)
4655 emit_move_insn (str_rtx, result);
4656 return true;
4658 default:
4659 break;
4662 return false;
4665 /* In the C++ memory model, consecutive bit fields in a structure are
4666 considered one memory location.
4668 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4669 returns the bit range of consecutive bits in which this COMPONENT_REF
4670 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4671 and *OFFSET may be adjusted in the process.
4673 If the access does not need to be restricted, 0 is returned in both
4674 *BITSTART and *BITEND. */
4676 static void
4677 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4678 unsigned HOST_WIDE_INT *bitend,
4679 tree exp,
4680 HOST_WIDE_INT *bitpos,
4681 tree *offset)
4683 HOST_WIDE_INT bitoffset;
4684 tree field, repr;
4686 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4688 field = TREE_OPERAND (exp, 1);
4689 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4690 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4691 need to limit the range we can access. */
4692 if (!repr)
4694 *bitstart = *bitend = 0;
4695 return;
4698 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4699 part of a larger bit field, then the representative does not serve any
4700 useful purpose. This can occur in Ada. */
4701 if (handled_component_p (TREE_OPERAND (exp, 0)))
4703 machine_mode rmode;
4704 HOST_WIDE_INT rbitsize, rbitpos;
4705 tree roffset;
4706 int unsignedp;
4707 int volatilep = 0;
4708 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4709 &roffset, &rmode, &unsignedp, &volatilep, false);
4710 if ((rbitpos % BITS_PER_UNIT) != 0)
4712 *bitstart = *bitend = 0;
4713 return;
4717 /* Compute the adjustment to bitpos from the offset of the field
4718 relative to the representative. DECL_FIELD_OFFSET of field and
4719 repr are the same by construction if they are not constants,
4720 see finish_bitfield_layout. */
4721 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4722 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4723 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4724 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4725 else
4726 bitoffset = 0;
4727 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4728 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4730 /* If the adjustment is larger than bitpos, we would have a negative bit
4731 position for the lower bound and this may wreak havoc later. Adjust
4732 offset and bitpos to make the lower bound non-negative in that case. */
4733 if (bitoffset > *bitpos)
4735 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4736 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4738 *bitpos += adjust;
4739 if (*offset == NULL_TREE)
4740 *offset = size_int (-adjust / BITS_PER_UNIT);
4741 else
4742 *offset
4743 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4744 *bitstart = 0;
4746 else
4747 *bitstart = *bitpos - bitoffset;
4749 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4752 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4753 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4754 DECL_RTL was not set yet, return NORTL. */
4756 static inline bool
4757 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4759 if (TREE_CODE (addr) != ADDR_EXPR)
4760 return false;
4762 tree base = TREE_OPERAND (addr, 0);
4764 if (!DECL_P (base)
4765 || TREE_ADDRESSABLE (base)
4766 || DECL_MODE (base) == BLKmode)
4767 return false;
4769 if (!DECL_RTL_SET_P (base))
4770 return nortl;
4772 return (!MEM_P (DECL_RTL (base)));
4775 /* Returns true if the MEM_REF REF refers to an object that does not
4776 reside in memory and has non-BLKmode. */
4778 static inline bool
4779 mem_ref_refers_to_non_mem_p (tree ref)
4781 tree base = TREE_OPERAND (ref, 0);
4782 return addr_expr_of_non_mem_decl_p_1 (base, false);
4785 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4786 is true, try generating a nontemporal store. */
4788 void
4789 expand_assignment (tree to, tree from, bool nontemporal)
4791 rtx to_rtx = 0;
4792 rtx result;
4793 machine_mode mode;
4794 unsigned int align;
4795 enum insn_code icode;
4797 /* Don't crash if the lhs of the assignment was erroneous. */
4798 if (TREE_CODE (to) == ERROR_MARK)
4800 expand_normal (from);
4801 return;
4804 /* Optimize away no-op moves without side-effects. */
4805 if (operand_equal_p (to, from, 0))
4806 return;
4808 /* Handle misaligned stores. */
4809 mode = TYPE_MODE (TREE_TYPE (to));
4810 if ((TREE_CODE (to) == MEM_REF
4811 || TREE_CODE (to) == TARGET_MEM_REF)
4812 && mode != BLKmode
4813 && !mem_ref_refers_to_non_mem_p (to)
4814 && ((align = get_object_alignment (to))
4815 < GET_MODE_ALIGNMENT (mode))
4816 && (((icode = optab_handler (movmisalign_optab, mode))
4817 != CODE_FOR_nothing)
4818 || SLOW_UNALIGNED_ACCESS (mode, align)))
4820 rtx reg, mem;
4822 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4823 reg = force_not_mem (reg);
4824 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4826 if (icode != CODE_FOR_nothing)
4828 struct expand_operand ops[2];
4830 create_fixed_operand (&ops[0], mem);
4831 create_input_operand (&ops[1], reg, mode);
4832 /* The movmisalign<mode> pattern cannot fail, else the assignment
4833 would silently be omitted. */
4834 expand_insn (icode, 2, ops);
4836 else
4837 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4838 return;
4841 /* Assignment of a structure component needs special treatment
4842 if the structure component's rtx is not simply a MEM.
4843 Assignment of an array element at a constant index, and assignment of
4844 an array element in an unaligned packed structure field, has the same
4845 problem. Same for (partially) storing into a non-memory object. */
4846 if (handled_component_p (to)
4847 || (TREE_CODE (to) == MEM_REF
4848 && mem_ref_refers_to_non_mem_p (to))
4849 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4851 machine_mode mode1;
4852 HOST_WIDE_INT bitsize, bitpos;
4853 unsigned HOST_WIDE_INT bitregion_start = 0;
4854 unsigned HOST_WIDE_INT bitregion_end = 0;
4855 tree offset;
4856 int unsignedp;
4857 int volatilep = 0;
4858 tree tem;
4860 push_temp_slots ();
4861 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4862 &unsignedp, &volatilep, true);
4864 /* Make sure bitpos is not negative, it can wreak havoc later. */
4865 if (bitpos < 0)
4867 gcc_assert (offset == NULL_TREE);
4868 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4869 ? 3 : exact_log2 (BITS_PER_UNIT)));
4870 bitpos &= BITS_PER_UNIT - 1;
4873 if (TREE_CODE (to) == COMPONENT_REF
4874 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4875 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4876 /* The C++ memory model naturally applies to byte-aligned fields.
4877 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4878 BITSIZE are not byte-aligned, there is no need to limit the range
4879 we can access. This can occur with packed structures in Ada. */
4880 else if (bitsize > 0
4881 && bitsize % BITS_PER_UNIT == 0
4882 && bitpos % BITS_PER_UNIT == 0)
4884 bitregion_start = bitpos;
4885 bitregion_end = bitpos + bitsize - 1;
4888 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4890 /* If the field has a mode, we want to access it in the
4891 field's mode, not the computed mode.
4892 If a MEM has VOIDmode (external with incomplete type),
4893 use BLKmode for it instead. */
4894 if (MEM_P (to_rtx))
4896 if (mode1 != VOIDmode)
4897 to_rtx = adjust_address (to_rtx, mode1, 0);
4898 else if (GET_MODE (to_rtx) == VOIDmode)
4899 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4902 if (offset != 0)
4904 machine_mode address_mode;
4905 rtx offset_rtx;
4907 if (!MEM_P (to_rtx))
4909 /* We can get constant negative offsets into arrays with broken
4910 user code. Translate this to a trap instead of ICEing. */
4911 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4912 expand_builtin_trap ();
4913 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4916 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4917 address_mode = get_address_mode (to_rtx);
4918 if (GET_MODE (offset_rtx) != address_mode)
4920 /* We cannot be sure that the RTL in offset_rtx is valid outside
4921 of a memory address context, so force it into a register
4922 before attempting to convert it to the desired mode. */
4923 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4924 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4927 /* If we have an expression in OFFSET_RTX and a non-zero
4928 byte offset in BITPOS, adding the byte offset before the
4929 OFFSET_RTX results in better intermediate code, which makes
4930 later rtl optimization passes perform better.
4932 We prefer intermediate code like this:
4934 r124:DI=r123:DI+0x18
4935 [r124:DI]=r121:DI
4937 ... instead of ...
4939 r124:DI=r123:DI+0x10
4940 [r124:DI+0x8]=r121:DI
4942 This is only done for aligned data values, as these can
4943 be expected to result in single move instructions. */
4944 if (mode1 != VOIDmode
4945 && bitpos != 0
4946 && bitsize > 0
4947 && (bitpos % bitsize) == 0
4948 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4949 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4951 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4952 bitregion_start = 0;
4953 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4954 bitregion_end -= bitpos;
4955 bitpos = 0;
4958 to_rtx = offset_address (to_rtx, offset_rtx,
4959 highest_pow2_factor_for_target (to,
4960 offset));
4963 /* No action is needed if the target is not a memory and the field
4964 lies completely outside that target. This can occur if the source
4965 code contains an out-of-bounds access to a small array. */
4966 if (!MEM_P (to_rtx)
4967 && GET_MODE (to_rtx) != BLKmode
4968 && (unsigned HOST_WIDE_INT) bitpos
4969 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4971 expand_normal (from);
4972 result = NULL;
4974 /* Handle expand_expr of a complex value returning a CONCAT. */
4975 else if (GET_CODE (to_rtx) == CONCAT)
4977 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4978 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4979 && bitpos == 0
4980 && bitsize == mode_bitsize)
4981 result = store_expr (from, to_rtx, false, nontemporal);
4982 else if (bitsize == mode_bitsize / 2
4983 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4984 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4985 nontemporal);
4986 else if (bitpos + bitsize <= mode_bitsize / 2)
4987 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4988 bitregion_start, bitregion_end,
4989 mode1, from,
4990 get_alias_set (to), nontemporal);
4991 else if (bitpos >= mode_bitsize / 2)
4992 result = store_field (XEXP (to_rtx, 1), bitsize,
4993 bitpos - mode_bitsize / 2,
4994 bitregion_start, bitregion_end,
4995 mode1, from,
4996 get_alias_set (to), nontemporal);
4997 else if (bitpos == 0 && bitsize == mode_bitsize)
4999 rtx from_rtx;
5000 result = expand_normal (from);
5001 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
5002 TYPE_MODE (TREE_TYPE (from)), 0);
5003 emit_move_insn (XEXP (to_rtx, 0),
5004 read_complex_part (from_rtx, false));
5005 emit_move_insn (XEXP (to_rtx, 1),
5006 read_complex_part (from_rtx, true));
5008 else
5010 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5011 GET_MODE_SIZE (GET_MODE (to_rtx)));
5012 write_complex_part (temp, XEXP (to_rtx, 0), false);
5013 write_complex_part (temp, XEXP (to_rtx, 1), true);
5014 result = store_field (temp, bitsize, bitpos,
5015 bitregion_start, bitregion_end,
5016 mode1, from,
5017 get_alias_set (to), nontemporal);
5018 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
5019 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5022 else
5024 if (MEM_P (to_rtx))
5026 /* If the field is at offset zero, we could have been given the
5027 DECL_RTX of the parent struct. Don't munge it. */
5028 to_rtx = shallow_copy_rtx (to_rtx);
5029 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5030 if (volatilep)
5031 MEM_VOLATILE_P (to_rtx) = 1;
5034 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5035 bitregion_start, bitregion_end,
5036 mode1,
5037 to_rtx, to, from))
5038 result = NULL;
5039 else
5040 result = store_field (to_rtx, bitsize, bitpos,
5041 bitregion_start, bitregion_end,
5042 mode1, from,
5043 get_alias_set (to), nontemporal);
5046 if (result)
5047 preserve_temp_slots (result);
5048 pop_temp_slots ();
5049 return;
5052 /* If the rhs is a function call and its value is not an aggregate,
5053 call the function before we start to compute the lhs.
5054 This is needed for correct code for cases such as
5055 val = setjmp (buf) on machines where reference to val
5056 requires loading up part of an address in a separate insn.
5058 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5059 since it might be a promoted variable where the zero- or sign- extension
5060 needs to be done. Handling this in the normal way is safe because no
5061 computation is done before the call. The same is true for SSA names. */
5062 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5063 && COMPLETE_TYPE_P (TREE_TYPE (from))
5064 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5065 && ! (((TREE_CODE (to) == VAR_DECL
5066 || TREE_CODE (to) == PARM_DECL
5067 || TREE_CODE (to) == RESULT_DECL)
5068 && REG_P (DECL_RTL (to)))
5069 || TREE_CODE (to) == SSA_NAME))
5071 rtx value;
5072 rtx bounds;
5074 push_temp_slots ();
5075 value = expand_normal (from);
5077 /* Split value and bounds to store them separately. */
5078 chkp_split_slot (value, &value, &bounds);
5080 if (to_rtx == 0)
5081 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5083 /* Handle calls that return values in multiple non-contiguous locations.
5084 The Irix 6 ABI has examples of this. */
5085 if (GET_CODE (to_rtx) == PARALLEL)
5087 if (GET_CODE (value) == PARALLEL)
5088 emit_group_move (to_rtx, value);
5089 else
5090 emit_group_load (to_rtx, value, TREE_TYPE (from),
5091 int_size_in_bytes (TREE_TYPE (from)));
5093 else if (GET_CODE (value) == PARALLEL)
5094 emit_group_store (to_rtx, value, TREE_TYPE (from),
5095 int_size_in_bytes (TREE_TYPE (from)));
5096 else if (GET_MODE (to_rtx) == BLKmode)
5098 /* Handle calls that return BLKmode values in registers. */
5099 if (REG_P (value))
5100 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5101 else
5102 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5104 else
5106 if (POINTER_TYPE_P (TREE_TYPE (to)))
5107 value = convert_memory_address_addr_space
5108 (GET_MODE (to_rtx), value,
5109 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5111 emit_move_insn (to_rtx, value);
5114 /* Store bounds if required. */
5115 if (bounds
5116 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5118 gcc_assert (MEM_P (to_rtx));
5119 chkp_emit_bounds_store (bounds, value, to_rtx);
5122 preserve_temp_slots (to_rtx);
5123 pop_temp_slots ();
5124 return;
5127 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5128 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5130 /* Don't move directly into a return register. */
5131 if (TREE_CODE (to) == RESULT_DECL
5132 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5134 rtx temp;
5136 push_temp_slots ();
5138 /* If the source is itself a return value, it still is in a pseudo at
5139 this point so we can move it back to the return register directly. */
5140 if (REG_P (to_rtx)
5141 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5142 && TREE_CODE (from) != CALL_EXPR)
5143 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5144 else
5145 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5147 /* Handle calls that return values in multiple non-contiguous locations.
5148 The Irix 6 ABI has examples of this. */
5149 if (GET_CODE (to_rtx) == PARALLEL)
5151 if (GET_CODE (temp) == PARALLEL)
5152 emit_group_move (to_rtx, temp);
5153 else
5154 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5155 int_size_in_bytes (TREE_TYPE (from)));
5157 else if (temp)
5158 emit_move_insn (to_rtx, temp);
5160 preserve_temp_slots (to_rtx);
5161 pop_temp_slots ();
5162 return;
5165 /* In case we are returning the contents of an object which overlaps
5166 the place the value is being stored, use a safe function when copying
5167 a value through a pointer into a structure value return block. */
5168 if (TREE_CODE (to) == RESULT_DECL
5169 && TREE_CODE (from) == INDIRECT_REF
5170 && ADDR_SPACE_GENERIC_P
5171 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5172 && refs_may_alias_p (to, from)
5173 && cfun->returns_struct
5174 && !cfun->returns_pcc_struct)
5176 rtx from_rtx, size;
5178 push_temp_slots ();
5179 size = expr_size (from);
5180 from_rtx = expand_normal (from);
5182 emit_library_call (memmove_libfunc, LCT_NORMAL,
5183 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5184 XEXP (from_rtx, 0), Pmode,
5185 convert_to_mode (TYPE_MODE (sizetype),
5186 size, TYPE_UNSIGNED (sizetype)),
5187 TYPE_MODE (sizetype));
5189 preserve_temp_slots (to_rtx);
5190 pop_temp_slots ();
5191 return;
5194 /* Compute FROM and store the value in the rtx we got. */
5196 push_temp_slots ();
5197 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5198 preserve_temp_slots (result);
5199 pop_temp_slots ();
5200 return;
5203 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5204 succeeded, false otherwise. */
5206 bool
5207 emit_storent_insn (rtx to, rtx from)
5209 struct expand_operand ops[2];
5210 machine_mode mode = GET_MODE (to);
5211 enum insn_code code = optab_handler (storent_optab, mode);
5213 if (code == CODE_FOR_nothing)
5214 return false;
5216 create_fixed_operand (&ops[0], to);
5217 create_input_operand (&ops[1], from, mode);
5218 return maybe_expand_insn (code, 2, ops);
5221 /* Generate code for computing expression EXP,
5222 and storing the value into TARGET.
5224 If the mode is BLKmode then we may return TARGET itself.
5225 It turns out that in BLKmode it doesn't cause a problem.
5226 because C has no operators that could combine two different
5227 assignments into the same BLKmode object with different values
5228 with no sequence point. Will other languages need this to
5229 be more thorough?
5231 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5232 stack, and block moves may need to be treated specially.
5234 If NONTEMPORAL is true, try using a nontemporal store instruction.
5236 If BTARGET is not NULL then computed bounds of EXP are
5237 associated with BTARGET. */
5240 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5241 bool nontemporal, tree btarget)
5243 rtx temp;
5244 rtx alt_rtl = NULL_RTX;
5245 location_t loc = curr_insn_location ();
5247 if (VOID_TYPE_P (TREE_TYPE (exp)))
5249 /* C++ can generate ?: expressions with a throw expression in one
5250 branch and an rvalue in the other. Here, we resolve attempts to
5251 store the throw expression's nonexistent result. */
5252 gcc_assert (!call_param_p);
5253 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5254 return NULL_RTX;
5256 if (TREE_CODE (exp) == COMPOUND_EXPR)
5258 /* Perform first part of compound expression, then assign from second
5259 part. */
5260 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5261 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5262 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5263 call_param_p, nontemporal, btarget);
5265 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5267 /* For conditional expression, get safe form of the target. Then
5268 test the condition, doing the appropriate assignment on either
5269 side. This avoids the creation of unnecessary temporaries.
5270 For non-BLKmode, it is more efficient not to do this. */
5272 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5274 do_pending_stack_adjust ();
5275 NO_DEFER_POP;
5276 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5277 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5278 nontemporal, btarget);
5279 emit_jump_insn (gen_jump (lab2));
5280 emit_barrier ();
5281 emit_label (lab1);
5282 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5283 nontemporal, btarget);
5284 emit_label (lab2);
5285 OK_DEFER_POP;
5287 return NULL_RTX;
5289 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5290 /* If this is a scalar in a register that is stored in a wider mode
5291 than the declared mode, compute the result into its declared mode
5292 and then convert to the wider mode. Our value is the computed
5293 expression. */
5295 rtx inner_target = 0;
5297 /* We can do the conversion inside EXP, which will often result
5298 in some optimizations. Do the conversion in two steps: first
5299 change the signedness, if needed, then the extend. But don't
5300 do this if the type of EXP is a subtype of something else
5301 since then the conversion might involve more than just
5302 converting modes. */
5303 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5304 && TREE_TYPE (TREE_TYPE (exp)) == 0
5305 && GET_MODE_PRECISION (GET_MODE (target))
5306 == TYPE_PRECISION (TREE_TYPE (exp)))
5308 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5309 TYPE_UNSIGNED (TREE_TYPE (exp))))
5311 /* Some types, e.g. Fortran's logical*4, won't have a signed
5312 version, so use the mode instead. */
5313 tree ntype
5314 = (signed_or_unsigned_type_for
5315 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5316 if (ntype == NULL)
5317 ntype = lang_hooks.types.type_for_mode
5318 (TYPE_MODE (TREE_TYPE (exp)),
5319 SUBREG_PROMOTED_SIGN (target));
5321 exp = fold_convert_loc (loc, ntype, exp);
5324 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5325 (GET_MODE (SUBREG_REG (target)),
5326 SUBREG_PROMOTED_SIGN (target)),
5327 exp);
5329 inner_target = SUBREG_REG (target);
5332 temp = expand_expr (exp, inner_target, VOIDmode,
5333 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5335 /* Handle bounds returned by call. */
5336 if (TREE_CODE (exp) == CALL_EXPR)
5338 rtx bounds;
5339 chkp_split_slot (temp, &temp, &bounds);
5340 if (bounds && btarget)
5342 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5343 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5344 chkp_set_rtl_bounds (btarget, tmp);
5348 /* If TEMP is a VOIDmode constant, use convert_modes to make
5349 sure that we properly convert it. */
5350 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5352 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5353 temp, SUBREG_PROMOTED_SIGN (target));
5354 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5355 GET_MODE (target), temp,
5356 SUBREG_PROMOTED_SIGN (target));
5359 convert_move (SUBREG_REG (target), temp,
5360 SUBREG_PROMOTED_SIGN (target));
5362 return NULL_RTX;
5364 else if ((TREE_CODE (exp) == STRING_CST
5365 || (TREE_CODE (exp) == MEM_REF
5366 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5367 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5368 == STRING_CST
5369 && integer_zerop (TREE_OPERAND (exp, 1))))
5370 && !nontemporal && !call_param_p
5371 && MEM_P (target))
5373 /* Optimize initialization of an array with a STRING_CST. */
5374 HOST_WIDE_INT exp_len, str_copy_len;
5375 rtx dest_mem;
5376 tree str = TREE_CODE (exp) == STRING_CST
5377 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5379 exp_len = int_expr_size (exp);
5380 if (exp_len <= 0)
5381 goto normal_expr;
5383 if (TREE_STRING_LENGTH (str) <= 0)
5384 goto normal_expr;
5386 str_copy_len = strlen (TREE_STRING_POINTER (str));
5387 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5388 goto normal_expr;
5390 str_copy_len = TREE_STRING_LENGTH (str);
5391 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5392 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5394 str_copy_len += STORE_MAX_PIECES - 1;
5395 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5397 str_copy_len = MIN (str_copy_len, exp_len);
5398 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5399 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5400 MEM_ALIGN (target), false))
5401 goto normal_expr;
5403 dest_mem = target;
5405 dest_mem = store_by_pieces (dest_mem,
5406 str_copy_len, builtin_strncpy_read_str,
5407 CONST_CAST (char *,
5408 TREE_STRING_POINTER (str)),
5409 MEM_ALIGN (target), false,
5410 exp_len > str_copy_len ? 1 : 0);
5411 if (exp_len > str_copy_len)
5412 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5413 GEN_INT (exp_len - str_copy_len),
5414 BLOCK_OP_NORMAL);
5415 return NULL_RTX;
5417 else
5419 rtx tmp_target;
5421 normal_expr:
5422 /* If we want to use a nontemporal store, force the value to
5423 register first. */
5424 tmp_target = nontemporal ? NULL_RTX : target;
5425 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5426 (call_param_p
5427 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5428 &alt_rtl, false);
5430 /* Handle bounds returned by call. */
5431 if (TREE_CODE (exp) == CALL_EXPR)
5433 rtx bounds;
5434 chkp_split_slot (temp, &temp, &bounds);
5435 if (bounds && btarget)
5437 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5438 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5439 chkp_set_rtl_bounds (btarget, tmp);
5444 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5445 the same as that of TARGET, adjust the constant. This is needed, for
5446 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5447 only a word-sized value. */
5448 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5449 && TREE_CODE (exp) != ERROR_MARK
5450 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5451 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5452 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5454 /* If value was not generated in the target, store it there.
5455 Convert the value to TARGET's type first if necessary and emit the
5456 pending incrementations that have been queued when expanding EXP.
5457 Note that we cannot emit the whole queue blindly because this will
5458 effectively disable the POST_INC optimization later.
5460 If TEMP and TARGET compare equal according to rtx_equal_p, but
5461 one or both of them are volatile memory refs, we have to distinguish
5462 two cases:
5463 - expand_expr has used TARGET. In this case, we must not generate
5464 another copy. This can be detected by TARGET being equal according
5465 to == .
5466 - expand_expr has not used TARGET - that means that the source just
5467 happens to have the same RTX form. Since temp will have been created
5468 by expand_expr, it will compare unequal according to == .
5469 We must generate a copy in this case, to reach the correct number
5470 of volatile memory references. */
5472 if ((! rtx_equal_p (temp, target)
5473 || (temp != target && (side_effects_p (temp)
5474 || side_effects_p (target))))
5475 && TREE_CODE (exp) != ERROR_MARK
5476 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5477 but TARGET is not valid memory reference, TEMP will differ
5478 from TARGET although it is really the same location. */
5479 && !(alt_rtl
5480 && rtx_equal_p (alt_rtl, target)
5481 && !side_effects_p (alt_rtl)
5482 && !side_effects_p (target))
5483 /* If there's nothing to copy, don't bother. Don't call
5484 expr_size unless necessary, because some front-ends (C++)
5485 expr_size-hook must not be given objects that are not
5486 supposed to be bit-copied or bit-initialized. */
5487 && expr_size (exp) != const0_rtx)
5489 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5491 if (GET_MODE (target) == BLKmode)
5493 /* Handle calls that return BLKmode values in registers. */
5494 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5495 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5496 else
5497 store_bit_field (target,
5498 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5499 0, 0, 0, GET_MODE (temp), temp);
5501 else
5502 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5505 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5507 /* Handle copying a string constant into an array. The string
5508 constant may be shorter than the array. So copy just the string's
5509 actual length, and clear the rest. First get the size of the data
5510 type of the string, which is actually the size of the target. */
5511 rtx size = expr_size (exp);
5513 if (CONST_INT_P (size)
5514 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5515 emit_block_move (target, temp, size,
5516 (call_param_p
5517 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5518 else
5520 machine_mode pointer_mode
5521 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5522 machine_mode address_mode = get_address_mode (target);
5524 /* Compute the size of the data to copy from the string. */
5525 tree copy_size
5526 = size_binop_loc (loc, MIN_EXPR,
5527 make_tree (sizetype, size),
5528 size_int (TREE_STRING_LENGTH (exp)));
5529 rtx copy_size_rtx
5530 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5531 (call_param_p
5532 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5533 rtx_code_label *label = 0;
5535 /* Copy that much. */
5536 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5537 TYPE_UNSIGNED (sizetype));
5538 emit_block_move (target, temp, copy_size_rtx,
5539 (call_param_p
5540 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5542 /* Figure out how much is left in TARGET that we have to clear.
5543 Do all calculations in pointer_mode. */
5544 if (CONST_INT_P (copy_size_rtx))
5546 size = plus_constant (address_mode, size,
5547 -INTVAL (copy_size_rtx));
5548 target = adjust_address (target, BLKmode,
5549 INTVAL (copy_size_rtx));
5551 else
5553 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5554 copy_size_rtx, NULL_RTX, 0,
5555 OPTAB_LIB_WIDEN);
5557 if (GET_MODE (copy_size_rtx) != address_mode)
5558 copy_size_rtx = convert_to_mode (address_mode,
5559 copy_size_rtx,
5560 TYPE_UNSIGNED (sizetype));
5562 target = offset_address (target, copy_size_rtx,
5563 highest_pow2_factor (copy_size));
5564 label = gen_label_rtx ();
5565 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5566 GET_MODE (size), 0, label);
5569 if (size != const0_rtx)
5570 clear_storage (target, size, BLOCK_OP_NORMAL);
5572 if (label)
5573 emit_label (label);
5576 /* Handle calls that return values in multiple non-contiguous locations.
5577 The Irix 6 ABI has examples of this. */
5578 else if (GET_CODE (target) == PARALLEL)
5580 if (GET_CODE (temp) == PARALLEL)
5581 emit_group_move (target, temp);
5582 else
5583 emit_group_load (target, temp, TREE_TYPE (exp),
5584 int_size_in_bytes (TREE_TYPE (exp)));
5586 else if (GET_CODE (temp) == PARALLEL)
5587 emit_group_store (target, temp, TREE_TYPE (exp),
5588 int_size_in_bytes (TREE_TYPE (exp)));
5589 else if (GET_MODE (temp) == BLKmode)
5590 emit_block_move (target, temp, expr_size (exp),
5591 (call_param_p
5592 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5593 /* If we emit a nontemporal store, there is nothing else to do. */
5594 else if (nontemporal && emit_storent_insn (target, temp))
5596 else
5598 temp = force_operand (temp, target);
5599 if (temp != target)
5600 emit_move_insn (target, temp);
5604 return NULL_RTX;
5607 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5609 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5611 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5614 /* Return true if field F of structure TYPE is a flexible array. */
5616 static bool
5617 flexible_array_member_p (const_tree f, const_tree type)
5619 const_tree tf;
5621 tf = TREE_TYPE (f);
5622 return (DECL_CHAIN (f) == NULL
5623 && TREE_CODE (tf) == ARRAY_TYPE
5624 && TYPE_DOMAIN (tf)
5625 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5626 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5627 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5628 && int_size_in_bytes (type) >= 0);
5631 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5632 must have in order for it to completely initialize a value of type TYPE.
5633 Return -1 if the number isn't known.
5635 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5637 static HOST_WIDE_INT
5638 count_type_elements (const_tree type, bool for_ctor_p)
5640 switch (TREE_CODE (type))
5642 case ARRAY_TYPE:
5644 tree nelts;
5646 nelts = array_type_nelts (type);
5647 if (nelts && tree_fits_uhwi_p (nelts))
5649 unsigned HOST_WIDE_INT n;
5651 n = tree_to_uhwi (nelts) + 1;
5652 if (n == 0 || for_ctor_p)
5653 return n;
5654 else
5655 return n * count_type_elements (TREE_TYPE (type), false);
5657 return for_ctor_p ? -1 : 1;
5660 case RECORD_TYPE:
5662 unsigned HOST_WIDE_INT n;
5663 tree f;
5665 n = 0;
5666 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5667 if (TREE_CODE (f) == FIELD_DECL)
5669 if (!for_ctor_p)
5670 n += count_type_elements (TREE_TYPE (f), false);
5671 else if (!flexible_array_member_p (f, type))
5672 /* Don't count flexible arrays, which are not supposed
5673 to be initialized. */
5674 n += 1;
5677 return n;
5680 case UNION_TYPE:
5681 case QUAL_UNION_TYPE:
5683 tree f;
5684 HOST_WIDE_INT n, m;
5686 gcc_assert (!for_ctor_p);
5687 /* Estimate the number of scalars in each field and pick the
5688 maximum. Other estimates would do instead; the idea is simply
5689 to make sure that the estimate is not sensitive to the ordering
5690 of the fields. */
5691 n = 1;
5692 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5693 if (TREE_CODE (f) == FIELD_DECL)
5695 m = count_type_elements (TREE_TYPE (f), false);
5696 /* If the field doesn't span the whole union, add an extra
5697 scalar for the rest. */
5698 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5699 TYPE_SIZE (type)) != 1)
5700 m++;
5701 if (n < m)
5702 n = m;
5704 return n;
5707 case COMPLEX_TYPE:
5708 return 2;
5710 case VECTOR_TYPE:
5711 return TYPE_VECTOR_SUBPARTS (type);
5713 case INTEGER_TYPE:
5714 case REAL_TYPE:
5715 case FIXED_POINT_TYPE:
5716 case ENUMERAL_TYPE:
5717 case BOOLEAN_TYPE:
5718 case POINTER_TYPE:
5719 case OFFSET_TYPE:
5720 case REFERENCE_TYPE:
5721 case NULLPTR_TYPE:
5722 return 1;
5724 case ERROR_MARK:
5725 return 0;
5727 case VOID_TYPE:
5728 case METHOD_TYPE:
5729 case FUNCTION_TYPE:
5730 case LANG_TYPE:
5731 default:
5732 gcc_unreachable ();
5736 /* Helper for categorize_ctor_elements. Identical interface. */
5738 static bool
5739 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5740 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5742 unsigned HOST_WIDE_INT idx;
5743 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5744 tree value, purpose, elt_type;
5746 /* Whether CTOR is a valid constant initializer, in accordance with what
5747 initializer_constant_valid_p does. If inferred from the constructor
5748 elements, true until proven otherwise. */
5749 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5750 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5752 nz_elts = 0;
5753 init_elts = 0;
5754 num_fields = 0;
5755 elt_type = NULL_TREE;
5757 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5759 HOST_WIDE_INT mult = 1;
5761 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5763 tree lo_index = TREE_OPERAND (purpose, 0);
5764 tree hi_index = TREE_OPERAND (purpose, 1);
5766 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5767 mult = (tree_to_uhwi (hi_index)
5768 - tree_to_uhwi (lo_index) + 1);
5770 num_fields += mult;
5771 elt_type = TREE_TYPE (value);
5773 switch (TREE_CODE (value))
5775 case CONSTRUCTOR:
5777 HOST_WIDE_INT nz = 0, ic = 0;
5779 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5780 p_complete);
5782 nz_elts += mult * nz;
5783 init_elts += mult * ic;
5785 if (const_from_elts_p && const_p)
5786 const_p = const_elt_p;
5788 break;
5790 case INTEGER_CST:
5791 case REAL_CST:
5792 case FIXED_CST:
5793 if (!initializer_zerop (value))
5794 nz_elts += mult;
5795 init_elts += mult;
5796 break;
5798 case STRING_CST:
5799 nz_elts += mult * TREE_STRING_LENGTH (value);
5800 init_elts += mult * TREE_STRING_LENGTH (value);
5801 break;
5803 case COMPLEX_CST:
5804 if (!initializer_zerop (TREE_REALPART (value)))
5805 nz_elts += mult;
5806 if (!initializer_zerop (TREE_IMAGPART (value)))
5807 nz_elts += mult;
5808 init_elts += mult;
5809 break;
5811 case VECTOR_CST:
5813 unsigned i;
5814 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5816 tree v = VECTOR_CST_ELT (value, i);
5817 if (!initializer_zerop (v))
5818 nz_elts += mult;
5819 init_elts += mult;
5822 break;
5824 default:
5826 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5827 nz_elts += mult * tc;
5828 init_elts += mult * tc;
5830 if (const_from_elts_p && const_p)
5831 const_p = initializer_constant_valid_p (value, elt_type)
5832 != NULL_TREE;
5834 break;
5838 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5839 num_fields, elt_type))
5840 *p_complete = false;
5842 *p_nz_elts += nz_elts;
5843 *p_init_elts += init_elts;
5845 return const_p;
5848 /* Examine CTOR to discover:
5849 * how many scalar fields are set to nonzero values,
5850 and place it in *P_NZ_ELTS;
5851 * how many scalar fields in total are in CTOR,
5852 and place it in *P_ELT_COUNT.
5853 * whether the constructor is complete -- in the sense that every
5854 meaningful byte is explicitly given a value --
5855 and place it in *P_COMPLETE.
5857 Return whether or not CTOR is a valid static constant initializer, the same
5858 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5860 bool
5861 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5862 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5864 *p_nz_elts = 0;
5865 *p_init_elts = 0;
5866 *p_complete = true;
5868 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5871 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5872 of which had type LAST_TYPE. Each element was itself a complete
5873 initializer, in the sense that every meaningful byte was explicitly
5874 given a value. Return true if the same is true for the constructor
5875 as a whole. */
5877 bool
5878 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5879 const_tree last_type)
5881 if (TREE_CODE (type) == UNION_TYPE
5882 || TREE_CODE (type) == QUAL_UNION_TYPE)
5884 if (num_elts == 0)
5885 return false;
5887 gcc_assert (num_elts == 1 && last_type);
5889 /* ??? We could look at each element of the union, and find the
5890 largest element. Which would avoid comparing the size of the
5891 initialized element against any tail padding in the union.
5892 Doesn't seem worth the effort... */
5893 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5896 return count_type_elements (type, true) == num_elts;
5899 /* Return 1 if EXP contains mostly (3/4) zeros. */
5901 static int
5902 mostly_zeros_p (const_tree exp)
5904 if (TREE_CODE (exp) == CONSTRUCTOR)
5906 HOST_WIDE_INT nz_elts, init_elts;
5907 bool complete_p;
5909 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5910 return !complete_p || nz_elts < init_elts / 4;
5913 return initializer_zerop (exp);
5916 /* Return 1 if EXP contains all zeros. */
5918 static int
5919 all_zeros_p (const_tree exp)
5921 if (TREE_CODE (exp) == CONSTRUCTOR)
5923 HOST_WIDE_INT nz_elts, init_elts;
5924 bool complete_p;
5926 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5927 return nz_elts == 0;
5930 return initializer_zerop (exp);
5933 /* Helper function for store_constructor.
5934 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5935 CLEARED is as for store_constructor.
5936 ALIAS_SET is the alias set to use for any stores.
5938 This provides a recursive shortcut back to store_constructor when it isn't
5939 necessary to go through store_field. This is so that we can pass through
5940 the cleared field to let store_constructor know that we may not have to
5941 clear a substructure if the outer structure has already been cleared. */
5943 static void
5944 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5945 HOST_WIDE_INT bitpos, machine_mode mode,
5946 tree exp, int cleared, alias_set_type alias_set)
5948 if (TREE_CODE (exp) == CONSTRUCTOR
5949 /* We can only call store_constructor recursively if the size and
5950 bit position are on a byte boundary. */
5951 && bitpos % BITS_PER_UNIT == 0
5952 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5953 /* If we have a nonzero bitpos for a register target, then we just
5954 let store_field do the bitfield handling. This is unlikely to
5955 generate unnecessary clear instructions anyways. */
5956 && (bitpos == 0 || MEM_P (target)))
5958 if (MEM_P (target))
5959 target
5960 = adjust_address (target,
5961 GET_MODE (target) == BLKmode
5962 || 0 != (bitpos
5963 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5964 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5967 /* Update the alias set, if required. */
5968 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5969 && MEM_ALIAS_SET (target) != 0)
5971 target = copy_rtx (target);
5972 set_mem_alias_set (target, alias_set);
5975 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5977 else
5978 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5982 /* Returns the number of FIELD_DECLs in TYPE. */
5984 static int
5985 fields_length (const_tree type)
5987 tree t = TYPE_FIELDS (type);
5988 int count = 0;
5990 for (; t; t = DECL_CHAIN (t))
5991 if (TREE_CODE (t) == FIELD_DECL)
5992 ++count;
5994 return count;
5998 /* Store the value of constructor EXP into the rtx TARGET.
5999 TARGET is either a REG or a MEM; we know it cannot conflict, since
6000 safe_from_p has been called.
6001 CLEARED is true if TARGET is known to have been zero'd.
6002 SIZE is the number of bytes of TARGET we are allowed to modify: this
6003 may not be the same as the size of EXP if we are assigning to a field
6004 which has been packed to exclude padding bits. */
6006 static void
6007 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
6009 tree type = TREE_TYPE (exp);
6010 #ifdef WORD_REGISTER_OPERATIONS
6011 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6012 #endif
6014 switch (TREE_CODE (type))
6016 case RECORD_TYPE:
6017 case UNION_TYPE:
6018 case QUAL_UNION_TYPE:
6020 unsigned HOST_WIDE_INT idx;
6021 tree field, value;
6023 /* If size is zero or the target is already cleared, do nothing. */
6024 if (size == 0 || cleared)
6025 cleared = 1;
6026 /* We either clear the aggregate or indicate the value is dead. */
6027 else if ((TREE_CODE (type) == UNION_TYPE
6028 || TREE_CODE (type) == QUAL_UNION_TYPE)
6029 && ! CONSTRUCTOR_ELTS (exp))
6030 /* If the constructor is empty, clear the union. */
6032 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6033 cleared = 1;
6036 /* If we are building a static constructor into a register,
6037 set the initial value as zero so we can fold the value into
6038 a constant. But if more than one register is involved,
6039 this probably loses. */
6040 else if (REG_P (target) && TREE_STATIC (exp)
6041 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6043 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6044 cleared = 1;
6047 /* If the constructor has fewer fields than the structure or
6048 if we are initializing the structure to mostly zeros, clear
6049 the whole structure first. Don't do this if TARGET is a
6050 register whose mode size isn't equal to SIZE since
6051 clear_storage can't handle this case. */
6052 else if (size > 0
6053 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6054 != fields_length (type))
6055 || mostly_zeros_p (exp))
6056 && (!REG_P (target)
6057 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6058 == size)))
6060 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6061 cleared = 1;
6064 if (REG_P (target) && !cleared)
6065 emit_clobber (target);
6067 /* Store each element of the constructor into the
6068 corresponding field of TARGET. */
6069 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6071 machine_mode mode;
6072 HOST_WIDE_INT bitsize;
6073 HOST_WIDE_INT bitpos = 0;
6074 tree offset;
6075 rtx to_rtx = target;
6077 /* Just ignore missing fields. We cleared the whole
6078 structure, above, if any fields are missing. */
6079 if (field == 0)
6080 continue;
6082 if (cleared && initializer_zerop (value))
6083 continue;
6085 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6086 bitsize = tree_to_uhwi (DECL_SIZE (field));
6087 else
6088 bitsize = -1;
6090 mode = DECL_MODE (field);
6091 if (DECL_BIT_FIELD (field))
6092 mode = VOIDmode;
6094 offset = DECL_FIELD_OFFSET (field);
6095 if (tree_fits_shwi_p (offset)
6096 && tree_fits_shwi_p (bit_position (field)))
6098 bitpos = int_bit_position (field);
6099 offset = 0;
6101 else
6102 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6104 if (offset)
6106 machine_mode address_mode;
6107 rtx offset_rtx;
6109 offset
6110 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6111 make_tree (TREE_TYPE (exp),
6112 target));
6114 offset_rtx = expand_normal (offset);
6115 gcc_assert (MEM_P (to_rtx));
6117 address_mode = get_address_mode (to_rtx);
6118 if (GET_MODE (offset_rtx) != address_mode)
6119 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6121 to_rtx = offset_address (to_rtx, offset_rtx,
6122 highest_pow2_factor (offset));
6125 #ifdef WORD_REGISTER_OPERATIONS
6126 /* If this initializes a field that is smaller than a
6127 word, at the start of a word, try to widen it to a full
6128 word. This special case allows us to output C++ member
6129 function initializations in a form that the optimizers
6130 can understand. */
6131 if (REG_P (target)
6132 && bitsize < BITS_PER_WORD
6133 && bitpos % BITS_PER_WORD == 0
6134 && GET_MODE_CLASS (mode) == MODE_INT
6135 && TREE_CODE (value) == INTEGER_CST
6136 && exp_size >= 0
6137 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6139 tree type = TREE_TYPE (value);
6141 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6143 type = lang_hooks.types.type_for_mode
6144 (word_mode, TYPE_UNSIGNED (type));
6145 value = fold_convert (type, value);
6148 if (BYTES_BIG_ENDIAN)
6149 value
6150 = fold_build2 (LSHIFT_EXPR, type, value,
6151 build_int_cst (type,
6152 BITS_PER_WORD - bitsize));
6153 bitsize = BITS_PER_WORD;
6154 mode = word_mode;
6156 #endif
6158 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6159 && DECL_NONADDRESSABLE_P (field))
6161 to_rtx = copy_rtx (to_rtx);
6162 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6165 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6166 value, cleared,
6167 get_alias_set (TREE_TYPE (field)));
6169 break;
6171 case ARRAY_TYPE:
6173 tree value, index;
6174 unsigned HOST_WIDE_INT i;
6175 int need_to_clear;
6176 tree domain;
6177 tree elttype = TREE_TYPE (type);
6178 int const_bounds_p;
6179 HOST_WIDE_INT minelt = 0;
6180 HOST_WIDE_INT maxelt = 0;
6182 domain = TYPE_DOMAIN (type);
6183 const_bounds_p = (TYPE_MIN_VALUE (domain)
6184 && TYPE_MAX_VALUE (domain)
6185 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6186 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6188 /* If we have constant bounds for the range of the type, get them. */
6189 if (const_bounds_p)
6191 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6192 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6195 /* If the constructor has fewer elements than the array, clear
6196 the whole array first. Similarly if this is static
6197 constructor of a non-BLKmode object. */
6198 if (cleared)
6199 need_to_clear = 0;
6200 else if (REG_P (target) && TREE_STATIC (exp))
6201 need_to_clear = 1;
6202 else
6204 unsigned HOST_WIDE_INT idx;
6205 tree index, value;
6206 HOST_WIDE_INT count = 0, zero_count = 0;
6207 need_to_clear = ! const_bounds_p;
6209 /* This loop is a more accurate version of the loop in
6210 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6211 is also needed to check for missing elements. */
6212 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6214 HOST_WIDE_INT this_node_count;
6216 if (need_to_clear)
6217 break;
6219 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6221 tree lo_index = TREE_OPERAND (index, 0);
6222 tree hi_index = TREE_OPERAND (index, 1);
6224 if (! tree_fits_uhwi_p (lo_index)
6225 || ! tree_fits_uhwi_p (hi_index))
6227 need_to_clear = 1;
6228 break;
6231 this_node_count = (tree_to_uhwi (hi_index)
6232 - tree_to_uhwi (lo_index) + 1);
6234 else
6235 this_node_count = 1;
6237 count += this_node_count;
6238 if (mostly_zeros_p (value))
6239 zero_count += this_node_count;
6242 /* Clear the entire array first if there are any missing
6243 elements, or if the incidence of zero elements is >=
6244 75%. */
6245 if (! need_to_clear
6246 && (count < maxelt - minelt + 1
6247 || 4 * zero_count >= 3 * count))
6248 need_to_clear = 1;
6251 if (need_to_clear && size > 0)
6253 if (REG_P (target))
6254 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6255 else
6256 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6257 cleared = 1;
6260 if (!cleared && REG_P (target))
6261 /* Inform later passes that the old value is dead. */
6262 emit_clobber (target);
6264 /* Store each element of the constructor into the
6265 corresponding element of TARGET, determined by counting the
6266 elements. */
6267 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6269 machine_mode mode;
6270 HOST_WIDE_INT bitsize;
6271 HOST_WIDE_INT bitpos;
6272 rtx xtarget = target;
6274 if (cleared && initializer_zerop (value))
6275 continue;
6277 mode = TYPE_MODE (elttype);
6278 if (mode == BLKmode)
6279 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6280 ? tree_to_uhwi (TYPE_SIZE (elttype))
6281 : -1);
6282 else
6283 bitsize = GET_MODE_BITSIZE (mode);
6285 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6287 tree lo_index = TREE_OPERAND (index, 0);
6288 tree hi_index = TREE_OPERAND (index, 1);
6289 rtx index_r, pos_rtx;
6290 HOST_WIDE_INT lo, hi, count;
6291 tree position;
6293 /* If the range is constant and "small", unroll the loop. */
6294 if (const_bounds_p
6295 && tree_fits_shwi_p (lo_index)
6296 && tree_fits_shwi_p (hi_index)
6297 && (lo = tree_to_shwi (lo_index),
6298 hi = tree_to_shwi (hi_index),
6299 count = hi - lo + 1,
6300 (!MEM_P (target)
6301 || count <= 2
6302 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6303 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6304 <= 40 * 8)))))
6306 lo -= minelt; hi -= minelt;
6307 for (; lo <= hi; lo++)
6309 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6311 if (MEM_P (target)
6312 && !MEM_KEEP_ALIAS_SET_P (target)
6313 && TREE_CODE (type) == ARRAY_TYPE
6314 && TYPE_NONALIASED_COMPONENT (type))
6316 target = copy_rtx (target);
6317 MEM_KEEP_ALIAS_SET_P (target) = 1;
6320 store_constructor_field
6321 (target, bitsize, bitpos, mode, value, cleared,
6322 get_alias_set (elttype));
6325 else
6327 rtx_code_label *loop_start = gen_label_rtx ();
6328 rtx_code_label *loop_end = gen_label_rtx ();
6329 tree exit_cond;
6331 expand_normal (hi_index);
6333 index = build_decl (EXPR_LOCATION (exp),
6334 VAR_DECL, NULL_TREE, domain);
6335 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6336 SET_DECL_RTL (index, index_r);
6337 store_expr (lo_index, index_r, 0, false);
6339 /* Build the head of the loop. */
6340 do_pending_stack_adjust ();
6341 emit_label (loop_start);
6343 /* Assign value to element index. */
6344 position =
6345 fold_convert (ssizetype,
6346 fold_build2 (MINUS_EXPR,
6347 TREE_TYPE (index),
6348 index,
6349 TYPE_MIN_VALUE (domain)));
6351 position =
6352 size_binop (MULT_EXPR, position,
6353 fold_convert (ssizetype,
6354 TYPE_SIZE_UNIT (elttype)));
6356 pos_rtx = expand_normal (position);
6357 xtarget = offset_address (target, pos_rtx,
6358 highest_pow2_factor (position));
6359 xtarget = adjust_address (xtarget, mode, 0);
6360 if (TREE_CODE (value) == CONSTRUCTOR)
6361 store_constructor (value, xtarget, cleared,
6362 bitsize / BITS_PER_UNIT);
6363 else
6364 store_expr (value, xtarget, 0, false);
6366 /* Generate a conditional jump to exit the loop. */
6367 exit_cond = build2 (LT_EXPR, integer_type_node,
6368 index, hi_index);
6369 jumpif (exit_cond, loop_end, -1);
6371 /* Update the loop counter, and jump to the head of
6372 the loop. */
6373 expand_assignment (index,
6374 build2 (PLUS_EXPR, TREE_TYPE (index),
6375 index, integer_one_node),
6376 false);
6378 emit_jump (loop_start);
6380 /* Build the end of the loop. */
6381 emit_label (loop_end);
6384 else if ((index != 0 && ! tree_fits_shwi_p (index))
6385 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6387 tree position;
6389 if (index == 0)
6390 index = ssize_int (1);
6392 if (minelt)
6393 index = fold_convert (ssizetype,
6394 fold_build2 (MINUS_EXPR,
6395 TREE_TYPE (index),
6396 index,
6397 TYPE_MIN_VALUE (domain)));
6399 position =
6400 size_binop (MULT_EXPR, index,
6401 fold_convert (ssizetype,
6402 TYPE_SIZE_UNIT (elttype)));
6403 xtarget = offset_address (target,
6404 expand_normal (position),
6405 highest_pow2_factor (position));
6406 xtarget = adjust_address (xtarget, mode, 0);
6407 store_expr (value, xtarget, 0, false);
6409 else
6411 if (index != 0)
6412 bitpos = ((tree_to_shwi (index) - minelt)
6413 * tree_to_uhwi (TYPE_SIZE (elttype)));
6414 else
6415 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6417 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6418 && TREE_CODE (type) == ARRAY_TYPE
6419 && TYPE_NONALIASED_COMPONENT (type))
6421 target = copy_rtx (target);
6422 MEM_KEEP_ALIAS_SET_P (target) = 1;
6424 store_constructor_field (target, bitsize, bitpos, mode, value,
6425 cleared, get_alias_set (elttype));
6428 break;
6431 case VECTOR_TYPE:
6433 unsigned HOST_WIDE_INT idx;
6434 constructor_elt *ce;
6435 int i;
6436 int need_to_clear;
6437 int icode = CODE_FOR_nothing;
6438 tree elttype = TREE_TYPE (type);
6439 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6440 machine_mode eltmode = TYPE_MODE (elttype);
6441 HOST_WIDE_INT bitsize;
6442 HOST_WIDE_INT bitpos;
6443 rtvec vector = NULL;
6444 unsigned n_elts;
6445 alias_set_type alias;
6447 gcc_assert (eltmode != BLKmode);
6449 n_elts = TYPE_VECTOR_SUBPARTS (type);
6450 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6452 machine_mode mode = GET_MODE (target);
6454 icode = (int) optab_handler (vec_init_optab, mode);
6455 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6456 if (icode != CODE_FOR_nothing)
6458 tree value;
6460 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6461 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6463 icode = CODE_FOR_nothing;
6464 break;
6467 if (icode != CODE_FOR_nothing)
6469 unsigned int i;
6471 vector = rtvec_alloc (n_elts);
6472 for (i = 0; i < n_elts; i++)
6473 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6477 /* If the constructor has fewer elements than the vector,
6478 clear the whole array first. Similarly if this is static
6479 constructor of a non-BLKmode object. */
6480 if (cleared)
6481 need_to_clear = 0;
6482 else if (REG_P (target) && TREE_STATIC (exp))
6483 need_to_clear = 1;
6484 else
6486 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6487 tree value;
6489 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6491 int n_elts_here = tree_to_uhwi
6492 (int_const_binop (TRUNC_DIV_EXPR,
6493 TYPE_SIZE (TREE_TYPE (value)),
6494 TYPE_SIZE (elttype)));
6496 count += n_elts_here;
6497 if (mostly_zeros_p (value))
6498 zero_count += n_elts_here;
6501 /* Clear the entire vector first if there are any missing elements,
6502 or if the incidence of zero elements is >= 75%. */
6503 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6506 if (need_to_clear && size > 0 && !vector)
6508 if (REG_P (target))
6509 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6510 else
6511 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6512 cleared = 1;
6515 /* Inform later passes that the old value is dead. */
6516 if (!cleared && !vector && REG_P (target))
6517 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6519 if (MEM_P (target))
6520 alias = MEM_ALIAS_SET (target);
6521 else
6522 alias = get_alias_set (elttype);
6524 /* Store each element of the constructor into the corresponding
6525 element of TARGET, determined by counting the elements. */
6526 for (idx = 0, i = 0;
6527 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6528 idx++, i += bitsize / elt_size)
6530 HOST_WIDE_INT eltpos;
6531 tree value = ce->value;
6533 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6534 if (cleared && initializer_zerop (value))
6535 continue;
6537 if (ce->index)
6538 eltpos = tree_to_uhwi (ce->index);
6539 else
6540 eltpos = i;
6542 if (vector)
6544 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6545 elements. */
6546 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6547 RTVEC_ELT (vector, eltpos)
6548 = expand_normal (value);
6550 else
6552 machine_mode value_mode =
6553 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6554 ? TYPE_MODE (TREE_TYPE (value))
6555 : eltmode;
6556 bitpos = eltpos * elt_size;
6557 store_constructor_field (target, bitsize, bitpos, value_mode,
6558 value, cleared, alias);
6562 if (vector)
6563 emit_insn (GEN_FCN (icode)
6564 (target,
6565 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6566 break;
6569 default:
6570 gcc_unreachable ();
6574 /* Store the value of EXP (an expression tree)
6575 into a subfield of TARGET which has mode MODE and occupies
6576 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6577 If MODE is VOIDmode, it means that we are storing into a bit-field.
6579 BITREGION_START is bitpos of the first bitfield in this region.
6580 BITREGION_END is the bitpos of the ending bitfield in this region.
6581 These two fields are 0, if the C++ memory model does not apply,
6582 or we are not interested in keeping track of bitfield regions.
6584 Always return const0_rtx unless we have something particular to
6585 return.
6587 ALIAS_SET is the alias set for the destination. This value will
6588 (in general) be different from that for TARGET, since TARGET is a
6589 reference to the containing structure.
6591 If NONTEMPORAL is true, try generating a nontemporal store. */
6593 static rtx
6594 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6595 unsigned HOST_WIDE_INT bitregion_start,
6596 unsigned HOST_WIDE_INT bitregion_end,
6597 machine_mode mode, tree exp,
6598 alias_set_type alias_set, bool nontemporal)
6600 if (TREE_CODE (exp) == ERROR_MARK)
6601 return const0_rtx;
6603 /* If we have nothing to store, do nothing unless the expression has
6604 side-effects. */
6605 if (bitsize == 0)
6606 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6608 if (GET_CODE (target) == CONCAT)
6610 /* We're storing into a struct containing a single __complex. */
6612 gcc_assert (!bitpos);
6613 return store_expr (exp, target, 0, nontemporal);
6616 /* If the structure is in a register or if the component
6617 is a bit field, we cannot use addressing to access it.
6618 Use bit-field techniques or SUBREG to store in it. */
6620 if (mode == VOIDmode
6621 || (mode != BLKmode && ! direct_store[(int) mode]
6622 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6623 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6624 || REG_P (target)
6625 || GET_CODE (target) == SUBREG
6626 /* If the field isn't aligned enough to store as an ordinary memref,
6627 store it as a bit field. */
6628 || (mode != BLKmode
6629 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6630 || bitpos % GET_MODE_ALIGNMENT (mode))
6631 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6632 || (bitpos % BITS_PER_UNIT != 0)))
6633 || (bitsize >= 0 && mode != BLKmode
6634 && GET_MODE_BITSIZE (mode) > bitsize)
6635 /* If the RHS and field are a constant size and the size of the
6636 RHS isn't the same size as the bitfield, we must use bitfield
6637 operations. */
6638 || (bitsize >= 0
6639 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6640 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6641 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6642 decl we must use bitfield operations. */
6643 || (bitsize >= 0
6644 && TREE_CODE (exp) == MEM_REF
6645 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6646 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6647 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6648 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6650 rtx temp;
6651 gimple nop_def;
6653 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6654 implies a mask operation. If the precision is the same size as
6655 the field we're storing into, that mask is redundant. This is
6656 particularly common with bit field assignments generated by the
6657 C front end. */
6658 nop_def = get_def_for_expr (exp, NOP_EXPR);
6659 if (nop_def)
6661 tree type = TREE_TYPE (exp);
6662 if (INTEGRAL_TYPE_P (type)
6663 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6664 && bitsize == TYPE_PRECISION (type))
6666 tree op = gimple_assign_rhs1 (nop_def);
6667 type = TREE_TYPE (op);
6668 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6669 exp = op;
6673 temp = expand_normal (exp);
6675 /* If BITSIZE is narrower than the size of the type of EXP
6676 we will be narrowing TEMP. Normally, what's wanted are the
6677 low-order bits. However, if EXP's type is a record and this is
6678 big-endian machine, we want the upper BITSIZE bits. */
6679 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6680 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6681 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6682 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6683 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6684 NULL_RTX, 1);
6686 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6687 if (mode != VOIDmode && mode != BLKmode
6688 && mode != TYPE_MODE (TREE_TYPE (exp)))
6689 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6691 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6692 are both BLKmode, both must be in memory and BITPOS must be aligned
6693 on a byte boundary. If so, we simply do a block copy. Likewise for
6694 a BLKmode-like TARGET. */
6695 if (GET_CODE (temp) != PARALLEL
6696 && GET_MODE (temp) == BLKmode
6697 && (GET_MODE (target) == BLKmode
6698 || (MEM_P (target)
6699 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6700 && (bitpos % BITS_PER_UNIT) == 0
6701 && (bitsize % BITS_PER_UNIT) == 0)))
6703 gcc_assert (MEM_P (target) && MEM_P (temp)
6704 && (bitpos % BITS_PER_UNIT) == 0);
6706 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6707 emit_block_move (target, temp,
6708 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6709 / BITS_PER_UNIT),
6710 BLOCK_OP_NORMAL);
6712 return const0_rtx;
6715 /* Handle calls that return values in multiple non-contiguous locations.
6716 The Irix 6 ABI has examples of this. */
6717 if (GET_CODE (temp) == PARALLEL)
6719 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6720 rtx temp_target;
6721 if (mode == BLKmode || mode == VOIDmode)
6722 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6723 temp_target = gen_reg_rtx (mode);
6724 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6725 temp = temp_target;
6727 else if (mode == BLKmode)
6729 /* Handle calls that return BLKmode values in registers. */
6730 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6732 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6733 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6734 temp = temp_target;
6736 else
6738 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6739 rtx temp_target;
6740 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6741 temp_target = gen_reg_rtx (mode);
6742 temp_target
6743 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6744 temp_target, mode, mode);
6745 temp = temp_target;
6749 /* Store the value in the bitfield. */
6750 store_bit_field (target, bitsize, bitpos,
6751 bitregion_start, bitregion_end,
6752 mode, temp);
6754 return const0_rtx;
6756 else
6758 /* Now build a reference to just the desired component. */
6759 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6761 if (to_rtx == target)
6762 to_rtx = copy_rtx (to_rtx);
6764 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6765 set_mem_alias_set (to_rtx, alias_set);
6767 return store_expr (exp, to_rtx, 0, nontemporal);
6771 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6772 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6773 codes and find the ultimate containing object, which we return.
6775 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6776 bit position, and *PUNSIGNEDP to the signedness of the field.
6777 If the position of the field is variable, we store a tree
6778 giving the variable offset (in units) in *POFFSET.
6779 This offset is in addition to the bit position.
6780 If the position is not variable, we store 0 in *POFFSET.
6782 If any of the extraction expressions is volatile,
6783 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6785 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6786 Otherwise, it is a mode that can be used to access the field.
6788 If the field describes a variable-sized object, *PMODE is set to
6789 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6790 this case, but the address of the object can be found.
6792 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6793 look through nodes that serve as markers of a greater alignment than
6794 the one that can be deduced from the expression. These nodes make it
6795 possible for front-ends to prevent temporaries from being created by
6796 the middle-end on alignment considerations. For that purpose, the
6797 normal operating mode at high-level is to always pass FALSE so that
6798 the ultimate containing object is really returned; moreover, the
6799 associated predicate handled_component_p will always return TRUE
6800 on these nodes, thus indicating that they are essentially handled
6801 by get_inner_reference. TRUE should only be passed when the caller
6802 is scanning the expression in order to build another representation
6803 and specifically knows how to handle these nodes; as such, this is
6804 the normal operating mode in the RTL expanders. */
6806 tree
6807 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6808 HOST_WIDE_INT *pbitpos, tree *poffset,
6809 machine_mode *pmode, int *punsignedp,
6810 int *pvolatilep, bool keep_aligning)
6812 tree size_tree = 0;
6813 machine_mode mode = VOIDmode;
6814 bool blkmode_bitfield = false;
6815 tree offset = size_zero_node;
6816 offset_int bit_offset = 0;
6818 /* First get the mode, signedness, and size. We do this from just the
6819 outermost expression. */
6820 *pbitsize = -1;
6821 if (TREE_CODE (exp) == COMPONENT_REF)
6823 tree field = TREE_OPERAND (exp, 1);
6824 size_tree = DECL_SIZE (field);
6825 if (flag_strict_volatile_bitfields > 0
6826 && TREE_THIS_VOLATILE (exp)
6827 && DECL_BIT_FIELD_TYPE (field)
6828 && DECL_MODE (field) != BLKmode)
6829 /* Volatile bitfields should be accessed in the mode of the
6830 field's type, not the mode computed based on the bit
6831 size. */
6832 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6833 else if (!DECL_BIT_FIELD (field))
6834 mode = DECL_MODE (field);
6835 else if (DECL_MODE (field) == BLKmode)
6836 blkmode_bitfield = true;
6838 *punsignedp = DECL_UNSIGNED (field);
6840 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6842 size_tree = TREE_OPERAND (exp, 1);
6843 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6844 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6846 /* For vector types, with the correct size of access, use the mode of
6847 inner type. */
6848 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6849 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6850 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6851 mode = TYPE_MODE (TREE_TYPE (exp));
6853 else
6855 mode = TYPE_MODE (TREE_TYPE (exp));
6856 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6858 if (mode == BLKmode)
6859 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6860 else
6861 *pbitsize = GET_MODE_BITSIZE (mode);
6864 if (size_tree != 0)
6866 if (! tree_fits_uhwi_p (size_tree))
6867 mode = BLKmode, *pbitsize = -1;
6868 else
6869 *pbitsize = tree_to_uhwi (size_tree);
6872 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6873 and find the ultimate containing object. */
6874 while (1)
6876 switch (TREE_CODE (exp))
6878 case BIT_FIELD_REF:
6879 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6880 break;
6882 case COMPONENT_REF:
6884 tree field = TREE_OPERAND (exp, 1);
6885 tree this_offset = component_ref_field_offset (exp);
6887 /* If this field hasn't been filled in yet, don't go past it.
6888 This should only happen when folding expressions made during
6889 type construction. */
6890 if (this_offset == 0)
6891 break;
6893 offset = size_binop (PLUS_EXPR, offset, this_offset);
6894 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6896 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6898 break;
6900 case ARRAY_REF:
6901 case ARRAY_RANGE_REF:
6903 tree index = TREE_OPERAND (exp, 1);
6904 tree low_bound = array_ref_low_bound (exp);
6905 tree unit_size = array_ref_element_size (exp);
6907 /* We assume all arrays have sizes that are a multiple of a byte.
6908 First subtract the lower bound, if any, in the type of the
6909 index, then convert to sizetype and multiply by the size of
6910 the array element. */
6911 if (! integer_zerop (low_bound))
6912 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6913 index, low_bound);
6915 offset = size_binop (PLUS_EXPR, offset,
6916 size_binop (MULT_EXPR,
6917 fold_convert (sizetype, index),
6918 unit_size));
6920 break;
6922 case REALPART_EXPR:
6923 break;
6925 case IMAGPART_EXPR:
6926 bit_offset += *pbitsize;
6927 break;
6929 case VIEW_CONVERT_EXPR:
6930 if (keep_aligning && STRICT_ALIGNMENT
6931 && (TYPE_ALIGN (TREE_TYPE (exp))
6932 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6933 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6934 < BIGGEST_ALIGNMENT)
6935 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6936 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6937 goto done;
6938 break;
6940 case MEM_REF:
6941 /* Hand back the decl for MEM[&decl, off]. */
6942 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6944 tree off = TREE_OPERAND (exp, 1);
6945 if (!integer_zerop (off))
6947 offset_int boff, coff = mem_ref_offset (exp);
6948 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6949 bit_offset += boff;
6951 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6953 goto done;
6955 default:
6956 goto done;
6959 /* If any reference in the chain is volatile, the effect is volatile. */
6960 if (TREE_THIS_VOLATILE (exp))
6961 *pvolatilep = 1;
6963 exp = TREE_OPERAND (exp, 0);
6965 done:
6967 /* If OFFSET is constant, see if we can return the whole thing as a
6968 constant bit position. Make sure to handle overflow during
6969 this conversion. */
6970 if (TREE_CODE (offset) == INTEGER_CST)
6972 offset_int tem = wi::sext (wi::to_offset (offset),
6973 TYPE_PRECISION (sizetype));
6974 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6975 tem += bit_offset;
6976 if (wi::fits_shwi_p (tem))
6978 *pbitpos = tem.to_shwi ();
6979 *poffset = offset = NULL_TREE;
6983 /* Otherwise, split it up. */
6984 if (offset)
6986 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6987 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
6989 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6990 offset_int tem = bit_offset.and_not (mask);
6991 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6992 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6993 bit_offset -= tem;
6994 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6995 offset = size_binop (PLUS_EXPR, offset,
6996 wide_int_to_tree (sizetype, tem));
6999 *pbitpos = bit_offset.to_shwi ();
7000 *poffset = offset;
7003 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7004 if (mode == VOIDmode
7005 && blkmode_bitfield
7006 && (*pbitpos % BITS_PER_UNIT) == 0
7007 && (*pbitsize % BITS_PER_UNIT) == 0)
7008 *pmode = BLKmode;
7009 else
7010 *pmode = mode;
7012 return exp;
7015 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7017 static unsigned HOST_WIDE_INT
7018 target_align (const_tree target)
7020 /* We might have a chain of nested references with intermediate misaligning
7021 bitfields components, so need to recurse to find out. */
7023 unsigned HOST_WIDE_INT this_align, outer_align;
7025 switch (TREE_CODE (target))
7027 case BIT_FIELD_REF:
7028 return 1;
7030 case COMPONENT_REF:
7031 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7032 outer_align = target_align (TREE_OPERAND (target, 0));
7033 return MIN (this_align, outer_align);
7035 case ARRAY_REF:
7036 case ARRAY_RANGE_REF:
7037 this_align = TYPE_ALIGN (TREE_TYPE (target));
7038 outer_align = target_align (TREE_OPERAND (target, 0));
7039 return MIN (this_align, outer_align);
7041 CASE_CONVERT:
7042 case NON_LVALUE_EXPR:
7043 case VIEW_CONVERT_EXPR:
7044 this_align = TYPE_ALIGN (TREE_TYPE (target));
7045 outer_align = target_align (TREE_OPERAND (target, 0));
7046 return MAX (this_align, outer_align);
7048 default:
7049 return TYPE_ALIGN (TREE_TYPE (target));
7054 /* Given an rtx VALUE that may contain additions and multiplications, return
7055 an equivalent value that just refers to a register, memory, or constant.
7056 This is done by generating instructions to perform the arithmetic and
7057 returning a pseudo-register containing the value.
7059 The returned value may be a REG, SUBREG, MEM or constant. */
7062 force_operand (rtx value, rtx target)
7064 rtx op1, op2;
7065 /* Use subtarget as the target for operand 0 of a binary operation. */
7066 rtx subtarget = get_subtarget (target);
7067 enum rtx_code code = GET_CODE (value);
7069 /* Check for subreg applied to an expression produced by loop optimizer. */
7070 if (code == SUBREG
7071 && !REG_P (SUBREG_REG (value))
7072 && !MEM_P (SUBREG_REG (value)))
7074 value
7075 = simplify_gen_subreg (GET_MODE (value),
7076 force_reg (GET_MODE (SUBREG_REG (value)),
7077 force_operand (SUBREG_REG (value),
7078 NULL_RTX)),
7079 GET_MODE (SUBREG_REG (value)),
7080 SUBREG_BYTE (value));
7081 code = GET_CODE (value);
7084 /* Check for a PIC address load. */
7085 if ((code == PLUS || code == MINUS)
7086 && XEXP (value, 0) == pic_offset_table_rtx
7087 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7088 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7089 || GET_CODE (XEXP (value, 1)) == CONST))
7091 if (!subtarget)
7092 subtarget = gen_reg_rtx (GET_MODE (value));
7093 emit_move_insn (subtarget, value);
7094 return subtarget;
7097 if (ARITHMETIC_P (value))
7099 op2 = XEXP (value, 1);
7100 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7101 subtarget = 0;
7102 if (code == MINUS && CONST_INT_P (op2))
7104 code = PLUS;
7105 op2 = negate_rtx (GET_MODE (value), op2);
7108 /* Check for an addition with OP2 a constant integer and our first
7109 operand a PLUS of a virtual register and something else. In that
7110 case, we want to emit the sum of the virtual register and the
7111 constant first and then add the other value. This allows virtual
7112 register instantiation to simply modify the constant rather than
7113 creating another one around this addition. */
7114 if (code == PLUS && CONST_INT_P (op2)
7115 && GET_CODE (XEXP (value, 0)) == PLUS
7116 && REG_P (XEXP (XEXP (value, 0), 0))
7117 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7118 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7120 rtx temp = expand_simple_binop (GET_MODE (value), code,
7121 XEXP (XEXP (value, 0), 0), op2,
7122 subtarget, 0, OPTAB_LIB_WIDEN);
7123 return expand_simple_binop (GET_MODE (value), code, temp,
7124 force_operand (XEXP (XEXP (value,
7125 0), 1), 0),
7126 target, 0, OPTAB_LIB_WIDEN);
7129 op1 = force_operand (XEXP (value, 0), subtarget);
7130 op2 = force_operand (op2, NULL_RTX);
7131 switch (code)
7133 case MULT:
7134 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7135 case DIV:
7136 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7137 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7138 target, 1, OPTAB_LIB_WIDEN);
7139 else
7140 return expand_divmod (0,
7141 FLOAT_MODE_P (GET_MODE (value))
7142 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7143 GET_MODE (value), op1, op2, target, 0);
7144 case MOD:
7145 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7146 target, 0);
7147 case UDIV:
7148 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7149 target, 1);
7150 case UMOD:
7151 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7152 target, 1);
7153 case ASHIFTRT:
7154 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7155 target, 0, OPTAB_LIB_WIDEN);
7156 default:
7157 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7158 target, 1, OPTAB_LIB_WIDEN);
7161 if (UNARY_P (value))
7163 if (!target)
7164 target = gen_reg_rtx (GET_MODE (value));
7165 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7166 switch (code)
7168 case ZERO_EXTEND:
7169 case SIGN_EXTEND:
7170 case TRUNCATE:
7171 case FLOAT_EXTEND:
7172 case FLOAT_TRUNCATE:
7173 convert_move (target, op1, code == ZERO_EXTEND);
7174 return target;
7176 case FIX:
7177 case UNSIGNED_FIX:
7178 expand_fix (target, op1, code == UNSIGNED_FIX);
7179 return target;
7181 case FLOAT:
7182 case UNSIGNED_FLOAT:
7183 expand_float (target, op1, code == UNSIGNED_FLOAT);
7184 return target;
7186 default:
7187 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7191 #ifdef INSN_SCHEDULING
7192 /* On machines that have insn scheduling, we want all memory reference to be
7193 explicit, so we need to deal with such paradoxical SUBREGs. */
7194 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7195 value
7196 = simplify_gen_subreg (GET_MODE (value),
7197 force_reg (GET_MODE (SUBREG_REG (value)),
7198 force_operand (SUBREG_REG (value),
7199 NULL_RTX)),
7200 GET_MODE (SUBREG_REG (value)),
7201 SUBREG_BYTE (value));
7202 #endif
7204 return value;
7207 /* Subroutine of expand_expr: return nonzero iff there is no way that
7208 EXP can reference X, which is being modified. TOP_P is nonzero if this
7209 call is going to be used to determine whether we need a temporary
7210 for EXP, as opposed to a recursive call to this function.
7212 It is always safe for this routine to return zero since it merely
7213 searches for optimization opportunities. */
7216 safe_from_p (const_rtx x, tree exp, int top_p)
7218 rtx exp_rtl = 0;
7219 int i, nops;
7221 if (x == 0
7222 /* If EXP has varying size, we MUST use a target since we currently
7223 have no way of allocating temporaries of variable size
7224 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7225 So we assume here that something at a higher level has prevented a
7226 clash. This is somewhat bogus, but the best we can do. Only
7227 do this when X is BLKmode and when we are at the top level. */
7228 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7229 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7230 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7231 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7232 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7233 != INTEGER_CST)
7234 && GET_MODE (x) == BLKmode)
7235 /* If X is in the outgoing argument area, it is always safe. */
7236 || (MEM_P (x)
7237 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7238 || (GET_CODE (XEXP (x, 0)) == PLUS
7239 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7240 return 1;
7242 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7243 find the underlying pseudo. */
7244 if (GET_CODE (x) == SUBREG)
7246 x = SUBREG_REG (x);
7247 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7248 return 0;
7251 /* Now look at our tree code and possibly recurse. */
7252 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7254 case tcc_declaration:
7255 exp_rtl = DECL_RTL_IF_SET (exp);
7256 break;
7258 case tcc_constant:
7259 return 1;
7261 case tcc_exceptional:
7262 if (TREE_CODE (exp) == TREE_LIST)
7264 while (1)
7266 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7267 return 0;
7268 exp = TREE_CHAIN (exp);
7269 if (!exp)
7270 return 1;
7271 if (TREE_CODE (exp) != TREE_LIST)
7272 return safe_from_p (x, exp, 0);
7275 else if (TREE_CODE (exp) == CONSTRUCTOR)
7277 constructor_elt *ce;
7278 unsigned HOST_WIDE_INT idx;
7280 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7281 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7282 || !safe_from_p (x, ce->value, 0))
7283 return 0;
7284 return 1;
7286 else if (TREE_CODE (exp) == ERROR_MARK)
7287 return 1; /* An already-visited SAVE_EXPR? */
7288 else
7289 return 0;
7291 case tcc_statement:
7292 /* The only case we look at here is the DECL_INITIAL inside a
7293 DECL_EXPR. */
7294 return (TREE_CODE (exp) != DECL_EXPR
7295 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7296 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7297 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7299 case tcc_binary:
7300 case tcc_comparison:
7301 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7302 return 0;
7303 /* Fall through. */
7305 case tcc_unary:
7306 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7308 case tcc_expression:
7309 case tcc_reference:
7310 case tcc_vl_exp:
7311 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7312 the expression. If it is set, we conflict iff we are that rtx or
7313 both are in memory. Otherwise, we check all operands of the
7314 expression recursively. */
7316 switch (TREE_CODE (exp))
7318 case ADDR_EXPR:
7319 /* If the operand is static or we are static, we can't conflict.
7320 Likewise if we don't conflict with the operand at all. */
7321 if (staticp (TREE_OPERAND (exp, 0))
7322 || TREE_STATIC (exp)
7323 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7324 return 1;
7326 /* Otherwise, the only way this can conflict is if we are taking
7327 the address of a DECL a that address if part of X, which is
7328 very rare. */
7329 exp = TREE_OPERAND (exp, 0);
7330 if (DECL_P (exp))
7332 if (!DECL_RTL_SET_P (exp)
7333 || !MEM_P (DECL_RTL (exp)))
7334 return 0;
7335 else
7336 exp_rtl = XEXP (DECL_RTL (exp), 0);
7338 break;
7340 case MEM_REF:
7341 if (MEM_P (x)
7342 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7343 get_alias_set (exp)))
7344 return 0;
7345 break;
7347 case CALL_EXPR:
7348 /* Assume that the call will clobber all hard registers and
7349 all of memory. */
7350 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7351 || MEM_P (x))
7352 return 0;
7353 break;
7355 case WITH_CLEANUP_EXPR:
7356 case CLEANUP_POINT_EXPR:
7357 /* Lowered by gimplify.c. */
7358 gcc_unreachable ();
7360 case SAVE_EXPR:
7361 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7363 default:
7364 break;
7367 /* If we have an rtx, we do not need to scan our operands. */
7368 if (exp_rtl)
7369 break;
7371 nops = TREE_OPERAND_LENGTH (exp);
7372 for (i = 0; i < nops; i++)
7373 if (TREE_OPERAND (exp, i) != 0
7374 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7375 return 0;
7377 break;
7379 case tcc_type:
7380 /* Should never get a type here. */
7381 gcc_unreachable ();
7384 /* If we have an rtl, find any enclosed object. Then see if we conflict
7385 with it. */
7386 if (exp_rtl)
7388 if (GET_CODE (exp_rtl) == SUBREG)
7390 exp_rtl = SUBREG_REG (exp_rtl);
7391 if (REG_P (exp_rtl)
7392 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7393 return 0;
7396 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7397 are memory and they conflict. */
7398 return ! (rtx_equal_p (x, exp_rtl)
7399 || (MEM_P (x) && MEM_P (exp_rtl)
7400 && true_dependence (exp_rtl, VOIDmode, x)));
7403 /* If we reach here, it is safe. */
7404 return 1;
7408 /* Return the highest power of two that EXP is known to be a multiple of.
7409 This is used in updating alignment of MEMs in array references. */
7411 unsigned HOST_WIDE_INT
7412 highest_pow2_factor (const_tree exp)
7414 unsigned HOST_WIDE_INT ret;
7415 int trailing_zeros = tree_ctz (exp);
7416 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7417 return BIGGEST_ALIGNMENT;
7418 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7419 if (ret > BIGGEST_ALIGNMENT)
7420 return BIGGEST_ALIGNMENT;
7421 return ret;
7424 /* Similar, except that the alignment requirements of TARGET are
7425 taken into account. Assume it is at least as aligned as its
7426 type, unless it is a COMPONENT_REF in which case the layout of
7427 the structure gives the alignment. */
7429 static unsigned HOST_WIDE_INT
7430 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7432 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7433 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7435 return MAX (factor, talign);
7438 /* Convert the tree comparison code TCODE to the rtl one where the
7439 signedness is UNSIGNEDP. */
7441 static enum rtx_code
7442 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7444 enum rtx_code code;
7445 switch (tcode)
7447 case EQ_EXPR:
7448 code = EQ;
7449 break;
7450 case NE_EXPR:
7451 code = NE;
7452 break;
7453 case LT_EXPR:
7454 code = unsignedp ? LTU : LT;
7455 break;
7456 case LE_EXPR:
7457 code = unsignedp ? LEU : LE;
7458 break;
7459 case GT_EXPR:
7460 code = unsignedp ? GTU : GT;
7461 break;
7462 case GE_EXPR:
7463 code = unsignedp ? GEU : GE;
7464 break;
7465 case UNORDERED_EXPR:
7466 code = UNORDERED;
7467 break;
7468 case ORDERED_EXPR:
7469 code = ORDERED;
7470 break;
7471 case UNLT_EXPR:
7472 code = UNLT;
7473 break;
7474 case UNLE_EXPR:
7475 code = UNLE;
7476 break;
7477 case UNGT_EXPR:
7478 code = UNGT;
7479 break;
7480 case UNGE_EXPR:
7481 code = UNGE;
7482 break;
7483 case UNEQ_EXPR:
7484 code = UNEQ;
7485 break;
7486 case LTGT_EXPR:
7487 code = LTGT;
7488 break;
7490 default:
7491 gcc_unreachable ();
7493 return code;
7496 /* Subroutine of expand_expr. Expand the two operands of a binary
7497 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7498 The value may be stored in TARGET if TARGET is nonzero. The
7499 MODIFIER argument is as documented by expand_expr. */
7501 void
7502 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7503 enum expand_modifier modifier)
7505 if (! safe_from_p (target, exp1, 1))
7506 target = 0;
7507 if (operand_equal_p (exp0, exp1, 0))
7509 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7510 *op1 = copy_rtx (*op0);
7512 else
7514 /* If we need to preserve evaluation order, copy exp0 into its own
7515 temporary variable so that it can't be clobbered by exp1. */
7516 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7517 exp0 = save_expr (exp0);
7518 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7519 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7524 /* Return a MEM that contains constant EXP. DEFER is as for
7525 output_constant_def and MODIFIER is as for expand_expr. */
7527 static rtx
7528 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7530 rtx mem;
7532 mem = output_constant_def (exp, defer);
7533 if (modifier != EXPAND_INITIALIZER)
7534 mem = use_anchored_address (mem);
7535 return mem;
7538 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7539 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7541 static rtx
7542 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7543 enum expand_modifier modifier, addr_space_t as)
7545 rtx result, subtarget;
7546 tree inner, offset;
7547 HOST_WIDE_INT bitsize, bitpos;
7548 int volatilep, unsignedp;
7549 machine_mode mode1;
7551 /* If we are taking the address of a constant and are at the top level,
7552 we have to use output_constant_def since we can't call force_const_mem
7553 at top level. */
7554 /* ??? This should be considered a front-end bug. We should not be
7555 generating ADDR_EXPR of something that isn't an LVALUE. The only
7556 exception here is STRING_CST. */
7557 if (CONSTANT_CLASS_P (exp))
7559 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7560 if (modifier < EXPAND_SUM)
7561 result = force_operand (result, target);
7562 return result;
7565 /* Everything must be something allowed by is_gimple_addressable. */
7566 switch (TREE_CODE (exp))
7568 case INDIRECT_REF:
7569 /* This case will happen via recursion for &a->b. */
7570 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7572 case MEM_REF:
7574 tree tem = TREE_OPERAND (exp, 0);
7575 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7576 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7577 return expand_expr (tem, target, tmode, modifier);
7580 case CONST_DECL:
7581 /* Expand the initializer like constants above. */
7582 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7583 0, modifier), 0);
7584 if (modifier < EXPAND_SUM)
7585 result = force_operand (result, target);
7586 return result;
7588 case REALPART_EXPR:
7589 /* The real part of the complex number is always first, therefore
7590 the address is the same as the address of the parent object. */
7591 offset = 0;
7592 bitpos = 0;
7593 inner = TREE_OPERAND (exp, 0);
7594 break;
7596 case IMAGPART_EXPR:
7597 /* The imaginary part of the complex number is always second.
7598 The expression is therefore always offset by the size of the
7599 scalar type. */
7600 offset = 0;
7601 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7602 inner = TREE_OPERAND (exp, 0);
7603 break;
7605 case COMPOUND_LITERAL_EXPR:
7606 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7607 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7608 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7609 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7610 the initializers aren't gimplified. */
7611 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7612 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7613 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7614 target, tmode, modifier, as);
7615 /* FALLTHRU */
7616 default:
7617 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7618 expand_expr, as that can have various side effects; LABEL_DECLs for
7619 example, may not have their DECL_RTL set yet. Expand the rtl of
7620 CONSTRUCTORs too, which should yield a memory reference for the
7621 constructor's contents. Assume language specific tree nodes can
7622 be expanded in some interesting way. */
7623 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7624 if (DECL_P (exp)
7625 || TREE_CODE (exp) == CONSTRUCTOR
7626 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7628 result = expand_expr (exp, target, tmode,
7629 modifier == EXPAND_INITIALIZER
7630 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7632 /* If the DECL isn't in memory, then the DECL wasn't properly
7633 marked TREE_ADDRESSABLE, which will be either a front-end
7634 or a tree optimizer bug. */
7636 if (TREE_ADDRESSABLE (exp)
7637 && ! MEM_P (result)
7638 && ! targetm.calls.allocate_stack_slots_for_args ())
7640 error ("local frame unavailable (naked function?)");
7641 return result;
7643 else
7644 gcc_assert (MEM_P (result));
7645 result = XEXP (result, 0);
7647 /* ??? Is this needed anymore? */
7648 if (DECL_P (exp))
7649 TREE_USED (exp) = 1;
7651 if (modifier != EXPAND_INITIALIZER
7652 && modifier != EXPAND_CONST_ADDRESS
7653 && modifier != EXPAND_SUM)
7654 result = force_operand (result, target);
7655 return result;
7658 /* Pass FALSE as the last argument to get_inner_reference although
7659 we are expanding to RTL. The rationale is that we know how to
7660 handle "aligning nodes" here: we can just bypass them because
7661 they won't change the final object whose address will be returned
7662 (they actually exist only for that purpose). */
7663 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7664 &mode1, &unsignedp, &volatilep, false);
7665 break;
7668 /* We must have made progress. */
7669 gcc_assert (inner != exp);
7671 subtarget = offset || bitpos ? NULL_RTX : target;
7672 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7673 inner alignment, force the inner to be sufficiently aligned. */
7674 if (CONSTANT_CLASS_P (inner)
7675 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7677 inner = copy_node (inner);
7678 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7679 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7680 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7682 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7684 if (offset)
7686 rtx tmp;
7688 if (modifier != EXPAND_NORMAL)
7689 result = force_operand (result, NULL);
7690 tmp = expand_expr (offset, NULL_RTX, tmode,
7691 modifier == EXPAND_INITIALIZER
7692 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7694 /* expand_expr is allowed to return an object in a mode other
7695 than TMODE. If it did, we need to convert. */
7696 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7697 tmp = convert_modes (tmode, GET_MODE (tmp),
7698 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7699 result = convert_memory_address_addr_space (tmode, result, as);
7700 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7702 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7703 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7704 else
7706 subtarget = bitpos ? NULL_RTX : target;
7707 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7708 1, OPTAB_LIB_WIDEN);
7712 if (bitpos)
7714 /* Someone beforehand should have rejected taking the address
7715 of such an object. */
7716 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7718 result = convert_memory_address_addr_space (tmode, result, as);
7719 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7720 if (modifier < EXPAND_SUM)
7721 result = force_operand (result, target);
7724 return result;
7727 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7728 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7730 static rtx
7731 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7732 enum expand_modifier modifier)
7734 addr_space_t as = ADDR_SPACE_GENERIC;
7735 machine_mode address_mode = Pmode;
7736 machine_mode pointer_mode = ptr_mode;
7737 machine_mode rmode;
7738 rtx result;
7740 /* Target mode of VOIDmode says "whatever's natural". */
7741 if (tmode == VOIDmode)
7742 tmode = TYPE_MODE (TREE_TYPE (exp));
7744 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7746 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7747 address_mode = targetm.addr_space.address_mode (as);
7748 pointer_mode = targetm.addr_space.pointer_mode (as);
7751 /* We can get called with some Weird Things if the user does silliness
7752 like "(short) &a". In that case, convert_memory_address won't do
7753 the right thing, so ignore the given target mode. */
7754 if (tmode != address_mode && tmode != pointer_mode)
7755 tmode = address_mode;
7757 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7758 tmode, modifier, as);
7760 /* Despite expand_expr claims concerning ignoring TMODE when not
7761 strictly convenient, stuff breaks if we don't honor it. Note
7762 that combined with the above, we only do this for pointer modes. */
7763 rmode = GET_MODE (result);
7764 if (rmode == VOIDmode)
7765 rmode = tmode;
7766 if (rmode != tmode)
7767 result = convert_memory_address_addr_space (tmode, result, as);
7769 return result;
7772 /* Generate code for computing CONSTRUCTOR EXP.
7773 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7774 is TRUE, instead of creating a temporary variable in memory
7775 NULL is returned and the caller needs to handle it differently. */
7777 static rtx
7778 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7779 bool avoid_temp_mem)
7781 tree type = TREE_TYPE (exp);
7782 machine_mode mode = TYPE_MODE (type);
7784 /* Try to avoid creating a temporary at all. This is possible
7785 if all of the initializer is zero.
7786 FIXME: try to handle all [0..255] initializers we can handle
7787 with memset. */
7788 if (TREE_STATIC (exp)
7789 && !TREE_ADDRESSABLE (exp)
7790 && target != 0 && mode == BLKmode
7791 && all_zeros_p (exp))
7793 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7794 return target;
7797 /* All elts simple constants => refer to a constant in memory. But
7798 if this is a non-BLKmode mode, let it store a field at a time
7799 since that should make a CONST_INT, CONST_WIDE_INT or
7800 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7801 use, it is best to store directly into the target unless the type
7802 is large enough that memcpy will be used. If we are making an
7803 initializer and all operands are constant, put it in memory as
7804 well.
7806 FIXME: Avoid trying to fill vector constructors piece-meal.
7807 Output them with output_constant_def below unless we're sure
7808 they're zeros. This should go away when vector initializers
7809 are treated like VECTOR_CST instead of arrays. */
7810 if ((TREE_STATIC (exp)
7811 && ((mode == BLKmode
7812 && ! (target != 0 && safe_from_p (target, exp, 1)))
7813 || TREE_ADDRESSABLE (exp)
7814 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7815 && (! can_move_by_pieces
7816 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7817 TYPE_ALIGN (type)))
7818 && ! mostly_zeros_p (exp))))
7819 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7820 && TREE_CONSTANT (exp)))
7822 rtx constructor;
7824 if (avoid_temp_mem)
7825 return NULL_RTX;
7827 constructor = expand_expr_constant (exp, 1, modifier);
7829 if (modifier != EXPAND_CONST_ADDRESS
7830 && modifier != EXPAND_INITIALIZER
7831 && modifier != EXPAND_SUM)
7832 constructor = validize_mem (constructor);
7834 return constructor;
7837 /* Handle calls that pass values in multiple non-contiguous
7838 locations. The Irix 6 ABI has examples of this. */
7839 if (target == 0 || ! safe_from_p (target, exp, 1)
7840 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7842 if (avoid_temp_mem)
7843 return NULL_RTX;
7845 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7848 store_constructor (exp, target, 0, int_expr_size (exp));
7849 return target;
7853 /* expand_expr: generate code for computing expression EXP.
7854 An rtx for the computed value is returned. The value is never null.
7855 In the case of a void EXP, const0_rtx is returned.
7857 The value may be stored in TARGET if TARGET is nonzero.
7858 TARGET is just a suggestion; callers must assume that
7859 the rtx returned may not be the same as TARGET.
7861 If TARGET is CONST0_RTX, it means that the value will be ignored.
7863 If TMODE is not VOIDmode, it suggests generating the
7864 result in mode TMODE. But this is done only when convenient.
7865 Otherwise, TMODE is ignored and the value generated in its natural mode.
7866 TMODE is just a suggestion; callers must assume that
7867 the rtx returned may not have mode TMODE.
7869 Note that TARGET may have neither TMODE nor MODE. In that case, it
7870 probably will not be used.
7872 If MODIFIER is EXPAND_SUM then when EXP is an addition
7873 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7874 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7875 products as above, or REG or MEM, or constant.
7876 Ordinarily in such cases we would output mul or add instructions
7877 and then return a pseudo reg containing the sum.
7879 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7880 it also marks a label as absolutely required (it can't be dead).
7881 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7882 This is used for outputting expressions used in initializers.
7884 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7885 with a constant address even if that address is not normally legitimate.
7886 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7888 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7889 a call parameter. Such targets require special care as we haven't yet
7890 marked TARGET so that it's safe from being trashed by libcalls. We
7891 don't want to use TARGET for anything but the final result;
7892 Intermediate values must go elsewhere. Additionally, calls to
7893 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7895 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7896 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7897 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7898 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7899 recursively.
7901 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7902 In this case, we don't adjust a returned MEM rtx that wouldn't be
7903 sufficiently aligned for its mode; instead, it's up to the caller
7904 to deal with it afterwards. This is used to make sure that unaligned
7905 base objects for which out-of-bounds accesses are supported, for
7906 example record types with trailing arrays, aren't realigned behind
7907 the back of the caller.
7908 The normal operating mode is to pass FALSE for this parameter. */
7911 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7912 enum expand_modifier modifier, rtx *alt_rtl,
7913 bool inner_reference_p)
7915 rtx ret;
7917 /* Handle ERROR_MARK before anybody tries to access its type. */
7918 if (TREE_CODE (exp) == ERROR_MARK
7919 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7921 ret = CONST0_RTX (tmode);
7922 return ret ? ret : const0_rtx;
7925 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7926 inner_reference_p);
7927 return ret;
7930 /* Try to expand the conditional expression which is represented by
7931 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7932 return the rtl reg which repsents the result. Otherwise return
7933 NULL_RTL. */
7935 static rtx
7936 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7937 tree treeop1 ATTRIBUTE_UNUSED,
7938 tree treeop2 ATTRIBUTE_UNUSED)
7940 rtx insn;
7941 rtx op00, op01, op1, op2;
7942 enum rtx_code comparison_code;
7943 machine_mode comparison_mode;
7944 gimple srcstmt;
7945 rtx temp;
7946 tree type = TREE_TYPE (treeop1);
7947 int unsignedp = TYPE_UNSIGNED (type);
7948 machine_mode mode = TYPE_MODE (type);
7949 machine_mode orig_mode = mode;
7951 /* If we cannot do a conditional move on the mode, try doing it
7952 with the promoted mode. */
7953 if (!can_conditionally_move_p (mode))
7955 mode = promote_mode (type, mode, &unsignedp);
7956 if (!can_conditionally_move_p (mode))
7957 return NULL_RTX;
7958 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7960 else
7961 temp = assign_temp (type, 0, 1);
7963 start_sequence ();
7964 expand_operands (treeop1, treeop2,
7965 temp, &op1, &op2, EXPAND_NORMAL);
7967 if (TREE_CODE (treeop0) == SSA_NAME
7968 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7970 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7971 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7972 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7973 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7974 comparison_mode = TYPE_MODE (type);
7975 unsignedp = TYPE_UNSIGNED (type);
7976 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7978 else if (COMPARISON_CLASS_P (treeop0))
7980 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7981 enum tree_code cmpcode = TREE_CODE (treeop0);
7982 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7983 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7984 unsignedp = TYPE_UNSIGNED (type);
7985 comparison_mode = TYPE_MODE (type);
7986 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7988 else
7990 op00 = expand_normal (treeop0);
7991 op01 = const0_rtx;
7992 comparison_code = NE;
7993 comparison_mode = GET_MODE (op00);
7994 if (comparison_mode == VOIDmode)
7995 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7998 if (GET_MODE (op1) != mode)
7999 op1 = gen_lowpart (mode, op1);
8001 if (GET_MODE (op2) != mode)
8002 op2 = gen_lowpart (mode, op2);
8004 /* Try to emit the conditional move. */
8005 insn = emit_conditional_move (temp, comparison_code,
8006 op00, op01, comparison_mode,
8007 op1, op2, mode,
8008 unsignedp);
8010 /* If we could do the conditional move, emit the sequence,
8011 and return. */
8012 if (insn)
8014 rtx_insn *seq = get_insns ();
8015 end_sequence ();
8016 emit_insn (seq);
8017 return convert_modes (orig_mode, mode, temp, 0);
8020 /* Otherwise discard the sequence and fall back to code with
8021 branches. */
8022 end_sequence ();
8023 return NULL_RTX;
8027 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8028 enum expand_modifier modifier)
8030 rtx op0, op1, op2, temp;
8031 rtx_code_label *lab;
8032 tree type;
8033 int unsignedp;
8034 machine_mode mode;
8035 enum tree_code code = ops->code;
8036 optab this_optab;
8037 rtx subtarget, original_target;
8038 int ignore;
8039 bool reduce_bit_field;
8040 location_t loc = ops->location;
8041 tree treeop0, treeop1, treeop2;
8042 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8043 ? reduce_to_bit_field_precision ((expr), \
8044 target, \
8045 type) \
8046 : (expr))
8048 type = ops->type;
8049 mode = TYPE_MODE (type);
8050 unsignedp = TYPE_UNSIGNED (type);
8052 treeop0 = ops->op0;
8053 treeop1 = ops->op1;
8054 treeop2 = ops->op2;
8056 /* We should be called only on simple (binary or unary) expressions,
8057 exactly those that are valid in gimple expressions that aren't
8058 GIMPLE_SINGLE_RHS (or invalid). */
8059 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8060 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8061 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8063 ignore = (target == const0_rtx
8064 || ((CONVERT_EXPR_CODE_P (code)
8065 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8066 && TREE_CODE (type) == VOID_TYPE));
8068 /* We should be called only if we need the result. */
8069 gcc_assert (!ignore);
8071 /* An operation in what may be a bit-field type needs the
8072 result to be reduced to the precision of the bit-field type,
8073 which is narrower than that of the type's mode. */
8074 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8075 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8077 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8078 target = 0;
8080 /* Use subtarget as the target for operand 0 of a binary operation. */
8081 subtarget = get_subtarget (target);
8082 original_target = target;
8084 switch (code)
8086 case NON_LVALUE_EXPR:
8087 case PAREN_EXPR:
8088 CASE_CONVERT:
8089 if (treeop0 == error_mark_node)
8090 return const0_rtx;
8092 if (TREE_CODE (type) == UNION_TYPE)
8094 tree valtype = TREE_TYPE (treeop0);
8096 /* If both input and output are BLKmode, this conversion isn't doing
8097 anything except possibly changing memory attribute. */
8098 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8100 rtx result = expand_expr (treeop0, target, tmode,
8101 modifier);
8103 result = copy_rtx (result);
8104 set_mem_attributes (result, type, 0);
8105 return result;
8108 if (target == 0)
8110 if (TYPE_MODE (type) != BLKmode)
8111 target = gen_reg_rtx (TYPE_MODE (type));
8112 else
8113 target = assign_temp (type, 1, 1);
8116 if (MEM_P (target))
8117 /* Store data into beginning of memory target. */
8118 store_expr (treeop0,
8119 adjust_address (target, TYPE_MODE (valtype), 0),
8120 modifier == EXPAND_STACK_PARM,
8121 false);
8123 else
8125 gcc_assert (REG_P (target));
8127 /* Store this field into a union of the proper type. */
8128 store_field (target,
8129 MIN ((int_size_in_bytes (TREE_TYPE
8130 (treeop0))
8131 * BITS_PER_UNIT),
8132 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8133 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8136 /* Return the entire union. */
8137 return target;
8140 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8142 op0 = expand_expr (treeop0, target, VOIDmode,
8143 modifier);
8145 /* If the signedness of the conversion differs and OP0 is
8146 a promoted SUBREG, clear that indication since we now
8147 have to do the proper extension. */
8148 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8149 && GET_CODE (op0) == SUBREG)
8150 SUBREG_PROMOTED_VAR_P (op0) = 0;
8152 return REDUCE_BIT_FIELD (op0);
8155 op0 = expand_expr (treeop0, NULL_RTX, mode,
8156 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8157 if (GET_MODE (op0) == mode)
8160 /* If OP0 is a constant, just convert it into the proper mode. */
8161 else if (CONSTANT_P (op0))
8163 tree inner_type = TREE_TYPE (treeop0);
8164 machine_mode inner_mode = GET_MODE (op0);
8166 if (inner_mode == VOIDmode)
8167 inner_mode = TYPE_MODE (inner_type);
8169 if (modifier == EXPAND_INITIALIZER)
8170 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8171 subreg_lowpart_offset (mode,
8172 inner_mode));
8173 else
8174 op0= convert_modes (mode, inner_mode, op0,
8175 TYPE_UNSIGNED (inner_type));
8178 else if (modifier == EXPAND_INITIALIZER)
8179 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8181 else if (target == 0)
8182 op0 = convert_to_mode (mode, op0,
8183 TYPE_UNSIGNED (TREE_TYPE
8184 (treeop0)));
8185 else
8187 convert_move (target, op0,
8188 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8189 op0 = target;
8192 return REDUCE_BIT_FIELD (op0);
8194 case ADDR_SPACE_CONVERT_EXPR:
8196 tree treeop0_type = TREE_TYPE (treeop0);
8197 addr_space_t as_to;
8198 addr_space_t as_from;
8200 gcc_assert (POINTER_TYPE_P (type));
8201 gcc_assert (POINTER_TYPE_P (treeop0_type));
8203 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8204 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8206 /* Conversions between pointers to the same address space should
8207 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8208 gcc_assert (as_to != as_from);
8210 /* Ask target code to handle conversion between pointers
8211 to overlapping address spaces. */
8212 if (targetm.addr_space.subset_p (as_to, as_from)
8213 || targetm.addr_space.subset_p (as_from, as_to))
8215 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8216 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8217 gcc_assert (op0);
8218 return op0;
8221 /* For disjoint address spaces, converting anything but
8222 a null pointer invokes undefined behaviour. We simply
8223 always return a null pointer here. */
8224 return CONST0_RTX (mode);
8227 case POINTER_PLUS_EXPR:
8228 /* Even though the sizetype mode and the pointer's mode can be different
8229 expand is able to handle this correctly and get the correct result out
8230 of the PLUS_EXPR code. */
8231 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8232 if sizetype precision is smaller than pointer precision. */
8233 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8234 treeop1 = fold_convert_loc (loc, type,
8235 fold_convert_loc (loc, ssizetype,
8236 treeop1));
8237 /* If sizetype precision is larger than pointer precision, truncate the
8238 offset to have matching modes. */
8239 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8240 treeop1 = fold_convert_loc (loc, type, treeop1);
8242 case PLUS_EXPR:
8243 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8244 something else, make sure we add the register to the constant and
8245 then to the other thing. This case can occur during strength
8246 reduction and doing it this way will produce better code if the
8247 frame pointer or argument pointer is eliminated.
8249 fold-const.c will ensure that the constant is always in the inner
8250 PLUS_EXPR, so the only case we need to do anything about is if
8251 sp, ap, or fp is our second argument, in which case we must swap
8252 the innermost first argument and our second argument. */
8254 if (TREE_CODE (treeop0) == PLUS_EXPR
8255 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8256 && TREE_CODE (treeop1) == VAR_DECL
8257 && (DECL_RTL (treeop1) == frame_pointer_rtx
8258 || DECL_RTL (treeop1) == stack_pointer_rtx
8259 || DECL_RTL (treeop1) == arg_pointer_rtx))
8261 gcc_unreachable ();
8264 /* If the result is to be ptr_mode and we are adding an integer to
8265 something, we might be forming a constant. So try to use
8266 plus_constant. If it produces a sum and we can't accept it,
8267 use force_operand. This allows P = &ARR[const] to generate
8268 efficient code on machines where a SYMBOL_REF is not a valid
8269 address.
8271 If this is an EXPAND_SUM call, always return the sum. */
8272 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8273 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8275 if (modifier == EXPAND_STACK_PARM)
8276 target = 0;
8277 if (TREE_CODE (treeop0) == INTEGER_CST
8278 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8279 && TREE_CONSTANT (treeop1))
8281 rtx constant_part;
8282 HOST_WIDE_INT wc;
8283 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8285 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8286 EXPAND_SUM);
8287 /* Use wi::shwi to ensure that the constant is
8288 truncated according to the mode of OP1, then sign extended
8289 to a HOST_WIDE_INT. Using the constant directly can result
8290 in non-canonical RTL in a 64x32 cross compile. */
8291 wc = TREE_INT_CST_LOW (treeop0);
8292 constant_part =
8293 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8294 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8295 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8296 op1 = force_operand (op1, target);
8297 return REDUCE_BIT_FIELD (op1);
8300 else if (TREE_CODE (treeop1) == INTEGER_CST
8301 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8302 && TREE_CONSTANT (treeop0))
8304 rtx constant_part;
8305 HOST_WIDE_INT wc;
8306 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8308 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8309 (modifier == EXPAND_INITIALIZER
8310 ? EXPAND_INITIALIZER : EXPAND_SUM));
8311 if (! CONSTANT_P (op0))
8313 op1 = expand_expr (treeop1, NULL_RTX,
8314 VOIDmode, modifier);
8315 /* Return a PLUS if modifier says it's OK. */
8316 if (modifier == EXPAND_SUM
8317 || modifier == EXPAND_INITIALIZER)
8318 return simplify_gen_binary (PLUS, mode, op0, op1);
8319 goto binop2;
8321 /* Use wi::shwi to ensure that the constant is
8322 truncated according to the mode of OP1, then sign extended
8323 to a HOST_WIDE_INT. Using the constant directly can result
8324 in non-canonical RTL in a 64x32 cross compile. */
8325 wc = TREE_INT_CST_LOW (treeop1);
8326 constant_part
8327 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8328 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8329 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8330 op0 = force_operand (op0, target);
8331 return REDUCE_BIT_FIELD (op0);
8335 /* Use TER to expand pointer addition of a negated value
8336 as pointer subtraction. */
8337 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8338 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8339 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8340 && TREE_CODE (treeop1) == SSA_NAME
8341 && TYPE_MODE (TREE_TYPE (treeop0))
8342 == TYPE_MODE (TREE_TYPE (treeop1)))
8344 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8345 if (def)
8347 treeop1 = gimple_assign_rhs1 (def);
8348 code = MINUS_EXPR;
8349 goto do_minus;
8353 /* No sense saving up arithmetic to be done
8354 if it's all in the wrong mode to form part of an address.
8355 And force_operand won't know whether to sign-extend or
8356 zero-extend. */
8357 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8358 || mode != ptr_mode)
8360 expand_operands (treeop0, treeop1,
8361 subtarget, &op0, &op1, EXPAND_NORMAL);
8362 if (op0 == const0_rtx)
8363 return op1;
8364 if (op1 == const0_rtx)
8365 return op0;
8366 goto binop2;
8369 expand_operands (treeop0, treeop1,
8370 subtarget, &op0, &op1, modifier);
8371 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8373 case MINUS_EXPR:
8374 do_minus:
8375 /* For initializers, we are allowed to return a MINUS of two
8376 symbolic constants. Here we handle all cases when both operands
8377 are constant. */
8378 /* Handle difference of two symbolic constants,
8379 for the sake of an initializer. */
8380 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8381 && really_constant_p (treeop0)
8382 && really_constant_p (treeop1))
8384 expand_operands (treeop0, treeop1,
8385 NULL_RTX, &op0, &op1, modifier);
8387 /* If the last operand is a CONST_INT, use plus_constant of
8388 the negated constant. Else make the MINUS. */
8389 if (CONST_INT_P (op1))
8390 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8391 -INTVAL (op1)));
8392 else
8393 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8396 /* No sense saving up arithmetic to be done
8397 if it's all in the wrong mode to form part of an address.
8398 And force_operand won't know whether to sign-extend or
8399 zero-extend. */
8400 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8401 || mode != ptr_mode)
8402 goto binop;
8404 expand_operands (treeop0, treeop1,
8405 subtarget, &op0, &op1, modifier);
8407 /* Convert A - const to A + (-const). */
8408 if (CONST_INT_P (op1))
8410 op1 = negate_rtx (mode, op1);
8411 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8414 goto binop2;
8416 case WIDEN_MULT_PLUS_EXPR:
8417 case WIDEN_MULT_MINUS_EXPR:
8418 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8419 op2 = expand_normal (treeop2);
8420 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8421 target, unsignedp);
8422 return target;
8424 case WIDEN_MULT_EXPR:
8425 /* If first operand is constant, swap them.
8426 Thus the following special case checks need only
8427 check the second operand. */
8428 if (TREE_CODE (treeop0) == INTEGER_CST)
8429 std::swap (treeop0, treeop1);
8431 /* First, check if we have a multiplication of one signed and one
8432 unsigned operand. */
8433 if (TREE_CODE (treeop1) != INTEGER_CST
8434 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8435 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8437 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8438 this_optab = usmul_widen_optab;
8439 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8440 != CODE_FOR_nothing)
8442 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8443 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8444 EXPAND_NORMAL);
8445 else
8446 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8447 EXPAND_NORMAL);
8448 /* op0 and op1 might still be constant, despite the above
8449 != INTEGER_CST check. Handle it. */
8450 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8452 op0 = convert_modes (innermode, mode, op0, true);
8453 op1 = convert_modes (innermode, mode, op1, false);
8454 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8455 target, unsignedp));
8457 goto binop3;
8460 /* Check for a multiplication with matching signedness. */
8461 else if ((TREE_CODE (treeop1) == INTEGER_CST
8462 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8463 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8464 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8466 tree op0type = TREE_TYPE (treeop0);
8467 machine_mode innermode = TYPE_MODE (op0type);
8468 bool zextend_p = TYPE_UNSIGNED (op0type);
8469 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8470 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8472 if (TREE_CODE (treeop0) != INTEGER_CST)
8474 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8475 != CODE_FOR_nothing)
8477 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8478 EXPAND_NORMAL);
8479 /* op0 and op1 might still be constant, despite the above
8480 != INTEGER_CST check. Handle it. */
8481 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8483 widen_mult_const:
8484 op0 = convert_modes (innermode, mode, op0, zextend_p);
8486 = convert_modes (innermode, mode, op1,
8487 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8488 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8489 target,
8490 unsignedp));
8492 temp = expand_widening_mult (mode, op0, op1, target,
8493 unsignedp, this_optab);
8494 return REDUCE_BIT_FIELD (temp);
8496 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8497 != CODE_FOR_nothing
8498 && innermode == word_mode)
8500 rtx htem, hipart;
8501 op0 = expand_normal (treeop0);
8502 if (TREE_CODE (treeop1) == INTEGER_CST)
8503 op1 = convert_modes (innermode, mode,
8504 expand_normal (treeop1),
8505 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8506 else
8507 op1 = expand_normal (treeop1);
8508 /* op0 and op1 might still be constant, despite the above
8509 != INTEGER_CST check. Handle it. */
8510 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8511 goto widen_mult_const;
8512 temp = expand_binop (mode, other_optab, op0, op1, target,
8513 unsignedp, OPTAB_LIB_WIDEN);
8514 hipart = gen_highpart (innermode, temp);
8515 htem = expand_mult_highpart_adjust (innermode, hipart,
8516 op0, op1, hipart,
8517 zextend_p);
8518 if (htem != hipart)
8519 emit_move_insn (hipart, htem);
8520 return REDUCE_BIT_FIELD (temp);
8524 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8525 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8526 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8527 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8529 case FMA_EXPR:
8531 optab opt = fma_optab;
8532 gimple def0, def2;
8534 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8535 call. */
8536 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8538 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8539 tree call_expr;
8541 gcc_assert (fn != NULL_TREE);
8542 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8543 return expand_builtin (call_expr, target, subtarget, mode, false);
8546 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8547 /* The multiplication is commutative - look at its 2nd operand
8548 if the first isn't fed by a negate. */
8549 if (!def0)
8551 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8552 /* Swap operands if the 2nd operand is fed by a negate. */
8553 if (def0)
8554 std::swap (treeop0, treeop1);
8556 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8558 op0 = op2 = NULL;
8560 if (def0 && def2
8561 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8563 opt = fnms_optab;
8564 op0 = expand_normal (gimple_assign_rhs1 (def0));
8565 op2 = expand_normal (gimple_assign_rhs1 (def2));
8567 else if (def0
8568 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8570 opt = fnma_optab;
8571 op0 = expand_normal (gimple_assign_rhs1 (def0));
8573 else if (def2
8574 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8576 opt = fms_optab;
8577 op2 = expand_normal (gimple_assign_rhs1 (def2));
8580 if (op0 == NULL)
8581 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8582 if (op2 == NULL)
8583 op2 = expand_normal (treeop2);
8584 op1 = expand_normal (treeop1);
8586 return expand_ternary_op (TYPE_MODE (type), opt,
8587 op0, op1, op2, target, 0);
8590 case MULT_EXPR:
8591 /* If this is a fixed-point operation, then we cannot use the code
8592 below because "expand_mult" doesn't support sat/no-sat fixed-point
8593 multiplications. */
8594 if (ALL_FIXED_POINT_MODE_P (mode))
8595 goto binop;
8597 /* If first operand is constant, swap them.
8598 Thus the following special case checks need only
8599 check the second operand. */
8600 if (TREE_CODE (treeop0) == INTEGER_CST)
8601 std::swap (treeop0, treeop1);
8603 /* Attempt to return something suitable for generating an
8604 indexed address, for machines that support that. */
8606 if (modifier == EXPAND_SUM && mode == ptr_mode
8607 && tree_fits_shwi_p (treeop1))
8609 tree exp1 = treeop1;
8611 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8612 EXPAND_SUM);
8614 if (!REG_P (op0))
8615 op0 = force_operand (op0, NULL_RTX);
8616 if (!REG_P (op0))
8617 op0 = copy_to_mode_reg (mode, op0);
8619 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8620 gen_int_mode (tree_to_shwi (exp1),
8621 TYPE_MODE (TREE_TYPE (exp1)))));
8624 if (modifier == EXPAND_STACK_PARM)
8625 target = 0;
8627 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8628 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8630 case TRUNC_DIV_EXPR:
8631 case FLOOR_DIV_EXPR:
8632 case CEIL_DIV_EXPR:
8633 case ROUND_DIV_EXPR:
8634 case EXACT_DIV_EXPR:
8635 /* If this is a fixed-point operation, then we cannot use the code
8636 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8637 divisions. */
8638 if (ALL_FIXED_POINT_MODE_P (mode))
8639 goto binop;
8641 if (modifier == EXPAND_STACK_PARM)
8642 target = 0;
8643 /* Possible optimization: compute the dividend with EXPAND_SUM
8644 then if the divisor is constant can optimize the case
8645 where some terms of the dividend have coeffs divisible by it. */
8646 expand_operands (treeop0, treeop1,
8647 subtarget, &op0, &op1, EXPAND_NORMAL);
8648 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8650 case RDIV_EXPR:
8651 goto binop;
8653 case MULT_HIGHPART_EXPR:
8654 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8655 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8656 gcc_assert (temp);
8657 return temp;
8659 case TRUNC_MOD_EXPR:
8660 case FLOOR_MOD_EXPR:
8661 case CEIL_MOD_EXPR:
8662 case ROUND_MOD_EXPR:
8663 if (modifier == EXPAND_STACK_PARM)
8664 target = 0;
8665 expand_operands (treeop0, treeop1,
8666 subtarget, &op0, &op1, EXPAND_NORMAL);
8667 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8669 case FIXED_CONVERT_EXPR:
8670 op0 = expand_normal (treeop0);
8671 if (target == 0 || modifier == EXPAND_STACK_PARM)
8672 target = gen_reg_rtx (mode);
8674 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8675 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8676 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8677 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8678 else
8679 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8680 return target;
8682 case FIX_TRUNC_EXPR:
8683 op0 = expand_normal (treeop0);
8684 if (target == 0 || modifier == EXPAND_STACK_PARM)
8685 target = gen_reg_rtx (mode);
8686 expand_fix (target, op0, unsignedp);
8687 return target;
8689 case FLOAT_EXPR:
8690 op0 = expand_normal (treeop0);
8691 if (target == 0 || modifier == EXPAND_STACK_PARM)
8692 target = gen_reg_rtx (mode);
8693 /* expand_float can't figure out what to do if FROM has VOIDmode.
8694 So give it the correct mode. With -O, cse will optimize this. */
8695 if (GET_MODE (op0) == VOIDmode)
8696 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8697 op0);
8698 expand_float (target, op0,
8699 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8700 return target;
8702 case NEGATE_EXPR:
8703 op0 = expand_expr (treeop0, subtarget,
8704 VOIDmode, EXPAND_NORMAL);
8705 if (modifier == EXPAND_STACK_PARM)
8706 target = 0;
8707 temp = expand_unop (mode,
8708 optab_for_tree_code (NEGATE_EXPR, type,
8709 optab_default),
8710 op0, target, 0);
8711 gcc_assert (temp);
8712 return REDUCE_BIT_FIELD (temp);
8714 case ABS_EXPR:
8715 op0 = expand_expr (treeop0, subtarget,
8716 VOIDmode, EXPAND_NORMAL);
8717 if (modifier == EXPAND_STACK_PARM)
8718 target = 0;
8720 /* ABS_EXPR is not valid for complex arguments. */
8721 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8722 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8724 /* Unsigned abs is simply the operand. Testing here means we don't
8725 risk generating incorrect code below. */
8726 if (TYPE_UNSIGNED (type))
8727 return op0;
8729 return expand_abs (mode, op0, target, unsignedp,
8730 safe_from_p (target, treeop0, 1));
8732 case MAX_EXPR:
8733 case MIN_EXPR:
8734 target = original_target;
8735 if (target == 0
8736 || modifier == EXPAND_STACK_PARM
8737 || (MEM_P (target) && MEM_VOLATILE_P (target))
8738 || GET_MODE (target) != mode
8739 || (REG_P (target)
8740 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8741 target = gen_reg_rtx (mode);
8742 expand_operands (treeop0, treeop1,
8743 target, &op0, &op1, EXPAND_NORMAL);
8745 /* First try to do it with a special MIN or MAX instruction.
8746 If that does not win, use a conditional jump to select the proper
8747 value. */
8748 this_optab = optab_for_tree_code (code, type, optab_default);
8749 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8750 OPTAB_WIDEN);
8751 if (temp != 0)
8752 return temp;
8754 /* At this point, a MEM target is no longer useful; we will get better
8755 code without it. */
8757 if (! REG_P (target))
8758 target = gen_reg_rtx (mode);
8760 /* If op1 was placed in target, swap op0 and op1. */
8761 if (target != op0 && target == op1)
8762 std::swap (op0, op1);
8764 /* We generate better code and avoid problems with op1 mentioning
8765 target by forcing op1 into a pseudo if it isn't a constant. */
8766 if (! CONSTANT_P (op1))
8767 op1 = force_reg (mode, op1);
8770 enum rtx_code comparison_code;
8771 rtx cmpop1 = op1;
8773 if (code == MAX_EXPR)
8774 comparison_code = unsignedp ? GEU : GE;
8775 else
8776 comparison_code = unsignedp ? LEU : LE;
8778 /* Canonicalize to comparisons against 0. */
8779 if (op1 == const1_rtx)
8781 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8782 or (a != 0 ? a : 1) for unsigned.
8783 For MIN we are safe converting (a <= 1 ? a : 1)
8784 into (a <= 0 ? a : 1) */
8785 cmpop1 = const0_rtx;
8786 if (code == MAX_EXPR)
8787 comparison_code = unsignedp ? NE : GT;
8789 if (op1 == constm1_rtx && !unsignedp)
8791 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8792 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8793 cmpop1 = const0_rtx;
8794 if (code == MIN_EXPR)
8795 comparison_code = LT;
8798 /* Use a conditional move if possible. */
8799 if (can_conditionally_move_p (mode))
8801 rtx insn;
8803 start_sequence ();
8805 /* Try to emit the conditional move. */
8806 insn = emit_conditional_move (target, comparison_code,
8807 op0, cmpop1, mode,
8808 op0, op1, mode,
8809 unsignedp);
8811 /* If we could do the conditional move, emit the sequence,
8812 and return. */
8813 if (insn)
8815 rtx_insn *seq = get_insns ();
8816 end_sequence ();
8817 emit_insn (seq);
8818 return target;
8821 /* Otherwise discard the sequence and fall back to code with
8822 branches. */
8823 end_sequence ();
8826 if (target != op0)
8827 emit_move_insn (target, op0);
8829 lab = gen_label_rtx ();
8830 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8831 unsignedp, mode, NULL_RTX, NULL, lab,
8832 -1);
8834 emit_move_insn (target, op1);
8835 emit_label (lab);
8836 return target;
8838 case BIT_NOT_EXPR:
8839 op0 = expand_expr (treeop0, subtarget,
8840 VOIDmode, EXPAND_NORMAL);
8841 if (modifier == EXPAND_STACK_PARM)
8842 target = 0;
8843 /* In case we have to reduce the result to bitfield precision
8844 for unsigned bitfield expand this as XOR with a proper constant
8845 instead. */
8846 if (reduce_bit_field && TYPE_UNSIGNED (type))
8848 wide_int mask = wi::mask (TYPE_PRECISION (type),
8849 false, GET_MODE_PRECISION (mode));
8851 temp = expand_binop (mode, xor_optab, op0,
8852 immed_wide_int_const (mask, mode),
8853 target, 1, OPTAB_LIB_WIDEN);
8855 else
8856 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8857 gcc_assert (temp);
8858 return temp;
8860 /* ??? Can optimize bitwise operations with one arg constant.
8861 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8862 and (a bitwise1 b) bitwise2 b (etc)
8863 but that is probably not worth while. */
8865 case BIT_AND_EXPR:
8866 case BIT_IOR_EXPR:
8867 case BIT_XOR_EXPR:
8868 goto binop;
8870 case LROTATE_EXPR:
8871 case RROTATE_EXPR:
8872 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8873 || (GET_MODE_PRECISION (TYPE_MODE (type))
8874 == TYPE_PRECISION (type)));
8875 /* fall through */
8877 case LSHIFT_EXPR:
8878 case RSHIFT_EXPR:
8879 /* If this is a fixed-point operation, then we cannot use the code
8880 below because "expand_shift" doesn't support sat/no-sat fixed-point
8881 shifts. */
8882 if (ALL_FIXED_POINT_MODE_P (mode))
8883 goto binop;
8885 if (! safe_from_p (subtarget, treeop1, 1))
8886 subtarget = 0;
8887 if (modifier == EXPAND_STACK_PARM)
8888 target = 0;
8889 op0 = expand_expr (treeop0, subtarget,
8890 VOIDmode, EXPAND_NORMAL);
8891 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8892 unsignedp);
8893 if (code == LSHIFT_EXPR)
8894 temp = REDUCE_BIT_FIELD (temp);
8895 return temp;
8897 /* Could determine the answer when only additive constants differ. Also,
8898 the addition of one can be handled by changing the condition. */
8899 case LT_EXPR:
8900 case LE_EXPR:
8901 case GT_EXPR:
8902 case GE_EXPR:
8903 case EQ_EXPR:
8904 case NE_EXPR:
8905 case UNORDERED_EXPR:
8906 case ORDERED_EXPR:
8907 case UNLT_EXPR:
8908 case UNLE_EXPR:
8909 case UNGT_EXPR:
8910 case UNGE_EXPR:
8911 case UNEQ_EXPR:
8912 case LTGT_EXPR:
8914 temp = do_store_flag (ops,
8915 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8916 tmode != VOIDmode ? tmode : mode);
8917 if (temp)
8918 return temp;
8920 /* Use a compare and a jump for BLKmode comparisons, or for function
8921 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8923 if ((target == 0
8924 || modifier == EXPAND_STACK_PARM
8925 || ! safe_from_p (target, treeop0, 1)
8926 || ! safe_from_p (target, treeop1, 1)
8927 /* Make sure we don't have a hard reg (such as function's return
8928 value) live across basic blocks, if not optimizing. */
8929 || (!optimize && REG_P (target)
8930 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8931 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8933 emit_move_insn (target, const0_rtx);
8935 rtx_code_label *lab1 = gen_label_rtx ();
8936 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
8938 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8939 emit_move_insn (target, constm1_rtx);
8940 else
8941 emit_move_insn (target, const1_rtx);
8943 emit_label (lab1);
8944 return target;
8946 case COMPLEX_EXPR:
8947 /* Get the rtx code of the operands. */
8948 op0 = expand_normal (treeop0);
8949 op1 = expand_normal (treeop1);
8951 if (!target)
8952 target = gen_reg_rtx (TYPE_MODE (type));
8953 else
8954 /* If target overlaps with op1, then either we need to force
8955 op1 into a pseudo (if target also overlaps with op0),
8956 or write the complex parts in reverse order. */
8957 switch (GET_CODE (target))
8959 case CONCAT:
8960 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8962 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8964 complex_expr_force_op1:
8965 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8966 emit_move_insn (temp, op1);
8967 op1 = temp;
8968 break;
8970 complex_expr_swap_order:
8971 /* Move the imaginary (op1) and real (op0) parts to their
8972 location. */
8973 write_complex_part (target, op1, true);
8974 write_complex_part (target, op0, false);
8976 return target;
8978 break;
8979 case MEM:
8980 temp = adjust_address_nv (target,
8981 GET_MODE_INNER (GET_MODE (target)), 0);
8982 if (reg_overlap_mentioned_p (temp, op1))
8984 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8985 temp = adjust_address_nv (target, imode,
8986 GET_MODE_SIZE (imode));
8987 if (reg_overlap_mentioned_p (temp, op0))
8988 goto complex_expr_force_op1;
8989 goto complex_expr_swap_order;
8991 break;
8992 default:
8993 if (reg_overlap_mentioned_p (target, op1))
8995 if (reg_overlap_mentioned_p (target, op0))
8996 goto complex_expr_force_op1;
8997 goto complex_expr_swap_order;
8999 break;
9002 /* Move the real (op0) and imaginary (op1) parts to their location. */
9003 write_complex_part (target, op0, false);
9004 write_complex_part (target, op1, true);
9006 return target;
9008 case WIDEN_SUM_EXPR:
9010 tree oprnd0 = treeop0;
9011 tree oprnd1 = treeop1;
9013 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9014 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9015 target, unsignedp);
9016 return target;
9019 case REDUC_MAX_EXPR:
9020 case REDUC_MIN_EXPR:
9021 case REDUC_PLUS_EXPR:
9023 op0 = expand_normal (treeop0);
9024 this_optab = optab_for_tree_code (code, type, optab_default);
9025 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9027 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9029 struct expand_operand ops[2];
9030 enum insn_code icode = optab_handler (this_optab, vec_mode);
9032 create_output_operand (&ops[0], target, mode);
9033 create_input_operand (&ops[1], op0, vec_mode);
9034 if (maybe_expand_insn (icode, 2, ops))
9036 target = ops[0].value;
9037 if (GET_MODE (target) != mode)
9038 return gen_lowpart (tmode, target);
9039 return target;
9042 /* Fall back to optab with vector result, and then extract scalar. */
9043 this_optab = scalar_reduc_to_vector (this_optab, type);
9044 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9045 gcc_assert (temp);
9046 /* The tree code produces a scalar result, but (somewhat by convention)
9047 the optab produces a vector with the result in element 0 if
9048 little-endian, or element N-1 if big-endian. So pull the scalar
9049 result out of that element. */
9050 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9051 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9052 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9053 target, mode, mode);
9054 gcc_assert (temp);
9055 return temp;
9058 case VEC_UNPACK_HI_EXPR:
9059 case VEC_UNPACK_LO_EXPR:
9061 op0 = expand_normal (treeop0);
9062 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9063 target, unsignedp);
9064 gcc_assert (temp);
9065 return temp;
9068 case VEC_UNPACK_FLOAT_HI_EXPR:
9069 case VEC_UNPACK_FLOAT_LO_EXPR:
9071 op0 = expand_normal (treeop0);
9072 /* The signedness is determined from input operand. */
9073 temp = expand_widen_pattern_expr
9074 (ops, op0, NULL_RTX, NULL_RTX,
9075 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9077 gcc_assert (temp);
9078 return temp;
9081 case VEC_WIDEN_MULT_HI_EXPR:
9082 case VEC_WIDEN_MULT_LO_EXPR:
9083 case VEC_WIDEN_MULT_EVEN_EXPR:
9084 case VEC_WIDEN_MULT_ODD_EXPR:
9085 case VEC_WIDEN_LSHIFT_HI_EXPR:
9086 case VEC_WIDEN_LSHIFT_LO_EXPR:
9087 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9088 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9089 target, unsignedp);
9090 gcc_assert (target);
9091 return target;
9093 case VEC_PACK_TRUNC_EXPR:
9094 case VEC_PACK_SAT_EXPR:
9095 case VEC_PACK_FIX_TRUNC_EXPR:
9096 mode = TYPE_MODE (TREE_TYPE (treeop0));
9097 goto binop;
9099 case VEC_PERM_EXPR:
9100 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9101 op2 = expand_normal (treeop2);
9103 /* Careful here: if the target doesn't support integral vector modes,
9104 a constant selection vector could wind up smooshed into a normal
9105 integral constant. */
9106 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9108 tree sel_type = TREE_TYPE (treeop2);
9109 machine_mode vmode
9110 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9111 TYPE_VECTOR_SUBPARTS (sel_type));
9112 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9113 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9114 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9116 else
9117 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9119 temp = expand_vec_perm (mode, op0, op1, op2, target);
9120 gcc_assert (temp);
9121 return temp;
9123 case DOT_PROD_EXPR:
9125 tree oprnd0 = treeop0;
9126 tree oprnd1 = treeop1;
9127 tree oprnd2 = treeop2;
9128 rtx op2;
9130 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9131 op2 = expand_normal (oprnd2);
9132 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9133 target, unsignedp);
9134 return target;
9137 case SAD_EXPR:
9139 tree oprnd0 = treeop0;
9140 tree oprnd1 = treeop1;
9141 tree oprnd2 = treeop2;
9142 rtx op2;
9144 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9145 op2 = expand_normal (oprnd2);
9146 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9147 target, unsignedp);
9148 return target;
9151 case REALIGN_LOAD_EXPR:
9153 tree oprnd0 = treeop0;
9154 tree oprnd1 = treeop1;
9155 tree oprnd2 = treeop2;
9156 rtx op2;
9158 this_optab = optab_for_tree_code (code, type, optab_default);
9159 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9160 op2 = expand_normal (oprnd2);
9161 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9162 target, unsignedp);
9163 gcc_assert (temp);
9164 return temp;
9167 case COND_EXPR:
9169 /* A COND_EXPR with its type being VOID_TYPE represents a
9170 conditional jump and is handled in
9171 expand_gimple_cond_expr. */
9172 gcc_assert (!VOID_TYPE_P (type));
9174 /* Note that COND_EXPRs whose type is a structure or union
9175 are required to be constructed to contain assignments of
9176 a temporary variable, so that we can evaluate them here
9177 for side effect only. If type is void, we must do likewise. */
9179 gcc_assert (!TREE_ADDRESSABLE (type)
9180 && !ignore
9181 && TREE_TYPE (treeop1) != void_type_node
9182 && TREE_TYPE (treeop2) != void_type_node);
9184 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9185 if (temp)
9186 return temp;
9188 /* If we are not to produce a result, we have no target. Otherwise,
9189 if a target was specified use it; it will not be used as an
9190 intermediate target unless it is safe. If no target, use a
9191 temporary. */
9193 if (modifier != EXPAND_STACK_PARM
9194 && original_target
9195 && safe_from_p (original_target, treeop0, 1)
9196 && GET_MODE (original_target) == mode
9197 && !MEM_P (original_target))
9198 temp = original_target;
9199 else
9200 temp = assign_temp (type, 0, 1);
9202 do_pending_stack_adjust ();
9203 NO_DEFER_POP;
9204 rtx_code_label *lab0 = gen_label_rtx ();
9205 rtx_code_label *lab1 = gen_label_rtx ();
9206 jumpifnot (treeop0, lab0, -1);
9207 store_expr (treeop1, temp,
9208 modifier == EXPAND_STACK_PARM,
9209 false);
9211 emit_jump_insn (gen_jump (lab1));
9212 emit_barrier ();
9213 emit_label (lab0);
9214 store_expr (treeop2, temp,
9215 modifier == EXPAND_STACK_PARM,
9216 false);
9218 emit_label (lab1);
9219 OK_DEFER_POP;
9220 return temp;
9223 case VEC_COND_EXPR:
9224 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9225 return target;
9227 default:
9228 gcc_unreachable ();
9231 /* Here to do an ordinary binary operator. */
9232 binop:
9233 expand_operands (treeop0, treeop1,
9234 subtarget, &op0, &op1, EXPAND_NORMAL);
9235 binop2:
9236 this_optab = optab_for_tree_code (code, type, optab_default);
9237 binop3:
9238 if (modifier == EXPAND_STACK_PARM)
9239 target = 0;
9240 temp = expand_binop (mode, this_optab, op0, op1, target,
9241 unsignedp, OPTAB_LIB_WIDEN);
9242 gcc_assert (temp);
9243 /* Bitwise operations do not need bitfield reduction as we expect their
9244 operands being properly truncated. */
9245 if (code == BIT_XOR_EXPR
9246 || code == BIT_AND_EXPR
9247 || code == BIT_IOR_EXPR)
9248 return temp;
9249 return REDUCE_BIT_FIELD (temp);
9251 #undef REDUCE_BIT_FIELD
9254 /* Return TRUE if expression STMT is suitable for replacement.
9255 Never consider memory loads as replaceable, because those don't ever lead
9256 into constant expressions. */
9258 static bool
9259 stmt_is_replaceable_p (gimple stmt)
9261 if (ssa_is_replaceable_p (stmt))
9263 /* Don't move around loads. */
9264 if (!gimple_assign_single_p (stmt)
9265 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9266 return true;
9268 return false;
9272 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9273 enum expand_modifier modifier, rtx *alt_rtl,
9274 bool inner_reference_p)
9276 rtx op0, op1, temp, decl_rtl;
9277 tree type;
9278 int unsignedp;
9279 machine_mode mode;
9280 enum tree_code code = TREE_CODE (exp);
9281 rtx subtarget, original_target;
9282 int ignore;
9283 tree context;
9284 bool reduce_bit_field;
9285 location_t loc = EXPR_LOCATION (exp);
9286 struct separate_ops ops;
9287 tree treeop0, treeop1, treeop2;
9288 tree ssa_name = NULL_TREE;
9289 gimple g;
9291 type = TREE_TYPE (exp);
9292 mode = TYPE_MODE (type);
9293 unsignedp = TYPE_UNSIGNED (type);
9295 treeop0 = treeop1 = treeop2 = NULL_TREE;
9296 if (!VL_EXP_CLASS_P (exp))
9297 switch (TREE_CODE_LENGTH (code))
9299 default:
9300 case 3: treeop2 = TREE_OPERAND (exp, 2);
9301 case 2: treeop1 = TREE_OPERAND (exp, 1);
9302 case 1: treeop0 = TREE_OPERAND (exp, 0);
9303 case 0: break;
9305 ops.code = code;
9306 ops.type = type;
9307 ops.op0 = treeop0;
9308 ops.op1 = treeop1;
9309 ops.op2 = treeop2;
9310 ops.location = loc;
9312 ignore = (target == const0_rtx
9313 || ((CONVERT_EXPR_CODE_P (code)
9314 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9315 && TREE_CODE (type) == VOID_TYPE));
9317 /* An operation in what may be a bit-field type needs the
9318 result to be reduced to the precision of the bit-field type,
9319 which is narrower than that of the type's mode. */
9320 reduce_bit_field = (!ignore
9321 && INTEGRAL_TYPE_P (type)
9322 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9324 /* If we are going to ignore this result, we need only do something
9325 if there is a side-effect somewhere in the expression. If there
9326 is, short-circuit the most common cases here. Note that we must
9327 not call expand_expr with anything but const0_rtx in case this
9328 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9330 if (ignore)
9332 if (! TREE_SIDE_EFFECTS (exp))
9333 return const0_rtx;
9335 /* Ensure we reference a volatile object even if value is ignored, but
9336 don't do this if all we are doing is taking its address. */
9337 if (TREE_THIS_VOLATILE (exp)
9338 && TREE_CODE (exp) != FUNCTION_DECL
9339 && mode != VOIDmode && mode != BLKmode
9340 && modifier != EXPAND_CONST_ADDRESS)
9342 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9343 if (MEM_P (temp))
9344 copy_to_reg (temp);
9345 return const0_rtx;
9348 if (TREE_CODE_CLASS (code) == tcc_unary
9349 || code == BIT_FIELD_REF
9350 || code == COMPONENT_REF
9351 || code == INDIRECT_REF)
9352 return expand_expr (treeop0, const0_rtx, VOIDmode,
9353 modifier);
9355 else if (TREE_CODE_CLASS (code) == tcc_binary
9356 || TREE_CODE_CLASS (code) == tcc_comparison
9357 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9359 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9360 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9361 return const0_rtx;
9364 target = 0;
9367 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9368 target = 0;
9370 /* Use subtarget as the target for operand 0 of a binary operation. */
9371 subtarget = get_subtarget (target);
9372 original_target = target;
9374 switch (code)
9376 case LABEL_DECL:
9378 tree function = decl_function_context (exp);
9380 temp = label_rtx (exp);
9381 temp = gen_rtx_LABEL_REF (Pmode, temp);
9383 if (function != current_function_decl
9384 && function != 0)
9385 LABEL_REF_NONLOCAL_P (temp) = 1;
9387 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9388 return temp;
9391 case SSA_NAME:
9392 /* ??? ivopts calls expander, without any preparation from
9393 out-of-ssa. So fake instructions as if this was an access to the
9394 base variable. This unnecessarily allocates a pseudo, see how we can
9395 reuse it, if partition base vars have it set already. */
9396 if (!currently_expanding_to_rtl)
9398 tree var = SSA_NAME_VAR (exp);
9399 if (var && DECL_RTL_SET_P (var))
9400 return DECL_RTL (var);
9401 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9402 LAST_VIRTUAL_REGISTER + 1);
9405 g = get_gimple_for_ssa_name (exp);
9406 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9407 if (g == NULL
9408 && modifier == EXPAND_INITIALIZER
9409 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9410 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9411 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9412 g = SSA_NAME_DEF_STMT (exp);
9413 if (g)
9415 rtx r;
9416 ops.code = gimple_assign_rhs_code (g);
9417 switch (get_gimple_rhs_class (ops.code))
9419 case GIMPLE_TERNARY_RHS:
9420 ops.op2 = gimple_assign_rhs3 (g);
9421 /* Fallthru */
9422 case GIMPLE_BINARY_RHS:
9423 ops.op1 = gimple_assign_rhs2 (g);
9425 /* Try to expand conditonal compare. */
9426 if (targetm.gen_ccmp_first)
9428 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9429 r = expand_ccmp_expr (g);
9430 if (r)
9431 break;
9433 /* Fallthru */
9434 case GIMPLE_UNARY_RHS:
9435 ops.op0 = gimple_assign_rhs1 (g);
9436 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9437 ops.location = gimple_location (g);
9438 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9439 break;
9440 case GIMPLE_SINGLE_RHS:
9442 location_t saved_loc = curr_insn_location ();
9443 set_curr_insn_location (gimple_location (g));
9444 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9445 tmode, modifier, NULL, inner_reference_p);
9446 set_curr_insn_location (saved_loc);
9447 break;
9449 default:
9450 gcc_unreachable ();
9452 if (REG_P (r) && !REG_EXPR (r))
9453 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9454 return r;
9457 ssa_name = exp;
9458 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9459 exp = SSA_NAME_VAR (ssa_name);
9460 goto expand_decl_rtl;
9462 case PARM_DECL:
9463 case VAR_DECL:
9464 /* If a static var's type was incomplete when the decl was written,
9465 but the type is complete now, lay out the decl now. */
9466 if (DECL_SIZE (exp) == 0
9467 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9468 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9469 layout_decl (exp, 0);
9471 /* ... fall through ... */
9473 case FUNCTION_DECL:
9474 case RESULT_DECL:
9475 decl_rtl = DECL_RTL (exp);
9476 expand_decl_rtl:
9477 gcc_assert (decl_rtl);
9478 decl_rtl = copy_rtx (decl_rtl);
9479 /* Record writes to register variables. */
9480 if (modifier == EXPAND_WRITE
9481 && REG_P (decl_rtl)
9482 && HARD_REGISTER_P (decl_rtl))
9483 add_to_hard_reg_set (&crtl->asm_clobbers,
9484 GET_MODE (decl_rtl), REGNO (decl_rtl));
9486 /* Ensure variable marked as used even if it doesn't go through
9487 a parser. If it hasn't be used yet, write out an external
9488 definition. */
9489 TREE_USED (exp) = 1;
9491 /* Show we haven't gotten RTL for this yet. */
9492 temp = 0;
9494 /* Variables inherited from containing functions should have
9495 been lowered by this point. */
9496 context = decl_function_context (exp);
9497 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9498 || context == current_function_decl
9499 || TREE_STATIC (exp)
9500 || DECL_EXTERNAL (exp)
9501 /* ??? C++ creates functions that are not TREE_STATIC. */
9502 || TREE_CODE (exp) == FUNCTION_DECL);
9504 /* This is the case of an array whose size is to be determined
9505 from its initializer, while the initializer is still being parsed.
9506 ??? We aren't parsing while expanding anymore. */
9508 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9509 temp = validize_mem (decl_rtl);
9511 /* If DECL_RTL is memory, we are in the normal case and the
9512 address is not valid, get the address into a register. */
9514 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9516 if (alt_rtl)
9517 *alt_rtl = decl_rtl;
9518 decl_rtl = use_anchored_address (decl_rtl);
9519 if (modifier != EXPAND_CONST_ADDRESS
9520 && modifier != EXPAND_SUM
9521 && !memory_address_addr_space_p (DECL_MODE (exp),
9522 XEXP (decl_rtl, 0),
9523 MEM_ADDR_SPACE (decl_rtl)))
9524 temp = replace_equiv_address (decl_rtl,
9525 copy_rtx (XEXP (decl_rtl, 0)));
9528 /* If we got something, return it. But first, set the alignment
9529 if the address is a register. */
9530 if (temp != 0)
9532 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9533 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9535 return temp;
9538 /* If the mode of DECL_RTL does not match that of the decl,
9539 there are two cases: we are dealing with a BLKmode value
9540 that is returned in a register, or we are dealing with
9541 a promoted value. In the latter case, return a SUBREG
9542 of the wanted mode, but mark it so that we know that it
9543 was already extended. */
9544 if (REG_P (decl_rtl)
9545 && DECL_MODE (exp) != BLKmode
9546 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9548 machine_mode pmode;
9550 /* Get the signedness to be used for this variable. Ensure we get
9551 the same mode we got when the variable was declared. */
9552 if (code == SSA_NAME
9553 && (g = SSA_NAME_DEF_STMT (ssa_name))
9554 && gimple_code (g) == GIMPLE_CALL
9555 && !gimple_call_internal_p (g))
9556 pmode = promote_function_mode (type, mode, &unsignedp,
9557 gimple_call_fntype (g),
9559 else
9560 pmode = promote_decl_mode (exp, &unsignedp);
9561 gcc_assert (GET_MODE (decl_rtl) == pmode);
9563 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9564 SUBREG_PROMOTED_VAR_P (temp) = 1;
9565 SUBREG_PROMOTED_SET (temp, unsignedp);
9566 return temp;
9569 return decl_rtl;
9571 case INTEGER_CST:
9572 /* Given that TYPE_PRECISION (type) is not always equal to
9573 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9574 the former to the latter according to the signedness of the
9575 type. */
9576 temp = immed_wide_int_const (wide_int::from
9577 (exp,
9578 GET_MODE_PRECISION (TYPE_MODE (type)),
9579 TYPE_SIGN (type)),
9580 TYPE_MODE (type));
9581 return temp;
9583 case VECTOR_CST:
9585 tree tmp = NULL_TREE;
9586 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9587 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9588 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9589 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9590 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9591 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9592 return const_vector_from_tree (exp);
9593 if (GET_MODE_CLASS (mode) == MODE_INT)
9595 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9596 if (type_for_mode)
9597 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9599 if (!tmp)
9601 vec<constructor_elt, va_gc> *v;
9602 unsigned i;
9603 vec_alloc (v, VECTOR_CST_NELTS (exp));
9604 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9605 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9606 tmp = build_constructor (type, v);
9608 return expand_expr (tmp, ignore ? const0_rtx : target,
9609 tmode, modifier);
9612 case CONST_DECL:
9613 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9615 case REAL_CST:
9616 /* If optimized, generate immediate CONST_DOUBLE
9617 which will be turned into memory by reload if necessary.
9619 We used to force a register so that loop.c could see it. But
9620 this does not allow gen_* patterns to perform optimizations with
9621 the constants. It also produces two insns in cases like "x = 1.0;".
9622 On most machines, floating-point constants are not permitted in
9623 many insns, so we'd end up copying it to a register in any case.
9625 Now, we do the copying in expand_binop, if appropriate. */
9626 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9627 TYPE_MODE (TREE_TYPE (exp)));
9629 case FIXED_CST:
9630 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9631 TYPE_MODE (TREE_TYPE (exp)));
9633 case COMPLEX_CST:
9634 /* Handle evaluating a complex constant in a CONCAT target. */
9635 if (original_target && GET_CODE (original_target) == CONCAT)
9637 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9638 rtx rtarg, itarg;
9640 rtarg = XEXP (original_target, 0);
9641 itarg = XEXP (original_target, 1);
9643 /* Move the real and imaginary parts separately. */
9644 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9645 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9647 if (op0 != rtarg)
9648 emit_move_insn (rtarg, op0);
9649 if (op1 != itarg)
9650 emit_move_insn (itarg, op1);
9652 return original_target;
9655 /* ... fall through ... */
9657 case STRING_CST:
9658 temp = expand_expr_constant (exp, 1, modifier);
9660 /* temp contains a constant address.
9661 On RISC machines where a constant address isn't valid,
9662 make some insns to get that address into a register. */
9663 if (modifier != EXPAND_CONST_ADDRESS
9664 && modifier != EXPAND_INITIALIZER
9665 && modifier != EXPAND_SUM
9666 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9667 MEM_ADDR_SPACE (temp)))
9668 return replace_equiv_address (temp,
9669 copy_rtx (XEXP (temp, 0)));
9670 return temp;
9672 case SAVE_EXPR:
9674 tree val = treeop0;
9675 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9676 inner_reference_p);
9678 if (!SAVE_EXPR_RESOLVED_P (exp))
9680 /* We can indeed still hit this case, typically via builtin
9681 expanders calling save_expr immediately before expanding
9682 something. Assume this means that we only have to deal
9683 with non-BLKmode values. */
9684 gcc_assert (GET_MODE (ret) != BLKmode);
9686 val = build_decl (curr_insn_location (),
9687 VAR_DECL, NULL, TREE_TYPE (exp));
9688 DECL_ARTIFICIAL (val) = 1;
9689 DECL_IGNORED_P (val) = 1;
9690 treeop0 = val;
9691 TREE_OPERAND (exp, 0) = treeop0;
9692 SAVE_EXPR_RESOLVED_P (exp) = 1;
9694 if (!CONSTANT_P (ret))
9695 ret = copy_to_reg (ret);
9696 SET_DECL_RTL (val, ret);
9699 return ret;
9703 case CONSTRUCTOR:
9704 /* If we don't need the result, just ensure we evaluate any
9705 subexpressions. */
9706 if (ignore)
9708 unsigned HOST_WIDE_INT idx;
9709 tree value;
9711 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9712 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9714 return const0_rtx;
9717 return expand_constructor (exp, target, modifier, false);
9719 case TARGET_MEM_REF:
9721 addr_space_t as
9722 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9723 enum insn_code icode;
9724 unsigned int align;
9726 op0 = addr_for_mem_ref (exp, as, true);
9727 op0 = memory_address_addr_space (mode, op0, as);
9728 temp = gen_rtx_MEM (mode, op0);
9729 set_mem_attributes (temp, exp, 0);
9730 set_mem_addr_space (temp, as);
9731 align = get_object_alignment (exp);
9732 if (modifier != EXPAND_WRITE
9733 && modifier != EXPAND_MEMORY
9734 && mode != BLKmode
9735 && align < GET_MODE_ALIGNMENT (mode)
9736 /* If the target does not have special handling for unaligned
9737 loads of mode then it can use regular moves for them. */
9738 && ((icode = optab_handler (movmisalign_optab, mode))
9739 != CODE_FOR_nothing))
9741 struct expand_operand ops[2];
9743 /* We've already validated the memory, and we're creating a
9744 new pseudo destination. The predicates really can't fail,
9745 nor can the generator. */
9746 create_output_operand (&ops[0], NULL_RTX, mode);
9747 create_fixed_operand (&ops[1], temp);
9748 expand_insn (icode, 2, ops);
9749 temp = ops[0].value;
9751 return temp;
9754 case MEM_REF:
9756 addr_space_t as
9757 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9758 machine_mode address_mode;
9759 tree base = TREE_OPERAND (exp, 0);
9760 gimple def_stmt;
9761 enum insn_code icode;
9762 unsigned align;
9763 /* Handle expansion of non-aliased memory with non-BLKmode. That
9764 might end up in a register. */
9765 if (mem_ref_refers_to_non_mem_p (exp))
9767 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9768 base = TREE_OPERAND (base, 0);
9769 if (offset == 0
9770 && tree_fits_uhwi_p (TYPE_SIZE (type))
9771 && (GET_MODE_BITSIZE (DECL_MODE (base))
9772 == tree_to_uhwi (TYPE_SIZE (type))))
9773 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9774 target, tmode, modifier);
9775 if (TYPE_MODE (type) == BLKmode)
9777 temp = assign_stack_temp (DECL_MODE (base),
9778 GET_MODE_SIZE (DECL_MODE (base)));
9779 store_expr (base, temp, 0, false);
9780 temp = adjust_address (temp, BLKmode, offset);
9781 set_mem_size (temp, int_size_in_bytes (type));
9782 return temp;
9784 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9785 bitsize_int (offset * BITS_PER_UNIT));
9786 return expand_expr (exp, target, tmode, modifier);
9788 address_mode = targetm.addr_space.address_mode (as);
9789 base = TREE_OPERAND (exp, 0);
9790 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9792 tree mask = gimple_assign_rhs2 (def_stmt);
9793 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9794 gimple_assign_rhs1 (def_stmt), mask);
9795 TREE_OPERAND (exp, 0) = base;
9797 align = get_object_alignment (exp);
9798 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9799 op0 = memory_address_addr_space (mode, op0, as);
9800 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9802 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9803 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9804 op0 = memory_address_addr_space (mode, op0, as);
9806 temp = gen_rtx_MEM (mode, op0);
9807 set_mem_attributes (temp, exp, 0);
9808 set_mem_addr_space (temp, as);
9809 if (TREE_THIS_VOLATILE (exp))
9810 MEM_VOLATILE_P (temp) = 1;
9811 if (modifier != EXPAND_WRITE
9812 && modifier != EXPAND_MEMORY
9813 && !inner_reference_p
9814 && mode != BLKmode
9815 && align < GET_MODE_ALIGNMENT (mode))
9817 if ((icode = optab_handler (movmisalign_optab, mode))
9818 != CODE_FOR_nothing)
9820 struct expand_operand ops[2];
9822 /* We've already validated the memory, and we're creating a
9823 new pseudo destination. The predicates really can't fail,
9824 nor can the generator. */
9825 create_output_operand (&ops[0], NULL_RTX, mode);
9826 create_fixed_operand (&ops[1], temp);
9827 expand_insn (icode, 2, ops);
9828 temp = ops[0].value;
9830 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9831 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9832 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9833 (modifier == EXPAND_STACK_PARM
9834 ? NULL_RTX : target),
9835 mode, mode);
9837 return temp;
9840 case ARRAY_REF:
9843 tree array = treeop0;
9844 tree index = treeop1;
9845 tree init;
9847 /* Fold an expression like: "foo"[2].
9848 This is not done in fold so it won't happen inside &.
9849 Don't fold if this is for wide characters since it's too
9850 difficult to do correctly and this is a very rare case. */
9852 if (modifier != EXPAND_CONST_ADDRESS
9853 && modifier != EXPAND_INITIALIZER
9854 && modifier != EXPAND_MEMORY)
9856 tree t = fold_read_from_constant_string (exp);
9858 if (t)
9859 return expand_expr (t, target, tmode, modifier);
9862 /* If this is a constant index into a constant array,
9863 just get the value from the array. Handle both the cases when
9864 we have an explicit constructor and when our operand is a variable
9865 that was declared const. */
9867 if (modifier != EXPAND_CONST_ADDRESS
9868 && modifier != EXPAND_INITIALIZER
9869 && modifier != EXPAND_MEMORY
9870 && TREE_CODE (array) == CONSTRUCTOR
9871 && ! TREE_SIDE_EFFECTS (array)
9872 && TREE_CODE (index) == INTEGER_CST)
9874 unsigned HOST_WIDE_INT ix;
9875 tree field, value;
9877 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9878 field, value)
9879 if (tree_int_cst_equal (field, index))
9881 if (!TREE_SIDE_EFFECTS (value))
9882 return expand_expr (fold (value), target, tmode, modifier);
9883 break;
9887 else if (optimize >= 1
9888 && modifier != EXPAND_CONST_ADDRESS
9889 && modifier != EXPAND_INITIALIZER
9890 && modifier != EXPAND_MEMORY
9891 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9892 && TREE_CODE (index) == INTEGER_CST
9893 && (TREE_CODE (array) == VAR_DECL
9894 || TREE_CODE (array) == CONST_DECL)
9895 && (init = ctor_for_folding (array)) != error_mark_node)
9897 if (init == NULL_TREE)
9899 tree value = build_zero_cst (type);
9900 if (TREE_CODE (value) == CONSTRUCTOR)
9902 /* If VALUE is a CONSTRUCTOR, this optimization is only
9903 useful if this doesn't store the CONSTRUCTOR into
9904 memory. If it does, it is more efficient to just
9905 load the data from the array directly. */
9906 rtx ret = expand_constructor (value, target,
9907 modifier, true);
9908 if (ret == NULL_RTX)
9909 value = NULL_TREE;
9912 if (value)
9913 return expand_expr (value, target, tmode, modifier);
9915 else if (TREE_CODE (init) == CONSTRUCTOR)
9917 unsigned HOST_WIDE_INT ix;
9918 tree field, value;
9920 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9921 field, value)
9922 if (tree_int_cst_equal (field, index))
9924 if (TREE_SIDE_EFFECTS (value))
9925 break;
9927 if (TREE_CODE (value) == CONSTRUCTOR)
9929 /* If VALUE is a CONSTRUCTOR, this
9930 optimization is only useful if
9931 this doesn't store the CONSTRUCTOR
9932 into memory. If it does, it is more
9933 efficient to just load the data from
9934 the array directly. */
9935 rtx ret = expand_constructor (value, target,
9936 modifier, true);
9937 if (ret == NULL_RTX)
9938 break;
9941 return
9942 expand_expr (fold (value), target, tmode, modifier);
9945 else if (TREE_CODE (init) == STRING_CST)
9947 tree low_bound = array_ref_low_bound (exp);
9948 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9950 /* Optimize the special case of a zero lower bound.
9952 We convert the lower bound to sizetype to avoid problems
9953 with constant folding. E.g. suppose the lower bound is
9954 1 and its mode is QI. Without the conversion
9955 (ARRAY + (INDEX - (unsigned char)1))
9956 becomes
9957 (ARRAY + (-(unsigned char)1) + INDEX)
9958 which becomes
9959 (ARRAY + 255 + INDEX). Oops! */
9960 if (!integer_zerop (low_bound))
9961 index1 = size_diffop_loc (loc, index1,
9962 fold_convert_loc (loc, sizetype,
9963 low_bound));
9965 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9967 tree type = TREE_TYPE (TREE_TYPE (init));
9968 machine_mode mode = TYPE_MODE (type);
9970 if (GET_MODE_CLASS (mode) == MODE_INT
9971 && GET_MODE_SIZE (mode) == 1)
9972 return gen_int_mode (TREE_STRING_POINTER (init)
9973 [TREE_INT_CST_LOW (index1)],
9974 mode);
9979 goto normal_inner_ref;
9981 case COMPONENT_REF:
9982 /* If the operand is a CONSTRUCTOR, we can just extract the
9983 appropriate field if it is present. */
9984 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9986 unsigned HOST_WIDE_INT idx;
9987 tree field, value;
9989 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9990 idx, field, value)
9991 if (field == treeop1
9992 /* We can normally use the value of the field in the
9993 CONSTRUCTOR. However, if this is a bitfield in
9994 an integral mode that we can fit in a HOST_WIDE_INT,
9995 we must mask only the number of bits in the bitfield,
9996 since this is done implicitly by the constructor. If
9997 the bitfield does not meet either of those conditions,
9998 we can't do this optimization. */
9999 && (! DECL_BIT_FIELD (field)
10000 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10001 && (GET_MODE_PRECISION (DECL_MODE (field))
10002 <= HOST_BITS_PER_WIDE_INT))))
10004 if (DECL_BIT_FIELD (field)
10005 && modifier == EXPAND_STACK_PARM)
10006 target = 0;
10007 op0 = expand_expr (value, target, tmode, modifier);
10008 if (DECL_BIT_FIELD (field))
10010 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10011 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10013 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10015 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10016 imode);
10017 op0 = expand_and (imode, op0, op1, target);
10019 else
10021 int count = GET_MODE_PRECISION (imode) - bitsize;
10023 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10024 target, 0);
10025 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10026 target, 0);
10030 return op0;
10033 goto normal_inner_ref;
10035 case BIT_FIELD_REF:
10036 case ARRAY_RANGE_REF:
10037 normal_inner_ref:
10039 machine_mode mode1, mode2;
10040 HOST_WIDE_INT bitsize, bitpos;
10041 tree offset;
10042 int volatilep = 0, must_force_mem;
10043 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10044 &mode1, &unsignedp, &volatilep, true);
10045 rtx orig_op0, memloc;
10046 bool clear_mem_expr = false;
10048 /* If we got back the original object, something is wrong. Perhaps
10049 we are evaluating an expression too early. In any event, don't
10050 infinitely recurse. */
10051 gcc_assert (tem != exp);
10053 /* If TEM's type is a union of variable size, pass TARGET to the inner
10054 computation, since it will need a temporary and TARGET is known
10055 to have to do. This occurs in unchecked conversion in Ada. */
10056 orig_op0 = op0
10057 = expand_expr_real (tem,
10058 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10059 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10060 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10061 != INTEGER_CST)
10062 && modifier != EXPAND_STACK_PARM
10063 ? target : NULL_RTX),
10064 VOIDmode,
10065 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10066 NULL, true);
10068 /* If the field has a mode, we want to access it in the
10069 field's mode, not the computed mode.
10070 If a MEM has VOIDmode (external with incomplete type),
10071 use BLKmode for it instead. */
10072 if (MEM_P (op0))
10074 if (mode1 != VOIDmode)
10075 op0 = adjust_address (op0, mode1, 0);
10076 else if (GET_MODE (op0) == VOIDmode)
10077 op0 = adjust_address (op0, BLKmode, 0);
10080 mode2
10081 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10083 /* If we have either an offset, a BLKmode result, or a reference
10084 outside the underlying object, we must force it to memory.
10085 Such a case can occur in Ada if we have unchecked conversion
10086 of an expression from a scalar type to an aggregate type or
10087 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10088 passed a partially uninitialized object or a view-conversion
10089 to a larger size. */
10090 must_force_mem = (offset
10091 || mode1 == BLKmode
10092 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10094 /* Handle CONCAT first. */
10095 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10097 if (bitpos == 0
10098 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10099 return op0;
10100 if (bitpos == 0
10101 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10102 && bitsize)
10104 op0 = XEXP (op0, 0);
10105 mode2 = GET_MODE (op0);
10107 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10108 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10109 && bitpos
10110 && bitsize)
10112 op0 = XEXP (op0, 1);
10113 bitpos = 0;
10114 mode2 = GET_MODE (op0);
10116 else
10117 /* Otherwise force into memory. */
10118 must_force_mem = 1;
10121 /* If this is a constant, put it in a register if it is a legitimate
10122 constant and we don't need a memory reference. */
10123 if (CONSTANT_P (op0)
10124 && mode2 != BLKmode
10125 && targetm.legitimate_constant_p (mode2, op0)
10126 && !must_force_mem)
10127 op0 = force_reg (mode2, op0);
10129 /* Otherwise, if this is a constant, try to force it to the constant
10130 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10131 is a legitimate constant. */
10132 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10133 op0 = validize_mem (memloc);
10135 /* Otherwise, if this is a constant or the object is not in memory
10136 and need be, put it there. */
10137 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10139 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10140 emit_move_insn (memloc, op0);
10141 op0 = memloc;
10142 clear_mem_expr = true;
10145 if (offset)
10147 machine_mode address_mode;
10148 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10149 EXPAND_SUM);
10151 gcc_assert (MEM_P (op0));
10153 address_mode = get_address_mode (op0);
10154 if (GET_MODE (offset_rtx) != address_mode)
10156 /* We cannot be sure that the RTL in offset_rtx is valid outside
10157 of a memory address context, so force it into a register
10158 before attempting to convert it to the desired mode. */
10159 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10160 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10163 /* See the comment in expand_assignment for the rationale. */
10164 if (mode1 != VOIDmode
10165 && bitpos != 0
10166 && bitsize > 0
10167 && (bitpos % bitsize) == 0
10168 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10169 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10171 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10172 bitpos = 0;
10175 op0 = offset_address (op0, offset_rtx,
10176 highest_pow2_factor (offset));
10179 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10180 record its alignment as BIGGEST_ALIGNMENT. */
10181 if (MEM_P (op0) && bitpos == 0 && offset != 0
10182 && is_aligning_offset (offset, tem))
10183 set_mem_align (op0, BIGGEST_ALIGNMENT);
10185 /* Don't forget about volatility even if this is a bitfield. */
10186 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10188 if (op0 == orig_op0)
10189 op0 = copy_rtx (op0);
10191 MEM_VOLATILE_P (op0) = 1;
10194 /* In cases where an aligned union has an unaligned object
10195 as a field, we might be extracting a BLKmode value from
10196 an integer-mode (e.g., SImode) object. Handle this case
10197 by doing the extract into an object as wide as the field
10198 (which we know to be the width of a basic mode), then
10199 storing into memory, and changing the mode to BLKmode. */
10200 if (mode1 == VOIDmode
10201 || REG_P (op0) || GET_CODE (op0) == SUBREG
10202 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10203 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10204 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10205 && modifier != EXPAND_CONST_ADDRESS
10206 && modifier != EXPAND_INITIALIZER
10207 && modifier != EXPAND_MEMORY)
10208 /* If the bitfield is volatile and the bitsize
10209 is narrower than the access size of the bitfield,
10210 we need to extract bitfields from the access. */
10211 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10212 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10213 && mode1 != BLKmode
10214 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10215 /* If the field isn't aligned enough to fetch as a memref,
10216 fetch it as a bit field. */
10217 || (mode1 != BLKmode
10218 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10219 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10220 || (MEM_P (op0)
10221 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10222 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10223 && modifier != EXPAND_MEMORY
10224 && ((modifier == EXPAND_CONST_ADDRESS
10225 || modifier == EXPAND_INITIALIZER)
10226 ? STRICT_ALIGNMENT
10227 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10228 || (bitpos % BITS_PER_UNIT != 0)))
10229 /* If the type and the field are a constant size and the
10230 size of the type isn't the same size as the bitfield,
10231 we must use bitfield operations. */
10232 || (bitsize >= 0
10233 && TYPE_SIZE (TREE_TYPE (exp))
10234 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10235 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10236 bitsize)))
10238 machine_mode ext_mode = mode;
10240 if (ext_mode == BLKmode
10241 && ! (target != 0 && MEM_P (op0)
10242 && MEM_P (target)
10243 && bitpos % BITS_PER_UNIT == 0))
10244 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10246 if (ext_mode == BLKmode)
10248 if (target == 0)
10249 target = assign_temp (type, 1, 1);
10251 /* ??? Unlike the similar test a few lines below, this one is
10252 very likely obsolete. */
10253 if (bitsize == 0)
10254 return target;
10256 /* In this case, BITPOS must start at a byte boundary and
10257 TARGET, if specified, must be a MEM. */
10258 gcc_assert (MEM_P (op0)
10259 && (!target || MEM_P (target))
10260 && !(bitpos % BITS_PER_UNIT));
10262 emit_block_move (target,
10263 adjust_address (op0, VOIDmode,
10264 bitpos / BITS_PER_UNIT),
10265 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10266 / BITS_PER_UNIT),
10267 (modifier == EXPAND_STACK_PARM
10268 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10270 return target;
10273 /* If we have nothing to extract, the result will be 0 for targets
10274 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10275 return 0 for the sake of consistency, as reading a zero-sized
10276 bitfield is valid in Ada and the value is fully specified. */
10277 if (bitsize == 0)
10278 return const0_rtx;
10280 op0 = validize_mem (op0);
10282 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10283 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10285 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10286 (modifier == EXPAND_STACK_PARM
10287 ? NULL_RTX : target),
10288 ext_mode, ext_mode);
10290 /* If the result is a record type and BITSIZE is narrower than
10291 the mode of OP0, an integral mode, and this is a big endian
10292 machine, we must put the field into the high-order bits. */
10293 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10294 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10295 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10296 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10297 GET_MODE_BITSIZE (GET_MODE (op0))
10298 - bitsize, op0, 1);
10300 /* If the result type is BLKmode, store the data into a temporary
10301 of the appropriate type, but with the mode corresponding to the
10302 mode for the data we have (op0's mode). */
10303 if (mode == BLKmode)
10305 rtx new_rtx
10306 = assign_stack_temp_for_type (ext_mode,
10307 GET_MODE_BITSIZE (ext_mode),
10308 type);
10309 emit_move_insn (new_rtx, op0);
10310 op0 = copy_rtx (new_rtx);
10311 PUT_MODE (op0, BLKmode);
10314 return op0;
10317 /* If the result is BLKmode, use that to access the object
10318 now as well. */
10319 if (mode == BLKmode)
10320 mode1 = BLKmode;
10322 /* Get a reference to just this component. */
10323 if (modifier == EXPAND_CONST_ADDRESS
10324 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10325 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10326 else
10327 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10329 if (op0 == orig_op0)
10330 op0 = copy_rtx (op0);
10332 set_mem_attributes (op0, exp, 0);
10334 if (REG_P (XEXP (op0, 0)))
10335 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10337 /* If op0 is a temporary because the original expressions was forced
10338 to memory, clear MEM_EXPR so that the original expression cannot
10339 be marked as addressable through MEM_EXPR of the temporary. */
10340 if (clear_mem_expr)
10341 set_mem_expr (op0, NULL_TREE);
10343 MEM_VOLATILE_P (op0) |= volatilep;
10344 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10345 || modifier == EXPAND_CONST_ADDRESS
10346 || modifier == EXPAND_INITIALIZER)
10347 return op0;
10349 if (target == 0)
10350 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10352 convert_move (target, op0, unsignedp);
10353 return target;
10356 case OBJ_TYPE_REF:
10357 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10359 case CALL_EXPR:
10360 /* All valid uses of __builtin_va_arg_pack () are removed during
10361 inlining. */
10362 if (CALL_EXPR_VA_ARG_PACK (exp))
10363 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10365 tree fndecl = get_callee_fndecl (exp), attr;
10367 if (fndecl
10368 && (attr = lookup_attribute ("error",
10369 DECL_ATTRIBUTES (fndecl))) != NULL)
10370 error ("%Kcall to %qs declared with attribute error: %s",
10371 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10372 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10373 if (fndecl
10374 && (attr = lookup_attribute ("warning",
10375 DECL_ATTRIBUTES (fndecl))) != NULL)
10376 warning_at (tree_nonartificial_location (exp),
10377 0, "%Kcall to %qs declared with attribute warning: %s",
10378 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10379 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10381 /* Check for a built-in function. */
10382 if (fndecl && DECL_BUILT_IN (fndecl))
10384 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10385 if (CALL_WITH_BOUNDS_P (exp))
10386 return expand_builtin_with_bounds (exp, target, subtarget,
10387 tmode, ignore);
10388 else
10389 return expand_builtin (exp, target, subtarget, tmode, ignore);
10392 return expand_call (exp, target, ignore);
10394 case VIEW_CONVERT_EXPR:
10395 op0 = NULL_RTX;
10397 /* If we are converting to BLKmode, try to avoid an intermediate
10398 temporary by fetching an inner memory reference. */
10399 if (mode == BLKmode
10400 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10401 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10402 && handled_component_p (treeop0))
10404 machine_mode mode1;
10405 HOST_WIDE_INT bitsize, bitpos;
10406 tree offset;
10407 int unsignedp;
10408 int volatilep = 0;
10409 tree tem
10410 = get_inner_reference (treeop0, &bitsize, &bitpos,
10411 &offset, &mode1, &unsignedp, &volatilep,
10412 true);
10413 rtx orig_op0;
10415 /* ??? We should work harder and deal with non-zero offsets. */
10416 if (!offset
10417 && (bitpos % BITS_PER_UNIT) == 0
10418 && bitsize >= 0
10419 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10421 /* See the normal_inner_ref case for the rationale. */
10422 orig_op0
10423 = expand_expr_real (tem,
10424 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10425 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10426 != INTEGER_CST)
10427 && modifier != EXPAND_STACK_PARM
10428 ? target : NULL_RTX),
10429 VOIDmode,
10430 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10431 NULL, true);
10433 if (MEM_P (orig_op0))
10435 op0 = orig_op0;
10437 /* Get a reference to just this component. */
10438 if (modifier == EXPAND_CONST_ADDRESS
10439 || modifier == EXPAND_SUM
10440 || modifier == EXPAND_INITIALIZER)
10441 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10442 else
10443 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10445 if (op0 == orig_op0)
10446 op0 = copy_rtx (op0);
10448 set_mem_attributes (op0, treeop0, 0);
10449 if (REG_P (XEXP (op0, 0)))
10450 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10452 MEM_VOLATILE_P (op0) |= volatilep;
10457 if (!op0)
10458 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10459 NULL, inner_reference_p);
10461 /* If the input and output modes are both the same, we are done. */
10462 if (mode == GET_MODE (op0))
10464 /* If neither mode is BLKmode, and both modes are the same size
10465 then we can use gen_lowpart. */
10466 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10467 && (GET_MODE_PRECISION (mode)
10468 == GET_MODE_PRECISION (GET_MODE (op0)))
10469 && !COMPLEX_MODE_P (GET_MODE (op0)))
10471 if (GET_CODE (op0) == SUBREG)
10472 op0 = force_reg (GET_MODE (op0), op0);
10473 temp = gen_lowpart_common (mode, op0);
10474 if (temp)
10475 op0 = temp;
10476 else
10478 if (!REG_P (op0) && !MEM_P (op0))
10479 op0 = force_reg (GET_MODE (op0), op0);
10480 op0 = gen_lowpart (mode, op0);
10483 /* If both types are integral, convert from one mode to the other. */
10484 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10485 op0 = convert_modes (mode, GET_MODE (op0), op0,
10486 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10487 /* If the output type is a bit-field type, do an extraction. */
10488 else if (reduce_bit_field)
10489 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10490 TYPE_UNSIGNED (type), NULL_RTX,
10491 mode, mode);
10492 /* As a last resort, spill op0 to memory, and reload it in a
10493 different mode. */
10494 else if (!MEM_P (op0))
10496 /* If the operand is not a MEM, force it into memory. Since we
10497 are going to be changing the mode of the MEM, don't call
10498 force_const_mem for constants because we don't allow pool
10499 constants to change mode. */
10500 tree inner_type = TREE_TYPE (treeop0);
10502 gcc_assert (!TREE_ADDRESSABLE (exp));
10504 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10505 target
10506 = assign_stack_temp_for_type
10507 (TYPE_MODE (inner_type),
10508 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10510 emit_move_insn (target, op0);
10511 op0 = target;
10514 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10515 output type is such that the operand is known to be aligned, indicate
10516 that it is. Otherwise, we need only be concerned about alignment for
10517 non-BLKmode results. */
10518 if (MEM_P (op0))
10520 enum insn_code icode;
10522 if (TYPE_ALIGN_OK (type))
10524 /* ??? Copying the MEM without substantially changing it might
10525 run afoul of the code handling volatile memory references in
10526 store_expr, which assumes that TARGET is returned unmodified
10527 if it has been used. */
10528 op0 = copy_rtx (op0);
10529 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10531 else if (modifier != EXPAND_WRITE
10532 && modifier != EXPAND_MEMORY
10533 && !inner_reference_p
10534 && mode != BLKmode
10535 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10537 /* If the target does have special handling for unaligned
10538 loads of mode then use them. */
10539 if ((icode = optab_handler (movmisalign_optab, mode))
10540 != CODE_FOR_nothing)
10542 rtx reg;
10544 op0 = adjust_address (op0, mode, 0);
10545 /* We've already validated the memory, and we're creating a
10546 new pseudo destination. The predicates really can't
10547 fail. */
10548 reg = gen_reg_rtx (mode);
10550 /* Nor can the insn generator. */
10551 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
10552 emit_insn (insn);
10553 return reg;
10555 else if (STRICT_ALIGNMENT)
10557 tree inner_type = TREE_TYPE (treeop0);
10558 HOST_WIDE_INT temp_size
10559 = MAX (int_size_in_bytes (inner_type),
10560 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10561 rtx new_rtx
10562 = assign_stack_temp_for_type (mode, temp_size, type);
10563 rtx new_with_op0_mode
10564 = adjust_address (new_rtx, GET_MODE (op0), 0);
10566 gcc_assert (!TREE_ADDRESSABLE (exp));
10568 if (GET_MODE (op0) == BLKmode)
10569 emit_block_move (new_with_op0_mode, op0,
10570 GEN_INT (GET_MODE_SIZE (mode)),
10571 (modifier == EXPAND_STACK_PARM
10572 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10573 else
10574 emit_move_insn (new_with_op0_mode, op0);
10576 op0 = new_rtx;
10580 op0 = adjust_address (op0, mode, 0);
10583 return op0;
10585 case MODIFY_EXPR:
10587 tree lhs = treeop0;
10588 tree rhs = treeop1;
10589 gcc_assert (ignore);
10591 /* Check for |= or &= of a bitfield of size one into another bitfield
10592 of size 1. In this case, (unless we need the result of the
10593 assignment) we can do this more efficiently with a
10594 test followed by an assignment, if necessary.
10596 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10597 things change so we do, this code should be enhanced to
10598 support it. */
10599 if (TREE_CODE (lhs) == COMPONENT_REF
10600 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10601 || TREE_CODE (rhs) == BIT_AND_EXPR)
10602 && TREE_OPERAND (rhs, 0) == lhs
10603 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10604 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10605 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10607 rtx_code_label *label = gen_label_rtx ();
10608 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10609 do_jump (TREE_OPERAND (rhs, 1),
10610 value ? label : 0,
10611 value ? 0 : label, -1);
10612 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10613 false);
10614 do_pending_stack_adjust ();
10615 emit_label (label);
10616 return const0_rtx;
10619 expand_assignment (lhs, rhs, false);
10620 return const0_rtx;
10623 case ADDR_EXPR:
10624 return expand_expr_addr_expr (exp, target, tmode, modifier);
10626 case REALPART_EXPR:
10627 op0 = expand_normal (treeop0);
10628 return read_complex_part (op0, false);
10630 case IMAGPART_EXPR:
10631 op0 = expand_normal (treeop0);
10632 return read_complex_part (op0, true);
10634 case RETURN_EXPR:
10635 case LABEL_EXPR:
10636 case GOTO_EXPR:
10637 case SWITCH_EXPR:
10638 case ASM_EXPR:
10639 /* Expanded in cfgexpand.c. */
10640 gcc_unreachable ();
10642 case TRY_CATCH_EXPR:
10643 case CATCH_EXPR:
10644 case EH_FILTER_EXPR:
10645 case TRY_FINALLY_EXPR:
10646 /* Lowered by tree-eh.c. */
10647 gcc_unreachable ();
10649 case WITH_CLEANUP_EXPR:
10650 case CLEANUP_POINT_EXPR:
10651 case TARGET_EXPR:
10652 case CASE_LABEL_EXPR:
10653 case VA_ARG_EXPR:
10654 case BIND_EXPR:
10655 case INIT_EXPR:
10656 case CONJ_EXPR:
10657 case COMPOUND_EXPR:
10658 case PREINCREMENT_EXPR:
10659 case PREDECREMENT_EXPR:
10660 case POSTINCREMENT_EXPR:
10661 case POSTDECREMENT_EXPR:
10662 case LOOP_EXPR:
10663 case EXIT_EXPR:
10664 case COMPOUND_LITERAL_EXPR:
10665 /* Lowered by gimplify.c. */
10666 gcc_unreachable ();
10668 case FDESC_EXPR:
10669 /* Function descriptors are not valid except for as
10670 initialization constants, and should not be expanded. */
10671 gcc_unreachable ();
10673 case WITH_SIZE_EXPR:
10674 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10675 have pulled out the size to use in whatever context it needed. */
10676 return expand_expr_real (treeop0, original_target, tmode,
10677 modifier, alt_rtl, inner_reference_p);
10679 default:
10680 return expand_expr_real_2 (&ops, target, tmode, modifier);
10684 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10685 signedness of TYPE), possibly returning the result in TARGET. */
10686 static rtx
10687 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10689 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10690 if (target && GET_MODE (target) != GET_MODE (exp))
10691 target = 0;
10692 /* For constant values, reduce using build_int_cst_type. */
10693 if (CONST_INT_P (exp))
10695 HOST_WIDE_INT value = INTVAL (exp);
10696 tree t = build_int_cst_type (type, value);
10697 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10699 else if (TYPE_UNSIGNED (type))
10701 machine_mode mode = GET_MODE (exp);
10702 rtx mask = immed_wide_int_const
10703 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10704 return expand_and (mode, exp, mask, target);
10706 else
10708 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10709 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10710 exp, count, target, 0);
10711 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10712 exp, count, target, 0);
10716 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10717 when applied to the address of EXP produces an address known to be
10718 aligned more than BIGGEST_ALIGNMENT. */
10720 static int
10721 is_aligning_offset (const_tree offset, const_tree exp)
10723 /* Strip off any conversions. */
10724 while (CONVERT_EXPR_P (offset))
10725 offset = TREE_OPERAND (offset, 0);
10727 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10728 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10729 if (TREE_CODE (offset) != BIT_AND_EXPR
10730 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10731 || compare_tree_int (TREE_OPERAND (offset, 1),
10732 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10733 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10734 return 0;
10736 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10737 It must be NEGATE_EXPR. Then strip any more conversions. */
10738 offset = TREE_OPERAND (offset, 0);
10739 while (CONVERT_EXPR_P (offset))
10740 offset = TREE_OPERAND (offset, 0);
10742 if (TREE_CODE (offset) != NEGATE_EXPR)
10743 return 0;
10745 offset = TREE_OPERAND (offset, 0);
10746 while (CONVERT_EXPR_P (offset))
10747 offset = TREE_OPERAND (offset, 0);
10749 /* This must now be the address of EXP. */
10750 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10753 /* Return the tree node if an ARG corresponds to a string constant or zero
10754 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10755 in bytes within the string that ARG is accessing. The type of the
10756 offset will be `sizetype'. */
10758 tree
10759 string_constant (tree arg, tree *ptr_offset)
10761 tree array, offset, lower_bound;
10762 STRIP_NOPS (arg);
10764 if (TREE_CODE (arg) == ADDR_EXPR)
10766 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10768 *ptr_offset = size_zero_node;
10769 return TREE_OPERAND (arg, 0);
10771 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10773 array = TREE_OPERAND (arg, 0);
10774 offset = size_zero_node;
10776 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10778 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10779 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10780 if (TREE_CODE (array) != STRING_CST
10781 && TREE_CODE (array) != VAR_DECL)
10782 return 0;
10784 /* Check if the array has a nonzero lower bound. */
10785 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10786 if (!integer_zerop (lower_bound))
10788 /* If the offset and base aren't both constants, return 0. */
10789 if (TREE_CODE (lower_bound) != INTEGER_CST)
10790 return 0;
10791 if (TREE_CODE (offset) != INTEGER_CST)
10792 return 0;
10793 /* Adjust offset by the lower bound. */
10794 offset = size_diffop (fold_convert (sizetype, offset),
10795 fold_convert (sizetype, lower_bound));
10798 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10800 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10801 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10802 if (TREE_CODE (array) != ADDR_EXPR)
10803 return 0;
10804 array = TREE_OPERAND (array, 0);
10805 if (TREE_CODE (array) != STRING_CST
10806 && TREE_CODE (array) != VAR_DECL)
10807 return 0;
10809 else
10810 return 0;
10812 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10814 tree arg0 = TREE_OPERAND (arg, 0);
10815 tree arg1 = TREE_OPERAND (arg, 1);
10817 STRIP_NOPS (arg0);
10818 STRIP_NOPS (arg1);
10820 if (TREE_CODE (arg0) == ADDR_EXPR
10821 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10822 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10824 array = TREE_OPERAND (arg0, 0);
10825 offset = arg1;
10827 else if (TREE_CODE (arg1) == ADDR_EXPR
10828 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10829 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10831 array = TREE_OPERAND (arg1, 0);
10832 offset = arg0;
10834 else
10835 return 0;
10837 else
10838 return 0;
10840 if (TREE_CODE (array) == STRING_CST)
10842 *ptr_offset = fold_convert (sizetype, offset);
10843 return array;
10845 else if (TREE_CODE (array) == VAR_DECL
10846 || TREE_CODE (array) == CONST_DECL)
10848 int length;
10849 tree init = ctor_for_folding (array);
10851 /* Variables initialized to string literals can be handled too. */
10852 if (init == error_mark_node
10853 || !init
10854 || TREE_CODE (init) != STRING_CST)
10855 return 0;
10857 /* Avoid const char foo[4] = "abcde"; */
10858 if (DECL_SIZE_UNIT (array) == NULL_TREE
10859 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10860 || (length = TREE_STRING_LENGTH (init)) <= 0
10861 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10862 return 0;
10864 /* If variable is bigger than the string literal, OFFSET must be constant
10865 and inside of the bounds of the string literal. */
10866 offset = fold_convert (sizetype, offset);
10867 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10868 && (! tree_fits_uhwi_p (offset)
10869 || compare_tree_int (offset, length) >= 0))
10870 return 0;
10872 *ptr_offset = offset;
10873 return init;
10876 return 0;
10879 /* Generate code to calculate OPS, and exploded expression
10880 using a store-flag instruction and return an rtx for the result.
10881 OPS reflects a comparison.
10883 If TARGET is nonzero, store the result there if convenient.
10885 Return zero if there is no suitable set-flag instruction
10886 available on this machine.
10888 Once expand_expr has been called on the arguments of the comparison,
10889 we are committed to doing the store flag, since it is not safe to
10890 re-evaluate the expression. We emit the store-flag insn by calling
10891 emit_store_flag, but only expand the arguments if we have a reason
10892 to believe that emit_store_flag will be successful. If we think that
10893 it will, but it isn't, we have to simulate the store-flag with a
10894 set/jump/set sequence. */
10896 static rtx
10897 do_store_flag (sepops ops, rtx target, machine_mode mode)
10899 enum rtx_code code;
10900 tree arg0, arg1, type;
10901 machine_mode operand_mode;
10902 int unsignedp;
10903 rtx op0, op1;
10904 rtx subtarget = target;
10905 location_t loc = ops->location;
10907 arg0 = ops->op0;
10908 arg1 = ops->op1;
10910 /* Don't crash if the comparison was erroneous. */
10911 if (arg0 == error_mark_node || arg1 == error_mark_node)
10912 return const0_rtx;
10914 type = TREE_TYPE (arg0);
10915 operand_mode = TYPE_MODE (type);
10916 unsignedp = TYPE_UNSIGNED (type);
10918 /* We won't bother with BLKmode store-flag operations because it would mean
10919 passing a lot of information to emit_store_flag. */
10920 if (operand_mode == BLKmode)
10921 return 0;
10923 /* We won't bother with store-flag operations involving function pointers
10924 when function pointers must be canonicalized before comparisons. */
10925 #ifdef HAVE_canonicalize_funcptr_for_compare
10926 if (HAVE_canonicalize_funcptr_for_compare
10927 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10928 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10929 == FUNCTION_TYPE))
10930 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10931 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10932 == FUNCTION_TYPE))))
10933 return 0;
10934 #endif
10936 STRIP_NOPS (arg0);
10937 STRIP_NOPS (arg1);
10939 /* For vector typed comparisons emit code to generate the desired
10940 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10941 expander for this. */
10942 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10944 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10945 tree if_true = constant_boolean_node (true, ops->type);
10946 tree if_false = constant_boolean_node (false, ops->type);
10947 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10950 /* Get the rtx comparison code to use. We know that EXP is a comparison
10951 operation of some type. Some comparisons against 1 and -1 can be
10952 converted to comparisons with zero. Do so here so that the tests
10953 below will be aware that we have a comparison with zero. These
10954 tests will not catch constants in the first operand, but constants
10955 are rarely passed as the first operand. */
10957 switch (ops->code)
10959 case EQ_EXPR:
10960 code = EQ;
10961 break;
10962 case NE_EXPR:
10963 code = NE;
10964 break;
10965 case LT_EXPR:
10966 if (integer_onep (arg1))
10967 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10968 else
10969 code = unsignedp ? LTU : LT;
10970 break;
10971 case LE_EXPR:
10972 if (! unsignedp && integer_all_onesp (arg1))
10973 arg1 = integer_zero_node, code = LT;
10974 else
10975 code = unsignedp ? LEU : LE;
10976 break;
10977 case GT_EXPR:
10978 if (! unsignedp && integer_all_onesp (arg1))
10979 arg1 = integer_zero_node, code = GE;
10980 else
10981 code = unsignedp ? GTU : GT;
10982 break;
10983 case GE_EXPR:
10984 if (integer_onep (arg1))
10985 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10986 else
10987 code = unsignedp ? GEU : GE;
10988 break;
10990 case UNORDERED_EXPR:
10991 code = UNORDERED;
10992 break;
10993 case ORDERED_EXPR:
10994 code = ORDERED;
10995 break;
10996 case UNLT_EXPR:
10997 code = UNLT;
10998 break;
10999 case UNLE_EXPR:
11000 code = UNLE;
11001 break;
11002 case UNGT_EXPR:
11003 code = UNGT;
11004 break;
11005 case UNGE_EXPR:
11006 code = UNGE;
11007 break;
11008 case UNEQ_EXPR:
11009 code = UNEQ;
11010 break;
11011 case LTGT_EXPR:
11012 code = LTGT;
11013 break;
11015 default:
11016 gcc_unreachable ();
11019 /* Put a constant second. */
11020 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11021 || TREE_CODE (arg0) == FIXED_CST)
11023 std::swap (arg0, arg1);
11024 code = swap_condition (code);
11027 /* If this is an equality or inequality test of a single bit, we can
11028 do this by shifting the bit being tested to the low-order bit and
11029 masking the result with the constant 1. If the condition was EQ,
11030 we xor it with 1. This does not require an scc insn and is faster
11031 than an scc insn even if we have it.
11033 The code to make this transformation was moved into fold_single_bit_test,
11034 so we just call into the folder and expand its result. */
11036 if ((code == NE || code == EQ)
11037 && integer_zerop (arg1)
11038 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11040 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11041 if (srcstmt
11042 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11044 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11045 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11046 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11047 gimple_assign_rhs1 (srcstmt),
11048 gimple_assign_rhs2 (srcstmt));
11049 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11050 if (temp)
11051 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11055 if (! get_subtarget (target)
11056 || GET_MODE (subtarget) != operand_mode)
11057 subtarget = 0;
11059 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11061 if (target == 0)
11062 target = gen_reg_rtx (mode);
11064 /* Try a cstore if possible. */
11065 return emit_store_flag_force (target, code, op0, op1,
11066 operand_mode, unsignedp,
11067 (TYPE_PRECISION (ops->type) == 1
11068 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11072 /* Stubs in case we haven't got a casesi insn. */
11073 #ifndef HAVE_casesi
11074 # define HAVE_casesi 0
11075 # define gen_casesi(a, b, c, d, e) (0)
11076 # define CODE_FOR_casesi CODE_FOR_nothing
11077 #endif
11079 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11080 0 otherwise (i.e. if there is no casesi instruction).
11082 DEFAULT_PROBABILITY is the probability of jumping to the default
11083 label. */
11085 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11086 rtx table_label, rtx default_label, rtx fallback_label,
11087 int default_probability)
11089 struct expand_operand ops[5];
11090 machine_mode index_mode = SImode;
11091 rtx op1, op2, index;
11093 if (! HAVE_casesi)
11094 return 0;
11096 /* Convert the index to SImode. */
11097 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11099 machine_mode omode = TYPE_MODE (index_type);
11100 rtx rangertx = expand_normal (range);
11102 /* We must handle the endpoints in the original mode. */
11103 index_expr = build2 (MINUS_EXPR, index_type,
11104 index_expr, minval);
11105 minval = integer_zero_node;
11106 index = expand_normal (index_expr);
11107 if (default_label)
11108 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11109 omode, 1, default_label,
11110 default_probability);
11111 /* Now we can safely truncate. */
11112 index = convert_to_mode (index_mode, index, 0);
11114 else
11116 if (TYPE_MODE (index_type) != index_mode)
11118 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11119 index_expr = fold_convert (index_type, index_expr);
11122 index = expand_normal (index_expr);
11125 do_pending_stack_adjust ();
11127 op1 = expand_normal (minval);
11128 op2 = expand_normal (range);
11130 create_input_operand (&ops[0], index, index_mode);
11131 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11132 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11133 create_fixed_operand (&ops[3], table_label);
11134 create_fixed_operand (&ops[4], (default_label
11135 ? default_label
11136 : fallback_label));
11137 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11138 return 1;
11141 /* Attempt to generate a tablejump instruction; same concept. */
11142 /* Subroutine of the next function.
11144 INDEX is the value being switched on, with the lowest value
11145 in the table already subtracted.
11146 MODE is its expected mode (needed if INDEX is constant).
11147 RANGE is the length of the jump table.
11148 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11150 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11151 index value is out of range.
11152 DEFAULT_PROBABILITY is the probability of jumping to
11153 the default label. */
11155 static void
11156 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11157 rtx default_label, int default_probability)
11159 rtx temp, vector;
11161 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11162 cfun->cfg->max_jumptable_ents = INTVAL (range);
11164 /* Do an unsigned comparison (in the proper mode) between the index
11165 expression and the value which represents the length of the range.
11166 Since we just finished subtracting the lower bound of the range
11167 from the index expression, this comparison allows us to simultaneously
11168 check that the original index expression value is both greater than
11169 or equal to the minimum value of the range and less than or equal to
11170 the maximum value of the range. */
11172 if (default_label)
11173 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11174 default_label, default_probability);
11177 /* If index is in range, it must fit in Pmode.
11178 Convert to Pmode so we can index with it. */
11179 if (mode != Pmode)
11180 index = convert_to_mode (Pmode, index, 1);
11182 /* Don't let a MEM slip through, because then INDEX that comes
11183 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11184 and break_out_memory_refs will go to work on it and mess it up. */
11185 #ifdef PIC_CASE_VECTOR_ADDRESS
11186 if (flag_pic && !REG_P (index))
11187 index = copy_to_mode_reg (Pmode, index);
11188 #endif
11190 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11191 GET_MODE_SIZE, because this indicates how large insns are. The other
11192 uses should all be Pmode, because they are addresses. This code
11193 could fail if addresses and insns are not the same size. */
11194 index = simplify_gen_binary (MULT, Pmode, index,
11195 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11196 Pmode));
11197 index = simplify_gen_binary (PLUS, Pmode, index,
11198 gen_rtx_LABEL_REF (Pmode, table_label));
11200 #ifdef PIC_CASE_VECTOR_ADDRESS
11201 if (flag_pic)
11202 index = PIC_CASE_VECTOR_ADDRESS (index);
11203 else
11204 #endif
11205 index = memory_address (CASE_VECTOR_MODE, index);
11206 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11207 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11208 convert_move (temp, vector, 0);
11210 emit_jump_insn (gen_tablejump (temp, table_label));
11212 /* If we are generating PIC code or if the table is PC-relative, the
11213 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11214 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11215 emit_barrier ();
11219 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11220 rtx table_label, rtx default_label, int default_probability)
11222 rtx index;
11224 if (! HAVE_tablejump)
11225 return 0;
11227 index_expr = fold_build2 (MINUS_EXPR, index_type,
11228 fold_convert (index_type, index_expr),
11229 fold_convert (index_type, minval));
11230 index = expand_normal (index_expr);
11231 do_pending_stack_adjust ();
11233 do_tablejump (index, TYPE_MODE (index_type),
11234 convert_modes (TYPE_MODE (index_type),
11235 TYPE_MODE (TREE_TYPE (range)),
11236 expand_normal (range),
11237 TYPE_UNSIGNED (TREE_TYPE (range))),
11238 table_label, default_label, default_probability);
11239 return 1;
11242 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11243 static rtx
11244 const_vector_from_tree (tree exp)
11246 rtvec v;
11247 unsigned i;
11248 int units;
11249 tree elt;
11250 machine_mode inner, mode;
11252 mode = TYPE_MODE (TREE_TYPE (exp));
11254 if (initializer_zerop (exp))
11255 return CONST0_RTX (mode);
11257 units = GET_MODE_NUNITS (mode);
11258 inner = GET_MODE_INNER (mode);
11260 v = rtvec_alloc (units);
11262 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11264 elt = VECTOR_CST_ELT (exp, i);
11266 if (TREE_CODE (elt) == REAL_CST)
11267 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11268 inner);
11269 else if (TREE_CODE (elt) == FIXED_CST)
11270 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11271 inner);
11272 else
11273 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11276 return gen_rtx_CONST_VECTOR (mode, v);
11279 /* Build a decl for a personality function given a language prefix. */
11281 tree
11282 build_personality_function (const char *lang)
11284 const char *unwind_and_version;
11285 tree decl, type;
11286 char *name;
11288 switch (targetm_common.except_unwind_info (&global_options))
11290 case UI_NONE:
11291 return NULL;
11292 case UI_SJLJ:
11293 unwind_and_version = "_sj0";
11294 break;
11295 case UI_DWARF2:
11296 case UI_TARGET:
11297 unwind_and_version = "_v0";
11298 break;
11299 case UI_SEH:
11300 unwind_and_version = "_seh0";
11301 break;
11302 default:
11303 gcc_unreachable ();
11306 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11308 type = build_function_type_list (integer_type_node, integer_type_node,
11309 long_long_unsigned_type_node,
11310 ptr_type_node, ptr_type_node, NULL_TREE);
11311 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11312 get_identifier (name), type);
11313 DECL_ARTIFICIAL (decl) = 1;
11314 DECL_EXTERNAL (decl) = 1;
11315 TREE_PUBLIC (decl) = 1;
11317 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11318 are the flags assigned by targetm.encode_section_info. */
11319 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11321 return decl;
11324 /* Extracts the personality function of DECL and returns the corresponding
11325 libfunc. */
11328 get_personality_function (tree decl)
11330 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11331 enum eh_personality_kind pk;
11333 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11334 if (pk == eh_personality_none)
11335 return NULL;
11337 if (!personality
11338 && pk == eh_personality_any)
11339 personality = lang_hooks.eh_personality ();
11341 if (pk == eh_personality_lang)
11342 gcc_assert (personality != NULL_TREE);
11344 return XEXP (DECL_RTL (personality), 0);
11347 /* Returns a tree for the size of EXP in bytes. */
11349 static tree
11350 tree_expr_size (const_tree exp)
11352 if (DECL_P (exp)
11353 && DECL_SIZE_UNIT (exp) != 0)
11354 return DECL_SIZE_UNIT (exp);
11355 else
11356 return size_in_bytes (TREE_TYPE (exp));
11359 /* Return an rtx for the size in bytes of the value of EXP. */
11362 expr_size (tree exp)
11364 tree size;
11366 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11367 size = TREE_OPERAND (exp, 1);
11368 else
11370 size = tree_expr_size (exp);
11371 gcc_assert (size);
11372 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11375 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11378 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11379 if the size can vary or is larger than an integer. */
11381 static HOST_WIDE_INT
11382 int_expr_size (tree exp)
11384 tree size;
11386 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11387 size = TREE_OPERAND (exp, 1);
11388 else
11390 size = tree_expr_size (exp);
11391 gcc_assert (size);
11394 if (size == 0 || !tree_fits_shwi_p (size))
11395 return -1;
11397 return tree_to_shwi (size);
11400 #include "gt-expr.h"