[obvious] Fix typos above expand_cond_expr_using_cmove
[official-gcc.git] / gcc / expr.c
blob6f6ee9d4978862e24eb7c420be97ce9a444a65df
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "rtl.h"
27 #include "df.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "attribs.h"
33 #include "varasm.h"
34 #include "flags.h"
35 #include "regs.h"
36 #include "except.h"
37 #include "insn-config.h"
38 #include "insn-attr.h"
39 #include "expmed.h"
40 #include "dojump.h"
41 #include "explow.h"
42 #include "calls.h"
43 #include "emit-rtl.h"
44 #include "stmt.h"
45 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
46 #include "expr.h"
47 #include "insn-codes.h"
48 #include "optabs.h"
49 #include "libfuncs.h"
50 #include "recog.h"
51 #include "reload.h"
52 #include "typeclass.h"
53 #include "toplev.h"
54 #include "langhooks.h"
55 #include "intl.h"
56 #include "tm_p.h"
57 #include "tree-iterator.h"
58 #include "internal-fn.h"
59 #include "cgraph.h"
60 #include "target.h"
61 #include "common/common-target.h"
62 #include "timevar.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
67 #include "params.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
70 #include "builtins.h"
71 #include "tree-chkp.h"
72 #include "rtl-chkp.h"
73 #include "ccmp.h"
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82 int cse_not_expected;
84 /* This structure is used by move_by_pieces to describe the move to
85 be performed. */
86 struct move_by_pieces_d
88 rtx to;
89 rtx to_addr;
90 int autinc_to;
91 int explicit_inc_to;
92 rtx from;
93 rtx from_addr;
94 int autinc_from;
95 int explicit_inc_from;
96 unsigned HOST_WIDE_INT len;
97 HOST_WIDE_INT offset;
98 int reverse;
101 /* This structure is used by store_by_pieces to describe the clear to
102 be performed. */
104 struct store_by_pieces_d
106 rtx to;
107 rtx to_addr;
108 int autinc_to;
109 int explicit_inc_to;
110 unsigned HOST_WIDE_INT len;
111 HOST_WIDE_INT offset;
112 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
113 void *constfundata;
114 int reverse;
117 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
118 struct move_by_pieces_d *);
119 static bool block_move_libcall_safe_for_call_parm (void);
120 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
121 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
122 unsigned HOST_WIDE_INT);
123 static tree emit_block_move_libcall_fn (int);
124 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
125 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
126 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
127 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
128 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
129 struct store_by_pieces_d *);
130 static tree clear_storage_libcall_fn (int);
131 static rtx_insn *compress_float_constant (rtx, rtx);
132 static rtx get_subtarget (rtx);
133 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
134 HOST_WIDE_INT, machine_mode,
135 tree, int, alias_set_type);
136 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
137 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
138 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
139 machine_mode, tree, alias_set_type, bool);
141 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
143 static int is_aligning_offset (const_tree, const_tree);
144 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
145 static rtx do_store_flag (sepops, rtx, machine_mode);
146 #ifdef PUSH_ROUNDING
147 static void emit_single_push_insn (machine_mode, rtx, tree);
148 #endif
149 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
150 static rtx const_vector_from_tree (tree);
151 static tree tree_expr_size (const_tree);
152 static HOST_WIDE_INT int_expr_size (tree);
155 /* This is run to set up which modes can be used
156 directly in memory and to initialize the block move optab. It is run
157 at the beginning of compilation and when the target is reinitialized. */
159 void
160 init_expr_target (void)
162 rtx insn, pat;
163 machine_mode mode;
164 int num_clobbers;
165 rtx mem, mem1;
166 rtx reg;
168 /* Try indexing by frame ptr and try by stack ptr.
169 It is known that on the Convex the stack ptr isn't a valid index.
170 With luck, one or the other is valid on any machine. */
171 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
172 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
174 /* A scratch register we can modify in-place below to avoid
175 useless RTL allocations. */
176 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
178 insn = rtx_alloc (INSN);
179 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
180 PATTERN (insn) = pat;
182 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
183 mode = (machine_mode) ((int) mode + 1))
185 int regno;
187 direct_load[(int) mode] = direct_store[(int) mode] = 0;
188 PUT_MODE (mem, mode);
189 PUT_MODE (mem1, mode);
191 /* See if there is some register that can be used in this mode and
192 directly loaded or stored from memory. */
194 if (mode != VOIDmode && mode != BLKmode)
195 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
196 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
197 regno++)
199 if (! HARD_REGNO_MODE_OK (regno, mode))
200 continue;
202 set_mode_and_regno (reg, mode, regno);
204 SET_SRC (pat) = mem;
205 SET_DEST (pat) = reg;
206 if (recog (pat, insn, &num_clobbers) >= 0)
207 direct_load[(int) mode] = 1;
209 SET_SRC (pat) = mem1;
210 SET_DEST (pat) = reg;
211 if (recog (pat, insn, &num_clobbers) >= 0)
212 direct_load[(int) mode] = 1;
214 SET_SRC (pat) = reg;
215 SET_DEST (pat) = mem;
216 if (recog (pat, insn, &num_clobbers) >= 0)
217 direct_store[(int) mode] = 1;
219 SET_SRC (pat) = reg;
220 SET_DEST (pat) = mem1;
221 if (recog (pat, insn, &num_clobbers) >= 0)
222 direct_store[(int) mode] = 1;
226 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
228 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
229 mode = GET_MODE_WIDER_MODE (mode))
231 machine_mode srcmode;
232 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
233 srcmode = GET_MODE_WIDER_MODE (srcmode))
235 enum insn_code ic;
237 ic = can_extend_p (mode, srcmode, 0);
238 if (ic == CODE_FOR_nothing)
239 continue;
241 PUT_MODE (mem, srcmode);
243 if (insn_operand_matches (ic, 1, mem))
244 float_extend_from_mem[mode][srcmode] = true;
249 /* This is run at the start of compiling a function. */
251 void
252 init_expr (void)
254 memset (&crtl->expr, 0, sizeof (crtl->expr));
257 /* Copy data from FROM to TO, where the machine modes are not the same.
258 Both modes may be integer, or both may be floating, or both may be
259 fixed-point.
260 UNSIGNEDP should be nonzero if FROM is an unsigned type.
261 This causes zero-extension instead of sign-extension. */
263 void
264 convert_move (rtx to, rtx from, int unsignedp)
266 machine_mode to_mode = GET_MODE (to);
267 machine_mode from_mode = GET_MODE (from);
268 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
269 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
270 enum insn_code code;
271 rtx libcall;
273 /* rtx code for making an equivalent value. */
274 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
275 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
278 gcc_assert (to_real == from_real);
279 gcc_assert (to_mode != BLKmode);
280 gcc_assert (from_mode != BLKmode);
282 /* If the source and destination are already the same, then there's
283 nothing to do. */
284 if (to == from)
285 return;
287 /* If FROM is a SUBREG that indicates that we have already done at least
288 the required extension, strip it. We don't handle such SUBREGs as
289 TO here. */
291 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
292 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
293 >= GET_MODE_PRECISION (to_mode))
294 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
295 from = gen_lowpart (to_mode, from), from_mode = to_mode;
297 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
299 if (to_mode == from_mode
300 || (from_mode == VOIDmode && CONSTANT_P (from)))
302 emit_move_insn (to, from);
303 return;
306 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
308 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
310 if (VECTOR_MODE_P (to_mode))
311 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
312 else
313 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
315 emit_move_insn (to, from);
316 return;
319 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
321 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
322 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
323 return;
326 if (to_real)
328 rtx value;
329 rtx_insn *insns;
330 convert_optab tab;
332 gcc_assert ((GET_MODE_PRECISION (from_mode)
333 != GET_MODE_PRECISION (to_mode))
334 || (DECIMAL_FLOAT_MODE_P (from_mode)
335 != DECIMAL_FLOAT_MODE_P (to_mode)));
337 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
338 /* Conversion between decimal float and binary float, same size. */
339 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
340 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
341 tab = sext_optab;
342 else
343 tab = trunc_optab;
345 /* Try converting directly if the insn is supported. */
347 code = convert_optab_handler (tab, to_mode, from_mode);
348 if (code != CODE_FOR_nothing)
350 emit_unop_insn (code, to, from,
351 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
352 return;
355 /* Otherwise use a libcall. */
356 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
358 /* Is this conversion implemented yet? */
359 gcc_assert (libcall);
361 start_sequence ();
362 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
363 1, from, from_mode);
364 insns = get_insns ();
365 end_sequence ();
366 emit_libcall_block (insns, to, value,
367 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
368 from)
369 : gen_rtx_FLOAT_EXTEND (to_mode, from));
370 return;
373 /* Handle pointer conversion. */ /* SPEE 900220. */
374 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
376 convert_optab ctab;
378 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
379 ctab = trunc_optab;
380 else if (unsignedp)
381 ctab = zext_optab;
382 else
383 ctab = sext_optab;
385 if (convert_optab_handler (ctab, to_mode, from_mode)
386 != CODE_FOR_nothing)
388 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
389 to, from, UNKNOWN);
390 return;
394 /* Targets are expected to provide conversion insns between PxImode and
395 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
396 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
398 machine_mode full_mode
399 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
401 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
402 != CODE_FOR_nothing);
404 if (full_mode != from_mode)
405 from = convert_to_mode (full_mode, from, unsignedp);
406 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
407 to, from, UNKNOWN);
408 return;
410 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
412 rtx new_from;
413 machine_mode full_mode
414 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
415 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
416 enum insn_code icode;
418 icode = convert_optab_handler (ctab, full_mode, from_mode);
419 gcc_assert (icode != CODE_FOR_nothing);
421 if (to_mode == full_mode)
423 emit_unop_insn (icode, to, from, UNKNOWN);
424 return;
427 new_from = gen_reg_rtx (full_mode);
428 emit_unop_insn (icode, new_from, from, UNKNOWN);
430 /* else proceed to integer conversions below. */
431 from_mode = full_mode;
432 from = new_from;
435 /* Make sure both are fixed-point modes or both are not. */
436 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
437 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
438 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
440 /* If we widen from_mode to to_mode and they are in the same class,
441 we won't saturate the result.
442 Otherwise, always saturate the result to play safe. */
443 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
444 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
445 expand_fixed_convert (to, from, 0, 0);
446 else
447 expand_fixed_convert (to, from, 0, 1);
448 return;
451 /* Now both modes are integers. */
453 /* Handle expanding beyond a word. */
454 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
455 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
457 rtx_insn *insns;
458 rtx lowpart;
459 rtx fill_value;
460 rtx lowfrom;
461 int i;
462 machine_mode lowpart_mode;
463 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
465 /* Try converting directly if the insn is supported. */
466 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
467 != CODE_FOR_nothing)
469 /* If FROM is a SUBREG, put it into a register. Do this
470 so that we always generate the same set of insns for
471 better cse'ing; if an intermediate assignment occurred,
472 we won't be doing the operation directly on the SUBREG. */
473 if (optimize > 0 && GET_CODE (from) == SUBREG)
474 from = force_reg (from_mode, from);
475 emit_unop_insn (code, to, from, equiv_code);
476 return;
478 /* Next, try converting via full word. */
479 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
480 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
481 != CODE_FOR_nothing))
483 rtx word_to = gen_reg_rtx (word_mode);
484 if (REG_P (to))
486 if (reg_overlap_mentioned_p (to, from))
487 from = force_reg (from_mode, from);
488 emit_clobber (to);
490 convert_move (word_to, from, unsignedp);
491 emit_unop_insn (code, to, word_to, equiv_code);
492 return;
495 /* No special multiword conversion insn; do it by hand. */
496 start_sequence ();
498 /* Since we will turn this into a no conflict block, we must ensure the
499 the source does not overlap the target so force it into an isolated
500 register when maybe so. Likewise for any MEM input, since the
501 conversion sequence might require several references to it and we
502 must ensure we're getting the same value every time. */
504 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
505 from = force_reg (from_mode, from);
507 /* Get a copy of FROM widened to a word, if necessary. */
508 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
509 lowpart_mode = word_mode;
510 else
511 lowpart_mode = from_mode;
513 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
515 lowpart = gen_lowpart (lowpart_mode, to);
516 emit_move_insn (lowpart, lowfrom);
518 /* Compute the value to put in each remaining word. */
519 if (unsignedp)
520 fill_value = const0_rtx;
521 else
522 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
523 LT, lowfrom, const0_rtx,
524 lowpart_mode, 0, -1);
526 /* Fill the remaining words. */
527 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
529 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
530 rtx subword = operand_subword (to, index, 1, to_mode);
532 gcc_assert (subword);
534 if (fill_value != subword)
535 emit_move_insn (subword, fill_value);
538 insns = get_insns ();
539 end_sequence ();
541 emit_insn (insns);
542 return;
545 /* Truncating multi-word to a word or less. */
546 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
547 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
549 if (!((MEM_P (from)
550 && ! MEM_VOLATILE_P (from)
551 && direct_load[(int) to_mode]
552 && ! mode_dependent_address_p (XEXP (from, 0),
553 MEM_ADDR_SPACE (from)))
554 || REG_P (from)
555 || GET_CODE (from) == SUBREG))
556 from = force_reg (from_mode, from);
557 convert_move (to, gen_lowpart (word_mode, from), 0);
558 return;
561 /* Now follow all the conversions between integers
562 no more than a word long. */
564 /* For truncation, usually we can just refer to FROM in a narrower mode. */
565 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
566 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
568 if (!((MEM_P (from)
569 && ! MEM_VOLATILE_P (from)
570 && direct_load[(int) to_mode]
571 && ! mode_dependent_address_p (XEXP (from, 0),
572 MEM_ADDR_SPACE (from)))
573 || REG_P (from)
574 || GET_CODE (from) == SUBREG))
575 from = force_reg (from_mode, from);
576 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
577 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
578 from = copy_to_reg (from);
579 emit_move_insn (to, gen_lowpart (to_mode, from));
580 return;
583 /* Handle extension. */
584 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
586 /* Convert directly if that works. */
587 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
588 != CODE_FOR_nothing)
590 emit_unop_insn (code, to, from, equiv_code);
591 return;
593 else
595 machine_mode intermediate;
596 rtx tmp;
597 int shift_amount;
599 /* Search for a mode to convert via. */
600 for (intermediate = from_mode; intermediate != VOIDmode;
601 intermediate = GET_MODE_WIDER_MODE (intermediate))
602 if (((can_extend_p (to_mode, intermediate, unsignedp)
603 != CODE_FOR_nothing)
604 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
605 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
606 && (can_extend_p (intermediate, from_mode, unsignedp)
607 != CODE_FOR_nothing))
609 convert_move (to, convert_to_mode (intermediate, from,
610 unsignedp), unsignedp);
611 return;
614 /* No suitable intermediate mode.
615 Generate what we need with shifts. */
616 shift_amount = (GET_MODE_PRECISION (to_mode)
617 - GET_MODE_PRECISION (from_mode));
618 from = gen_lowpart (to_mode, force_reg (from_mode, from));
619 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
620 to, unsignedp);
621 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
622 to, unsignedp);
623 if (tmp != to)
624 emit_move_insn (to, tmp);
625 return;
629 /* Support special truncate insns for certain modes. */
630 if (convert_optab_handler (trunc_optab, to_mode,
631 from_mode) != CODE_FOR_nothing)
633 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
634 to, from, UNKNOWN);
635 return;
638 /* Handle truncation of volatile memrefs, and so on;
639 the things that couldn't be truncated directly,
640 and for which there was no special instruction.
642 ??? Code above formerly short-circuited this, for most integer
643 mode pairs, with a force_reg in from_mode followed by a recursive
644 call to this routine. Appears always to have been wrong. */
645 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
647 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
648 emit_move_insn (to, temp);
649 return;
652 /* Mode combination is not recognized. */
653 gcc_unreachable ();
656 /* Return an rtx for a value that would result
657 from converting X to mode MODE.
658 Both X and MODE may be floating, or both integer.
659 UNSIGNEDP is nonzero if X is an unsigned value.
660 This can be done by referring to a part of X in place
661 or by copying to a new temporary with conversion. */
664 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
666 return convert_modes (mode, VOIDmode, x, unsignedp);
669 /* Return an rtx for a value that would result
670 from converting X from mode OLDMODE to mode MODE.
671 Both modes may be floating, or both integer.
672 UNSIGNEDP is nonzero if X is an unsigned value.
674 This can be done by referring to a part of X in place
675 or by copying to a new temporary with conversion.
677 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
680 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
682 rtx temp;
684 /* If FROM is a SUBREG that indicates that we have already done at least
685 the required extension, strip it. */
687 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
688 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
689 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
690 x = gen_lowpart (mode, SUBREG_REG (x));
692 if (GET_MODE (x) != VOIDmode)
693 oldmode = GET_MODE (x);
695 if (mode == oldmode)
696 return x;
698 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
700 /* If the caller did not tell us the old mode, then there is not
701 much to do with respect to canonicalization. We have to
702 assume that all the bits are significant. */
703 if (GET_MODE_CLASS (oldmode) != MODE_INT)
704 oldmode = MAX_MODE_INT;
705 wide_int w = wide_int::from (std::make_pair (x, oldmode),
706 GET_MODE_PRECISION (mode),
707 unsignedp ? UNSIGNED : SIGNED);
708 return immed_wide_int_const (w, mode);
711 /* We can do this with a gen_lowpart if both desired and current modes
712 are integer, and this is either a constant integer, a register, or a
713 non-volatile MEM. */
714 if (GET_MODE_CLASS (mode) == MODE_INT
715 && GET_MODE_CLASS (oldmode) == MODE_INT
716 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
717 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
718 || (REG_P (x)
719 && (!HARD_REGISTER_P (x)
720 || HARD_REGNO_MODE_OK (REGNO (x), mode))
721 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
723 return gen_lowpart (mode, x);
725 /* Converting from integer constant into mode is always equivalent to an
726 subreg operation. */
727 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
729 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
730 return simplify_gen_subreg (mode, x, oldmode, 0);
733 temp = gen_reg_rtx (mode);
734 convert_move (temp, x, unsignedp);
735 return temp;
738 /* Return the largest alignment we can use for doing a move (or store)
739 of MAX_PIECES. ALIGN is the largest alignment we could use. */
741 static unsigned int
742 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
744 machine_mode tmode;
746 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
747 if (align >= GET_MODE_ALIGNMENT (tmode))
748 align = GET_MODE_ALIGNMENT (tmode);
749 else
751 machine_mode tmode, xmode;
753 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
754 tmode != VOIDmode;
755 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
756 if (GET_MODE_SIZE (tmode) > max_pieces
757 || SLOW_UNALIGNED_ACCESS (tmode, align))
758 break;
760 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
763 return align;
766 /* Return the widest integer mode no wider than SIZE. If no such mode
767 can be found, return VOIDmode. */
769 static machine_mode
770 widest_int_mode_for_size (unsigned int size)
772 machine_mode tmode, mode = VOIDmode;
774 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
775 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
776 if (GET_MODE_SIZE (tmode) < size)
777 mode = tmode;
779 return mode;
782 /* Determine whether the LEN bytes can be moved by using several move
783 instructions. Return nonzero if a call to move_by_pieces should
784 succeed. */
787 can_move_by_pieces (unsigned HOST_WIDE_INT len,
788 unsigned int align)
790 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
791 optimize_insn_for_speed_p ());
794 /* Generate several move instructions to copy LEN bytes from block FROM to
795 block TO. (These are MEM rtx's with BLKmode).
797 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
798 used to push FROM to the stack.
800 ALIGN is maximum stack alignment we can assume.
802 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
803 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
804 stpcpy. */
807 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
808 unsigned int align, int endp)
810 struct move_by_pieces_d data;
811 machine_mode to_addr_mode;
812 machine_mode from_addr_mode = get_address_mode (from);
813 rtx to_addr, from_addr = XEXP (from, 0);
814 unsigned int max_size = MOVE_MAX_PIECES + 1;
815 enum insn_code icode;
817 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
819 data.offset = 0;
820 data.from_addr = from_addr;
821 if (to)
823 to_addr_mode = get_address_mode (to);
824 to_addr = XEXP (to, 0);
825 data.to = to;
826 data.autinc_to
827 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
828 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
829 data.reverse
830 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
832 else
834 to_addr_mode = VOIDmode;
835 to_addr = NULL_RTX;
836 data.to = NULL_RTX;
837 data.autinc_to = 1;
838 if (STACK_GROWS_DOWNWARD)
839 data.reverse = 1;
840 else
841 data.reverse = 0;
843 data.to_addr = to_addr;
844 data.from = from;
845 data.autinc_from
846 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
847 || GET_CODE (from_addr) == POST_INC
848 || GET_CODE (from_addr) == POST_DEC);
850 data.explicit_inc_from = 0;
851 data.explicit_inc_to = 0;
852 if (data.reverse) data.offset = len;
853 data.len = len;
855 /* If copying requires more than two move insns,
856 copy addresses to registers (to make displacements shorter)
857 and use post-increment if available. */
858 if (!(data.autinc_from && data.autinc_to)
859 && move_by_pieces_ninsns (len, align, max_size) > 2)
861 /* Find the mode of the largest move...
862 MODE might not be used depending on the definitions of the
863 USE_* macros below. */
864 machine_mode mode ATTRIBUTE_UNUSED
865 = widest_int_mode_for_size (max_size);
867 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
869 data.from_addr = copy_to_mode_reg (from_addr_mode,
870 plus_constant (from_addr_mode,
871 from_addr, len));
872 data.autinc_from = 1;
873 data.explicit_inc_from = -1;
875 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
877 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
878 data.autinc_from = 1;
879 data.explicit_inc_from = 1;
881 if (!data.autinc_from && CONSTANT_P (from_addr))
882 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
883 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
885 data.to_addr = copy_to_mode_reg (to_addr_mode,
886 plus_constant (to_addr_mode,
887 to_addr, len));
888 data.autinc_to = 1;
889 data.explicit_inc_to = -1;
891 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
893 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
894 data.autinc_to = 1;
895 data.explicit_inc_to = 1;
897 if (!data.autinc_to && CONSTANT_P (to_addr))
898 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
901 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
903 /* First move what we can in the largest integer mode, then go to
904 successively smaller modes. */
906 while (max_size > 1 && data.len > 0)
908 machine_mode mode = widest_int_mode_for_size (max_size);
910 if (mode == VOIDmode)
911 break;
913 icode = optab_handler (mov_optab, mode);
914 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
915 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
917 max_size = GET_MODE_SIZE (mode);
920 /* The code above should have handled everything. */
921 gcc_assert (!data.len);
923 if (endp)
925 rtx to1;
927 gcc_assert (!data.reverse);
928 if (data.autinc_to)
930 if (endp == 2)
932 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
933 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
934 else
935 data.to_addr = copy_to_mode_reg (to_addr_mode,
936 plus_constant (to_addr_mode,
937 data.to_addr,
938 -1));
940 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
941 data.offset);
943 else
945 if (endp == 2)
946 --data.offset;
947 to1 = adjust_address (data.to, QImode, data.offset);
949 return to1;
951 else
952 return data.to;
955 /* Return number of insns required to move L bytes by pieces.
956 ALIGN (in bits) is maximum alignment we can assume. */
958 unsigned HOST_WIDE_INT
959 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
960 unsigned int max_size)
962 unsigned HOST_WIDE_INT n_insns = 0;
964 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
966 while (max_size > 1 && l > 0)
968 machine_mode mode;
969 enum insn_code icode;
971 mode = widest_int_mode_for_size (max_size);
973 if (mode == VOIDmode)
974 break;
976 icode = optab_handler (mov_optab, mode);
977 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
978 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
980 max_size = GET_MODE_SIZE (mode);
983 gcc_assert (!l);
984 return n_insns;
987 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
988 with move instructions for mode MODE. GENFUN is the gen_... function
989 to make a move insn for that mode. DATA has all the other info. */
991 static void
992 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
993 struct move_by_pieces_d *data)
995 unsigned int size = GET_MODE_SIZE (mode);
996 rtx to1 = NULL_RTX, from1;
998 while (data->len >= size)
1000 if (data->reverse)
1001 data->offset -= size;
1003 if (data->to)
1005 if (data->autinc_to)
1006 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1007 data->offset);
1008 else
1009 to1 = adjust_address (data->to, mode, data->offset);
1012 if (data->autinc_from)
1013 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1014 data->offset);
1015 else
1016 from1 = adjust_address (data->from, mode, data->offset);
1018 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1019 emit_insn (gen_add2_insn (data->to_addr,
1020 gen_int_mode (-(HOST_WIDE_INT) size,
1021 GET_MODE (data->to_addr))));
1022 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1023 emit_insn (gen_add2_insn (data->from_addr,
1024 gen_int_mode (-(HOST_WIDE_INT) size,
1025 GET_MODE (data->from_addr))));
1027 if (data->to)
1028 emit_insn ((*genfun) (to1, from1));
1029 else
1031 #ifdef PUSH_ROUNDING
1032 emit_single_push_insn (mode, from1, NULL);
1033 #else
1034 gcc_unreachable ();
1035 #endif
1038 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1039 emit_insn (gen_add2_insn (data->to_addr,
1040 gen_int_mode (size,
1041 GET_MODE (data->to_addr))));
1042 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1043 emit_insn (gen_add2_insn (data->from_addr,
1044 gen_int_mode (size,
1045 GET_MODE (data->from_addr))));
1047 if (! data->reverse)
1048 data->offset += size;
1050 data->len -= size;
1054 /* Emit code to move a block Y to a block X. This may be done with
1055 string-move instructions, with multiple scalar move instructions,
1056 or with a library call.
1058 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1059 SIZE is an rtx that says how long they are.
1060 ALIGN is the maximum alignment we can assume they have.
1061 METHOD describes what kind of copy this is, and what mechanisms may be used.
1062 MIN_SIZE is the minimal size of block to move
1063 MAX_SIZE is the maximal size of block to move, if it can not be represented
1064 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1066 Return the address of the new block, if memcpy is called and returns it,
1067 0 otherwise. */
1070 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1071 unsigned int expected_align, HOST_WIDE_INT expected_size,
1072 unsigned HOST_WIDE_INT min_size,
1073 unsigned HOST_WIDE_INT max_size,
1074 unsigned HOST_WIDE_INT probable_max_size)
1076 bool may_use_call;
1077 rtx retval = 0;
1078 unsigned int align;
1080 gcc_assert (size);
1081 if (CONST_INT_P (size)
1082 && INTVAL (size) == 0)
1083 return 0;
1085 switch (method)
1087 case BLOCK_OP_NORMAL:
1088 case BLOCK_OP_TAILCALL:
1089 may_use_call = true;
1090 break;
1092 case BLOCK_OP_CALL_PARM:
1093 may_use_call = block_move_libcall_safe_for_call_parm ();
1095 /* Make inhibit_defer_pop nonzero around the library call
1096 to force it to pop the arguments right away. */
1097 NO_DEFER_POP;
1098 break;
1100 case BLOCK_OP_NO_LIBCALL:
1101 may_use_call = false;
1102 break;
1104 default:
1105 gcc_unreachable ();
1108 gcc_assert (MEM_P (x) && MEM_P (y));
1109 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1110 gcc_assert (align >= BITS_PER_UNIT);
1112 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1113 block copy is more efficient for other large modes, e.g. DCmode. */
1114 x = adjust_address (x, BLKmode, 0);
1115 y = adjust_address (y, BLKmode, 0);
1117 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1118 can be incorrect is coming from __builtin_memcpy. */
1119 if (CONST_INT_P (size))
1121 x = shallow_copy_rtx (x);
1122 y = shallow_copy_rtx (y);
1123 set_mem_size (x, INTVAL (size));
1124 set_mem_size (y, INTVAL (size));
1127 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1128 move_by_pieces (x, y, INTVAL (size), align, 0);
1129 else if (emit_block_move_via_movmem (x, y, size, align,
1130 expected_align, expected_size,
1131 min_size, max_size, probable_max_size))
1133 else if (may_use_call
1134 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1135 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1137 /* Since x and y are passed to a libcall, mark the corresponding
1138 tree EXPR as addressable. */
1139 tree y_expr = MEM_EXPR (y);
1140 tree x_expr = MEM_EXPR (x);
1141 if (y_expr)
1142 mark_addressable (y_expr);
1143 if (x_expr)
1144 mark_addressable (x_expr);
1145 retval = emit_block_move_via_libcall (x, y, size,
1146 method == BLOCK_OP_TAILCALL);
1149 else
1150 emit_block_move_via_loop (x, y, size, align);
1152 if (method == BLOCK_OP_CALL_PARM)
1153 OK_DEFER_POP;
1155 return retval;
1159 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1161 unsigned HOST_WIDE_INT max, min = 0;
1162 if (GET_CODE (size) == CONST_INT)
1163 min = max = UINTVAL (size);
1164 else
1165 max = GET_MODE_MASK (GET_MODE (size));
1166 return emit_block_move_hints (x, y, size, method, 0, -1,
1167 min, max, max);
1170 /* A subroutine of emit_block_move. Returns true if calling the
1171 block move libcall will not clobber any parameters which may have
1172 already been placed on the stack. */
1174 static bool
1175 block_move_libcall_safe_for_call_parm (void)
1177 #if defined (REG_PARM_STACK_SPACE)
1178 tree fn;
1179 #endif
1181 /* If arguments are pushed on the stack, then they're safe. */
1182 if (PUSH_ARGS)
1183 return true;
1185 /* If registers go on the stack anyway, any argument is sure to clobber
1186 an outgoing argument. */
1187 #if defined (REG_PARM_STACK_SPACE)
1188 fn = emit_block_move_libcall_fn (false);
1189 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1190 depend on its argument. */
1191 (void) fn;
1192 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1193 && REG_PARM_STACK_SPACE (fn) != 0)
1194 return false;
1195 #endif
1197 /* If any argument goes in memory, then it might clobber an outgoing
1198 argument. */
1200 CUMULATIVE_ARGS args_so_far_v;
1201 cumulative_args_t args_so_far;
1202 tree fn, arg;
1204 fn = emit_block_move_libcall_fn (false);
1205 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1206 args_so_far = pack_cumulative_args (&args_so_far_v);
1208 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1209 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1211 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1212 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1213 NULL_TREE, true);
1214 if (!tmp || !REG_P (tmp))
1215 return false;
1216 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1217 return false;
1218 targetm.calls.function_arg_advance (args_so_far, mode,
1219 NULL_TREE, true);
1222 return true;
1225 /* A subroutine of emit_block_move. Expand a movmem pattern;
1226 return true if successful. */
1228 static bool
1229 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1230 unsigned int expected_align, HOST_WIDE_INT expected_size,
1231 unsigned HOST_WIDE_INT min_size,
1232 unsigned HOST_WIDE_INT max_size,
1233 unsigned HOST_WIDE_INT probable_max_size)
1235 int save_volatile_ok = volatile_ok;
1236 machine_mode mode;
1238 if (expected_align < align)
1239 expected_align = align;
1240 if (expected_size != -1)
1242 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1243 expected_size = probable_max_size;
1244 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1245 expected_size = min_size;
1248 /* Since this is a move insn, we don't care about volatility. */
1249 volatile_ok = 1;
1251 /* Try the most limited insn first, because there's no point
1252 including more than one in the machine description unless
1253 the more limited one has some advantage. */
1255 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1256 mode = GET_MODE_WIDER_MODE (mode))
1258 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1260 if (code != CODE_FOR_nothing
1261 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1262 here because if SIZE is less than the mode mask, as it is
1263 returned by the macro, it will definitely be less than the
1264 actual mode mask. Since SIZE is within the Pmode address
1265 space, we limit MODE to Pmode. */
1266 && ((CONST_INT_P (size)
1267 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1268 <= (GET_MODE_MASK (mode) >> 1)))
1269 || max_size <= (GET_MODE_MASK (mode) >> 1)
1270 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1272 struct expand_operand ops[9];
1273 unsigned int nops;
1275 /* ??? When called via emit_block_move_for_call, it'd be
1276 nice if there were some way to inform the backend, so
1277 that it doesn't fail the expansion because it thinks
1278 emitting the libcall would be more efficient. */
1279 nops = insn_data[(int) code].n_generator_args;
1280 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1282 create_fixed_operand (&ops[0], x);
1283 create_fixed_operand (&ops[1], y);
1284 /* The check above guarantees that this size conversion is valid. */
1285 create_convert_operand_to (&ops[2], size, mode, true);
1286 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1287 if (nops >= 6)
1289 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1290 create_integer_operand (&ops[5], expected_size);
1292 if (nops >= 8)
1294 create_integer_operand (&ops[6], min_size);
1295 /* If we can not represent the maximal size,
1296 make parameter NULL. */
1297 if ((HOST_WIDE_INT) max_size != -1)
1298 create_integer_operand (&ops[7], max_size);
1299 else
1300 create_fixed_operand (&ops[7], NULL);
1302 if (nops == 9)
1304 /* If we can not represent the maximal size,
1305 make parameter NULL. */
1306 if ((HOST_WIDE_INT) probable_max_size != -1)
1307 create_integer_operand (&ops[8], probable_max_size);
1308 else
1309 create_fixed_operand (&ops[8], NULL);
1311 if (maybe_expand_insn (code, nops, ops))
1313 volatile_ok = save_volatile_ok;
1314 return true;
1319 volatile_ok = save_volatile_ok;
1320 return false;
1323 /* A subroutine of emit_block_move. Expand a call to memcpy.
1324 Return the return value from memcpy, 0 otherwise. */
1327 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1329 rtx dst_addr, src_addr;
1330 tree call_expr, fn, src_tree, dst_tree, size_tree;
1331 machine_mode size_mode;
1332 rtx retval;
1334 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1335 pseudos. We can then place those new pseudos into a VAR_DECL and
1336 use them later. */
1338 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1339 src_addr = copy_addr_to_reg (XEXP (src, 0));
1341 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1342 src_addr = convert_memory_address (ptr_mode, src_addr);
1344 dst_tree = make_tree (ptr_type_node, dst_addr);
1345 src_tree = make_tree (ptr_type_node, src_addr);
1347 size_mode = TYPE_MODE (sizetype);
1349 size = convert_to_mode (size_mode, size, 1);
1350 size = copy_to_mode_reg (size_mode, size);
1352 /* It is incorrect to use the libcall calling conventions to call
1353 memcpy in this context. This could be a user call to memcpy and
1354 the user may wish to examine the return value from memcpy. For
1355 targets where libcalls and normal calls have different conventions
1356 for returning pointers, we could end up generating incorrect code. */
1358 size_tree = make_tree (sizetype, size);
1360 fn = emit_block_move_libcall_fn (true);
1361 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1362 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1364 retval = expand_normal (call_expr);
1366 return retval;
1369 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1370 for the function we use for block copies. */
1372 static GTY(()) tree block_move_fn;
1374 void
1375 init_block_move_fn (const char *asmspec)
1377 if (!block_move_fn)
1379 tree args, fn, attrs, attr_args;
1381 fn = get_identifier ("memcpy");
1382 args = build_function_type_list (ptr_type_node, ptr_type_node,
1383 const_ptr_type_node, sizetype,
1384 NULL_TREE);
1386 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1387 DECL_EXTERNAL (fn) = 1;
1388 TREE_PUBLIC (fn) = 1;
1389 DECL_ARTIFICIAL (fn) = 1;
1390 TREE_NOTHROW (fn) = 1;
1391 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1392 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1394 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1395 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1397 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1399 block_move_fn = fn;
1402 if (asmspec)
1403 set_user_assembler_name (block_move_fn, asmspec);
1406 static tree
1407 emit_block_move_libcall_fn (int for_call)
1409 static bool emitted_extern;
1411 if (!block_move_fn)
1412 init_block_move_fn (NULL);
1414 if (for_call && !emitted_extern)
1416 emitted_extern = true;
1417 make_decl_rtl (block_move_fn);
1420 return block_move_fn;
1423 /* A subroutine of emit_block_move. Copy the data via an explicit
1424 loop. This is used only when libcalls are forbidden. */
1425 /* ??? It'd be nice to copy in hunks larger than QImode. */
1427 static void
1428 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1429 unsigned int align ATTRIBUTE_UNUSED)
1431 rtx_code_label *cmp_label, *top_label;
1432 rtx iter, x_addr, y_addr, tmp;
1433 machine_mode x_addr_mode = get_address_mode (x);
1434 machine_mode y_addr_mode = get_address_mode (y);
1435 machine_mode iter_mode;
1437 iter_mode = GET_MODE (size);
1438 if (iter_mode == VOIDmode)
1439 iter_mode = word_mode;
1441 top_label = gen_label_rtx ();
1442 cmp_label = gen_label_rtx ();
1443 iter = gen_reg_rtx (iter_mode);
1445 emit_move_insn (iter, const0_rtx);
1447 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1448 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1449 do_pending_stack_adjust ();
1451 emit_jump (cmp_label);
1452 emit_label (top_label);
1454 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1455 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1457 if (x_addr_mode != y_addr_mode)
1458 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1459 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1461 x = change_address (x, QImode, x_addr);
1462 y = change_address (y, QImode, y_addr);
1464 emit_move_insn (x, y);
1466 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1467 true, OPTAB_LIB_WIDEN);
1468 if (tmp != iter)
1469 emit_move_insn (iter, tmp);
1471 emit_label (cmp_label);
1473 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1474 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1477 /* Copy all or part of a value X into registers starting at REGNO.
1478 The number of registers to be filled is NREGS. */
1480 void
1481 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1483 if (nregs == 0)
1484 return;
1486 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1487 x = validize_mem (force_const_mem (mode, x));
1489 /* See if the machine can do this with a load multiple insn. */
1490 if (targetm.have_load_multiple ())
1492 rtx_insn *last = get_last_insn ();
1493 rtx first = gen_rtx_REG (word_mode, regno);
1494 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
1495 GEN_INT (nregs)))
1497 emit_insn (pat);
1498 return;
1500 else
1501 delete_insns_since (last);
1504 for (int i = 0; i < nregs; i++)
1505 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1506 operand_subword_force (x, i, mode));
1509 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1510 The number of registers to be filled is NREGS. */
1512 void
1513 move_block_from_reg (int regno, rtx x, int nregs)
1515 if (nregs == 0)
1516 return;
1518 /* See if the machine can do this with a store multiple insn. */
1519 if (targetm.have_store_multiple ())
1521 rtx_insn *last = get_last_insn ();
1522 rtx first = gen_rtx_REG (word_mode, regno);
1523 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
1524 GEN_INT (nregs)))
1526 emit_insn (pat);
1527 return;
1529 else
1530 delete_insns_since (last);
1533 for (int i = 0; i < nregs; i++)
1535 rtx tem = operand_subword (x, i, 1, BLKmode);
1537 gcc_assert (tem);
1539 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1543 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1544 ORIG, where ORIG is a non-consecutive group of registers represented by
1545 a PARALLEL. The clone is identical to the original except in that the
1546 original set of registers is replaced by a new set of pseudo registers.
1547 The new set has the same modes as the original set. */
1550 gen_group_rtx (rtx orig)
1552 int i, length;
1553 rtx *tmps;
1555 gcc_assert (GET_CODE (orig) == PARALLEL);
1557 length = XVECLEN (orig, 0);
1558 tmps = XALLOCAVEC (rtx, length);
1560 /* Skip a NULL entry in first slot. */
1561 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1563 if (i)
1564 tmps[0] = 0;
1566 for (; i < length; i++)
1568 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1569 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1571 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1574 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1577 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1578 except that values are placed in TMPS[i], and must later be moved
1579 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1581 static void
1582 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1584 rtx src;
1585 int start, i;
1586 machine_mode m = GET_MODE (orig_src);
1588 gcc_assert (GET_CODE (dst) == PARALLEL);
1590 if (m != VOIDmode
1591 && !SCALAR_INT_MODE_P (m)
1592 && !MEM_P (orig_src)
1593 && GET_CODE (orig_src) != CONCAT)
1595 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1596 if (imode == BLKmode)
1597 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1598 else
1599 src = gen_reg_rtx (imode);
1600 if (imode != BLKmode)
1601 src = gen_lowpart (GET_MODE (orig_src), src);
1602 emit_move_insn (src, orig_src);
1603 /* ...and back again. */
1604 if (imode != BLKmode)
1605 src = gen_lowpart (imode, src);
1606 emit_group_load_1 (tmps, dst, src, type, ssize);
1607 return;
1610 /* Check for a NULL entry, used to indicate that the parameter goes
1611 both on the stack and in registers. */
1612 if (XEXP (XVECEXP (dst, 0, 0), 0))
1613 start = 0;
1614 else
1615 start = 1;
1617 /* Process the pieces. */
1618 for (i = start; i < XVECLEN (dst, 0); i++)
1620 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1621 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1622 unsigned int bytelen = GET_MODE_SIZE (mode);
1623 int shift = 0;
1625 /* Handle trailing fragments that run over the size of the struct. */
1626 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1628 /* Arrange to shift the fragment to where it belongs.
1629 extract_bit_field loads to the lsb of the reg. */
1630 if (
1631 #ifdef BLOCK_REG_PADDING
1632 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1633 == (BYTES_BIG_ENDIAN ? upward : downward)
1634 #else
1635 BYTES_BIG_ENDIAN
1636 #endif
1638 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1639 bytelen = ssize - bytepos;
1640 gcc_assert (bytelen > 0);
1643 /* If we won't be loading directly from memory, protect the real source
1644 from strange tricks we might play; but make sure that the source can
1645 be loaded directly into the destination. */
1646 src = orig_src;
1647 if (!MEM_P (orig_src)
1648 && (!CONSTANT_P (orig_src)
1649 || (GET_MODE (orig_src) != mode
1650 && GET_MODE (orig_src) != VOIDmode)))
1652 if (GET_MODE (orig_src) == VOIDmode)
1653 src = gen_reg_rtx (mode);
1654 else
1655 src = gen_reg_rtx (GET_MODE (orig_src));
1657 emit_move_insn (src, orig_src);
1660 /* Optimize the access just a bit. */
1661 if (MEM_P (src)
1662 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1663 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1664 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1665 && bytelen == GET_MODE_SIZE (mode))
1667 tmps[i] = gen_reg_rtx (mode);
1668 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1670 else if (COMPLEX_MODE_P (mode)
1671 && GET_MODE (src) == mode
1672 && bytelen == GET_MODE_SIZE (mode))
1673 /* Let emit_move_complex do the bulk of the work. */
1674 tmps[i] = src;
1675 else if (GET_CODE (src) == CONCAT)
1677 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1678 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1680 if ((bytepos == 0 && bytelen == slen0)
1681 || (bytepos != 0 && bytepos + bytelen <= slen))
1683 /* The following assumes that the concatenated objects all
1684 have the same size. In this case, a simple calculation
1685 can be used to determine the object and the bit field
1686 to be extracted. */
1687 tmps[i] = XEXP (src, bytepos / slen0);
1688 if (! CONSTANT_P (tmps[i])
1689 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1690 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1691 (bytepos % slen0) * BITS_PER_UNIT,
1692 1, NULL_RTX, mode, mode);
1694 else
1696 rtx mem;
1698 gcc_assert (!bytepos);
1699 mem = assign_stack_temp (GET_MODE (src), slen);
1700 emit_move_insn (mem, src);
1701 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1702 0, 1, NULL_RTX, mode, mode);
1705 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1706 SIMD register, which is currently broken. While we get GCC
1707 to emit proper RTL for these cases, let's dump to memory. */
1708 else if (VECTOR_MODE_P (GET_MODE (dst))
1709 && REG_P (src))
1711 int slen = GET_MODE_SIZE (GET_MODE (src));
1712 rtx mem;
1714 mem = assign_stack_temp (GET_MODE (src), slen);
1715 emit_move_insn (mem, src);
1716 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1718 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1719 && XVECLEN (dst, 0) > 1)
1720 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1721 else if (CONSTANT_P (src))
1723 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1725 if (len == ssize)
1726 tmps[i] = src;
1727 else
1729 rtx first, second;
1731 /* TODO: const_wide_int can have sizes other than this... */
1732 gcc_assert (2 * len == ssize);
1733 split_double (src, &first, &second);
1734 if (i)
1735 tmps[i] = second;
1736 else
1737 tmps[i] = first;
1740 else if (REG_P (src) && GET_MODE (src) == mode)
1741 tmps[i] = src;
1742 else
1743 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1744 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1745 mode, mode);
1747 if (shift)
1748 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1749 shift, tmps[i], 0);
1753 /* Emit code to move a block SRC of type TYPE to a block DST,
1754 where DST is non-consecutive registers represented by a PARALLEL.
1755 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1756 if not known. */
1758 void
1759 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1761 rtx *tmps;
1762 int i;
1764 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1765 emit_group_load_1 (tmps, dst, src, type, ssize);
1767 /* Copy the extracted pieces into the proper (probable) hard regs. */
1768 for (i = 0; i < XVECLEN (dst, 0); i++)
1770 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1771 if (d == NULL)
1772 continue;
1773 emit_move_insn (d, tmps[i]);
1777 /* Similar, but load SRC into new pseudos in a format that looks like
1778 PARALLEL. This can later be fed to emit_group_move to get things
1779 in the right place. */
1782 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1784 rtvec vec;
1785 int i;
1787 vec = rtvec_alloc (XVECLEN (parallel, 0));
1788 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1790 /* Convert the vector to look just like the original PARALLEL, except
1791 with the computed values. */
1792 for (i = 0; i < XVECLEN (parallel, 0); i++)
1794 rtx e = XVECEXP (parallel, 0, i);
1795 rtx d = XEXP (e, 0);
1797 if (d)
1799 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1800 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1802 RTVEC_ELT (vec, i) = e;
1805 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1808 /* Emit code to move a block SRC to block DST, where SRC and DST are
1809 non-consecutive groups of registers, each represented by a PARALLEL. */
1811 void
1812 emit_group_move (rtx dst, rtx src)
1814 int i;
1816 gcc_assert (GET_CODE (src) == PARALLEL
1817 && GET_CODE (dst) == PARALLEL
1818 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1820 /* Skip first entry if NULL. */
1821 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1822 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1823 XEXP (XVECEXP (src, 0, i), 0));
1826 /* Move a group of registers represented by a PARALLEL into pseudos. */
1829 emit_group_move_into_temps (rtx src)
1831 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1832 int i;
1834 for (i = 0; i < XVECLEN (src, 0); i++)
1836 rtx e = XVECEXP (src, 0, i);
1837 rtx d = XEXP (e, 0);
1839 if (d)
1840 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1841 RTVEC_ELT (vec, i) = e;
1844 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1847 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1848 where SRC is non-consecutive registers represented by a PARALLEL.
1849 SSIZE represents the total size of block ORIG_DST, or -1 if not
1850 known. */
1852 void
1853 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1855 rtx *tmps, dst;
1856 int start, finish, i;
1857 machine_mode m = GET_MODE (orig_dst);
1859 gcc_assert (GET_CODE (src) == PARALLEL);
1861 if (!SCALAR_INT_MODE_P (m)
1862 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1864 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1865 if (imode == BLKmode)
1866 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1867 else
1868 dst = gen_reg_rtx (imode);
1869 emit_group_store (dst, src, type, ssize);
1870 if (imode != BLKmode)
1871 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1872 emit_move_insn (orig_dst, dst);
1873 return;
1876 /* Check for a NULL entry, used to indicate that the parameter goes
1877 both on the stack and in registers. */
1878 if (XEXP (XVECEXP (src, 0, 0), 0))
1879 start = 0;
1880 else
1881 start = 1;
1882 finish = XVECLEN (src, 0);
1884 tmps = XALLOCAVEC (rtx, finish);
1886 /* Copy the (probable) hard regs into pseudos. */
1887 for (i = start; i < finish; i++)
1889 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1890 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1892 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1893 emit_move_insn (tmps[i], reg);
1895 else
1896 tmps[i] = reg;
1899 /* If we won't be storing directly into memory, protect the real destination
1900 from strange tricks we might play. */
1901 dst = orig_dst;
1902 if (GET_CODE (dst) == PARALLEL)
1904 rtx temp;
1906 /* We can get a PARALLEL dst if there is a conditional expression in
1907 a return statement. In that case, the dst and src are the same,
1908 so no action is necessary. */
1909 if (rtx_equal_p (dst, src))
1910 return;
1912 /* It is unclear if we can ever reach here, but we may as well handle
1913 it. Allocate a temporary, and split this into a store/load to/from
1914 the temporary. */
1915 temp = assign_stack_temp (GET_MODE (dst), ssize);
1916 emit_group_store (temp, src, type, ssize);
1917 emit_group_load (dst, temp, type, ssize);
1918 return;
1920 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1922 machine_mode outer = GET_MODE (dst);
1923 machine_mode inner;
1924 HOST_WIDE_INT bytepos;
1925 bool done = false;
1926 rtx temp;
1928 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1929 dst = gen_reg_rtx (outer);
1931 /* Make life a bit easier for combine. */
1932 /* If the first element of the vector is the low part
1933 of the destination mode, use a paradoxical subreg to
1934 initialize the destination. */
1935 if (start < finish)
1937 inner = GET_MODE (tmps[start]);
1938 bytepos = subreg_lowpart_offset (inner, outer);
1939 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1941 temp = simplify_gen_subreg (outer, tmps[start],
1942 inner, 0);
1943 if (temp)
1945 emit_move_insn (dst, temp);
1946 done = true;
1947 start++;
1952 /* If the first element wasn't the low part, try the last. */
1953 if (!done
1954 && start < finish - 1)
1956 inner = GET_MODE (tmps[finish - 1]);
1957 bytepos = subreg_lowpart_offset (inner, outer);
1958 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1960 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1961 inner, 0);
1962 if (temp)
1964 emit_move_insn (dst, temp);
1965 done = true;
1966 finish--;
1971 /* Otherwise, simply initialize the result to zero. */
1972 if (!done)
1973 emit_move_insn (dst, CONST0_RTX (outer));
1976 /* Process the pieces. */
1977 for (i = start; i < finish; i++)
1979 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1980 machine_mode mode = GET_MODE (tmps[i]);
1981 unsigned int bytelen = GET_MODE_SIZE (mode);
1982 unsigned int adj_bytelen;
1983 rtx dest = dst;
1985 /* Handle trailing fragments that run over the size of the struct. */
1986 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1987 adj_bytelen = ssize - bytepos;
1988 else
1989 adj_bytelen = bytelen;
1991 if (GET_CODE (dst) == CONCAT)
1993 if (bytepos + adj_bytelen
1994 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1995 dest = XEXP (dst, 0);
1996 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1998 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1999 dest = XEXP (dst, 1);
2001 else
2003 machine_mode dest_mode = GET_MODE (dest);
2004 machine_mode tmp_mode = GET_MODE (tmps[i]);
2006 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2008 if (GET_MODE_ALIGNMENT (dest_mode)
2009 >= GET_MODE_ALIGNMENT (tmp_mode))
2011 dest = assign_stack_temp (dest_mode,
2012 GET_MODE_SIZE (dest_mode));
2013 emit_move_insn (adjust_address (dest,
2014 tmp_mode,
2015 bytepos),
2016 tmps[i]);
2017 dst = dest;
2019 else
2021 dest = assign_stack_temp (tmp_mode,
2022 GET_MODE_SIZE (tmp_mode));
2023 emit_move_insn (dest, tmps[i]);
2024 dst = adjust_address (dest, dest_mode, bytepos);
2026 break;
2030 /* Handle trailing fragments that run over the size of the struct. */
2031 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2033 /* store_bit_field always takes its value from the lsb.
2034 Move the fragment to the lsb if it's not already there. */
2035 if (
2036 #ifdef BLOCK_REG_PADDING
2037 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2038 == (BYTES_BIG_ENDIAN ? upward : downward)
2039 #else
2040 BYTES_BIG_ENDIAN
2041 #endif
2044 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2045 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2046 shift, tmps[i], 0);
2049 /* Make sure not to write past the end of the struct. */
2050 store_bit_field (dest,
2051 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2052 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2053 VOIDmode, tmps[i]);
2056 /* Optimize the access just a bit. */
2057 else if (MEM_P (dest)
2058 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2059 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2060 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2061 && bytelen == GET_MODE_SIZE (mode))
2062 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2064 else
2065 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2066 0, 0, mode, tmps[i]);
2069 /* Copy from the pseudo into the (probable) hard reg. */
2070 if (orig_dst != dst)
2071 emit_move_insn (orig_dst, dst);
2074 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2075 of the value stored in X. */
2078 maybe_emit_group_store (rtx x, tree type)
2080 machine_mode mode = TYPE_MODE (type);
2081 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2082 if (GET_CODE (x) == PARALLEL)
2084 rtx result = gen_reg_rtx (mode);
2085 emit_group_store (result, x, type, int_size_in_bytes (type));
2086 return result;
2088 return x;
2091 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2093 This is used on targets that return BLKmode values in registers. */
2095 void
2096 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2098 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2099 rtx src = NULL, dst = NULL;
2100 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2101 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2102 machine_mode mode = GET_MODE (srcreg);
2103 machine_mode tmode = GET_MODE (target);
2104 machine_mode copy_mode;
2106 /* BLKmode registers created in the back-end shouldn't have survived. */
2107 gcc_assert (mode != BLKmode);
2109 /* If the structure doesn't take up a whole number of words, see whether
2110 SRCREG is padded on the left or on the right. If it's on the left,
2111 set PADDING_CORRECTION to the number of bits to skip.
2113 In most ABIs, the structure will be returned at the least end of
2114 the register, which translates to right padding on little-endian
2115 targets and left padding on big-endian targets. The opposite
2116 holds if the structure is returned at the most significant
2117 end of the register. */
2118 if (bytes % UNITS_PER_WORD != 0
2119 && (targetm.calls.return_in_msb (type)
2120 ? !BYTES_BIG_ENDIAN
2121 : BYTES_BIG_ENDIAN))
2122 padding_correction
2123 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2125 /* We can use a single move if we have an exact mode for the size. */
2126 else if (MEM_P (target)
2127 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2128 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2129 && bytes == GET_MODE_SIZE (mode))
2131 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2132 return;
2135 /* And if we additionally have the same mode for a register. */
2136 else if (REG_P (target)
2137 && GET_MODE (target) == mode
2138 && bytes == GET_MODE_SIZE (mode))
2140 emit_move_insn (target, srcreg);
2141 return;
2144 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2145 into a new pseudo which is a full word. */
2146 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2148 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2149 mode = word_mode;
2152 /* Copy the structure BITSIZE bits at a time. If the target lives in
2153 memory, take care of not reading/writing past its end by selecting
2154 a copy mode suited to BITSIZE. This should always be possible given
2155 how it is computed.
2157 If the target lives in register, make sure not to select a copy mode
2158 larger than the mode of the register.
2160 We could probably emit more efficient code for machines which do not use
2161 strict alignment, but it doesn't seem worth the effort at the current
2162 time. */
2164 copy_mode = word_mode;
2165 if (MEM_P (target))
2167 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2168 if (mem_mode != BLKmode)
2169 copy_mode = mem_mode;
2171 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2172 copy_mode = tmode;
2174 for (bitpos = 0, xbitpos = padding_correction;
2175 bitpos < bytes * BITS_PER_UNIT;
2176 bitpos += bitsize, xbitpos += bitsize)
2178 /* We need a new source operand each time xbitpos is on a
2179 word boundary and when xbitpos == padding_correction
2180 (the first time through). */
2181 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2182 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2184 /* We need a new destination operand each time bitpos is on
2185 a word boundary. */
2186 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2187 dst = target;
2188 else if (bitpos % BITS_PER_WORD == 0)
2189 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2191 /* Use xbitpos for the source extraction (right justified) and
2192 bitpos for the destination store (left justified). */
2193 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2194 extract_bit_field (src, bitsize,
2195 xbitpos % BITS_PER_WORD, 1,
2196 NULL_RTX, copy_mode, copy_mode));
2200 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2201 register if it contains any data, otherwise return null.
2203 This is used on targets that return BLKmode values in registers. */
2206 copy_blkmode_to_reg (machine_mode mode, tree src)
2208 int i, n_regs;
2209 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2210 unsigned int bitsize;
2211 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2212 machine_mode dst_mode;
2214 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2216 x = expand_normal (src);
2218 bytes = int_size_in_bytes (TREE_TYPE (src));
2219 if (bytes == 0)
2220 return NULL_RTX;
2222 /* If the structure doesn't take up a whole number of words, see
2223 whether the register value should be padded on the left or on
2224 the right. Set PADDING_CORRECTION to the number of padding
2225 bits needed on the left side.
2227 In most ABIs, the structure will be returned at the least end of
2228 the register, which translates to right padding on little-endian
2229 targets and left padding on big-endian targets. The opposite
2230 holds if the structure is returned at the most significant
2231 end of the register. */
2232 if (bytes % UNITS_PER_WORD != 0
2233 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2234 ? !BYTES_BIG_ENDIAN
2235 : BYTES_BIG_ENDIAN))
2236 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2237 * BITS_PER_UNIT));
2239 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2240 dst_words = XALLOCAVEC (rtx, n_regs);
2241 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2243 /* Copy the structure BITSIZE bits at a time. */
2244 for (bitpos = 0, xbitpos = padding_correction;
2245 bitpos < bytes * BITS_PER_UNIT;
2246 bitpos += bitsize, xbitpos += bitsize)
2248 /* We need a new destination pseudo each time xbitpos is
2249 on a word boundary and when xbitpos == padding_correction
2250 (the first time through). */
2251 if (xbitpos % BITS_PER_WORD == 0
2252 || xbitpos == padding_correction)
2254 /* Generate an appropriate register. */
2255 dst_word = gen_reg_rtx (word_mode);
2256 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2258 /* Clear the destination before we move anything into it. */
2259 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2262 /* We need a new source operand each time bitpos is on a word
2263 boundary. */
2264 if (bitpos % BITS_PER_WORD == 0)
2265 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2267 /* Use bitpos for the source extraction (left justified) and
2268 xbitpos for the destination store (right justified). */
2269 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2270 0, 0, word_mode,
2271 extract_bit_field (src_word, bitsize,
2272 bitpos % BITS_PER_WORD, 1,
2273 NULL_RTX, word_mode, word_mode));
2276 if (mode == BLKmode)
2278 /* Find the smallest integer mode large enough to hold the
2279 entire structure. */
2280 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2281 mode != VOIDmode;
2282 mode = GET_MODE_WIDER_MODE (mode))
2283 /* Have we found a large enough mode? */
2284 if (GET_MODE_SIZE (mode) >= bytes)
2285 break;
2287 /* A suitable mode should have been found. */
2288 gcc_assert (mode != VOIDmode);
2291 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2292 dst_mode = word_mode;
2293 else
2294 dst_mode = mode;
2295 dst = gen_reg_rtx (dst_mode);
2297 for (i = 0; i < n_regs; i++)
2298 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2300 if (mode != dst_mode)
2301 dst = gen_lowpart (mode, dst);
2303 return dst;
2306 /* Add a USE expression for REG to the (possibly empty) list pointed
2307 to by CALL_FUSAGE. REG must denote a hard register. */
2309 void
2310 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2312 gcc_assert (REG_P (reg));
2314 if (!HARD_REGISTER_P (reg))
2315 return;
2317 *call_fusage
2318 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2321 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2322 to by CALL_FUSAGE. REG must denote a hard register. */
2324 void
2325 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2327 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2329 *call_fusage
2330 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2333 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2334 starting at REGNO. All of these registers must be hard registers. */
2336 void
2337 use_regs (rtx *call_fusage, int regno, int nregs)
2339 int i;
2341 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2343 for (i = 0; i < nregs; i++)
2344 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2347 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2348 PARALLEL REGS. This is for calls that pass values in multiple
2349 non-contiguous locations. The Irix 6 ABI has examples of this. */
2351 void
2352 use_group_regs (rtx *call_fusage, rtx regs)
2354 int i;
2356 for (i = 0; i < XVECLEN (regs, 0); i++)
2358 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2360 /* A NULL entry means the parameter goes both on the stack and in
2361 registers. This can also be a MEM for targets that pass values
2362 partially on the stack and partially in registers. */
2363 if (reg != 0 && REG_P (reg))
2364 use_reg (call_fusage, reg);
2368 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2369 assigment and the code of the expresion on the RHS is CODE. Return
2370 NULL otherwise. */
2372 static gimple
2373 get_def_for_expr (tree name, enum tree_code code)
2375 gimple def_stmt;
2377 if (TREE_CODE (name) != SSA_NAME)
2378 return NULL;
2380 def_stmt = get_gimple_for_ssa_name (name);
2381 if (!def_stmt
2382 || gimple_assign_rhs_code (def_stmt) != code)
2383 return NULL;
2385 return def_stmt;
2388 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2389 assigment and the class of the expresion on the RHS is CLASS. Return
2390 NULL otherwise. */
2392 static gimple
2393 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2395 gimple def_stmt;
2397 if (TREE_CODE (name) != SSA_NAME)
2398 return NULL;
2400 def_stmt = get_gimple_for_ssa_name (name);
2401 if (!def_stmt
2402 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2403 return NULL;
2405 return def_stmt;
2409 /* Determine whether the LEN bytes generated by CONSTFUN can be
2410 stored to memory using several move instructions. CONSTFUNDATA is
2411 a pointer which will be passed as argument in every CONSTFUN call.
2412 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2413 a memset operation and false if it's a copy of a constant string.
2414 Return nonzero if a call to store_by_pieces should succeed. */
2417 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2418 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2419 void *constfundata, unsigned int align, bool memsetp)
2421 unsigned HOST_WIDE_INT l;
2422 unsigned int max_size;
2423 HOST_WIDE_INT offset = 0;
2424 machine_mode mode;
2425 enum insn_code icode;
2426 int reverse;
2427 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2428 rtx cst ATTRIBUTE_UNUSED;
2430 if (len == 0)
2431 return 1;
2433 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2434 memsetp
2435 ? SET_BY_PIECES
2436 : STORE_BY_PIECES,
2437 optimize_insn_for_speed_p ()))
2438 return 0;
2440 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2442 /* We would first store what we can in the largest integer mode, then go to
2443 successively smaller modes. */
2445 for (reverse = 0;
2446 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2447 reverse++)
2449 l = len;
2450 max_size = STORE_MAX_PIECES + 1;
2451 while (max_size > 1 && l > 0)
2453 mode = widest_int_mode_for_size (max_size);
2455 if (mode == VOIDmode)
2456 break;
2458 icode = optab_handler (mov_optab, mode);
2459 if (icode != CODE_FOR_nothing
2460 && align >= GET_MODE_ALIGNMENT (mode))
2462 unsigned int size = GET_MODE_SIZE (mode);
2464 while (l >= size)
2466 if (reverse)
2467 offset -= size;
2469 cst = (*constfun) (constfundata, offset, mode);
2470 if (!targetm.legitimate_constant_p (mode, cst))
2471 return 0;
2473 if (!reverse)
2474 offset += size;
2476 l -= size;
2480 max_size = GET_MODE_SIZE (mode);
2483 /* The code above should have handled everything. */
2484 gcc_assert (!l);
2487 return 1;
2490 /* Generate several move instructions to store LEN bytes generated by
2491 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2492 pointer which will be passed as argument in every CONSTFUN call.
2493 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2494 a memset operation and false if it's a copy of a constant string.
2495 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2496 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2497 stpcpy. */
2500 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2501 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2502 void *constfundata, unsigned int align, bool memsetp, int endp)
2504 machine_mode to_addr_mode = get_address_mode (to);
2505 struct store_by_pieces_d data;
2507 if (len == 0)
2509 gcc_assert (endp != 2);
2510 return to;
2513 gcc_assert (targetm.use_by_pieces_infrastructure_p
2514 (len, align,
2515 memsetp
2516 ? SET_BY_PIECES
2517 : STORE_BY_PIECES,
2518 optimize_insn_for_speed_p ()));
2520 data.constfun = constfun;
2521 data.constfundata = constfundata;
2522 data.len = len;
2523 data.to = to;
2524 store_by_pieces_1 (&data, align);
2525 if (endp)
2527 rtx to1;
2529 gcc_assert (!data.reverse);
2530 if (data.autinc_to)
2532 if (endp == 2)
2534 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2535 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2536 else
2537 data.to_addr = copy_to_mode_reg (to_addr_mode,
2538 plus_constant (to_addr_mode,
2539 data.to_addr,
2540 -1));
2542 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2543 data.offset);
2545 else
2547 if (endp == 2)
2548 --data.offset;
2549 to1 = adjust_address (data.to, QImode, data.offset);
2551 return to1;
2553 else
2554 return data.to;
2557 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2558 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2560 static void
2561 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2563 struct store_by_pieces_d data;
2565 if (len == 0)
2566 return;
2568 data.constfun = clear_by_pieces_1;
2569 data.constfundata = NULL;
2570 data.len = len;
2571 data.to = to;
2572 store_by_pieces_1 (&data, align);
2575 /* Callback routine for clear_by_pieces.
2576 Return const0_rtx unconditionally. */
2578 static rtx
2579 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2580 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2581 machine_mode mode ATTRIBUTE_UNUSED)
2583 return const0_rtx;
2586 /* Subroutine of clear_by_pieces and store_by_pieces.
2587 Generate several move instructions to store LEN bytes of block TO. (A MEM
2588 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2590 static void
2591 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2592 unsigned int align ATTRIBUTE_UNUSED)
2594 machine_mode to_addr_mode = get_address_mode (data->to);
2595 rtx to_addr = XEXP (data->to, 0);
2596 unsigned int max_size = STORE_MAX_PIECES + 1;
2597 enum insn_code icode;
2599 data->offset = 0;
2600 data->to_addr = to_addr;
2601 data->autinc_to
2602 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2603 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2605 data->explicit_inc_to = 0;
2606 data->reverse
2607 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2608 if (data->reverse)
2609 data->offset = data->len;
2611 /* If storing requires more than two move insns,
2612 copy addresses to registers (to make displacements shorter)
2613 and use post-increment if available. */
2614 if (!data->autinc_to
2615 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2617 /* Determine the main mode we'll be using.
2618 MODE might not be used depending on the definitions of the
2619 USE_* macros below. */
2620 machine_mode mode ATTRIBUTE_UNUSED
2621 = widest_int_mode_for_size (max_size);
2623 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2625 data->to_addr = copy_to_mode_reg (to_addr_mode,
2626 plus_constant (to_addr_mode,
2627 to_addr,
2628 data->len));
2629 data->autinc_to = 1;
2630 data->explicit_inc_to = -1;
2633 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2634 && ! data->autinc_to)
2636 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2637 data->autinc_to = 1;
2638 data->explicit_inc_to = 1;
2641 if ( !data->autinc_to && CONSTANT_P (to_addr))
2642 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2645 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2647 /* First store what we can in the largest integer mode, then go to
2648 successively smaller modes. */
2650 while (max_size > 1 && data->len > 0)
2652 machine_mode mode = widest_int_mode_for_size (max_size);
2654 if (mode == VOIDmode)
2655 break;
2657 icode = optab_handler (mov_optab, mode);
2658 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2659 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2661 max_size = GET_MODE_SIZE (mode);
2664 /* The code above should have handled everything. */
2665 gcc_assert (!data->len);
2668 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2669 with move instructions for mode MODE. GENFUN is the gen_... function
2670 to make a move insn for that mode. DATA has all the other info. */
2672 static void
2673 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2674 struct store_by_pieces_d *data)
2676 unsigned int size = GET_MODE_SIZE (mode);
2677 rtx to1, cst;
2679 while (data->len >= size)
2681 if (data->reverse)
2682 data->offset -= size;
2684 if (data->autinc_to)
2685 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2686 data->offset);
2687 else
2688 to1 = adjust_address (data->to, mode, data->offset);
2690 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2691 emit_insn (gen_add2_insn (data->to_addr,
2692 gen_int_mode (-(HOST_WIDE_INT) size,
2693 GET_MODE (data->to_addr))));
2695 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2696 emit_insn ((*genfun) (to1, cst));
2698 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2699 emit_insn (gen_add2_insn (data->to_addr,
2700 gen_int_mode (size,
2701 GET_MODE (data->to_addr))));
2703 if (! data->reverse)
2704 data->offset += size;
2706 data->len -= size;
2710 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2711 its length in bytes. */
2714 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2715 unsigned int expected_align, HOST_WIDE_INT expected_size,
2716 unsigned HOST_WIDE_INT min_size,
2717 unsigned HOST_WIDE_INT max_size,
2718 unsigned HOST_WIDE_INT probable_max_size)
2720 machine_mode mode = GET_MODE (object);
2721 unsigned int align;
2723 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2725 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2726 just move a zero. Otherwise, do this a piece at a time. */
2727 if (mode != BLKmode
2728 && CONST_INT_P (size)
2729 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2731 rtx zero = CONST0_RTX (mode);
2732 if (zero != NULL)
2734 emit_move_insn (object, zero);
2735 return NULL;
2738 if (COMPLEX_MODE_P (mode))
2740 zero = CONST0_RTX (GET_MODE_INNER (mode));
2741 if (zero != NULL)
2743 write_complex_part (object, zero, 0);
2744 write_complex_part (object, zero, 1);
2745 return NULL;
2750 if (size == const0_rtx)
2751 return NULL;
2753 align = MEM_ALIGN (object);
2755 if (CONST_INT_P (size)
2756 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2757 CLEAR_BY_PIECES,
2758 optimize_insn_for_speed_p ()))
2759 clear_by_pieces (object, INTVAL (size), align);
2760 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2761 expected_align, expected_size,
2762 min_size, max_size, probable_max_size))
2764 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2765 return set_storage_via_libcall (object, size, const0_rtx,
2766 method == BLOCK_OP_TAILCALL);
2767 else
2768 gcc_unreachable ();
2770 return NULL;
2774 clear_storage (rtx object, rtx size, enum block_op_methods method)
2776 unsigned HOST_WIDE_INT max, min = 0;
2777 if (GET_CODE (size) == CONST_INT)
2778 min = max = UINTVAL (size);
2779 else
2780 max = GET_MODE_MASK (GET_MODE (size));
2781 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2785 /* A subroutine of clear_storage. Expand a call to memset.
2786 Return the return value of memset, 0 otherwise. */
2789 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2791 tree call_expr, fn, object_tree, size_tree, val_tree;
2792 machine_mode size_mode;
2793 rtx retval;
2795 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2796 place those into new pseudos into a VAR_DECL and use them later. */
2798 object = copy_addr_to_reg (XEXP (object, 0));
2800 size_mode = TYPE_MODE (sizetype);
2801 size = convert_to_mode (size_mode, size, 1);
2802 size = copy_to_mode_reg (size_mode, size);
2804 /* It is incorrect to use the libcall calling conventions to call
2805 memset in this context. This could be a user call to memset and
2806 the user may wish to examine the return value from memset. For
2807 targets where libcalls and normal calls have different conventions
2808 for returning pointers, we could end up generating incorrect code. */
2810 object_tree = make_tree (ptr_type_node, object);
2811 if (!CONST_INT_P (val))
2812 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2813 size_tree = make_tree (sizetype, size);
2814 val_tree = make_tree (integer_type_node, val);
2816 fn = clear_storage_libcall_fn (true);
2817 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2818 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2820 retval = expand_normal (call_expr);
2822 return retval;
2825 /* A subroutine of set_storage_via_libcall. Create the tree node
2826 for the function we use for block clears. */
2828 tree block_clear_fn;
2830 void
2831 init_block_clear_fn (const char *asmspec)
2833 if (!block_clear_fn)
2835 tree fn, args;
2837 fn = get_identifier ("memset");
2838 args = build_function_type_list (ptr_type_node, ptr_type_node,
2839 integer_type_node, sizetype,
2840 NULL_TREE);
2842 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2843 DECL_EXTERNAL (fn) = 1;
2844 TREE_PUBLIC (fn) = 1;
2845 DECL_ARTIFICIAL (fn) = 1;
2846 TREE_NOTHROW (fn) = 1;
2847 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2848 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2850 block_clear_fn = fn;
2853 if (asmspec)
2854 set_user_assembler_name (block_clear_fn, asmspec);
2857 static tree
2858 clear_storage_libcall_fn (int for_call)
2860 static bool emitted_extern;
2862 if (!block_clear_fn)
2863 init_block_clear_fn (NULL);
2865 if (for_call && !emitted_extern)
2867 emitted_extern = true;
2868 make_decl_rtl (block_clear_fn);
2871 return block_clear_fn;
2874 /* Expand a setmem pattern; return true if successful. */
2876 bool
2877 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2878 unsigned int expected_align, HOST_WIDE_INT expected_size,
2879 unsigned HOST_WIDE_INT min_size,
2880 unsigned HOST_WIDE_INT max_size,
2881 unsigned HOST_WIDE_INT probable_max_size)
2883 /* Try the most limited insn first, because there's no point
2884 including more than one in the machine description unless
2885 the more limited one has some advantage. */
2887 machine_mode mode;
2889 if (expected_align < align)
2890 expected_align = align;
2891 if (expected_size != -1)
2893 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2894 expected_size = max_size;
2895 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2896 expected_size = min_size;
2899 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2900 mode = GET_MODE_WIDER_MODE (mode))
2902 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2904 if (code != CODE_FOR_nothing
2905 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2906 here because if SIZE is less than the mode mask, as it is
2907 returned by the macro, it will definitely be less than the
2908 actual mode mask. Since SIZE is within the Pmode address
2909 space, we limit MODE to Pmode. */
2910 && ((CONST_INT_P (size)
2911 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2912 <= (GET_MODE_MASK (mode) >> 1)))
2913 || max_size <= (GET_MODE_MASK (mode) >> 1)
2914 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2916 struct expand_operand ops[9];
2917 unsigned int nops;
2919 nops = insn_data[(int) code].n_generator_args;
2920 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2922 create_fixed_operand (&ops[0], object);
2923 /* The check above guarantees that this size conversion is valid. */
2924 create_convert_operand_to (&ops[1], size, mode, true);
2925 create_convert_operand_from (&ops[2], val, byte_mode, true);
2926 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2927 if (nops >= 6)
2929 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2930 create_integer_operand (&ops[5], expected_size);
2932 if (nops >= 8)
2934 create_integer_operand (&ops[6], min_size);
2935 /* If we can not represent the maximal size,
2936 make parameter NULL. */
2937 if ((HOST_WIDE_INT) max_size != -1)
2938 create_integer_operand (&ops[7], max_size);
2939 else
2940 create_fixed_operand (&ops[7], NULL);
2942 if (nops == 9)
2944 /* If we can not represent the maximal size,
2945 make parameter NULL. */
2946 if ((HOST_WIDE_INT) probable_max_size != -1)
2947 create_integer_operand (&ops[8], probable_max_size);
2948 else
2949 create_fixed_operand (&ops[8], NULL);
2951 if (maybe_expand_insn (code, nops, ops))
2952 return true;
2956 return false;
2960 /* Write to one of the components of the complex value CPLX. Write VAL to
2961 the real part if IMAG_P is false, and the imaginary part if its true. */
2963 void
2964 write_complex_part (rtx cplx, rtx val, bool imag_p)
2966 machine_mode cmode;
2967 machine_mode imode;
2968 unsigned ibitsize;
2970 if (GET_CODE (cplx) == CONCAT)
2972 emit_move_insn (XEXP (cplx, imag_p), val);
2973 return;
2976 cmode = GET_MODE (cplx);
2977 imode = GET_MODE_INNER (cmode);
2978 ibitsize = GET_MODE_BITSIZE (imode);
2980 /* For MEMs simplify_gen_subreg may generate an invalid new address
2981 because, e.g., the original address is considered mode-dependent
2982 by the target, which restricts simplify_subreg from invoking
2983 adjust_address_nv. Instead of preparing fallback support for an
2984 invalid address, we call adjust_address_nv directly. */
2985 if (MEM_P (cplx))
2987 emit_move_insn (adjust_address_nv (cplx, imode,
2988 imag_p ? GET_MODE_SIZE (imode) : 0),
2989 val);
2990 return;
2993 /* If the sub-object is at least word sized, then we know that subregging
2994 will work. This special case is important, since store_bit_field
2995 wants to operate on integer modes, and there's rarely an OImode to
2996 correspond to TCmode. */
2997 if (ibitsize >= BITS_PER_WORD
2998 /* For hard regs we have exact predicates. Assume we can split
2999 the original object if it spans an even number of hard regs.
3000 This special case is important for SCmode on 64-bit platforms
3001 where the natural size of floating-point regs is 32-bit. */
3002 || (REG_P (cplx)
3003 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3004 && REG_NREGS (cplx) % 2 == 0))
3006 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3007 imag_p ? GET_MODE_SIZE (imode) : 0);
3008 if (part)
3010 emit_move_insn (part, val);
3011 return;
3013 else
3014 /* simplify_gen_subreg may fail for sub-word MEMs. */
3015 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3018 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3021 /* Extract one of the components of the complex value CPLX. Extract the
3022 real part if IMAG_P is false, and the imaginary part if it's true. */
3024 static rtx
3025 read_complex_part (rtx cplx, bool imag_p)
3027 machine_mode cmode, imode;
3028 unsigned ibitsize;
3030 if (GET_CODE (cplx) == CONCAT)
3031 return XEXP (cplx, imag_p);
3033 cmode = GET_MODE (cplx);
3034 imode = GET_MODE_INNER (cmode);
3035 ibitsize = GET_MODE_BITSIZE (imode);
3037 /* Special case reads from complex constants that got spilled to memory. */
3038 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3040 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3041 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3043 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3044 if (CONSTANT_CLASS_P (part))
3045 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3049 /* For MEMs simplify_gen_subreg may generate an invalid new address
3050 because, e.g., the original address is considered mode-dependent
3051 by the target, which restricts simplify_subreg from invoking
3052 adjust_address_nv. Instead of preparing fallback support for an
3053 invalid address, we call adjust_address_nv directly. */
3054 if (MEM_P (cplx))
3055 return adjust_address_nv (cplx, imode,
3056 imag_p ? GET_MODE_SIZE (imode) : 0);
3058 /* If the sub-object is at least word sized, then we know that subregging
3059 will work. This special case is important, since extract_bit_field
3060 wants to operate on integer modes, and there's rarely an OImode to
3061 correspond to TCmode. */
3062 if (ibitsize >= BITS_PER_WORD
3063 /* For hard regs we have exact predicates. Assume we can split
3064 the original object if it spans an even number of hard regs.
3065 This special case is important for SCmode on 64-bit platforms
3066 where the natural size of floating-point regs is 32-bit. */
3067 || (REG_P (cplx)
3068 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3069 && REG_NREGS (cplx) % 2 == 0))
3071 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3072 imag_p ? GET_MODE_SIZE (imode) : 0);
3073 if (ret)
3074 return ret;
3075 else
3076 /* simplify_gen_subreg may fail for sub-word MEMs. */
3077 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3080 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3081 true, NULL_RTX, imode, imode);
3084 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3085 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3086 represented in NEW_MODE. If FORCE is true, this will never happen, as
3087 we'll force-create a SUBREG if needed. */
3089 static rtx
3090 emit_move_change_mode (machine_mode new_mode,
3091 machine_mode old_mode, rtx x, bool force)
3093 rtx ret;
3095 if (push_operand (x, GET_MODE (x)))
3097 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3098 MEM_COPY_ATTRIBUTES (ret, x);
3100 else if (MEM_P (x))
3102 /* We don't have to worry about changing the address since the
3103 size in bytes is supposed to be the same. */
3104 if (reload_in_progress)
3106 /* Copy the MEM to change the mode and move any
3107 substitutions from the old MEM to the new one. */
3108 ret = adjust_address_nv (x, new_mode, 0);
3109 copy_replacements (x, ret);
3111 else
3112 ret = adjust_address (x, new_mode, 0);
3114 else
3116 /* Note that we do want simplify_subreg's behavior of validating
3117 that the new mode is ok for a hard register. If we were to use
3118 simplify_gen_subreg, we would create the subreg, but would
3119 probably run into the target not being able to implement it. */
3120 /* Except, of course, when FORCE is true, when this is exactly what
3121 we want. Which is needed for CCmodes on some targets. */
3122 if (force)
3123 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3124 else
3125 ret = simplify_subreg (new_mode, x, old_mode, 0);
3128 return ret;
3131 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3132 an integer mode of the same size as MODE. Returns the instruction
3133 emitted, or NULL if such a move could not be generated. */
3135 static rtx_insn *
3136 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3138 machine_mode imode;
3139 enum insn_code code;
3141 /* There must exist a mode of the exact size we require. */
3142 imode = int_mode_for_mode (mode);
3143 if (imode == BLKmode)
3144 return NULL;
3146 /* The target must support moves in this mode. */
3147 code = optab_handler (mov_optab, imode);
3148 if (code == CODE_FOR_nothing)
3149 return NULL;
3151 x = emit_move_change_mode (imode, mode, x, force);
3152 if (x == NULL_RTX)
3153 return NULL;
3154 y = emit_move_change_mode (imode, mode, y, force);
3155 if (y == NULL_RTX)
3156 return NULL;
3157 return emit_insn (GEN_FCN (code) (x, y));
3160 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3161 Return an equivalent MEM that does not use an auto-increment. */
3164 emit_move_resolve_push (machine_mode mode, rtx x)
3166 enum rtx_code code = GET_CODE (XEXP (x, 0));
3167 HOST_WIDE_INT adjust;
3168 rtx temp;
3170 adjust = GET_MODE_SIZE (mode);
3171 #ifdef PUSH_ROUNDING
3172 adjust = PUSH_ROUNDING (adjust);
3173 #endif
3174 if (code == PRE_DEC || code == POST_DEC)
3175 adjust = -adjust;
3176 else if (code == PRE_MODIFY || code == POST_MODIFY)
3178 rtx expr = XEXP (XEXP (x, 0), 1);
3179 HOST_WIDE_INT val;
3181 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3182 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3183 val = INTVAL (XEXP (expr, 1));
3184 if (GET_CODE (expr) == MINUS)
3185 val = -val;
3186 gcc_assert (adjust == val || adjust == -val);
3187 adjust = val;
3190 /* Do not use anti_adjust_stack, since we don't want to update
3191 stack_pointer_delta. */
3192 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3193 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3194 0, OPTAB_LIB_WIDEN);
3195 if (temp != stack_pointer_rtx)
3196 emit_move_insn (stack_pointer_rtx, temp);
3198 switch (code)
3200 case PRE_INC:
3201 case PRE_DEC:
3202 case PRE_MODIFY:
3203 temp = stack_pointer_rtx;
3204 break;
3205 case POST_INC:
3206 case POST_DEC:
3207 case POST_MODIFY:
3208 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3209 break;
3210 default:
3211 gcc_unreachable ();
3214 return replace_equiv_address (x, temp);
3217 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3218 X is known to satisfy push_operand, and MODE is known to be complex.
3219 Returns the last instruction emitted. */
3221 rtx_insn *
3222 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3224 machine_mode submode = GET_MODE_INNER (mode);
3225 bool imag_first;
3227 #ifdef PUSH_ROUNDING
3228 unsigned int submodesize = GET_MODE_SIZE (submode);
3230 /* In case we output to the stack, but the size is smaller than the
3231 machine can push exactly, we need to use move instructions. */
3232 if (PUSH_ROUNDING (submodesize) != submodesize)
3234 x = emit_move_resolve_push (mode, x);
3235 return emit_move_insn (x, y);
3237 #endif
3239 /* Note that the real part always precedes the imag part in memory
3240 regardless of machine's endianness. */
3241 switch (GET_CODE (XEXP (x, 0)))
3243 case PRE_DEC:
3244 case POST_DEC:
3245 imag_first = true;
3246 break;
3247 case PRE_INC:
3248 case POST_INC:
3249 imag_first = false;
3250 break;
3251 default:
3252 gcc_unreachable ();
3255 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3256 read_complex_part (y, imag_first));
3257 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3258 read_complex_part (y, !imag_first));
3261 /* A subroutine of emit_move_complex. Perform the move from Y to X
3262 via two moves of the parts. Returns the last instruction emitted. */
3264 rtx_insn *
3265 emit_move_complex_parts (rtx x, rtx y)
3267 /* Show the output dies here. This is necessary for SUBREGs
3268 of pseudos since we cannot track their lifetimes correctly;
3269 hard regs shouldn't appear here except as return values. */
3270 if (!reload_completed && !reload_in_progress
3271 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3272 emit_clobber (x);
3274 write_complex_part (x, read_complex_part (y, false), false);
3275 write_complex_part (x, read_complex_part (y, true), true);
3277 return get_last_insn ();
3280 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3281 MODE is known to be complex. Returns the last instruction emitted. */
3283 static rtx_insn *
3284 emit_move_complex (machine_mode mode, rtx x, rtx y)
3286 bool try_int;
3288 /* Need to take special care for pushes, to maintain proper ordering
3289 of the data, and possibly extra padding. */
3290 if (push_operand (x, mode))
3291 return emit_move_complex_push (mode, x, y);
3293 /* See if we can coerce the target into moving both values at once, except
3294 for floating point where we favor moving as parts if this is easy. */
3295 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3296 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3297 && !(REG_P (x)
3298 && HARD_REGISTER_P (x)
3299 && REG_NREGS (x) == 1)
3300 && !(REG_P (y)
3301 && HARD_REGISTER_P (y)
3302 && REG_NREGS (y) == 1))
3303 try_int = false;
3304 /* Not possible if the values are inherently not adjacent. */
3305 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3306 try_int = false;
3307 /* Is possible if both are registers (or subregs of registers). */
3308 else if (register_operand (x, mode) && register_operand (y, mode))
3309 try_int = true;
3310 /* If one of the operands is a memory, and alignment constraints
3311 are friendly enough, we may be able to do combined memory operations.
3312 We do not attempt this if Y is a constant because that combination is
3313 usually better with the by-parts thing below. */
3314 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3315 && (!STRICT_ALIGNMENT
3316 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3317 try_int = true;
3318 else
3319 try_int = false;
3321 if (try_int)
3323 rtx_insn *ret;
3325 /* For memory to memory moves, optimal behavior can be had with the
3326 existing block move logic. */
3327 if (MEM_P (x) && MEM_P (y))
3329 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3330 BLOCK_OP_NO_LIBCALL);
3331 return get_last_insn ();
3334 ret = emit_move_via_integer (mode, x, y, true);
3335 if (ret)
3336 return ret;
3339 return emit_move_complex_parts (x, y);
3342 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3343 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3345 static rtx_insn *
3346 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3348 rtx_insn *ret;
3350 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3351 if (mode != CCmode)
3353 enum insn_code code = optab_handler (mov_optab, CCmode);
3354 if (code != CODE_FOR_nothing)
3356 x = emit_move_change_mode (CCmode, mode, x, true);
3357 y = emit_move_change_mode (CCmode, mode, y, true);
3358 return emit_insn (GEN_FCN (code) (x, y));
3362 /* Otherwise, find the MODE_INT mode of the same width. */
3363 ret = emit_move_via_integer (mode, x, y, false);
3364 gcc_assert (ret != NULL);
3365 return ret;
3368 /* Return true if word I of OP lies entirely in the
3369 undefined bits of a paradoxical subreg. */
3371 static bool
3372 undefined_operand_subword_p (const_rtx op, int i)
3374 machine_mode innermode, innermostmode;
3375 int offset;
3376 if (GET_CODE (op) != SUBREG)
3377 return false;
3378 innermode = GET_MODE (op);
3379 innermostmode = GET_MODE (SUBREG_REG (op));
3380 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3381 /* The SUBREG_BYTE represents offset, as if the value were stored in
3382 memory, except for a paradoxical subreg where we define
3383 SUBREG_BYTE to be 0; undo this exception as in
3384 simplify_subreg. */
3385 if (SUBREG_BYTE (op) == 0
3386 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3388 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3389 if (WORDS_BIG_ENDIAN)
3390 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3391 if (BYTES_BIG_ENDIAN)
3392 offset += difference % UNITS_PER_WORD;
3394 if (offset >= GET_MODE_SIZE (innermostmode)
3395 || offset <= -GET_MODE_SIZE (word_mode))
3396 return true;
3397 return false;
3400 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3401 MODE is any multi-word or full-word mode that lacks a move_insn
3402 pattern. Note that you will get better code if you define such
3403 patterns, even if they must turn into multiple assembler instructions. */
3405 static rtx_insn *
3406 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3408 rtx_insn *last_insn = 0;
3409 rtx_insn *seq;
3410 rtx inner;
3411 bool need_clobber;
3412 int i;
3414 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3416 /* If X is a push on the stack, do the push now and replace
3417 X with a reference to the stack pointer. */
3418 if (push_operand (x, mode))
3419 x = emit_move_resolve_push (mode, x);
3421 /* If we are in reload, see if either operand is a MEM whose address
3422 is scheduled for replacement. */
3423 if (reload_in_progress && MEM_P (x)
3424 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3425 x = replace_equiv_address_nv (x, inner);
3426 if (reload_in_progress && MEM_P (y)
3427 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3428 y = replace_equiv_address_nv (y, inner);
3430 start_sequence ();
3432 need_clobber = false;
3433 for (i = 0;
3434 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3435 i++)
3437 rtx xpart = operand_subword (x, i, 1, mode);
3438 rtx ypart;
3440 /* Do not generate code for a move if it would come entirely
3441 from the undefined bits of a paradoxical subreg. */
3442 if (undefined_operand_subword_p (y, i))
3443 continue;
3445 ypart = operand_subword (y, i, 1, mode);
3447 /* If we can't get a part of Y, put Y into memory if it is a
3448 constant. Otherwise, force it into a register. Then we must
3449 be able to get a part of Y. */
3450 if (ypart == 0 && CONSTANT_P (y))
3452 y = use_anchored_address (force_const_mem (mode, y));
3453 ypart = operand_subword (y, i, 1, mode);
3455 else if (ypart == 0)
3456 ypart = operand_subword_force (y, i, mode);
3458 gcc_assert (xpart && ypart);
3460 need_clobber |= (GET_CODE (xpart) == SUBREG);
3462 last_insn = emit_move_insn (xpart, ypart);
3465 seq = get_insns ();
3466 end_sequence ();
3468 /* Show the output dies here. This is necessary for SUBREGs
3469 of pseudos since we cannot track their lifetimes correctly;
3470 hard regs shouldn't appear here except as return values.
3471 We never want to emit such a clobber after reload. */
3472 if (x != y
3473 && ! (reload_in_progress || reload_completed)
3474 && need_clobber != 0)
3475 emit_clobber (x);
3477 emit_insn (seq);
3479 return last_insn;
3482 /* Low level part of emit_move_insn.
3483 Called just like emit_move_insn, but assumes X and Y
3484 are basically valid. */
3486 rtx_insn *
3487 emit_move_insn_1 (rtx x, rtx y)
3489 machine_mode mode = GET_MODE (x);
3490 enum insn_code code;
3492 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3494 code = optab_handler (mov_optab, mode);
3495 if (code != CODE_FOR_nothing)
3496 return emit_insn (GEN_FCN (code) (x, y));
3498 /* Expand complex moves by moving real part and imag part. */
3499 if (COMPLEX_MODE_P (mode))
3500 return emit_move_complex (mode, x, y);
3502 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3503 || ALL_FIXED_POINT_MODE_P (mode))
3505 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3507 /* If we can't find an integer mode, use multi words. */
3508 if (result)
3509 return result;
3510 else
3511 return emit_move_multi_word (mode, x, y);
3514 if (GET_MODE_CLASS (mode) == MODE_CC)
3515 return emit_move_ccmode (mode, x, y);
3517 /* Try using a move pattern for the corresponding integer mode. This is
3518 only safe when simplify_subreg can convert MODE constants into integer
3519 constants. At present, it can only do this reliably if the value
3520 fits within a HOST_WIDE_INT. */
3521 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3523 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3525 if (ret)
3527 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3528 return ret;
3532 return emit_move_multi_word (mode, x, y);
3535 /* Generate code to copy Y into X.
3536 Both Y and X must have the same mode, except that
3537 Y can be a constant with VOIDmode.
3538 This mode cannot be BLKmode; use emit_block_move for that.
3540 Return the last instruction emitted. */
3542 rtx_insn *
3543 emit_move_insn (rtx x, rtx y)
3545 machine_mode mode = GET_MODE (x);
3546 rtx y_cst = NULL_RTX;
3547 rtx_insn *last_insn;
3548 rtx set;
3550 gcc_assert (mode != BLKmode
3551 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3553 if (CONSTANT_P (y))
3555 if (optimize
3556 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3557 && (last_insn = compress_float_constant (x, y)))
3558 return last_insn;
3560 y_cst = y;
3562 if (!targetm.legitimate_constant_p (mode, y))
3564 y = force_const_mem (mode, y);
3566 /* If the target's cannot_force_const_mem prevented the spill,
3567 assume that the target's move expanders will also take care
3568 of the non-legitimate constant. */
3569 if (!y)
3570 y = y_cst;
3571 else
3572 y = use_anchored_address (y);
3576 /* If X or Y are memory references, verify that their addresses are valid
3577 for the machine. */
3578 if (MEM_P (x)
3579 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3580 MEM_ADDR_SPACE (x))
3581 && ! push_operand (x, GET_MODE (x))))
3582 x = validize_mem (x);
3584 if (MEM_P (y)
3585 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3586 MEM_ADDR_SPACE (y)))
3587 y = validize_mem (y);
3589 gcc_assert (mode != BLKmode);
3591 last_insn = emit_move_insn_1 (x, y);
3593 if (y_cst && REG_P (x)
3594 && (set = single_set (last_insn)) != NULL_RTX
3595 && SET_DEST (set) == x
3596 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3597 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3599 return last_insn;
3602 /* Generate the body of an instruction to copy Y into X.
3603 It may be a list of insns, if one insn isn't enough. */
3605 rtx_insn *
3606 gen_move_insn (rtx x, rtx y)
3608 rtx_insn *seq;
3610 start_sequence ();
3611 emit_move_insn_1 (x, y);
3612 seq = get_insns ();
3613 end_sequence ();
3614 return seq;
3617 /* If Y is representable exactly in a narrower mode, and the target can
3618 perform the extension directly from constant or memory, then emit the
3619 move as an extension. */
3621 static rtx_insn *
3622 compress_float_constant (rtx x, rtx y)
3624 machine_mode dstmode = GET_MODE (x);
3625 machine_mode orig_srcmode = GET_MODE (y);
3626 machine_mode srcmode;
3627 REAL_VALUE_TYPE r;
3628 int oldcost, newcost;
3629 bool speed = optimize_insn_for_speed_p ();
3631 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3633 if (targetm.legitimate_constant_p (dstmode, y))
3634 oldcost = set_src_cost (y, orig_srcmode, speed);
3635 else
3636 oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3638 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3639 srcmode != orig_srcmode;
3640 srcmode = GET_MODE_WIDER_MODE (srcmode))
3642 enum insn_code ic;
3643 rtx trunc_y;
3644 rtx_insn *last_insn;
3646 /* Skip if the target can't extend this way. */
3647 ic = can_extend_p (dstmode, srcmode, 0);
3648 if (ic == CODE_FOR_nothing)
3649 continue;
3651 /* Skip if the narrowed value isn't exact. */
3652 if (! exact_real_truncate (srcmode, &r))
3653 continue;
3655 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3657 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3659 /* Skip if the target needs extra instructions to perform
3660 the extension. */
3661 if (!insn_operand_matches (ic, 1, trunc_y))
3662 continue;
3663 /* This is valid, but may not be cheaper than the original. */
3664 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3665 dstmode, speed);
3666 if (oldcost < newcost)
3667 continue;
3669 else if (float_extend_from_mem[dstmode][srcmode])
3671 trunc_y = force_const_mem (srcmode, trunc_y);
3672 /* This is valid, but may not be cheaper than the original. */
3673 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3674 dstmode, speed);
3675 if (oldcost < newcost)
3676 continue;
3677 trunc_y = validize_mem (trunc_y);
3679 else
3680 continue;
3682 /* For CSE's benefit, force the compressed constant pool entry
3683 into a new pseudo. This constant may be used in different modes,
3684 and if not, combine will put things back together for us. */
3685 trunc_y = force_reg (srcmode, trunc_y);
3687 /* If x is a hard register, perform the extension into a pseudo,
3688 so that e.g. stack realignment code is aware of it. */
3689 rtx target = x;
3690 if (REG_P (x) && HARD_REGISTER_P (x))
3691 target = gen_reg_rtx (dstmode);
3693 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3694 last_insn = get_last_insn ();
3696 if (REG_P (target))
3697 set_unique_reg_note (last_insn, REG_EQUAL, y);
3699 if (target != x)
3700 return emit_move_insn (x, target);
3701 return last_insn;
3704 return NULL;
3707 /* Pushing data onto the stack. */
3709 /* Push a block of length SIZE (perhaps variable)
3710 and return an rtx to address the beginning of the block.
3711 The value may be virtual_outgoing_args_rtx.
3713 EXTRA is the number of bytes of padding to push in addition to SIZE.
3714 BELOW nonzero means this padding comes at low addresses;
3715 otherwise, the padding comes at high addresses. */
3718 push_block (rtx size, int extra, int below)
3720 rtx temp;
3722 size = convert_modes (Pmode, ptr_mode, size, 1);
3723 if (CONSTANT_P (size))
3724 anti_adjust_stack (plus_constant (Pmode, size, extra));
3725 else if (REG_P (size) && extra == 0)
3726 anti_adjust_stack (size);
3727 else
3729 temp = copy_to_mode_reg (Pmode, size);
3730 if (extra != 0)
3731 temp = expand_binop (Pmode, add_optab, temp,
3732 gen_int_mode (extra, Pmode),
3733 temp, 0, OPTAB_LIB_WIDEN);
3734 anti_adjust_stack (temp);
3737 if (STACK_GROWS_DOWNWARD)
3739 temp = virtual_outgoing_args_rtx;
3740 if (extra != 0 && below)
3741 temp = plus_constant (Pmode, temp, extra);
3743 else
3745 if (CONST_INT_P (size))
3746 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3747 -INTVAL (size) - (below ? 0 : extra));
3748 else if (extra != 0 && !below)
3749 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3750 negate_rtx (Pmode, plus_constant (Pmode, size,
3751 extra)));
3752 else
3753 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3754 negate_rtx (Pmode, size));
3757 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3760 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3762 static rtx
3763 mem_autoinc_base (rtx mem)
3765 if (MEM_P (mem))
3767 rtx addr = XEXP (mem, 0);
3768 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3769 return XEXP (addr, 0);
3771 return NULL;
3774 /* A utility routine used here, in reload, and in try_split. The insns
3775 after PREV up to and including LAST are known to adjust the stack,
3776 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3777 placing notes as appropriate. PREV may be NULL, indicating the
3778 entire insn sequence prior to LAST should be scanned.
3780 The set of allowed stack pointer modifications is small:
3781 (1) One or more auto-inc style memory references (aka pushes),
3782 (2) One or more addition/subtraction with the SP as destination,
3783 (3) A single move insn with the SP as destination,
3784 (4) A call_pop insn,
3785 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3787 Insns in the sequence that do not modify the SP are ignored,
3788 except for noreturn calls.
3790 The return value is the amount of adjustment that can be trivially
3791 verified, via immediate operand or auto-inc. If the adjustment
3792 cannot be trivially extracted, the return value is INT_MIN. */
3794 HOST_WIDE_INT
3795 find_args_size_adjust (rtx_insn *insn)
3797 rtx dest, set, pat;
3798 int i;
3800 pat = PATTERN (insn);
3801 set = NULL;
3803 /* Look for a call_pop pattern. */
3804 if (CALL_P (insn))
3806 /* We have to allow non-call_pop patterns for the case
3807 of emit_single_push_insn of a TLS address. */
3808 if (GET_CODE (pat) != PARALLEL)
3809 return 0;
3811 /* All call_pop have a stack pointer adjust in the parallel.
3812 The call itself is always first, and the stack adjust is
3813 usually last, so search from the end. */
3814 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3816 set = XVECEXP (pat, 0, i);
3817 if (GET_CODE (set) != SET)
3818 continue;
3819 dest = SET_DEST (set);
3820 if (dest == stack_pointer_rtx)
3821 break;
3823 /* We'd better have found the stack pointer adjust. */
3824 if (i == 0)
3825 return 0;
3826 /* Fall through to process the extracted SET and DEST
3827 as if it was a standalone insn. */
3829 else if (GET_CODE (pat) == SET)
3830 set = pat;
3831 else if ((set = single_set (insn)) != NULL)
3833 else if (GET_CODE (pat) == PARALLEL)
3835 /* ??? Some older ports use a parallel with a stack adjust
3836 and a store for a PUSH_ROUNDING pattern, rather than a
3837 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3838 /* ??? See h8300 and m68k, pushqi1. */
3839 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3841 set = XVECEXP (pat, 0, i);
3842 if (GET_CODE (set) != SET)
3843 continue;
3844 dest = SET_DEST (set);
3845 if (dest == stack_pointer_rtx)
3846 break;
3848 /* We do not expect an auto-inc of the sp in the parallel. */
3849 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3850 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3851 != stack_pointer_rtx);
3853 if (i < 0)
3854 return 0;
3856 else
3857 return 0;
3859 dest = SET_DEST (set);
3861 /* Look for direct modifications of the stack pointer. */
3862 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3864 /* Look for a trivial adjustment, otherwise assume nothing. */
3865 /* Note that the SPU restore_stack_block pattern refers to
3866 the stack pointer in V4SImode. Consider that non-trivial. */
3867 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3868 && GET_CODE (SET_SRC (set)) == PLUS
3869 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3870 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3871 return INTVAL (XEXP (SET_SRC (set), 1));
3872 /* ??? Reload can generate no-op moves, which will be cleaned
3873 up later. Recognize it and continue searching. */
3874 else if (rtx_equal_p (dest, SET_SRC (set)))
3875 return 0;
3876 else
3877 return HOST_WIDE_INT_MIN;
3879 else
3881 rtx mem, addr;
3883 /* Otherwise only think about autoinc patterns. */
3884 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3886 mem = dest;
3887 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3888 != stack_pointer_rtx);
3890 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3891 mem = SET_SRC (set);
3892 else
3893 return 0;
3895 addr = XEXP (mem, 0);
3896 switch (GET_CODE (addr))
3898 case PRE_INC:
3899 case POST_INC:
3900 return GET_MODE_SIZE (GET_MODE (mem));
3901 case PRE_DEC:
3902 case POST_DEC:
3903 return -GET_MODE_SIZE (GET_MODE (mem));
3904 case PRE_MODIFY:
3905 case POST_MODIFY:
3906 addr = XEXP (addr, 1);
3907 gcc_assert (GET_CODE (addr) == PLUS);
3908 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3909 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3910 return INTVAL (XEXP (addr, 1));
3911 default:
3912 gcc_unreachable ();
3918 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3920 int args_size = end_args_size;
3921 bool saw_unknown = false;
3922 rtx_insn *insn;
3924 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3926 HOST_WIDE_INT this_delta;
3928 if (!NONDEBUG_INSN_P (insn))
3929 continue;
3931 this_delta = find_args_size_adjust (insn);
3932 if (this_delta == 0)
3934 if (!CALL_P (insn)
3935 || ACCUMULATE_OUTGOING_ARGS
3936 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3937 continue;
3940 gcc_assert (!saw_unknown);
3941 if (this_delta == HOST_WIDE_INT_MIN)
3942 saw_unknown = true;
3944 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3945 if (STACK_GROWS_DOWNWARD)
3946 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3948 args_size -= this_delta;
3951 return saw_unknown ? INT_MIN : args_size;
3954 #ifdef PUSH_ROUNDING
3955 /* Emit single push insn. */
3957 static void
3958 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
3960 rtx dest_addr;
3961 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3962 rtx dest;
3963 enum insn_code icode;
3965 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3966 /* If there is push pattern, use it. Otherwise try old way of throwing
3967 MEM representing push operation to move expander. */
3968 icode = optab_handler (push_optab, mode);
3969 if (icode != CODE_FOR_nothing)
3971 struct expand_operand ops[1];
3973 create_input_operand (&ops[0], x, mode);
3974 if (maybe_expand_insn (icode, 1, ops))
3975 return;
3977 if (GET_MODE_SIZE (mode) == rounded_size)
3978 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3979 /* If we are to pad downward, adjust the stack pointer first and
3980 then store X into the stack location using an offset. This is
3981 because emit_move_insn does not know how to pad; it does not have
3982 access to type. */
3983 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3985 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3986 HOST_WIDE_INT offset;
3988 emit_move_insn (stack_pointer_rtx,
3989 expand_binop (Pmode,
3990 STACK_GROWS_DOWNWARD ? sub_optab
3991 : add_optab,
3992 stack_pointer_rtx,
3993 gen_int_mode (rounded_size, Pmode),
3994 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3996 offset = (HOST_WIDE_INT) padding_size;
3997 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
3998 /* We have already decremented the stack pointer, so get the
3999 previous value. */
4000 offset += (HOST_WIDE_INT) rounded_size;
4002 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4003 /* We have already incremented the stack pointer, so get the
4004 previous value. */
4005 offset -= (HOST_WIDE_INT) rounded_size;
4007 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4008 gen_int_mode (offset, Pmode));
4010 else
4012 if (STACK_GROWS_DOWNWARD)
4013 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4014 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4015 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4016 Pmode));
4017 else
4018 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4019 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4020 gen_int_mode (rounded_size, Pmode));
4022 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4025 dest = gen_rtx_MEM (mode, dest_addr);
4027 if (type != 0)
4029 set_mem_attributes (dest, type, 1);
4031 if (cfun->tail_call_marked)
4032 /* Function incoming arguments may overlap with sibling call
4033 outgoing arguments and we cannot allow reordering of reads
4034 from function arguments with stores to outgoing arguments
4035 of sibling calls. */
4036 set_mem_alias_set (dest, 0);
4038 emit_move_insn (dest, x);
4041 /* Emit and annotate a single push insn. */
4043 static void
4044 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4046 int delta, old_delta = stack_pointer_delta;
4047 rtx_insn *prev = get_last_insn ();
4048 rtx_insn *last;
4050 emit_single_push_insn_1 (mode, x, type);
4052 last = get_last_insn ();
4054 /* Notice the common case where we emitted exactly one insn. */
4055 if (PREV_INSN (last) == prev)
4057 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4058 return;
4061 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4062 gcc_assert (delta == INT_MIN || delta == old_delta);
4064 #endif
4066 /* If reading SIZE bytes from X will end up reading from
4067 Y return the number of bytes that overlap. Return -1
4068 if there is no overlap or -2 if we can't determine
4069 (for example when X and Y have different base registers). */
4071 static int
4072 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4074 rtx tmp = plus_constant (Pmode, x, size);
4075 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4077 if (!CONST_INT_P (sub))
4078 return -2;
4080 HOST_WIDE_INT val = INTVAL (sub);
4082 return IN_RANGE (val, 1, size) ? val : -1;
4085 /* Generate code to push X onto the stack, assuming it has mode MODE and
4086 type TYPE.
4087 MODE is redundant except when X is a CONST_INT (since they don't
4088 carry mode info).
4089 SIZE is an rtx for the size of data to be copied (in bytes),
4090 needed only if X is BLKmode.
4091 Return true if successful. May return false if asked to push a
4092 partial argument during a sibcall optimization (as specified by
4093 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4094 to not overlap.
4096 ALIGN (in bits) is maximum alignment we can assume.
4098 If PARTIAL and REG are both nonzero, then copy that many of the first
4099 bytes of X into registers starting with REG, and push the rest of X.
4100 The amount of space pushed is decreased by PARTIAL bytes.
4101 REG must be a hard register in this case.
4102 If REG is zero but PARTIAL is not, take any all others actions for an
4103 argument partially in registers, but do not actually load any
4104 registers.
4106 EXTRA is the amount in bytes of extra space to leave next to this arg.
4107 This is ignored if an argument block has already been allocated.
4109 On a machine that lacks real push insns, ARGS_ADDR is the address of
4110 the bottom of the argument block for this call. We use indexing off there
4111 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4112 argument block has not been preallocated.
4114 ARGS_SO_FAR is the size of args previously pushed for this call.
4116 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4117 for arguments passed in registers. If nonzero, it will be the number
4118 of bytes required. */
4120 bool
4121 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4122 unsigned int align, int partial, rtx reg, int extra,
4123 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4124 rtx alignment_pad, bool sibcall_p)
4126 rtx xinner;
4127 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4129 /* Decide where to pad the argument: `downward' for below,
4130 `upward' for above, or `none' for don't pad it.
4131 Default is below for small data on big-endian machines; else above. */
4132 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4134 /* Invert direction if stack is post-decrement.
4135 FIXME: why? */
4136 if (STACK_PUSH_CODE == POST_DEC)
4137 if (where_pad != none)
4138 where_pad = (where_pad == downward ? upward : downward);
4140 xinner = x;
4142 int nregs = partial / UNITS_PER_WORD;
4143 rtx *tmp_regs = NULL;
4144 int overlapping = 0;
4146 if (mode == BLKmode
4147 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4149 /* Copy a block into the stack, entirely or partially. */
4151 rtx temp;
4152 int used;
4153 int offset;
4154 int skip;
4156 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4157 used = partial - offset;
4159 if (mode != BLKmode)
4161 /* A value is to be stored in an insufficiently aligned
4162 stack slot; copy via a suitably aligned slot if
4163 necessary. */
4164 size = GEN_INT (GET_MODE_SIZE (mode));
4165 if (!MEM_P (xinner))
4167 temp = assign_temp (type, 1, 1);
4168 emit_move_insn (temp, xinner);
4169 xinner = temp;
4173 gcc_assert (size);
4175 /* USED is now the # of bytes we need not copy to the stack
4176 because registers will take care of them. */
4178 if (partial != 0)
4179 xinner = adjust_address (xinner, BLKmode, used);
4181 /* If the partial register-part of the arg counts in its stack size,
4182 skip the part of stack space corresponding to the registers.
4183 Otherwise, start copying to the beginning of the stack space,
4184 by setting SKIP to 0. */
4185 skip = (reg_parm_stack_space == 0) ? 0 : used;
4187 #ifdef PUSH_ROUNDING
4188 /* Do it with several push insns if that doesn't take lots of insns
4189 and if there is no difficulty with push insns that skip bytes
4190 on the stack for alignment purposes. */
4191 if (args_addr == 0
4192 && PUSH_ARGS
4193 && CONST_INT_P (size)
4194 && skip == 0
4195 && MEM_ALIGN (xinner) >= align
4196 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4197 /* Here we avoid the case of a structure whose weak alignment
4198 forces many pushes of a small amount of data,
4199 and such small pushes do rounding that causes trouble. */
4200 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4201 || align >= BIGGEST_ALIGNMENT
4202 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4203 == (align / BITS_PER_UNIT)))
4204 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4206 /* Push padding now if padding above and stack grows down,
4207 or if padding below and stack grows up.
4208 But if space already allocated, this has already been done. */
4209 if (extra && args_addr == 0
4210 && where_pad != none && where_pad != stack_direction)
4211 anti_adjust_stack (GEN_INT (extra));
4213 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4215 else
4216 #endif /* PUSH_ROUNDING */
4218 rtx target;
4220 /* Otherwise make space on the stack and copy the data
4221 to the address of that space. */
4223 /* Deduct words put into registers from the size we must copy. */
4224 if (partial != 0)
4226 if (CONST_INT_P (size))
4227 size = GEN_INT (INTVAL (size) - used);
4228 else
4229 size = expand_binop (GET_MODE (size), sub_optab, size,
4230 gen_int_mode (used, GET_MODE (size)),
4231 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4234 /* Get the address of the stack space.
4235 In this case, we do not deal with EXTRA separately.
4236 A single stack adjust will do. */
4237 if (! args_addr)
4239 temp = push_block (size, extra, where_pad == downward);
4240 extra = 0;
4242 else if (CONST_INT_P (args_so_far))
4243 temp = memory_address (BLKmode,
4244 plus_constant (Pmode, args_addr,
4245 skip + INTVAL (args_so_far)));
4246 else
4247 temp = memory_address (BLKmode,
4248 plus_constant (Pmode,
4249 gen_rtx_PLUS (Pmode,
4250 args_addr,
4251 args_so_far),
4252 skip));
4254 if (!ACCUMULATE_OUTGOING_ARGS)
4256 /* If the source is referenced relative to the stack pointer,
4257 copy it to another register to stabilize it. We do not need
4258 to do this if we know that we won't be changing sp. */
4260 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4261 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4262 temp = copy_to_reg (temp);
4265 target = gen_rtx_MEM (BLKmode, temp);
4267 /* We do *not* set_mem_attributes here, because incoming arguments
4268 may overlap with sibling call outgoing arguments and we cannot
4269 allow reordering of reads from function arguments with stores
4270 to outgoing arguments of sibling calls. We do, however, want
4271 to record the alignment of the stack slot. */
4272 /* ALIGN may well be better aligned than TYPE, e.g. due to
4273 PARM_BOUNDARY. Assume the caller isn't lying. */
4274 set_mem_align (target, align);
4276 /* If part should go in registers and pushing to that part would
4277 overwrite some of the values that need to go into regs, load the
4278 overlapping values into temporary pseudos to be moved into the hard
4279 regs at the end after the stack pushing has completed.
4280 We cannot load them directly into the hard regs here because
4281 they can be clobbered by the block move expansions.
4282 See PR 65358. */
4284 if (partial > 0 && reg != 0 && mode == BLKmode
4285 && GET_CODE (reg) != PARALLEL)
4287 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4288 if (overlapping > 0)
4290 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4291 overlapping /= UNITS_PER_WORD;
4293 tmp_regs = XALLOCAVEC (rtx, overlapping);
4295 for (int i = 0; i < overlapping; i++)
4296 tmp_regs[i] = gen_reg_rtx (word_mode);
4298 for (int i = 0; i < overlapping; i++)
4299 emit_move_insn (tmp_regs[i],
4300 operand_subword_force (target, i, mode));
4302 else if (overlapping == -1)
4303 overlapping = 0;
4304 /* Could not determine whether there is overlap.
4305 Fail the sibcall. */
4306 else
4308 overlapping = 0;
4309 if (sibcall_p)
4310 return false;
4313 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4316 else if (partial > 0)
4318 /* Scalar partly in registers. */
4320 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4321 int i;
4322 int not_stack;
4323 /* # bytes of start of argument
4324 that we must make space for but need not store. */
4325 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4326 int args_offset = INTVAL (args_so_far);
4327 int skip;
4329 /* Push padding now if padding above and stack grows down,
4330 or if padding below and stack grows up.
4331 But if space already allocated, this has already been done. */
4332 if (extra && args_addr == 0
4333 && where_pad != none && where_pad != stack_direction)
4334 anti_adjust_stack (GEN_INT (extra));
4336 /* If we make space by pushing it, we might as well push
4337 the real data. Otherwise, we can leave OFFSET nonzero
4338 and leave the space uninitialized. */
4339 if (args_addr == 0)
4340 offset = 0;
4342 /* Now NOT_STACK gets the number of words that we don't need to
4343 allocate on the stack. Convert OFFSET to words too. */
4344 not_stack = (partial - offset) / UNITS_PER_WORD;
4345 offset /= UNITS_PER_WORD;
4347 /* If the partial register-part of the arg counts in its stack size,
4348 skip the part of stack space corresponding to the registers.
4349 Otherwise, start copying to the beginning of the stack space,
4350 by setting SKIP to 0. */
4351 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4353 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4354 x = validize_mem (force_const_mem (mode, x));
4356 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4357 SUBREGs of such registers are not allowed. */
4358 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4359 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4360 x = copy_to_reg (x);
4362 /* Loop over all the words allocated on the stack for this arg. */
4363 /* We can do it by words, because any scalar bigger than a word
4364 has a size a multiple of a word. */
4365 for (i = size - 1; i >= not_stack; i--)
4366 if (i >= not_stack + offset)
4367 if (!emit_push_insn (operand_subword_force (x, i, mode),
4368 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4369 0, args_addr,
4370 GEN_INT (args_offset + ((i - not_stack + skip)
4371 * UNITS_PER_WORD)),
4372 reg_parm_stack_space, alignment_pad, sibcall_p))
4373 return false;
4375 else
4377 rtx addr;
4378 rtx dest;
4380 /* Push padding now if padding above and stack grows down,
4381 or if padding below and stack grows up.
4382 But if space already allocated, this has already been done. */
4383 if (extra && args_addr == 0
4384 && where_pad != none && where_pad != stack_direction)
4385 anti_adjust_stack (GEN_INT (extra));
4387 #ifdef PUSH_ROUNDING
4388 if (args_addr == 0 && PUSH_ARGS)
4389 emit_single_push_insn (mode, x, type);
4390 else
4391 #endif
4393 if (CONST_INT_P (args_so_far))
4394 addr
4395 = memory_address (mode,
4396 plus_constant (Pmode, args_addr,
4397 INTVAL (args_so_far)));
4398 else
4399 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4400 args_so_far));
4401 dest = gen_rtx_MEM (mode, addr);
4403 /* We do *not* set_mem_attributes here, because incoming arguments
4404 may overlap with sibling call outgoing arguments and we cannot
4405 allow reordering of reads from function arguments with stores
4406 to outgoing arguments of sibling calls. We do, however, want
4407 to record the alignment of the stack slot. */
4408 /* ALIGN may well be better aligned than TYPE, e.g. due to
4409 PARM_BOUNDARY. Assume the caller isn't lying. */
4410 set_mem_align (dest, align);
4412 emit_move_insn (dest, x);
4416 /* Move the partial arguments into the registers and any overlapping
4417 values that we moved into the pseudos in tmp_regs. */
4418 if (partial > 0 && reg != 0)
4420 /* Handle calls that pass values in multiple non-contiguous locations.
4421 The Irix 6 ABI has examples of this. */
4422 if (GET_CODE (reg) == PARALLEL)
4423 emit_group_load (reg, x, type, -1);
4424 else
4426 gcc_assert (partial % UNITS_PER_WORD == 0);
4427 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4429 for (int i = 0; i < overlapping; i++)
4430 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4431 + nregs - overlapping + i),
4432 tmp_regs[i]);
4437 if (extra && args_addr == 0 && where_pad == stack_direction)
4438 anti_adjust_stack (GEN_INT (extra));
4440 if (alignment_pad && args_addr == 0)
4441 anti_adjust_stack (alignment_pad);
4443 return true;
4446 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4447 operations. */
4449 static rtx
4450 get_subtarget (rtx x)
4452 return (optimize
4453 || x == 0
4454 /* Only registers can be subtargets. */
4455 || !REG_P (x)
4456 /* Don't use hard regs to avoid extending their life. */
4457 || REGNO (x) < FIRST_PSEUDO_REGISTER
4458 ? 0 : x);
4461 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4462 FIELD is a bitfield. Returns true if the optimization was successful,
4463 and there's nothing else to do. */
4465 static bool
4466 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4467 unsigned HOST_WIDE_INT bitpos,
4468 unsigned HOST_WIDE_INT bitregion_start,
4469 unsigned HOST_WIDE_INT bitregion_end,
4470 machine_mode mode1, rtx str_rtx,
4471 tree to, tree src)
4473 machine_mode str_mode = GET_MODE (str_rtx);
4474 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4475 tree op0, op1;
4476 rtx value, result;
4477 optab binop;
4478 gimple srcstmt;
4479 enum tree_code code;
4481 if (mode1 != VOIDmode
4482 || bitsize >= BITS_PER_WORD
4483 || str_bitsize > BITS_PER_WORD
4484 || TREE_SIDE_EFFECTS (to)
4485 || TREE_THIS_VOLATILE (to))
4486 return false;
4488 STRIP_NOPS (src);
4489 if (TREE_CODE (src) != SSA_NAME)
4490 return false;
4491 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4492 return false;
4494 srcstmt = get_gimple_for_ssa_name (src);
4495 if (!srcstmt
4496 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4497 return false;
4499 code = gimple_assign_rhs_code (srcstmt);
4501 op0 = gimple_assign_rhs1 (srcstmt);
4503 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4504 to find its initialization. Hopefully the initialization will
4505 be from a bitfield load. */
4506 if (TREE_CODE (op0) == SSA_NAME)
4508 gimple op0stmt = get_gimple_for_ssa_name (op0);
4510 /* We want to eventually have OP0 be the same as TO, which
4511 should be a bitfield. */
4512 if (!op0stmt
4513 || !is_gimple_assign (op0stmt)
4514 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4515 return false;
4516 op0 = gimple_assign_rhs1 (op0stmt);
4519 op1 = gimple_assign_rhs2 (srcstmt);
4521 if (!operand_equal_p (to, op0, 0))
4522 return false;
4524 if (MEM_P (str_rtx))
4526 unsigned HOST_WIDE_INT offset1;
4528 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4529 str_mode = word_mode;
4530 str_mode = get_best_mode (bitsize, bitpos,
4531 bitregion_start, bitregion_end,
4532 MEM_ALIGN (str_rtx), str_mode, 0);
4533 if (str_mode == VOIDmode)
4534 return false;
4535 str_bitsize = GET_MODE_BITSIZE (str_mode);
4537 offset1 = bitpos;
4538 bitpos %= str_bitsize;
4539 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4540 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4542 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4543 return false;
4545 /* If the bit field covers the whole REG/MEM, store_field
4546 will likely generate better code. */
4547 if (bitsize >= str_bitsize)
4548 return false;
4550 /* We can't handle fields split across multiple entities. */
4551 if (bitpos + bitsize > str_bitsize)
4552 return false;
4554 if (BYTES_BIG_ENDIAN)
4555 bitpos = str_bitsize - bitpos - bitsize;
4557 switch (code)
4559 case PLUS_EXPR:
4560 case MINUS_EXPR:
4561 /* For now, just optimize the case of the topmost bitfield
4562 where we don't need to do any masking and also
4563 1 bit bitfields where xor can be used.
4564 We might win by one instruction for the other bitfields
4565 too if insv/extv instructions aren't used, so that
4566 can be added later. */
4567 if (bitpos + bitsize != str_bitsize
4568 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4569 break;
4571 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4572 value = convert_modes (str_mode,
4573 TYPE_MODE (TREE_TYPE (op1)), value,
4574 TYPE_UNSIGNED (TREE_TYPE (op1)));
4576 /* We may be accessing data outside the field, which means
4577 we can alias adjacent data. */
4578 if (MEM_P (str_rtx))
4580 str_rtx = shallow_copy_rtx (str_rtx);
4581 set_mem_alias_set (str_rtx, 0);
4582 set_mem_expr (str_rtx, 0);
4585 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4586 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4588 value = expand_and (str_mode, value, const1_rtx, NULL);
4589 binop = xor_optab;
4591 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4592 result = expand_binop (str_mode, binop, str_rtx,
4593 value, str_rtx, 1, OPTAB_WIDEN);
4594 if (result != str_rtx)
4595 emit_move_insn (str_rtx, result);
4596 return true;
4598 case BIT_IOR_EXPR:
4599 case BIT_XOR_EXPR:
4600 if (TREE_CODE (op1) != INTEGER_CST)
4601 break;
4602 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4603 value = convert_modes (str_mode,
4604 TYPE_MODE (TREE_TYPE (op1)), value,
4605 TYPE_UNSIGNED (TREE_TYPE (op1)));
4607 /* We may be accessing data outside the field, which means
4608 we can alias adjacent data. */
4609 if (MEM_P (str_rtx))
4611 str_rtx = shallow_copy_rtx (str_rtx);
4612 set_mem_alias_set (str_rtx, 0);
4613 set_mem_expr (str_rtx, 0);
4616 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4617 if (bitpos + bitsize != str_bitsize)
4619 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4620 str_mode);
4621 value = expand_and (str_mode, value, mask, NULL_RTX);
4623 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4624 result = expand_binop (str_mode, binop, str_rtx,
4625 value, str_rtx, 1, OPTAB_WIDEN);
4626 if (result != str_rtx)
4627 emit_move_insn (str_rtx, result);
4628 return true;
4630 default:
4631 break;
4634 return false;
4637 /* In the C++ memory model, consecutive bit fields in a structure are
4638 considered one memory location.
4640 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4641 returns the bit range of consecutive bits in which this COMPONENT_REF
4642 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4643 and *OFFSET may be adjusted in the process.
4645 If the access does not need to be restricted, 0 is returned in both
4646 *BITSTART and *BITEND. */
4648 static void
4649 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4650 unsigned HOST_WIDE_INT *bitend,
4651 tree exp,
4652 HOST_WIDE_INT *bitpos,
4653 tree *offset)
4655 HOST_WIDE_INT bitoffset;
4656 tree field, repr;
4658 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4660 field = TREE_OPERAND (exp, 1);
4661 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4662 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4663 need to limit the range we can access. */
4664 if (!repr)
4666 *bitstart = *bitend = 0;
4667 return;
4670 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4671 part of a larger bit field, then the representative does not serve any
4672 useful purpose. This can occur in Ada. */
4673 if (handled_component_p (TREE_OPERAND (exp, 0)))
4675 machine_mode rmode;
4676 HOST_WIDE_INT rbitsize, rbitpos;
4677 tree roffset;
4678 int unsignedp;
4679 int volatilep = 0;
4680 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4681 &roffset, &rmode, &unsignedp, &volatilep, false);
4682 if ((rbitpos % BITS_PER_UNIT) != 0)
4684 *bitstart = *bitend = 0;
4685 return;
4689 /* Compute the adjustment to bitpos from the offset of the field
4690 relative to the representative. DECL_FIELD_OFFSET of field and
4691 repr are the same by construction if they are not constants,
4692 see finish_bitfield_layout. */
4693 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4694 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4695 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4696 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4697 else
4698 bitoffset = 0;
4699 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4700 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4702 /* If the adjustment is larger than bitpos, we would have a negative bit
4703 position for the lower bound and this may wreak havoc later. Adjust
4704 offset and bitpos to make the lower bound non-negative in that case. */
4705 if (bitoffset > *bitpos)
4707 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4708 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4710 *bitpos += adjust;
4711 if (*offset == NULL_TREE)
4712 *offset = size_int (-adjust / BITS_PER_UNIT);
4713 else
4714 *offset
4715 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4716 *bitstart = 0;
4718 else
4719 *bitstart = *bitpos - bitoffset;
4721 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4724 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4725 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4726 DECL_RTL was not set yet, return NORTL. */
4728 static inline bool
4729 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4731 if (TREE_CODE (addr) != ADDR_EXPR)
4732 return false;
4734 tree base = TREE_OPERAND (addr, 0);
4736 if (!DECL_P (base)
4737 || TREE_ADDRESSABLE (base)
4738 || DECL_MODE (base) == BLKmode)
4739 return false;
4741 if (!DECL_RTL_SET_P (base))
4742 return nortl;
4744 return (!MEM_P (DECL_RTL (base)));
4747 /* Returns true if the MEM_REF REF refers to an object that does not
4748 reside in memory and has non-BLKmode. */
4750 static inline bool
4751 mem_ref_refers_to_non_mem_p (tree ref)
4753 tree base = TREE_OPERAND (ref, 0);
4754 return addr_expr_of_non_mem_decl_p_1 (base, false);
4757 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4758 is true, try generating a nontemporal store. */
4760 void
4761 expand_assignment (tree to, tree from, bool nontemporal)
4763 rtx to_rtx = 0;
4764 rtx result;
4765 machine_mode mode;
4766 unsigned int align;
4767 enum insn_code icode;
4769 /* Don't crash if the lhs of the assignment was erroneous. */
4770 if (TREE_CODE (to) == ERROR_MARK)
4772 expand_normal (from);
4773 return;
4776 /* Optimize away no-op moves without side-effects. */
4777 if (operand_equal_p (to, from, 0))
4778 return;
4780 /* Handle misaligned stores. */
4781 mode = TYPE_MODE (TREE_TYPE (to));
4782 if ((TREE_CODE (to) == MEM_REF
4783 || TREE_CODE (to) == TARGET_MEM_REF)
4784 && mode != BLKmode
4785 && !mem_ref_refers_to_non_mem_p (to)
4786 && ((align = get_object_alignment (to))
4787 < GET_MODE_ALIGNMENT (mode))
4788 && (((icode = optab_handler (movmisalign_optab, mode))
4789 != CODE_FOR_nothing)
4790 || SLOW_UNALIGNED_ACCESS (mode, align)))
4792 rtx reg, mem;
4794 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4795 reg = force_not_mem (reg);
4796 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4798 if (icode != CODE_FOR_nothing)
4800 struct expand_operand ops[2];
4802 create_fixed_operand (&ops[0], mem);
4803 create_input_operand (&ops[1], reg, mode);
4804 /* The movmisalign<mode> pattern cannot fail, else the assignment
4805 would silently be omitted. */
4806 expand_insn (icode, 2, ops);
4808 else
4809 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4810 return;
4813 /* Assignment of a structure component needs special treatment
4814 if the structure component's rtx is not simply a MEM.
4815 Assignment of an array element at a constant index, and assignment of
4816 an array element in an unaligned packed structure field, has the same
4817 problem. Same for (partially) storing into a non-memory object. */
4818 if (handled_component_p (to)
4819 || (TREE_CODE (to) == MEM_REF
4820 && mem_ref_refers_to_non_mem_p (to))
4821 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4823 machine_mode mode1;
4824 HOST_WIDE_INT bitsize, bitpos;
4825 unsigned HOST_WIDE_INT bitregion_start = 0;
4826 unsigned HOST_WIDE_INT bitregion_end = 0;
4827 tree offset;
4828 int unsignedp;
4829 int volatilep = 0;
4830 tree tem;
4832 push_temp_slots ();
4833 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4834 &unsignedp, &volatilep, true);
4836 /* Make sure bitpos is not negative, it can wreak havoc later. */
4837 if (bitpos < 0)
4839 gcc_assert (offset == NULL_TREE);
4840 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4841 ? 3 : exact_log2 (BITS_PER_UNIT)));
4842 bitpos &= BITS_PER_UNIT - 1;
4845 if (TREE_CODE (to) == COMPONENT_REF
4846 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4847 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4848 /* The C++ memory model naturally applies to byte-aligned fields.
4849 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4850 BITSIZE are not byte-aligned, there is no need to limit the range
4851 we can access. This can occur with packed structures in Ada. */
4852 else if (bitsize > 0
4853 && bitsize % BITS_PER_UNIT == 0
4854 && bitpos % BITS_PER_UNIT == 0)
4856 bitregion_start = bitpos;
4857 bitregion_end = bitpos + bitsize - 1;
4860 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4862 /* If the field has a mode, we want to access it in the
4863 field's mode, not the computed mode.
4864 If a MEM has VOIDmode (external with incomplete type),
4865 use BLKmode for it instead. */
4866 if (MEM_P (to_rtx))
4868 if (mode1 != VOIDmode)
4869 to_rtx = adjust_address (to_rtx, mode1, 0);
4870 else if (GET_MODE (to_rtx) == VOIDmode)
4871 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4874 if (offset != 0)
4876 machine_mode address_mode;
4877 rtx offset_rtx;
4879 if (!MEM_P (to_rtx))
4881 /* We can get constant negative offsets into arrays with broken
4882 user code. Translate this to a trap instead of ICEing. */
4883 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4884 expand_builtin_trap ();
4885 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4888 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4889 address_mode = get_address_mode (to_rtx);
4890 if (GET_MODE (offset_rtx) != address_mode)
4892 /* We cannot be sure that the RTL in offset_rtx is valid outside
4893 of a memory address context, so force it into a register
4894 before attempting to convert it to the desired mode. */
4895 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4896 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4899 /* If we have an expression in OFFSET_RTX and a non-zero
4900 byte offset in BITPOS, adding the byte offset before the
4901 OFFSET_RTX results in better intermediate code, which makes
4902 later rtl optimization passes perform better.
4904 We prefer intermediate code like this:
4906 r124:DI=r123:DI+0x18
4907 [r124:DI]=r121:DI
4909 ... instead of ...
4911 r124:DI=r123:DI+0x10
4912 [r124:DI+0x8]=r121:DI
4914 This is only done for aligned data values, as these can
4915 be expected to result in single move instructions. */
4916 if (mode1 != VOIDmode
4917 && bitpos != 0
4918 && bitsize > 0
4919 && (bitpos % bitsize) == 0
4920 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4921 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4923 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4924 bitregion_start = 0;
4925 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4926 bitregion_end -= bitpos;
4927 bitpos = 0;
4930 to_rtx = offset_address (to_rtx, offset_rtx,
4931 highest_pow2_factor_for_target (to,
4932 offset));
4935 /* No action is needed if the target is not a memory and the field
4936 lies completely outside that target. This can occur if the source
4937 code contains an out-of-bounds access to a small array. */
4938 if (!MEM_P (to_rtx)
4939 && GET_MODE (to_rtx) != BLKmode
4940 && (unsigned HOST_WIDE_INT) bitpos
4941 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4943 expand_normal (from);
4944 result = NULL;
4946 /* Handle expand_expr of a complex value returning a CONCAT. */
4947 else if (GET_CODE (to_rtx) == CONCAT)
4949 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4950 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4951 && bitpos == 0
4952 && bitsize == mode_bitsize)
4953 result = store_expr (from, to_rtx, false, nontemporal);
4954 else if (bitsize == mode_bitsize / 2
4955 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4956 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4957 nontemporal);
4958 else if (bitpos + bitsize <= mode_bitsize / 2)
4959 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4960 bitregion_start, bitregion_end,
4961 mode1, from,
4962 get_alias_set (to), nontemporal);
4963 else if (bitpos >= mode_bitsize / 2)
4964 result = store_field (XEXP (to_rtx, 1), bitsize,
4965 bitpos - mode_bitsize / 2,
4966 bitregion_start, bitregion_end,
4967 mode1, from,
4968 get_alias_set (to), nontemporal);
4969 else if (bitpos == 0 && bitsize == mode_bitsize)
4971 rtx from_rtx;
4972 result = expand_normal (from);
4973 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4974 TYPE_MODE (TREE_TYPE (from)), 0);
4975 emit_move_insn (XEXP (to_rtx, 0),
4976 read_complex_part (from_rtx, false));
4977 emit_move_insn (XEXP (to_rtx, 1),
4978 read_complex_part (from_rtx, true));
4980 else
4982 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4983 GET_MODE_SIZE (GET_MODE (to_rtx)));
4984 write_complex_part (temp, XEXP (to_rtx, 0), false);
4985 write_complex_part (temp, XEXP (to_rtx, 1), true);
4986 result = store_field (temp, bitsize, bitpos,
4987 bitregion_start, bitregion_end,
4988 mode1, from,
4989 get_alias_set (to), nontemporal);
4990 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4991 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4994 else
4996 if (MEM_P (to_rtx))
4998 /* If the field is at offset zero, we could have been given the
4999 DECL_RTX of the parent struct. Don't munge it. */
5000 to_rtx = shallow_copy_rtx (to_rtx);
5001 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5002 if (volatilep)
5003 MEM_VOLATILE_P (to_rtx) = 1;
5006 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5007 bitregion_start, bitregion_end,
5008 mode1,
5009 to_rtx, to, from))
5010 result = NULL;
5011 else
5012 result = store_field (to_rtx, bitsize, bitpos,
5013 bitregion_start, bitregion_end,
5014 mode1, from,
5015 get_alias_set (to), nontemporal);
5018 if (result)
5019 preserve_temp_slots (result);
5020 pop_temp_slots ();
5021 return;
5024 /* If the rhs is a function call and its value is not an aggregate,
5025 call the function before we start to compute the lhs.
5026 This is needed for correct code for cases such as
5027 val = setjmp (buf) on machines where reference to val
5028 requires loading up part of an address in a separate insn.
5030 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5031 since it might be a promoted variable where the zero- or sign- extension
5032 needs to be done. Handling this in the normal way is safe because no
5033 computation is done before the call. The same is true for SSA names. */
5034 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5035 && COMPLETE_TYPE_P (TREE_TYPE (from))
5036 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5037 && ! (((TREE_CODE (to) == VAR_DECL
5038 || TREE_CODE (to) == PARM_DECL
5039 || TREE_CODE (to) == RESULT_DECL)
5040 && REG_P (DECL_RTL (to)))
5041 || TREE_CODE (to) == SSA_NAME))
5043 rtx value;
5044 rtx bounds;
5046 push_temp_slots ();
5047 value = expand_normal (from);
5049 /* Split value and bounds to store them separately. */
5050 chkp_split_slot (value, &value, &bounds);
5052 if (to_rtx == 0)
5053 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5055 /* Handle calls that return values in multiple non-contiguous locations.
5056 The Irix 6 ABI has examples of this. */
5057 if (GET_CODE (to_rtx) == PARALLEL)
5059 if (GET_CODE (value) == PARALLEL)
5060 emit_group_move (to_rtx, value);
5061 else
5062 emit_group_load (to_rtx, value, TREE_TYPE (from),
5063 int_size_in_bytes (TREE_TYPE (from)));
5065 else if (GET_CODE (value) == PARALLEL)
5066 emit_group_store (to_rtx, value, TREE_TYPE (from),
5067 int_size_in_bytes (TREE_TYPE (from)));
5068 else if (GET_MODE (to_rtx) == BLKmode)
5070 /* Handle calls that return BLKmode values in registers. */
5071 if (REG_P (value))
5072 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5073 else
5074 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5076 else
5078 if (POINTER_TYPE_P (TREE_TYPE (to)))
5079 value = convert_memory_address_addr_space
5080 (GET_MODE (to_rtx), value,
5081 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5083 emit_move_insn (to_rtx, value);
5086 /* Store bounds if required. */
5087 if (bounds
5088 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5090 gcc_assert (MEM_P (to_rtx));
5091 chkp_emit_bounds_store (bounds, value, to_rtx);
5094 preserve_temp_slots (to_rtx);
5095 pop_temp_slots ();
5096 return;
5099 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5100 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5102 /* Don't move directly into a return register. */
5103 if (TREE_CODE (to) == RESULT_DECL
5104 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5106 rtx temp;
5108 push_temp_slots ();
5110 /* If the source is itself a return value, it still is in a pseudo at
5111 this point so we can move it back to the return register directly. */
5112 if (REG_P (to_rtx)
5113 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5114 && TREE_CODE (from) != CALL_EXPR)
5115 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5116 else
5117 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5119 /* Handle calls that return values in multiple non-contiguous locations.
5120 The Irix 6 ABI has examples of this. */
5121 if (GET_CODE (to_rtx) == PARALLEL)
5123 if (GET_CODE (temp) == PARALLEL)
5124 emit_group_move (to_rtx, temp);
5125 else
5126 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5127 int_size_in_bytes (TREE_TYPE (from)));
5129 else if (temp)
5130 emit_move_insn (to_rtx, temp);
5132 preserve_temp_slots (to_rtx);
5133 pop_temp_slots ();
5134 return;
5137 /* In case we are returning the contents of an object which overlaps
5138 the place the value is being stored, use a safe function when copying
5139 a value through a pointer into a structure value return block. */
5140 if (TREE_CODE (to) == RESULT_DECL
5141 && TREE_CODE (from) == INDIRECT_REF
5142 && ADDR_SPACE_GENERIC_P
5143 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5144 && refs_may_alias_p (to, from)
5145 && cfun->returns_struct
5146 && !cfun->returns_pcc_struct)
5148 rtx from_rtx, size;
5150 push_temp_slots ();
5151 size = expr_size (from);
5152 from_rtx = expand_normal (from);
5154 emit_library_call (memmove_libfunc, LCT_NORMAL,
5155 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5156 XEXP (from_rtx, 0), Pmode,
5157 convert_to_mode (TYPE_MODE (sizetype),
5158 size, TYPE_UNSIGNED (sizetype)),
5159 TYPE_MODE (sizetype));
5161 preserve_temp_slots (to_rtx);
5162 pop_temp_slots ();
5163 return;
5166 /* Compute FROM and store the value in the rtx we got. */
5168 push_temp_slots ();
5169 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5170 preserve_temp_slots (result);
5171 pop_temp_slots ();
5172 return;
5175 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5176 succeeded, false otherwise. */
5178 bool
5179 emit_storent_insn (rtx to, rtx from)
5181 struct expand_operand ops[2];
5182 machine_mode mode = GET_MODE (to);
5183 enum insn_code code = optab_handler (storent_optab, mode);
5185 if (code == CODE_FOR_nothing)
5186 return false;
5188 create_fixed_operand (&ops[0], to);
5189 create_input_operand (&ops[1], from, mode);
5190 return maybe_expand_insn (code, 2, ops);
5193 /* Generate code for computing expression EXP,
5194 and storing the value into TARGET.
5196 If the mode is BLKmode then we may return TARGET itself.
5197 It turns out that in BLKmode it doesn't cause a problem.
5198 because C has no operators that could combine two different
5199 assignments into the same BLKmode object with different values
5200 with no sequence point. Will other languages need this to
5201 be more thorough?
5203 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5204 stack, and block moves may need to be treated specially.
5206 If NONTEMPORAL is true, try using a nontemporal store instruction.
5208 If BTARGET is not NULL then computed bounds of EXP are
5209 associated with BTARGET. */
5212 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5213 bool nontemporal, tree btarget)
5215 rtx temp;
5216 rtx alt_rtl = NULL_RTX;
5217 location_t loc = curr_insn_location ();
5219 if (VOID_TYPE_P (TREE_TYPE (exp)))
5221 /* C++ can generate ?: expressions with a throw expression in one
5222 branch and an rvalue in the other. Here, we resolve attempts to
5223 store the throw expression's nonexistent result. */
5224 gcc_assert (!call_param_p);
5225 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5226 return NULL_RTX;
5228 if (TREE_CODE (exp) == COMPOUND_EXPR)
5230 /* Perform first part of compound expression, then assign from second
5231 part. */
5232 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5233 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5234 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5235 call_param_p, nontemporal, btarget);
5237 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5239 /* For conditional expression, get safe form of the target. Then
5240 test the condition, doing the appropriate assignment on either
5241 side. This avoids the creation of unnecessary temporaries.
5242 For non-BLKmode, it is more efficient not to do this. */
5244 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5246 do_pending_stack_adjust ();
5247 NO_DEFER_POP;
5248 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5249 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5250 nontemporal, btarget);
5251 emit_jump_insn (targetm.gen_jump (lab2));
5252 emit_barrier ();
5253 emit_label (lab1);
5254 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5255 nontemporal, btarget);
5256 emit_label (lab2);
5257 OK_DEFER_POP;
5259 return NULL_RTX;
5261 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5262 /* If this is a scalar in a register that is stored in a wider mode
5263 than the declared mode, compute the result into its declared mode
5264 and then convert to the wider mode. Our value is the computed
5265 expression. */
5267 rtx inner_target = 0;
5269 /* We can do the conversion inside EXP, which will often result
5270 in some optimizations. Do the conversion in two steps: first
5271 change the signedness, if needed, then the extend. But don't
5272 do this if the type of EXP is a subtype of something else
5273 since then the conversion might involve more than just
5274 converting modes. */
5275 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5276 && TREE_TYPE (TREE_TYPE (exp)) == 0
5277 && GET_MODE_PRECISION (GET_MODE (target))
5278 == TYPE_PRECISION (TREE_TYPE (exp)))
5280 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5281 TYPE_UNSIGNED (TREE_TYPE (exp))))
5283 /* Some types, e.g. Fortran's logical*4, won't have a signed
5284 version, so use the mode instead. */
5285 tree ntype
5286 = (signed_or_unsigned_type_for
5287 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5288 if (ntype == NULL)
5289 ntype = lang_hooks.types.type_for_mode
5290 (TYPE_MODE (TREE_TYPE (exp)),
5291 SUBREG_PROMOTED_SIGN (target));
5293 exp = fold_convert_loc (loc, ntype, exp);
5296 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5297 (GET_MODE (SUBREG_REG (target)),
5298 SUBREG_PROMOTED_SIGN (target)),
5299 exp);
5301 inner_target = SUBREG_REG (target);
5304 temp = expand_expr (exp, inner_target, VOIDmode,
5305 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5307 /* Handle bounds returned by call. */
5308 if (TREE_CODE (exp) == CALL_EXPR)
5310 rtx bounds;
5311 chkp_split_slot (temp, &temp, &bounds);
5312 if (bounds && btarget)
5314 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5315 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5316 chkp_set_rtl_bounds (btarget, tmp);
5320 /* If TEMP is a VOIDmode constant, use convert_modes to make
5321 sure that we properly convert it. */
5322 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5324 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5325 temp, SUBREG_PROMOTED_SIGN (target));
5326 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5327 GET_MODE (target), temp,
5328 SUBREG_PROMOTED_SIGN (target));
5331 convert_move (SUBREG_REG (target), temp,
5332 SUBREG_PROMOTED_SIGN (target));
5334 return NULL_RTX;
5336 else if ((TREE_CODE (exp) == STRING_CST
5337 || (TREE_CODE (exp) == MEM_REF
5338 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5339 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5340 == STRING_CST
5341 && integer_zerop (TREE_OPERAND (exp, 1))))
5342 && !nontemporal && !call_param_p
5343 && MEM_P (target))
5345 /* Optimize initialization of an array with a STRING_CST. */
5346 HOST_WIDE_INT exp_len, str_copy_len;
5347 rtx dest_mem;
5348 tree str = TREE_CODE (exp) == STRING_CST
5349 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5351 exp_len = int_expr_size (exp);
5352 if (exp_len <= 0)
5353 goto normal_expr;
5355 if (TREE_STRING_LENGTH (str) <= 0)
5356 goto normal_expr;
5358 str_copy_len = strlen (TREE_STRING_POINTER (str));
5359 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5360 goto normal_expr;
5362 str_copy_len = TREE_STRING_LENGTH (str);
5363 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5364 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5366 str_copy_len += STORE_MAX_PIECES - 1;
5367 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5369 str_copy_len = MIN (str_copy_len, exp_len);
5370 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5371 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5372 MEM_ALIGN (target), false))
5373 goto normal_expr;
5375 dest_mem = target;
5377 dest_mem = store_by_pieces (dest_mem,
5378 str_copy_len, builtin_strncpy_read_str,
5379 CONST_CAST (char *,
5380 TREE_STRING_POINTER (str)),
5381 MEM_ALIGN (target), false,
5382 exp_len > str_copy_len ? 1 : 0);
5383 if (exp_len > str_copy_len)
5384 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5385 GEN_INT (exp_len - str_copy_len),
5386 BLOCK_OP_NORMAL);
5387 return NULL_RTX;
5389 else
5391 rtx tmp_target;
5393 normal_expr:
5394 /* If we want to use a nontemporal store, force the value to
5395 register first. */
5396 tmp_target = nontemporal ? NULL_RTX : target;
5397 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5398 (call_param_p
5399 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5400 &alt_rtl, false);
5402 /* Handle bounds returned by call. */
5403 if (TREE_CODE (exp) == CALL_EXPR)
5405 rtx bounds;
5406 chkp_split_slot (temp, &temp, &bounds);
5407 if (bounds && btarget)
5409 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5410 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5411 chkp_set_rtl_bounds (btarget, tmp);
5416 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5417 the same as that of TARGET, adjust the constant. This is needed, for
5418 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5419 only a word-sized value. */
5420 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5421 && TREE_CODE (exp) != ERROR_MARK
5422 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5423 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5424 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5426 /* If value was not generated in the target, store it there.
5427 Convert the value to TARGET's type first if necessary and emit the
5428 pending incrementations that have been queued when expanding EXP.
5429 Note that we cannot emit the whole queue blindly because this will
5430 effectively disable the POST_INC optimization later.
5432 If TEMP and TARGET compare equal according to rtx_equal_p, but
5433 one or both of them are volatile memory refs, we have to distinguish
5434 two cases:
5435 - expand_expr has used TARGET. In this case, we must not generate
5436 another copy. This can be detected by TARGET being equal according
5437 to == .
5438 - expand_expr has not used TARGET - that means that the source just
5439 happens to have the same RTX form. Since temp will have been created
5440 by expand_expr, it will compare unequal according to == .
5441 We must generate a copy in this case, to reach the correct number
5442 of volatile memory references. */
5444 if ((! rtx_equal_p (temp, target)
5445 || (temp != target && (side_effects_p (temp)
5446 || side_effects_p (target))))
5447 && TREE_CODE (exp) != ERROR_MARK
5448 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5449 but TARGET is not valid memory reference, TEMP will differ
5450 from TARGET although it is really the same location. */
5451 && !(alt_rtl
5452 && rtx_equal_p (alt_rtl, target)
5453 && !side_effects_p (alt_rtl)
5454 && !side_effects_p (target))
5455 /* If there's nothing to copy, don't bother. Don't call
5456 expr_size unless necessary, because some front-ends (C++)
5457 expr_size-hook must not be given objects that are not
5458 supposed to be bit-copied or bit-initialized. */
5459 && expr_size (exp) != const0_rtx)
5461 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5463 if (GET_MODE (target) == BLKmode)
5465 /* Handle calls that return BLKmode values in registers. */
5466 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5467 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5468 else
5469 store_bit_field (target,
5470 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5471 0, 0, 0, GET_MODE (temp), temp);
5473 else
5474 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5477 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5479 /* Handle copying a string constant into an array. The string
5480 constant may be shorter than the array. So copy just the string's
5481 actual length, and clear the rest. First get the size of the data
5482 type of the string, which is actually the size of the target. */
5483 rtx size = expr_size (exp);
5485 if (CONST_INT_P (size)
5486 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5487 emit_block_move (target, temp, size,
5488 (call_param_p
5489 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5490 else
5492 machine_mode pointer_mode
5493 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5494 machine_mode address_mode = get_address_mode (target);
5496 /* Compute the size of the data to copy from the string. */
5497 tree copy_size
5498 = size_binop_loc (loc, MIN_EXPR,
5499 make_tree (sizetype, size),
5500 size_int (TREE_STRING_LENGTH (exp)));
5501 rtx copy_size_rtx
5502 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5503 (call_param_p
5504 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5505 rtx_code_label *label = 0;
5507 /* Copy that much. */
5508 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5509 TYPE_UNSIGNED (sizetype));
5510 emit_block_move (target, temp, copy_size_rtx,
5511 (call_param_p
5512 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5514 /* Figure out how much is left in TARGET that we have to clear.
5515 Do all calculations in pointer_mode. */
5516 if (CONST_INT_P (copy_size_rtx))
5518 size = plus_constant (address_mode, size,
5519 -INTVAL (copy_size_rtx));
5520 target = adjust_address (target, BLKmode,
5521 INTVAL (copy_size_rtx));
5523 else
5525 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5526 copy_size_rtx, NULL_RTX, 0,
5527 OPTAB_LIB_WIDEN);
5529 if (GET_MODE (copy_size_rtx) != address_mode)
5530 copy_size_rtx = convert_to_mode (address_mode,
5531 copy_size_rtx,
5532 TYPE_UNSIGNED (sizetype));
5534 target = offset_address (target, copy_size_rtx,
5535 highest_pow2_factor (copy_size));
5536 label = gen_label_rtx ();
5537 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5538 GET_MODE (size), 0, label);
5541 if (size != const0_rtx)
5542 clear_storage (target, size, BLOCK_OP_NORMAL);
5544 if (label)
5545 emit_label (label);
5548 /* Handle calls that return values in multiple non-contiguous locations.
5549 The Irix 6 ABI has examples of this. */
5550 else if (GET_CODE (target) == PARALLEL)
5552 if (GET_CODE (temp) == PARALLEL)
5553 emit_group_move (target, temp);
5554 else
5555 emit_group_load (target, temp, TREE_TYPE (exp),
5556 int_size_in_bytes (TREE_TYPE (exp)));
5558 else if (GET_CODE (temp) == PARALLEL)
5559 emit_group_store (target, temp, TREE_TYPE (exp),
5560 int_size_in_bytes (TREE_TYPE (exp)));
5561 else if (GET_MODE (temp) == BLKmode)
5562 emit_block_move (target, temp, expr_size (exp),
5563 (call_param_p
5564 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5565 /* If we emit a nontemporal store, there is nothing else to do. */
5566 else if (nontemporal && emit_storent_insn (target, temp))
5568 else
5570 temp = force_operand (temp, target);
5571 if (temp != target)
5572 emit_move_insn (target, temp);
5576 return NULL_RTX;
5579 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5581 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5583 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5586 /* Return true if field F of structure TYPE is a flexible array. */
5588 static bool
5589 flexible_array_member_p (const_tree f, const_tree type)
5591 const_tree tf;
5593 tf = TREE_TYPE (f);
5594 return (DECL_CHAIN (f) == NULL
5595 && TREE_CODE (tf) == ARRAY_TYPE
5596 && TYPE_DOMAIN (tf)
5597 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5598 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5599 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5600 && int_size_in_bytes (type) >= 0);
5603 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5604 must have in order for it to completely initialize a value of type TYPE.
5605 Return -1 if the number isn't known.
5607 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5609 static HOST_WIDE_INT
5610 count_type_elements (const_tree type, bool for_ctor_p)
5612 switch (TREE_CODE (type))
5614 case ARRAY_TYPE:
5616 tree nelts;
5618 nelts = array_type_nelts (type);
5619 if (nelts && tree_fits_uhwi_p (nelts))
5621 unsigned HOST_WIDE_INT n;
5623 n = tree_to_uhwi (nelts) + 1;
5624 if (n == 0 || for_ctor_p)
5625 return n;
5626 else
5627 return n * count_type_elements (TREE_TYPE (type), false);
5629 return for_ctor_p ? -1 : 1;
5632 case RECORD_TYPE:
5634 unsigned HOST_WIDE_INT n;
5635 tree f;
5637 n = 0;
5638 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5639 if (TREE_CODE (f) == FIELD_DECL)
5641 if (!for_ctor_p)
5642 n += count_type_elements (TREE_TYPE (f), false);
5643 else if (!flexible_array_member_p (f, type))
5644 /* Don't count flexible arrays, which are not supposed
5645 to be initialized. */
5646 n += 1;
5649 return n;
5652 case UNION_TYPE:
5653 case QUAL_UNION_TYPE:
5655 tree f;
5656 HOST_WIDE_INT n, m;
5658 gcc_assert (!for_ctor_p);
5659 /* Estimate the number of scalars in each field and pick the
5660 maximum. Other estimates would do instead; the idea is simply
5661 to make sure that the estimate is not sensitive to the ordering
5662 of the fields. */
5663 n = 1;
5664 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5665 if (TREE_CODE (f) == FIELD_DECL)
5667 m = count_type_elements (TREE_TYPE (f), false);
5668 /* If the field doesn't span the whole union, add an extra
5669 scalar for the rest. */
5670 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5671 TYPE_SIZE (type)) != 1)
5672 m++;
5673 if (n < m)
5674 n = m;
5676 return n;
5679 case COMPLEX_TYPE:
5680 return 2;
5682 case VECTOR_TYPE:
5683 return TYPE_VECTOR_SUBPARTS (type);
5685 case INTEGER_TYPE:
5686 case REAL_TYPE:
5687 case FIXED_POINT_TYPE:
5688 case ENUMERAL_TYPE:
5689 case BOOLEAN_TYPE:
5690 case POINTER_TYPE:
5691 case OFFSET_TYPE:
5692 case REFERENCE_TYPE:
5693 case NULLPTR_TYPE:
5694 return 1;
5696 case ERROR_MARK:
5697 return 0;
5699 case VOID_TYPE:
5700 case METHOD_TYPE:
5701 case FUNCTION_TYPE:
5702 case LANG_TYPE:
5703 default:
5704 gcc_unreachable ();
5708 /* Helper for categorize_ctor_elements. Identical interface. */
5710 static bool
5711 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5712 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5714 unsigned HOST_WIDE_INT idx;
5715 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5716 tree value, purpose, elt_type;
5718 /* Whether CTOR is a valid constant initializer, in accordance with what
5719 initializer_constant_valid_p does. If inferred from the constructor
5720 elements, true until proven otherwise. */
5721 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5722 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5724 nz_elts = 0;
5725 init_elts = 0;
5726 num_fields = 0;
5727 elt_type = NULL_TREE;
5729 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5731 HOST_WIDE_INT mult = 1;
5733 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5735 tree lo_index = TREE_OPERAND (purpose, 0);
5736 tree hi_index = TREE_OPERAND (purpose, 1);
5738 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5739 mult = (tree_to_uhwi (hi_index)
5740 - tree_to_uhwi (lo_index) + 1);
5742 num_fields += mult;
5743 elt_type = TREE_TYPE (value);
5745 switch (TREE_CODE (value))
5747 case CONSTRUCTOR:
5749 HOST_WIDE_INT nz = 0, ic = 0;
5751 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5752 p_complete);
5754 nz_elts += mult * nz;
5755 init_elts += mult * ic;
5757 if (const_from_elts_p && const_p)
5758 const_p = const_elt_p;
5760 break;
5762 case INTEGER_CST:
5763 case REAL_CST:
5764 case FIXED_CST:
5765 if (!initializer_zerop (value))
5766 nz_elts += mult;
5767 init_elts += mult;
5768 break;
5770 case STRING_CST:
5771 nz_elts += mult * TREE_STRING_LENGTH (value);
5772 init_elts += mult * TREE_STRING_LENGTH (value);
5773 break;
5775 case COMPLEX_CST:
5776 if (!initializer_zerop (TREE_REALPART (value)))
5777 nz_elts += mult;
5778 if (!initializer_zerop (TREE_IMAGPART (value)))
5779 nz_elts += mult;
5780 init_elts += mult;
5781 break;
5783 case VECTOR_CST:
5785 unsigned i;
5786 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5788 tree v = VECTOR_CST_ELT (value, i);
5789 if (!initializer_zerop (v))
5790 nz_elts += mult;
5791 init_elts += mult;
5794 break;
5796 default:
5798 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5799 nz_elts += mult * tc;
5800 init_elts += mult * tc;
5802 if (const_from_elts_p && const_p)
5803 const_p = initializer_constant_valid_p (value, elt_type)
5804 != NULL_TREE;
5806 break;
5810 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5811 num_fields, elt_type))
5812 *p_complete = false;
5814 *p_nz_elts += nz_elts;
5815 *p_init_elts += init_elts;
5817 return const_p;
5820 /* Examine CTOR to discover:
5821 * how many scalar fields are set to nonzero values,
5822 and place it in *P_NZ_ELTS;
5823 * how many scalar fields in total are in CTOR,
5824 and place it in *P_ELT_COUNT.
5825 * whether the constructor is complete -- in the sense that every
5826 meaningful byte is explicitly given a value --
5827 and place it in *P_COMPLETE.
5829 Return whether or not CTOR is a valid static constant initializer, the same
5830 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5832 bool
5833 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5834 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5836 *p_nz_elts = 0;
5837 *p_init_elts = 0;
5838 *p_complete = true;
5840 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5843 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5844 of which had type LAST_TYPE. Each element was itself a complete
5845 initializer, in the sense that every meaningful byte was explicitly
5846 given a value. Return true if the same is true for the constructor
5847 as a whole. */
5849 bool
5850 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5851 const_tree last_type)
5853 if (TREE_CODE (type) == UNION_TYPE
5854 || TREE_CODE (type) == QUAL_UNION_TYPE)
5856 if (num_elts == 0)
5857 return false;
5859 gcc_assert (num_elts == 1 && last_type);
5861 /* ??? We could look at each element of the union, and find the
5862 largest element. Which would avoid comparing the size of the
5863 initialized element against any tail padding in the union.
5864 Doesn't seem worth the effort... */
5865 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5868 return count_type_elements (type, true) == num_elts;
5871 /* Return 1 if EXP contains mostly (3/4) zeros. */
5873 static int
5874 mostly_zeros_p (const_tree exp)
5876 if (TREE_CODE (exp) == CONSTRUCTOR)
5878 HOST_WIDE_INT nz_elts, init_elts;
5879 bool complete_p;
5881 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5882 return !complete_p || nz_elts < init_elts / 4;
5885 return initializer_zerop (exp);
5888 /* Return 1 if EXP contains all zeros. */
5890 static int
5891 all_zeros_p (const_tree exp)
5893 if (TREE_CODE (exp) == CONSTRUCTOR)
5895 HOST_WIDE_INT nz_elts, init_elts;
5896 bool complete_p;
5898 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5899 return nz_elts == 0;
5902 return initializer_zerop (exp);
5905 /* Helper function for store_constructor.
5906 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5907 CLEARED is as for store_constructor.
5908 ALIAS_SET is the alias set to use for any stores.
5910 This provides a recursive shortcut back to store_constructor when it isn't
5911 necessary to go through store_field. This is so that we can pass through
5912 the cleared field to let store_constructor know that we may not have to
5913 clear a substructure if the outer structure has already been cleared. */
5915 static void
5916 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5917 HOST_WIDE_INT bitpos, machine_mode mode,
5918 tree exp, int cleared, alias_set_type alias_set)
5920 if (TREE_CODE (exp) == CONSTRUCTOR
5921 /* We can only call store_constructor recursively if the size and
5922 bit position are on a byte boundary. */
5923 && bitpos % BITS_PER_UNIT == 0
5924 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5925 /* If we have a nonzero bitpos for a register target, then we just
5926 let store_field do the bitfield handling. This is unlikely to
5927 generate unnecessary clear instructions anyways. */
5928 && (bitpos == 0 || MEM_P (target)))
5930 if (MEM_P (target))
5931 target
5932 = adjust_address (target,
5933 GET_MODE (target) == BLKmode
5934 || 0 != (bitpos
5935 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5936 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5939 /* Update the alias set, if required. */
5940 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5941 && MEM_ALIAS_SET (target) != 0)
5943 target = copy_rtx (target);
5944 set_mem_alias_set (target, alias_set);
5947 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5949 else
5950 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5954 /* Returns the number of FIELD_DECLs in TYPE. */
5956 static int
5957 fields_length (const_tree type)
5959 tree t = TYPE_FIELDS (type);
5960 int count = 0;
5962 for (; t; t = DECL_CHAIN (t))
5963 if (TREE_CODE (t) == FIELD_DECL)
5964 ++count;
5966 return count;
5970 /* Store the value of constructor EXP into the rtx TARGET.
5971 TARGET is either a REG or a MEM; we know it cannot conflict, since
5972 safe_from_p has been called.
5973 CLEARED is true if TARGET is known to have been zero'd.
5974 SIZE is the number of bytes of TARGET we are allowed to modify: this
5975 may not be the same as the size of EXP if we are assigning to a field
5976 which has been packed to exclude padding bits. */
5978 static void
5979 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5981 tree type = TREE_TYPE (exp);
5982 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5984 switch (TREE_CODE (type))
5986 case RECORD_TYPE:
5987 case UNION_TYPE:
5988 case QUAL_UNION_TYPE:
5990 unsigned HOST_WIDE_INT idx;
5991 tree field, value;
5993 /* If size is zero or the target is already cleared, do nothing. */
5994 if (size == 0 || cleared)
5995 cleared = 1;
5996 /* We either clear the aggregate or indicate the value is dead. */
5997 else if ((TREE_CODE (type) == UNION_TYPE
5998 || TREE_CODE (type) == QUAL_UNION_TYPE)
5999 && ! CONSTRUCTOR_ELTS (exp))
6000 /* If the constructor is empty, clear the union. */
6002 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6003 cleared = 1;
6006 /* If we are building a static constructor into a register,
6007 set the initial value as zero so we can fold the value into
6008 a constant. But if more than one register is involved,
6009 this probably loses. */
6010 else if (REG_P (target) && TREE_STATIC (exp)
6011 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6013 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6014 cleared = 1;
6017 /* If the constructor has fewer fields than the structure or
6018 if we are initializing the structure to mostly zeros, clear
6019 the whole structure first. Don't do this if TARGET is a
6020 register whose mode size isn't equal to SIZE since
6021 clear_storage can't handle this case. */
6022 else if (size > 0
6023 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6024 != fields_length (type))
6025 || mostly_zeros_p (exp))
6026 && (!REG_P (target)
6027 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6028 == size)))
6030 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6031 cleared = 1;
6034 if (REG_P (target) && !cleared)
6035 emit_clobber (target);
6037 /* Store each element of the constructor into the
6038 corresponding field of TARGET. */
6039 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6041 machine_mode mode;
6042 HOST_WIDE_INT bitsize;
6043 HOST_WIDE_INT bitpos = 0;
6044 tree offset;
6045 rtx to_rtx = target;
6047 /* Just ignore missing fields. We cleared the whole
6048 structure, above, if any fields are missing. */
6049 if (field == 0)
6050 continue;
6052 if (cleared && initializer_zerop (value))
6053 continue;
6055 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6056 bitsize = tree_to_uhwi (DECL_SIZE (field));
6057 else
6058 bitsize = -1;
6060 mode = DECL_MODE (field);
6061 if (DECL_BIT_FIELD (field))
6062 mode = VOIDmode;
6064 offset = DECL_FIELD_OFFSET (field);
6065 if (tree_fits_shwi_p (offset)
6066 && tree_fits_shwi_p (bit_position (field)))
6068 bitpos = int_bit_position (field);
6069 offset = 0;
6071 else
6072 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6074 if (offset)
6076 machine_mode address_mode;
6077 rtx offset_rtx;
6079 offset
6080 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6081 make_tree (TREE_TYPE (exp),
6082 target));
6084 offset_rtx = expand_normal (offset);
6085 gcc_assert (MEM_P (to_rtx));
6087 address_mode = get_address_mode (to_rtx);
6088 if (GET_MODE (offset_rtx) != address_mode)
6089 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6091 to_rtx = offset_address (to_rtx, offset_rtx,
6092 highest_pow2_factor (offset));
6095 /* If this initializes a field that is smaller than a
6096 word, at the start of a word, try to widen it to a full
6097 word. This special case allows us to output C++ member
6098 function initializations in a form that the optimizers
6099 can understand. */
6100 if (WORD_REGISTER_OPERATIONS
6101 && REG_P (target)
6102 && bitsize < BITS_PER_WORD
6103 && bitpos % BITS_PER_WORD == 0
6104 && GET_MODE_CLASS (mode) == MODE_INT
6105 && TREE_CODE (value) == INTEGER_CST
6106 && exp_size >= 0
6107 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6109 tree type = TREE_TYPE (value);
6111 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6113 type = lang_hooks.types.type_for_mode
6114 (word_mode, TYPE_UNSIGNED (type));
6115 value = fold_convert (type, value);
6118 if (BYTES_BIG_ENDIAN)
6119 value
6120 = fold_build2 (LSHIFT_EXPR, type, value,
6121 build_int_cst (type,
6122 BITS_PER_WORD - bitsize));
6123 bitsize = BITS_PER_WORD;
6124 mode = word_mode;
6127 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6128 && DECL_NONADDRESSABLE_P (field))
6130 to_rtx = copy_rtx (to_rtx);
6131 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6134 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6135 value, cleared,
6136 get_alias_set (TREE_TYPE (field)));
6138 break;
6140 case ARRAY_TYPE:
6142 tree value, index;
6143 unsigned HOST_WIDE_INT i;
6144 int need_to_clear;
6145 tree domain;
6146 tree elttype = TREE_TYPE (type);
6147 int const_bounds_p;
6148 HOST_WIDE_INT minelt = 0;
6149 HOST_WIDE_INT maxelt = 0;
6151 domain = TYPE_DOMAIN (type);
6152 const_bounds_p = (TYPE_MIN_VALUE (domain)
6153 && TYPE_MAX_VALUE (domain)
6154 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6155 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6157 /* If we have constant bounds for the range of the type, get them. */
6158 if (const_bounds_p)
6160 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6161 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6164 /* If the constructor has fewer elements than the array, clear
6165 the whole array first. Similarly if this is static
6166 constructor of a non-BLKmode object. */
6167 if (cleared)
6168 need_to_clear = 0;
6169 else if (REG_P (target) && TREE_STATIC (exp))
6170 need_to_clear = 1;
6171 else
6173 unsigned HOST_WIDE_INT idx;
6174 tree index, value;
6175 HOST_WIDE_INT count = 0, zero_count = 0;
6176 need_to_clear = ! const_bounds_p;
6178 /* This loop is a more accurate version of the loop in
6179 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6180 is also needed to check for missing elements. */
6181 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6183 HOST_WIDE_INT this_node_count;
6185 if (need_to_clear)
6186 break;
6188 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6190 tree lo_index = TREE_OPERAND (index, 0);
6191 tree hi_index = TREE_OPERAND (index, 1);
6193 if (! tree_fits_uhwi_p (lo_index)
6194 || ! tree_fits_uhwi_p (hi_index))
6196 need_to_clear = 1;
6197 break;
6200 this_node_count = (tree_to_uhwi (hi_index)
6201 - tree_to_uhwi (lo_index) + 1);
6203 else
6204 this_node_count = 1;
6206 count += this_node_count;
6207 if (mostly_zeros_p (value))
6208 zero_count += this_node_count;
6211 /* Clear the entire array first if there are any missing
6212 elements, or if the incidence of zero elements is >=
6213 75%. */
6214 if (! need_to_clear
6215 && (count < maxelt - minelt + 1
6216 || 4 * zero_count >= 3 * count))
6217 need_to_clear = 1;
6220 if (need_to_clear && size > 0)
6222 if (REG_P (target))
6223 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6224 else
6225 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6226 cleared = 1;
6229 if (!cleared && REG_P (target))
6230 /* Inform later passes that the old value is dead. */
6231 emit_clobber (target);
6233 /* Store each element of the constructor into the
6234 corresponding element of TARGET, determined by counting the
6235 elements. */
6236 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6238 machine_mode mode;
6239 HOST_WIDE_INT bitsize;
6240 HOST_WIDE_INT bitpos;
6241 rtx xtarget = target;
6243 if (cleared && initializer_zerop (value))
6244 continue;
6246 mode = TYPE_MODE (elttype);
6247 if (mode == BLKmode)
6248 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6249 ? tree_to_uhwi (TYPE_SIZE (elttype))
6250 : -1);
6251 else
6252 bitsize = GET_MODE_BITSIZE (mode);
6254 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6256 tree lo_index = TREE_OPERAND (index, 0);
6257 tree hi_index = TREE_OPERAND (index, 1);
6258 rtx index_r, pos_rtx;
6259 HOST_WIDE_INT lo, hi, count;
6260 tree position;
6262 /* If the range is constant and "small", unroll the loop. */
6263 if (const_bounds_p
6264 && tree_fits_shwi_p (lo_index)
6265 && tree_fits_shwi_p (hi_index)
6266 && (lo = tree_to_shwi (lo_index),
6267 hi = tree_to_shwi (hi_index),
6268 count = hi - lo + 1,
6269 (!MEM_P (target)
6270 || count <= 2
6271 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6272 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6273 <= 40 * 8)))))
6275 lo -= minelt; hi -= minelt;
6276 for (; lo <= hi; lo++)
6278 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6280 if (MEM_P (target)
6281 && !MEM_KEEP_ALIAS_SET_P (target)
6282 && TREE_CODE (type) == ARRAY_TYPE
6283 && TYPE_NONALIASED_COMPONENT (type))
6285 target = copy_rtx (target);
6286 MEM_KEEP_ALIAS_SET_P (target) = 1;
6289 store_constructor_field
6290 (target, bitsize, bitpos, mode, value, cleared,
6291 get_alias_set (elttype));
6294 else
6296 rtx_code_label *loop_start = gen_label_rtx ();
6297 rtx_code_label *loop_end = gen_label_rtx ();
6298 tree exit_cond;
6300 expand_normal (hi_index);
6302 index = build_decl (EXPR_LOCATION (exp),
6303 VAR_DECL, NULL_TREE, domain);
6304 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6305 SET_DECL_RTL (index, index_r);
6306 store_expr (lo_index, index_r, 0, false);
6308 /* Build the head of the loop. */
6309 do_pending_stack_adjust ();
6310 emit_label (loop_start);
6312 /* Assign value to element index. */
6313 position =
6314 fold_convert (ssizetype,
6315 fold_build2 (MINUS_EXPR,
6316 TREE_TYPE (index),
6317 index,
6318 TYPE_MIN_VALUE (domain)));
6320 position =
6321 size_binop (MULT_EXPR, position,
6322 fold_convert (ssizetype,
6323 TYPE_SIZE_UNIT (elttype)));
6325 pos_rtx = expand_normal (position);
6326 xtarget = offset_address (target, pos_rtx,
6327 highest_pow2_factor (position));
6328 xtarget = adjust_address (xtarget, mode, 0);
6329 if (TREE_CODE (value) == CONSTRUCTOR)
6330 store_constructor (value, xtarget, cleared,
6331 bitsize / BITS_PER_UNIT);
6332 else
6333 store_expr (value, xtarget, 0, false);
6335 /* Generate a conditional jump to exit the loop. */
6336 exit_cond = build2 (LT_EXPR, integer_type_node,
6337 index, hi_index);
6338 jumpif (exit_cond, loop_end, -1);
6340 /* Update the loop counter, and jump to the head of
6341 the loop. */
6342 expand_assignment (index,
6343 build2 (PLUS_EXPR, TREE_TYPE (index),
6344 index, integer_one_node),
6345 false);
6347 emit_jump (loop_start);
6349 /* Build the end of the loop. */
6350 emit_label (loop_end);
6353 else if ((index != 0 && ! tree_fits_shwi_p (index))
6354 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6356 tree position;
6358 if (index == 0)
6359 index = ssize_int (1);
6361 if (minelt)
6362 index = fold_convert (ssizetype,
6363 fold_build2 (MINUS_EXPR,
6364 TREE_TYPE (index),
6365 index,
6366 TYPE_MIN_VALUE (domain)));
6368 position =
6369 size_binop (MULT_EXPR, index,
6370 fold_convert (ssizetype,
6371 TYPE_SIZE_UNIT (elttype)));
6372 xtarget = offset_address (target,
6373 expand_normal (position),
6374 highest_pow2_factor (position));
6375 xtarget = adjust_address (xtarget, mode, 0);
6376 store_expr (value, xtarget, 0, false);
6378 else
6380 if (index != 0)
6381 bitpos = ((tree_to_shwi (index) - minelt)
6382 * tree_to_uhwi (TYPE_SIZE (elttype)));
6383 else
6384 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6386 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6387 && TREE_CODE (type) == ARRAY_TYPE
6388 && TYPE_NONALIASED_COMPONENT (type))
6390 target = copy_rtx (target);
6391 MEM_KEEP_ALIAS_SET_P (target) = 1;
6393 store_constructor_field (target, bitsize, bitpos, mode, value,
6394 cleared, get_alias_set (elttype));
6397 break;
6400 case VECTOR_TYPE:
6402 unsigned HOST_WIDE_INT idx;
6403 constructor_elt *ce;
6404 int i;
6405 int need_to_clear;
6406 int icode = CODE_FOR_nothing;
6407 tree elttype = TREE_TYPE (type);
6408 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6409 machine_mode eltmode = TYPE_MODE (elttype);
6410 HOST_WIDE_INT bitsize;
6411 HOST_WIDE_INT bitpos;
6412 rtvec vector = NULL;
6413 unsigned n_elts;
6414 alias_set_type alias;
6416 gcc_assert (eltmode != BLKmode);
6418 n_elts = TYPE_VECTOR_SUBPARTS (type);
6419 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6421 machine_mode mode = GET_MODE (target);
6423 icode = (int) optab_handler (vec_init_optab, mode);
6424 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6425 if (icode != CODE_FOR_nothing)
6427 tree value;
6429 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6430 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6432 icode = CODE_FOR_nothing;
6433 break;
6436 if (icode != CODE_FOR_nothing)
6438 unsigned int i;
6440 vector = rtvec_alloc (n_elts);
6441 for (i = 0; i < n_elts; i++)
6442 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6446 /* If the constructor has fewer elements than the vector,
6447 clear the whole array first. Similarly if this is static
6448 constructor of a non-BLKmode object. */
6449 if (cleared)
6450 need_to_clear = 0;
6451 else if (REG_P (target) && TREE_STATIC (exp))
6452 need_to_clear = 1;
6453 else
6455 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6456 tree value;
6458 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6460 int n_elts_here = tree_to_uhwi
6461 (int_const_binop (TRUNC_DIV_EXPR,
6462 TYPE_SIZE (TREE_TYPE (value)),
6463 TYPE_SIZE (elttype)));
6465 count += n_elts_here;
6466 if (mostly_zeros_p (value))
6467 zero_count += n_elts_here;
6470 /* Clear the entire vector first if there are any missing elements,
6471 or if the incidence of zero elements is >= 75%. */
6472 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6475 if (need_to_clear && size > 0 && !vector)
6477 if (REG_P (target))
6478 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6479 else
6480 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6481 cleared = 1;
6484 /* Inform later passes that the old value is dead. */
6485 if (!cleared && !vector && REG_P (target))
6486 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6488 if (MEM_P (target))
6489 alias = MEM_ALIAS_SET (target);
6490 else
6491 alias = get_alias_set (elttype);
6493 /* Store each element of the constructor into the corresponding
6494 element of TARGET, determined by counting the elements. */
6495 for (idx = 0, i = 0;
6496 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6497 idx++, i += bitsize / elt_size)
6499 HOST_WIDE_INT eltpos;
6500 tree value = ce->value;
6502 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6503 if (cleared && initializer_zerop (value))
6504 continue;
6506 if (ce->index)
6507 eltpos = tree_to_uhwi (ce->index);
6508 else
6509 eltpos = i;
6511 if (vector)
6513 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6514 elements. */
6515 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6516 RTVEC_ELT (vector, eltpos)
6517 = expand_normal (value);
6519 else
6521 machine_mode value_mode =
6522 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6523 ? TYPE_MODE (TREE_TYPE (value))
6524 : eltmode;
6525 bitpos = eltpos * elt_size;
6526 store_constructor_field (target, bitsize, bitpos, value_mode,
6527 value, cleared, alias);
6531 if (vector)
6532 emit_insn (GEN_FCN (icode)
6533 (target,
6534 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6535 break;
6538 default:
6539 gcc_unreachable ();
6543 /* Store the value of EXP (an expression tree)
6544 into a subfield of TARGET which has mode MODE and occupies
6545 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6546 If MODE is VOIDmode, it means that we are storing into a bit-field.
6548 BITREGION_START is bitpos of the first bitfield in this region.
6549 BITREGION_END is the bitpos of the ending bitfield in this region.
6550 These two fields are 0, if the C++ memory model does not apply,
6551 or we are not interested in keeping track of bitfield regions.
6553 Always return const0_rtx unless we have something particular to
6554 return.
6556 ALIAS_SET is the alias set for the destination. This value will
6557 (in general) be different from that for TARGET, since TARGET is a
6558 reference to the containing structure.
6560 If NONTEMPORAL is true, try generating a nontemporal store. */
6562 static rtx
6563 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6564 unsigned HOST_WIDE_INT bitregion_start,
6565 unsigned HOST_WIDE_INT bitregion_end,
6566 machine_mode mode, tree exp,
6567 alias_set_type alias_set, bool nontemporal)
6569 if (TREE_CODE (exp) == ERROR_MARK)
6570 return const0_rtx;
6572 /* If we have nothing to store, do nothing unless the expression has
6573 side-effects. */
6574 if (bitsize == 0)
6575 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6577 if (GET_CODE (target) == CONCAT)
6579 /* We're storing into a struct containing a single __complex. */
6581 gcc_assert (!bitpos);
6582 return store_expr (exp, target, 0, nontemporal);
6585 /* If the structure is in a register or if the component
6586 is a bit field, we cannot use addressing to access it.
6587 Use bit-field techniques or SUBREG to store in it. */
6589 if (mode == VOIDmode
6590 || (mode != BLKmode && ! direct_store[(int) mode]
6591 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6592 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6593 || REG_P (target)
6594 || GET_CODE (target) == SUBREG
6595 /* If the field isn't aligned enough to store as an ordinary memref,
6596 store it as a bit field. */
6597 || (mode != BLKmode
6598 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6599 || bitpos % GET_MODE_ALIGNMENT (mode))
6600 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6601 || (bitpos % BITS_PER_UNIT != 0)))
6602 || (bitsize >= 0 && mode != BLKmode
6603 && GET_MODE_BITSIZE (mode) > bitsize)
6604 /* If the RHS and field are a constant size and the size of the
6605 RHS isn't the same size as the bitfield, we must use bitfield
6606 operations. */
6607 || (bitsize >= 0
6608 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6609 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6610 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6611 decl we must use bitfield operations. */
6612 || (bitsize >= 0
6613 && TREE_CODE (exp) == MEM_REF
6614 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6615 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6616 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6617 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6619 rtx temp;
6620 gimple nop_def;
6622 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6623 implies a mask operation. If the precision is the same size as
6624 the field we're storing into, that mask is redundant. This is
6625 particularly common with bit field assignments generated by the
6626 C front end. */
6627 nop_def = get_def_for_expr (exp, NOP_EXPR);
6628 if (nop_def)
6630 tree type = TREE_TYPE (exp);
6631 if (INTEGRAL_TYPE_P (type)
6632 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6633 && bitsize == TYPE_PRECISION (type))
6635 tree op = gimple_assign_rhs1 (nop_def);
6636 type = TREE_TYPE (op);
6637 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6638 exp = op;
6642 temp = expand_normal (exp);
6644 /* If BITSIZE is narrower than the size of the type of EXP
6645 we will be narrowing TEMP. Normally, what's wanted are the
6646 low-order bits. However, if EXP's type is a record and this is
6647 big-endian machine, we want the upper BITSIZE bits. */
6648 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6649 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6650 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6651 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6652 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6653 NULL_RTX, 1);
6655 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6656 if (mode != VOIDmode && mode != BLKmode
6657 && mode != TYPE_MODE (TREE_TYPE (exp)))
6658 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6660 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6661 are both BLKmode, both must be in memory and BITPOS must be aligned
6662 on a byte boundary. If so, we simply do a block copy. Likewise for
6663 a BLKmode-like TARGET. */
6664 if (GET_CODE (temp) != PARALLEL
6665 && GET_MODE (temp) == BLKmode
6666 && (GET_MODE (target) == BLKmode
6667 || (MEM_P (target)
6668 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6669 && (bitpos % BITS_PER_UNIT) == 0
6670 && (bitsize % BITS_PER_UNIT) == 0)))
6672 gcc_assert (MEM_P (target) && MEM_P (temp)
6673 && (bitpos % BITS_PER_UNIT) == 0);
6675 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6676 emit_block_move (target, temp,
6677 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6678 / BITS_PER_UNIT),
6679 BLOCK_OP_NORMAL);
6681 return const0_rtx;
6684 /* Handle calls that return values in multiple non-contiguous locations.
6685 The Irix 6 ABI has examples of this. */
6686 if (GET_CODE (temp) == PARALLEL)
6688 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6689 rtx temp_target;
6690 if (mode == BLKmode || mode == VOIDmode)
6691 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6692 temp_target = gen_reg_rtx (mode);
6693 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6694 temp = temp_target;
6696 else if (mode == BLKmode)
6698 /* Handle calls that return BLKmode values in registers. */
6699 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6701 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6702 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6703 temp = temp_target;
6705 else
6707 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6708 rtx temp_target;
6709 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6710 temp_target = gen_reg_rtx (mode);
6711 temp_target
6712 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6713 temp_target, mode, mode);
6714 temp = temp_target;
6718 /* Store the value in the bitfield. */
6719 store_bit_field (target, bitsize, bitpos,
6720 bitregion_start, bitregion_end,
6721 mode, temp);
6723 return const0_rtx;
6725 else
6727 /* Now build a reference to just the desired component. */
6728 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6730 if (to_rtx == target)
6731 to_rtx = copy_rtx (to_rtx);
6733 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6734 set_mem_alias_set (to_rtx, alias_set);
6736 return store_expr (exp, to_rtx, 0, nontemporal);
6740 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6741 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6742 codes and find the ultimate containing object, which we return.
6744 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6745 bit position, and *PUNSIGNEDP to the signedness of the field.
6746 If the position of the field is variable, we store a tree
6747 giving the variable offset (in units) in *POFFSET.
6748 This offset is in addition to the bit position.
6749 If the position is not variable, we store 0 in *POFFSET.
6751 If any of the extraction expressions is volatile,
6752 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6754 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6755 Otherwise, it is a mode that can be used to access the field.
6757 If the field describes a variable-sized object, *PMODE is set to
6758 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6759 this case, but the address of the object can be found.
6761 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6762 look through nodes that serve as markers of a greater alignment than
6763 the one that can be deduced from the expression. These nodes make it
6764 possible for front-ends to prevent temporaries from being created by
6765 the middle-end on alignment considerations. For that purpose, the
6766 normal operating mode at high-level is to always pass FALSE so that
6767 the ultimate containing object is really returned; moreover, the
6768 associated predicate handled_component_p will always return TRUE
6769 on these nodes, thus indicating that they are essentially handled
6770 by get_inner_reference. TRUE should only be passed when the caller
6771 is scanning the expression in order to build another representation
6772 and specifically knows how to handle these nodes; as such, this is
6773 the normal operating mode in the RTL expanders. */
6775 tree
6776 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6777 HOST_WIDE_INT *pbitpos, tree *poffset,
6778 machine_mode *pmode, int *punsignedp,
6779 int *pvolatilep, bool keep_aligning)
6781 tree size_tree = 0;
6782 machine_mode mode = VOIDmode;
6783 bool blkmode_bitfield = false;
6784 tree offset = size_zero_node;
6785 offset_int bit_offset = 0;
6787 /* First get the mode, signedness, and size. We do this from just the
6788 outermost expression. */
6789 *pbitsize = -1;
6790 if (TREE_CODE (exp) == COMPONENT_REF)
6792 tree field = TREE_OPERAND (exp, 1);
6793 size_tree = DECL_SIZE (field);
6794 if (flag_strict_volatile_bitfields > 0
6795 && TREE_THIS_VOLATILE (exp)
6796 && DECL_BIT_FIELD_TYPE (field)
6797 && DECL_MODE (field) != BLKmode)
6798 /* Volatile bitfields should be accessed in the mode of the
6799 field's type, not the mode computed based on the bit
6800 size. */
6801 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6802 else if (!DECL_BIT_FIELD (field))
6803 mode = DECL_MODE (field);
6804 else if (DECL_MODE (field) == BLKmode)
6805 blkmode_bitfield = true;
6807 *punsignedp = DECL_UNSIGNED (field);
6809 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6811 size_tree = TREE_OPERAND (exp, 1);
6812 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6813 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6815 /* For vector types, with the correct size of access, use the mode of
6816 inner type. */
6817 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6818 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6819 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6820 mode = TYPE_MODE (TREE_TYPE (exp));
6822 else
6824 mode = TYPE_MODE (TREE_TYPE (exp));
6825 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6827 if (mode == BLKmode)
6828 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6829 else
6830 *pbitsize = GET_MODE_BITSIZE (mode);
6833 if (size_tree != 0)
6835 if (! tree_fits_uhwi_p (size_tree))
6836 mode = BLKmode, *pbitsize = -1;
6837 else
6838 *pbitsize = tree_to_uhwi (size_tree);
6841 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6842 and find the ultimate containing object. */
6843 while (1)
6845 switch (TREE_CODE (exp))
6847 case BIT_FIELD_REF:
6848 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6849 break;
6851 case COMPONENT_REF:
6853 tree field = TREE_OPERAND (exp, 1);
6854 tree this_offset = component_ref_field_offset (exp);
6856 /* If this field hasn't been filled in yet, don't go past it.
6857 This should only happen when folding expressions made during
6858 type construction. */
6859 if (this_offset == 0)
6860 break;
6862 offset = size_binop (PLUS_EXPR, offset, this_offset);
6863 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6865 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6867 break;
6869 case ARRAY_REF:
6870 case ARRAY_RANGE_REF:
6872 tree index = TREE_OPERAND (exp, 1);
6873 tree low_bound = array_ref_low_bound (exp);
6874 tree unit_size = array_ref_element_size (exp);
6876 /* We assume all arrays have sizes that are a multiple of a byte.
6877 First subtract the lower bound, if any, in the type of the
6878 index, then convert to sizetype and multiply by the size of
6879 the array element. */
6880 if (! integer_zerop (low_bound))
6881 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6882 index, low_bound);
6884 offset = size_binop (PLUS_EXPR, offset,
6885 size_binop (MULT_EXPR,
6886 fold_convert (sizetype, index),
6887 unit_size));
6889 break;
6891 case REALPART_EXPR:
6892 break;
6894 case IMAGPART_EXPR:
6895 bit_offset += *pbitsize;
6896 break;
6898 case VIEW_CONVERT_EXPR:
6899 if (keep_aligning && STRICT_ALIGNMENT
6900 && (TYPE_ALIGN (TREE_TYPE (exp))
6901 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6902 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6903 < BIGGEST_ALIGNMENT)
6904 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6905 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6906 goto done;
6907 break;
6909 case MEM_REF:
6910 /* Hand back the decl for MEM[&decl, off]. */
6911 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6913 tree off = TREE_OPERAND (exp, 1);
6914 if (!integer_zerop (off))
6916 offset_int boff, coff = mem_ref_offset (exp);
6917 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6918 bit_offset += boff;
6920 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6922 goto done;
6924 default:
6925 goto done;
6928 /* If any reference in the chain is volatile, the effect is volatile. */
6929 if (TREE_THIS_VOLATILE (exp))
6930 *pvolatilep = 1;
6932 exp = TREE_OPERAND (exp, 0);
6934 done:
6936 /* If OFFSET is constant, see if we can return the whole thing as a
6937 constant bit position. Make sure to handle overflow during
6938 this conversion. */
6939 if (TREE_CODE (offset) == INTEGER_CST)
6941 offset_int tem = wi::sext (wi::to_offset (offset),
6942 TYPE_PRECISION (sizetype));
6943 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6944 tem += bit_offset;
6945 if (wi::fits_shwi_p (tem))
6947 *pbitpos = tem.to_shwi ();
6948 *poffset = offset = NULL_TREE;
6952 /* Otherwise, split it up. */
6953 if (offset)
6955 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6956 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
6958 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6959 offset_int tem = bit_offset.and_not (mask);
6960 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6961 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6962 bit_offset -= tem;
6963 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6964 offset = size_binop (PLUS_EXPR, offset,
6965 wide_int_to_tree (sizetype, tem));
6968 *pbitpos = bit_offset.to_shwi ();
6969 *poffset = offset;
6972 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6973 if (mode == VOIDmode
6974 && blkmode_bitfield
6975 && (*pbitpos % BITS_PER_UNIT) == 0
6976 && (*pbitsize % BITS_PER_UNIT) == 0)
6977 *pmode = BLKmode;
6978 else
6979 *pmode = mode;
6981 return exp;
6984 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6986 static unsigned HOST_WIDE_INT
6987 target_align (const_tree target)
6989 /* We might have a chain of nested references with intermediate misaligning
6990 bitfields components, so need to recurse to find out. */
6992 unsigned HOST_WIDE_INT this_align, outer_align;
6994 switch (TREE_CODE (target))
6996 case BIT_FIELD_REF:
6997 return 1;
6999 case COMPONENT_REF:
7000 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7001 outer_align = target_align (TREE_OPERAND (target, 0));
7002 return MIN (this_align, outer_align);
7004 case ARRAY_REF:
7005 case ARRAY_RANGE_REF:
7006 this_align = TYPE_ALIGN (TREE_TYPE (target));
7007 outer_align = target_align (TREE_OPERAND (target, 0));
7008 return MIN (this_align, outer_align);
7010 CASE_CONVERT:
7011 case NON_LVALUE_EXPR:
7012 case VIEW_CONVERT_EXPR:
7013 this_align = TYPE_ALIGN (TREE_TYPE (target));
7014 outer_align = target_align (TREE_OPERAND (target, 0));
7015 return MAX (this_align, outer_align);
7017 default:
7018 return TYPE_ALIGN (TREE_TYPE (target));
7023 /* Given an rtx VALUE that may contain additions and multiplications, return
7024 an equivalent value that just refers to a register, memory, or constant.
7025 This is done by generating instructions to perform the arithmetic and
7026 returning a pseudo-register containing the value.
7028 The returned value may be a REG, SUBREG, MEM or constant. */
7031 force_operand (rtx value, rtx target)
7033 rtx op1, op2;
7034 /* Use subtarget as the target for operand 0 of a binary operation. */
7035 rtx subtarget = get_subtarget (target);
7036 enum rtx_code code = GET_CODE (value);
7038 /* Check for subreg applied to an expression produced by loop optimizer. */
7039 if (code == SUBREG
7040 && !REG_P (SUBREG_REG (value))
7041 && !MEM_P (SUBREG_REG (value)))
7043 value
7044 = simplify_gen_subreg (GET_MODE (value),
7045 force_reg (GET_MODE (SUBREG_REG (value)),
7046 force_operand (SUBREG_REG (value),
7047 NULL_RTX)),
7048 GET_MODE (SUBREG_REG (value)),
7049 SUBREG_BYTE (value));
7050 code = GET_CODE (value);
7053 /* Check for a PIC address load. */
7054 if ((code == PLUS || code == MINUS)
7055 && XEXP (value, 0) == pic_offset_table_rtx
7056 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7057 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7058 || GET_CODE (XEXP (value, 1)) == CONST))
7060 if (!subtarget)
7061 subtarget = gen_reg_rtx (GET_MODE (value));
7062 emit_move_insn (subtarget, value);
7063 return subtarget;
7066 if (ARITHMETIC_P (value))
7068 op2 = XEXP (value, 1);
7069 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7070 subtarget = 0;
7071 if (code == MINUS && CONST_INT_P (op2))
7073 code = PLUS;
7074 op2 = negate_rtx (GET_MODE (value), op2);
7077 /* Check for an addition with OP2 a constant integer and our first
7078 operand a PLUS of a virtual register and something else. In that
7079 case, we want to emit the sum of the virtual register and the
7080 constant first and then add the other value. This allows virtual
7081 register instantiation to simply modify the constant rather than
7082 creating another one around this addition. */
7083 if (code == PLUS && CONST_INT_P (op2)
7084 && GET_CODE (XEXP (value, 0)) == PLUS
7085 && REG_P (XEXP (XEXP (value, 0), 0))
7086 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7087 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7089 rtx temp = expand_simple_binop (GET_MODE (value), code,
7090 XEXP (XEXP (value, 0), 0), op2,
7091 subtarget, 0, OPTAB_LIB_WIDEN);
7092 return expand_simple_binop (GET_MODE (value), code, temp,
7093 force_operand (XEXP (XEXP (value,
7094 0), 1), 0),
7095 target, 0, OPTAB_LIB_WIDEN);
7098 op1 = force_operand (XEXP (value, 0), subtarget);
7099 op2 = force_operand (op2, NULL_RTX);
7100 switch (code)
7102 case MULT:
7103 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7104 case DIV:
7105 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7106 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7107 target, 1, OPTAB_LIB_WIDEN);
7108 else
7109 return expand_divmod (0,
7110 FLOAT_MODE_P (GET_MODE (value))
7111 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7112 GET_MODE (value), op1, op2, target, 0);
7113 case MOD:
7114 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7115 target, 0);
7116 case UDIV:
7117 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7118 target, 1);
7119 case UMOD:
7120 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7121 target, 1);
7122 case ASHIFTRT:
7123 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7124 target, 0, OPTAB_LIB_WIDEN);
7125 default:
7126 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7127 target, 1, OPTAB_LIB_WIDEN);
7130 if (UNARY_P (value))
7132 if (!target)
7133 target = gen_reg_rtx (GET_MODE (value));
7134 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7135 switch (code)
7137 case ZERO_EXTEND:
7138 case SIGN_EXTEND:
7139 case TRUNCATE:
7140 case FLOAT_EXTEND:
7141 case FLOAT_TRUNCATE:
7142 convert_move (target, op1, code == ZERO_EXTEND);
7143 return target;
7145 case FIX:
7146 case UNSIGNED_FIX:
7147 expand_fix (target, op1, code == UNSIGNED_FIX);
7148 return target;
7150 case FLOAT:
7151 case UNSIGNED_FLOAT:
7152 expand_float (target, op1, code == UNSIGNED_FLOAT);
7153 return target;
7155 default:
7156 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7160 #ifdef INSN_SCHEDULING
7161 /* On machines that have insn scheduling, we want all memory reference to be
7162 explicit, so we need to deal with such paradoxical SUBREGs. */
7163 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7164 value
7165 = simplify_gen_subreg (GET_MODE (value),
7166 force_reg (GET_MODE (SUBREG_REG (value)),
7167 force_operand (SUBREG_REG (value),
7168 NULL_RTX)),
7169 GET_MODE (SUBREG_REG (value)),
7170 SUBREG_BYTE (value));
7171 #endif
7173 return value;
7176 /* Subroutine of expand_expr: return nonzero iff there is no way that
7177 EXP can reference X, which is being modified. TOP_P is nonzero if this
7178 call is going to be used to determine whether we need a temporary
7179 for EXP, as opposed to a recursive call to this function.
7181 It is always safe for this routine to return zero since it merely
7182 searches for optimization opportunities. */
7185 safe_from_p (const_rtx x, tree exp, int top_p)
7187 rtx exp_rtl = 0;
7188 int i, nops;
7190 if (x == 0
7191 /* If EXP has varying size, we MUST use a target since we currently
7192 have no way of allocating temporaries of variable size
7193 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7194 So we assume here that something at a higher level has prevented a
7195 clash. This is somewhat bogus, but the best we can do. Only
7196 do this when X is BLKmode and when we are at the top level. */
7197 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7198 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7199 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7200 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7201 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7202 != INTEGER_CST)
7203 && GET_MODE (x) == BLKmode)
7204 /* If X is in the outgoing argument area, it is always safe. */
7205 || (MEM_P (x)
7206 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7207 || (GET_CODE (XEXP (x, 0)) == PLUS
7208 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7209 return 1;
7211 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7212 find the underlying pseudo. */
7213 if (GET_CODE (x) == SUBREG)
7215 x = SUBREG_REG (x);
7216 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7217 return 0;
7220 /* Now look at our tree code and possibly recurse. */
7221 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7223 case tcc_declaration:
7224 exp_rtl = DECL_RTL_IF_SET (exp);
7225 break;
7227 case tcc_constant:
7228 return 1;
7230 case tcc_exceptional:
7231 if (TREE_CODE (exp) == TREE_LIST)
7233 while (1)
7235 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7236 return 0;
7237 exp = TREE_CHAIN (exp);
7238 if (!exp)
7239 return 1;
7240 if (TREE_CODE (exp) != TREE_LIST)
7241 return safe_from_p (x, exp, 0);
7244 else if (TREE_CODE (exp) == CONSTRUCTOR)
7246 constructor_elt *ce;
7247 unsigned HOST_WIDE_INT idx;
7249 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7250 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7251 || !safe_from_p (x, ce->value, 0))
7252 return 0;
7253 return 1;
7255 else if (TREE_CODE (exp) == ERROR_MARK)
7256 return 1; /* An already-visited SAVE_EXPR? */
7257 else
7258 return 0;
7260 case tcc_statement:
7261 /* The only case we look at here is the DECL_INITIAL inside a
7262 DECL_EXPR. */
7263 return (TREE_CODE (exp) != DECL_EXPR
7264 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7265 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7266 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7268 case tcc_binary:
7269 case tcc_comparison:
7270 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7271 return 0;
7272 /* Fall through. */
7274 case tcc_unary:
7275 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7277 case tcc_expression:
7278 case tcc_reference:
7279 case tcc_vl_exp:
7280 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7281 the expression. If it is set, we conflict iff we are that rtx or
7282 both are in memory. Otherwise, we check all operands of the
7283 expression recursively. */
7285 switch (TREE_CODE (exp))
7287 case ADDR_EXPR:
7288 /* If the operand is static or we are static, we can't conflict.
7289 Likewise if we don't conflict with the operand at all. */
7290 if (staticp (TREE_OPERAND (exp, 0))
7291 || TREE_STATIC (exp)
7292 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7293 return 1;
7295 /* Otherwise, the only way this can conflict is if we are taking
7296 the address of a DECL a that address if part of X, which is
7297 very rare. */
7298 exp = TREE_OPERAND (exp, 0);
7299 if (DECL_P (exp))
7301 if (!DECL_RTL_SET_P (exp)
7302 || !MEM_P (DECL_RTL (exp)))
7303 return 0;
7304 else
7305 exp_rtl = XEXP (DECL_RTL (exp), 0);
7307 break;
7309 case MEM_REF:
7310 if (MEM_P (x)
7311 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7312 get_alias_set (exp)))
7313 return 0;
7314 break;
7316 case CALL_EXPR:
7317 /* Assume that the call will clobber all hard registers and
7318 all of memory. */
7319 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7320 || MEM_P (x))
7321 return 0;
7322 break;
7324 case WITH_CLEANUP_EXPR:
7325 case CLEANUP_POINT_EXPR:
7326 /* Lowered by gimplify.c. */
7327 gcc_unreachable ();
7329 case SAVE_EXPR:
7330 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7332 default:
7333 break;
7336 /* If we have an rtx, we do not need to scan our operands. */
7337 if (exp_rtl)
7338 break;
7340 nops = TREE_OPERAND_LENGTH (exp);
7341 for (i = 0; i < nops; i++)
7342 if (TREE_OPERAND (exp, i) != 0
7343 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7344 return 0;
7346 break;
7348 case tcc_type:
7349 /* Should never get a type here. */
7350 gcc_unreachable ();
7353 /* If we have an rtl, find any enclosed object. Then see if we conflict
7354 with it. */
7355 if (exp_rtl)
7357 if (GET_CODE (exp_rtl) == SUBREG)
7359 exp_rtl = SUBREG_REG (exp_rtl);
7360 if (REG_P (exp_rtl)
7361 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7362 return 0;
7365 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7366 are memory and they conflict. */
7367 return ! (rtx_equal_p (x, exp_rtl)
7368 || (MEM_P (x) && MEM_P (exp_rtl)
7369 && true_dependence (exp_rtl, VOIDmode, x)));
7372 /* If we reach here, it is safe. */
7373 return 1;
7377 /* Return the highest power of two that EXP is known to be a multiple of.
7378 This is used in updating alignment of MEMs in array references. */
7380 unsigned HOST_WIDE_INT
7381 highest_pow2_factor (const_tree exp)
7383 unsigned HOST_WIDE_INT ret;
7384 int trailing_zeros = tree_ctz (exp);
7385 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7386 return BIGGEST_ALIGNMENT;
7387 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7388 if (ret > BIGGEST_ALIGNMENT)
7389 return BIGGEST_ALIGNMENT;
7390 return ret;
7393 /* Similar, except that the alignment requirements of TARGET are
7394 taken into account. Assume it is at least as aligned as its
7395 type, unless it is a COMPONENT_REF in which case the layout of
7396 the structure gives the alignment. */
7398 static unsigned HOST_WIDE_INT
7399 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7401 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7402 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7404 return MAX (factor, talign);
7407 /* Convert the tree comparison code TCODE to the rtl one where the
7408 signedness is UNSIGNEDP. */
7410 static enum rtx_code
7411 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7413 enum rtx_code code;
7414 switch (tcode)
7416 case EQ_EXPR:
7417 code = EQ;
7418 break;
7419 case NE_EXPR:
7420 code = NE;
7421 break;
7422 case LT_EXPR:
7423 code = unsignedp ? LTU : LT;
7424 break;
7425 case LE_EXPR:
7426 code = unsignedp ? LEU : LE;
7427 break;
7428 case GT_EXPR:
7429 code = unsignedp ? GTU : GT;
7430 break;
7431 case GE_EXPR:
7432 code = unsignedp ? GEU : GE;
7433 break;
7434 case UNORDERED_EXPR:
7435 code = UNORDERED;
7436 break;
7437 case ORDERED_EXPR:
7438 code = ORDERED;
7439 break;
7440 case UNLT_EXPR:
7441 code = UNLT;
7442 break;
7443 case UNLE_EXPR:
7444 code = UNLE;
7445 break;
7446 case UNGT_EXPR:
7447 code = UNGT;
7448 break;
7449 case UNGE_EXPR:
7450 code = UNGE;
7451 break;
7452 case UNEQ_EXPR:
7453 code = UNEQ;
7454 break;
7455 case LTGT_EXPR:
7456 code = LTGT;
7457 break;
7459 default:
7460 gcc_unreachable ();
7462 return code;
7465 /* Subroutine of expand_expr. Expand the two operands of a binary
7466 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7467 The value may be stored in TARGET if TARGET is nonzero. The
7468 MODIFIER argument is as documented by expand_expr. */
7470 void
7471 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7472 enum expand_modifier modifier)
7474 if (! safe_from_p (target, exp1, 1))
7475 target = 0;
7476 if (operand_equal_p (exp0, exp1, 0))
7478 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7479 *op1 = copy_rtx (*op0);
7481 else
7483 /* If we need to preserve evaluation order, copy exp0 into its own
7484 temporary variable so that it can't be clobbered by exp1. */
7485 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7486 exp0 = save_expr (exp0);
7487 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7488 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7493 /* Return a MEM that contains constant EXP. DEFER is as for
7494 output_constant_def and MODIFIER is as for expand_expr. */
7496 static rtx
7497 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7499 rtx mem;
7501 mem = output_constant_def (exp, defer);
7502 if (modifier != EXPAND_INITIALIZER)
7503 mem = use_anchored_address (mem);
7504 return mem;
7507 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7508 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7510 static rtx
7511 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7512 enum expand_modifier modifier, addr_space_t as)
7514 rtx result, subtarget;
7515 tree inner, offset;
7516 HOST_WIDE_INT bitsize, bitpos;
7517 int volatilep, unsignedp;
7518 machine_mode mode1;
7520 /* If we are taking the address of a constant and are at the top level,
7521 we have to use output_constant_def since we can't call force_const_mem
7522 at top level. */
7523 /* ??? This should be considered a front-end bug. We should not be
7524 generating ADDR_EXPR of something that isn't an LVALUE. The only
7525 exception here is STRING_CST. */
7526 if (CONSTANT_CLASS_P (exp))
7528 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7529 if (modifier < EXPAND_SUM)
7530 result = force_operand (result, target);
7531 return result;
7534 /* Everything must be something allowed by is_gimple_addressable. */
7535 switch (TREE_CODE (exp))
7537 case INDIRECT_REF:
7538 /* This case will happen via recursion for &a->b. */
7539 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7541 case MEM_REF:
7543 tree tem = TREE_OPERAND (exp, 0);
7544 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7545 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7546 return expand_expr (tem, target, tmode, modifier);
7549 case CONST_DECL:
7550 /* Expand the initializer like constants above. */
7551 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7552 0, modifier), 0);
7553 if (modifier < EXPAND_SUM)
7554 result = force_operand (result, target);
7555 return result;
7557 case REALPART_EXPR:
7558 /* The real part of the complex number is always first, therefore
7559 the address is the same as the address of the parent object. */
7560 offset = 0;
7561 bitpos = 0;
7562 inner = TREE_OPERAND (exp, 0);
7563 break;
7565 case IMAGPART_EXPR:
7566 /* The imaginary part of the complex number is always second.
7567 The expression is therefore always offset by the size of the
7568 scalar type. */
7569 offset = 0;
7570 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7571 inner = TREE_OPERAND (exp, 0);
7572 break;
7574 case COMPOUND_LITERAL_EXPR:
7575 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7576 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7577 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7578 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7579 the initializers aren't gimplified. */
7580 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7581 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7582 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7583 target, tmode, modifier, as);
7584 /* FALLTHRU */
7585 default:
7586 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7587 expand_expr, as that can have various side effects; LABEL_DECLs for
7588 example, may not have their DECL_RTL set yet. Expand the rtl of
7589 CONSTRUCTORs too, which should yield a memory reference for the
7590 constructor's contents. Assume language specific tree nodes can
7591 be expanded in some interesting way. */
7592 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7593 if (DECL_P (exp)
7594 || TREE_CODE (exp) == CONSTRUCTOR
7595 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7597 result = expand_expr (exp, target, tmode,
7598 modifier == EXPAND_INITIALIZER
7599 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7601 /* If the DECL isn't in memory, then the DECL wasn't properly
7602 marked TREE_ADDRESSABLE, which will be either a front-end
7603 or a tree optimizer bug. */
7605 if (TREE_ADDRESSABLE (exp)
7606 && ! MEM_P (result)
7607 && ! targetm.calls.allocate_stack_slots_for_args ())
7609 error ("local frame unavailable (naked function?)");
7610 return result;
7612 else
7613 gcc_assert (MEM_P (result));
7614 result = XEXP (result, 0);
7616 /* ??? Is this needed anymore? */
7617 if (DECL_P (exp))
7618 TREE_USED (exp) = 1;
7620 if (modifier != EXPAND_INITIALIZER
7621 && modifier != EXPAND_CONST_ADDRESS
7622 && modifier != EXPAND_SUM)
7623 result = force_operand (result, target);
7624 return result;
7627 /* Pass FALSE as the last argument to get_inner_reference although
7628 we are expanding to RTL. The rationale is that we know how to
7629 handle "aligning nodes" here: we can just bypass them because
7630 they won't change the final object whose address will be returned
7631 (they actually exist only for that purpose). */
7632 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7633 &mode1, &unsignedp, &volatilep, false);
7634 break;
7637 /* We must have made progress. */
7638 gcc_assert (inner != exp);
7640 subtarget = offset || bitpos ? NULL_RTX : target;
7641 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7642 inner alignment, force the inner to be sufficiently aligned. */
7643 if (CONSTANT_CLASS_P (inner)
7644 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7646 inner = copy_node (inner);
7647 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7648 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7649 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7651 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7653 if (offset)
7655 rtx tmp;
7657 if (modifier != EXPAND_NORMAL)
7658 result = force_operand (result, NULL);
7659 tmp = expand_expr (offset, NULL_RTX, tmode,
7660 modifier == EXPAND_INITIALIZER
7661 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7663 /* expand_expr is allowed to return an object in a mode other
7664 than TMODE. If it did, we need to convert. */
7665 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7666 tmp = convert_modes (tmode, GET_MODE (tmp),
7667 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7668 result = convert_memory_address_addr_space (tmode, result, as);
7669 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7671 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7672 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7673 else
7675 subtarget = bitpos ? NULL_RTX : target;
7676 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7677 1, OPTAB_LIB_WIDEN);
7681 if (bitpos)
7683 /* Someone beforehand should have rejected taking the address
7684 of such an object. */
7685 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7687 result = convert_memory_address_addr_space (tmode, result, as);
7688 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7689 if (modifier < EXPAND_SUM)
7690 result = force_operand (result, target);
7693 return result;
7696 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7697 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7699 static rtx
7700 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7701 enum expand_modifier modifier)
7703 addr_space_t as = ADDR_SPACE_GENERIC;
7704 machine_mode address_mode = Pmode;
7705 machine_mode pointer_mode = ptr_mode;
7706 machine_mode rmode;
7707 rtx result;
7709 /* Target mode of VOIDmode says "whatever's natural". */
7710 if (tmode == VOIDmode)
7711 tmode = TYPE_MODE (TREE_TYPE (exp));
7713 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7715 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7716 address_mode = targetm.addr_space.address_mode (as);
7717 pointer_mode = targetm.addr_space.pointer_mode (as);
7720 /* We can get called with some Weird Things if the user does silliness
7721 like "(short) &a". In that case, convert_memory_address won't do
7722 the right thing, so ignore the given target mode. */
7723 if (tmode != address_mode && tmode != pointer_mode)
7724 tmode = address_mode;
7726 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7727 tmode, modifier, as);
7729 /* Despite expand_expr claims concerning ignoring TMODE when not
7730 strictly convenient, stuff breaks if we don't honor it. Note
7731 that combined with the above, we only do this for pointer modes. */
7732 rmode = GET_MODE (result);
7733 if (rmode == VOIDmode)
7734 rmode = tmode;
7735 if (rmode != tmode)
7736 result = convert_memory_address_addr_space (tmode, result, as);
7738 return result;
7741 /* Generate code for computing CONSTRUCTOR EXP.
7742 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7743 is TRUE, instead of creating a temporary variable in memory
7744 NULL is returned and the caller needs to handle it differently. */
7746 static rtx
7747 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7748 bool avoid_temp_mem)
7750 tree type = TREE_TYPE (exp);
7751 machine_mode mode = TYPE_MODE (type);
7753 /* Try to avoid creating a temporary at all. This is possible
7754 if all of the initializer is zero.
7755 FIXME: try to handle all [0..255] initializers we can handle
7756 with memset. */
7757 if (TREE_STATIC (exp)
7758 && !TREE_ADDRESSABLE (exp)
7759 && target != 0 && mode == BLKmode
7760 && all_zeros_p (exp))
7762 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7763 return target;
7766 /* All elts simple constants => refer to a constant in memory. But
7767 if this is a non-BLKmode mode, let it store a field at a time
7768 since that should make a CONST_INT, CONST_WIDE_INT or
7769 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7770 use, it is best to store directly into the target unless the type
7771 is large enough that memcpy will be used. If we are making an
7772 initializer and all operands are constant, put it in memory as
7773 well.
7775 FIXME: Avoid trying to fill vector constructors piece-meal.
7776 Output them with output_constant_def below unless we're sure
7777 they're zeros. This should go away when vector initializers
7778 are treated like VECTOR_CST instead of arrays. */
7779 if ((TREE_STATIC (exp)
7780 && ((mode == BLKmode
7781 && ! (target != 0 && safe_from_p (target, exp, 1)))
7782 || TREE_ADDRESSABLE (exp)
7783 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7784 && (! can_move_by_pieces
7785 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7786 TYPE_ALIGN (type)))
7787 && ! mostly_zeros_p (exp))))
7788 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7789 && TREE_CONSTANT (exp)))
7791 rtx constructor;
7793 if (avoid_temp_mem)
7794 return NULL_RTX;
7796 constructor = expand_expr_constant (exp, 1, modifier);
7798 if (modifier != EXPAND_CONST_ADDRESS
7799 && modifier != EXPAND_INITIALIZER
7800 && modifier != EXPAND_SUM)
7801 constructor = validize_mem (constructor);
7803 return constructor;
7806 /* Handle calls that pass values in multiple non-contiguous
7807 locations. The Irix 6 ABI has examples of this. */
7808 if (target == 0 || ! safe_from_p (target, exp, 1)
7809 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7811 if (avoid_temp_mem)
7812 return NULL_RTX;
7814 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7817 store_constructor (exp, target, 0, int_expr_size (exp));
7818 return target;
7822 /* expand_expr: generate code for computing expression EXP.
7823 An rtx for the computed value is returned. The value is never null.
7824 In the case of a void EXP, const0_rtx is returned.
7826 The value may be stored in TARGET if TARGET is nonzero.
7827 TARGET is just a suggestion; callers must assume that
7828 the rtx returned may not be the same as TARGET.
7830 If TARGET is CONST0_RTX, it means that the value will be ignored.
7832 If TMODE is not VOIDmode, it suggests generating the
7833 result in mode TMODE. But this is done only when convenient.
7834 Otherwise, TMODE is ignored and the value generated in its natural mode.
7835 TMODE is just a suggestion; callers must assume that
7836 the rtx returned may not have mode TMODE.
7838 Note that TARGET may have neither TMODE nor MODE. In that case, it
7839 probably will not be used.
7841 If MODIFIER is EXPAND_SUM then when EXP is an addition
7842 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7843 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7844 products as above, or REG or MEM, or constant.
7845 Ordinarily in such cases we would output mul or add instructions
7846 and then return a pseudo reg containing the sum.
7848 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7849 it also marks a label as absolutely required (it can't be dead).
7850 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7851 This is used for outputting expressions used in initializers.
7853 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7854 with a constant address even if that address is not normally legitimate.
7855 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7857 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7858 a call parameter. Such targets require special care as we haven't yet
7859 marked TARGET so that it's safe from being trashed by libcalls. We
7860 don't want to use TARGET for anything but the final result;
7861 Intermediate values must go elsewhere. Additionally, calls to
7862 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7864 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7865 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7866 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7867 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7868 recursively.
7870 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7871 In this case, we don't adjust a returned MEM rtx that wouldn't be
7872 sufficiently aligned for its mode; instead, it's up to the caller
7873 to deal with it afterwards. This is used to make sure that unaligned
7874 base objects for which out-of-bounds accesses are supported, for
7875 example record types with trailing arrays, aren't realigned behind
7876 the back of the caller.
7877 The normal operating mode is to pass FALSE for this parameter. */
7880 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7881 enum expand_modifier modifier, rtx *alt_rtl,
7882 bool inner_reference_p)
7884 rtx ret;
7886 /* Handle ERROR_MARK before anybody tries to access its type. */
7887 if (TREE_CODE (exp) == ERROR_MARK
7888 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7890 ret = CONST0_RTX (tmode);
7891 return ret ? ret : const0_rtx;
7894 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7895 inner_reference_p);
7896 return ret;
7899 /* Try to expand the conditional expression which is represented by
7900 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
7901 return the rtl reg which represents the result. Otherwise return
7902 NULL_RTX. */
7904 static rtx
7905 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7906 tree treeop1 ATTRIBUTE_UNUSED,
7907 tree treeop2 ATTRIBUTE_UNUSED)
7909 rtx insn;
7910 rtx op00, op01, op1, op2;
7911 enum rtx_code comparison_code;
7912 machine_mode comparison_mode;
7913 gimple srcstmt;
7914 rtx temp;
7915 tree type = TREE_TYPE (treeop1);
7916 int unsignedp = TYPE_UNSIGNED (type);
7917 machine_mode mode = TYPE_MODE (type);
7918 machine_mode orig_mode = mode;
7920 /* If we cannot do a conditional move on the mode, try doing it
7921 with the promoted mode. */
7922 if (!can_conditionally_move_p (mode))
7924 mode = promote_mode (type, mode, &unsignedp);
7925 if (!can_conditionally_move_p (mode))
7926 return NULL_RTX;
7927 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7929 else
7930 temp = assign_temp (type, 0, 1);
7932 start_sequence ();
7933 expand_operands (treeop1, treeop2,
7934 temp, &op1, &op2, EXPAND_NORMAL);
7936 if (TREE_CODE (treeop0) == SSA_NAME
7937 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7939 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7940 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7941 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7942 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7943 comparison_mode = TYPE_MODE (type);
7944 unsignedp = TYPE_UNSIGNED (type);
7945 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7947 else if (COMPARISON_CLASS_P (treeop0))
7949 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7950 enum tree_code cmpcode = TREE_CODE (treeop0);
7951 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7952 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7953 unsignedp = TYPE_UNSIGNED (type);
7954 comparison_mode = TYPE_MODE (type);
7955 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7957 else
7959 op00 = expand_normal (treeop0);
7960 op01 = const0_rtx;
7961 comparison_code = NE;
7962 comparison_mode = GET_MODE (op00);
7963 if (comparison_mode == VOIDmode)
7964 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7967 if (GET_MODE (op1) != mode)
7968 op1 = gen_lowpart (mode, op1);
7970 if (GET_MODE (op2) != mode)
7971 op2 = gen_lowpart (mode, op2);
7973 /* Try to emit the conditional move. */
7974 insn = emit_conditional_move (temp, comparison_code,
7975 op00, op01, comparison_mode,
7976 op1, op2, mode,
7977 unsignedp);
7979 /* If we could do the conditional move, emit the sequence,
7980 and return. */
7981 if (insn)
7983 rtx_insn *seq = get_insns ();
7984 end_sequence ();
7985 emit_insn (seq);
7986 return convert_modes (orig_mode, mode, temp, 0);
7989 /* Otherwise discard the sequence and fall back to code with
7990 branches. */
7991 end_sequence ();
7992 return NULL_RTX;
7996 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
7997 enum expand_modifier modifier)
7999 rtx op0, op1, op2, temp;
8000 rtx_code_label *lab;
8001 tree type;
8002 int unsignedp;
8003 machine_mode mode;
8004 enum tree_code code = ops->code;
8005 optab this_optab;
8006 rtx subtarget, original_target;
8007 int ignore;
8008 bool reduce_bit_field;
8009 location_t loc = ops->location;
8010 tree treeop0, treeop1, treeop2;
8011 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8012 ? reduce_to_bit_field_precision ((expr), \
8013 target, \
8014 type) \
8015 : (expr))
8017 type = ops->type;
8018 mode = TYPE_MODE (type);
8019 unsignedp = TYPE_UNSIGNED (type);
8021 treeop0 = ops->op0;
8022 treeop1 = ops->op1;
8023 treeop2 = ops->op2;
8025 /* We should be called only on simple (binary or unary) expressions,
8026 exactly those that are valid in gimple expressions that aren't
8027 GIMPLE_SINGLE_RHS (or invalid). */
8028 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8029 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8030 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8032 ignore = (target == const0_rtx
8033 || ((CONVERT_EXPR_CODE_P (code)
8034 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8035 && TREE_CODE (type) == VOID_TYPE));
8037 /* We should be called only if we need the result. */
8038 gcc_assert (!ignore);
8040 /* An operation in what may be a bit-field type needs the
8041 result to be reduced to the precision of the bit-field type,
8042 which is narrower than that of the type's mode. */
8043 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8044 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8046 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8047 target = 0;
8049 /* Use subtarget as the target for operand 0 of a binary operation. */
8050 subtarget = get_subtarget (target);
8051 original_target = target;
8053 switch (code)
8055 case NON_LVALUE_EXPR:
8056 case PAREN_EXPR:
8057 CASE_CONVERT:
8058 if (treeop0 == error_mark_node)
8059 return const0_rtx;
8061 if (TREE_CODE (type) == UNION_TYPE)
8063 tree valtype = TREE_TYPE (treeop0);
8065 /* If both input and output are BLKmode, this conversion isn't doing
8066 anything except possibly changing memory attribute. */
8067 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8069 rtx result = expand_expr (treeop0, target, tmode,
8070 modifier);
8072 result = copy_rtx (result);
8073 set_mem_attributes (result, type, 0);
8074 return result;
8077 if (target == 0)
8079 if (TYPE_MODE (type) != BLKmode)
8080 target = gen_reg_rtx (TYPE_MODE (type));
8081 else
8082 target = assign_temp (type, 1, 1);
8085 if (MEM_P (target))
8086 /* Store data into beginning of memory target. */
8087 store_expr (treeop0,
8088 adjust_address (target, TYPE_MODE (valtype), 0),
8089 modifier == EXPAND_STACK_PARM,
8090 false);
8092 else
8094 gcc_assert (REG_P (target));
8096 /* Store this field into a union of the proper type. */
8097 store_field (target,
8098 MIN ((int_size_in_bytes (TREE_TYPE
8099 (treeop0))
8100 * BITS_PER_UNIT),
8101 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8102 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8105 /* Return the entire union. */
8106 return target;
8109 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8111 op0 = expand_expr (treeop0, target, VOIDmode,
8112 modifier);
8114 /* If the signedness of the conversion differs and OP0 is
8115 a promoted SUBREG, clear that indication since we now
8116 have to do the proper extension. */
8117 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8118 && GET_CODE (op0) == SUBREG)
8119 SUBREG_PROMOTED_VAR_P (op0) = 0;
8121 return REDUCE_BIT_FIELD (op0);
8124 op0 = expand_expr (treeop0, NULL_RTX, mode,
8125 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8126 if (GET_MODE (op0) == mode)
8129 /* If OP0 is a constant, just convert it into the proper mode. */
8130 else if (CONSTANT_P (op0))
8132 tree inner_type = TREE_TYPE (treeop0);
8133 machine_mode inner_mode = GET_MODE (op0);
8135 if (inner_mode == VOIDmode)
8136 inner_mode = TYPE_MODE (inner_type);
8138 if (modifier == EXPAND_INITIALIZER)
8139 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8140 subreg_lowpart_offset (mode,
8141 inner_mode));
8142 else
8143 op0= convert_modes (mode, inner_mode, op0,
8144 TYPE_UNSIGNED (inner_type));
8147 else if (modifier == EXPAND_INITIALIZER)
8148 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8150 else if (target == 0)
8151 op0 = convert_to_mode (mode, op0,
8152 TYPE_UNSIGNED (TREE_TYPE
8153 (treeop0)));
8154 else
8156 convert_move (target, op0,
8157 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8158 op0 = target;
8161 return REDUCE_BIT_FIELD (op0);
8163 case ADDR_SPACE_CONVERT_EXPR:
8165 tree treeop0_type = TREE_TYPE (treeop0);
8166 addr_space_t as_to;
8167 addr_space_t as_from;
8169 gcc_assert (POINTER_TYPE_P (type));
8170 gcc_assert (POINTER_TYPE_P (treeop0_type));
8172 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8173 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8175 /* Conversions between pointers to the same address space should
8176 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8177 gcc_assert (as_to != as_from);
8179 /* Ask target code to handle conversion between pointers
8180 to overlapping address spaces. */
8181 if (targetm.addr_space.subset_p (as_to, as_from)
8182 || targetm.addr_space.subset_p (as_from, as_to))
8184 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8185 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8186 gcc_assert (op0);
8187 return op0;
8190 /* For disjoint address spaces, converting anything but
8191 a null pointer invokes undefined behaviour. We simply
8192 always return a null pointer here. */
8193 return CONST0_RTX (mode);
8196 case POINTER_PLUS_EXPR:
8197 /* Even though the sizetype mode and the pointer's mode can be different
8198 expand is able to handle this correctly and get the correct result out
8199 of the PLUS_EXPR code. */
8200 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8201 if sizetype precision is smaller than pointer precision. */
8202 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8203 treeop1 = fold_convert_loc (loc, type,
8204 fold_convert_loc (loc, ssizetype,
8205 treeop1));
8206 /* If sizetype precision is larger than pointer precision, truncate the
8207 offset to have matching modes. */
8208 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8209 treeop1 = fold_convert_loc (loc, type, treeop1);
8211 case PLUS_EXPR:
8212 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8213 something else, make sure we add the register to the constant and
8214 then to the other thing. This case can occur during strength
8215 reduction and doing it this way will produce better code if the
8216 frame pointer or argument pointer is eliminated.
8218 fold-const.c will ensure that the constant is always in the inner
8219 PLUS_EXPR, so the only case we need to do anything about is if
8220 sp, ap, or fp is our second argument, in which case we must swap
8221 the innermost first argument and our second argument. */
8223 if (TREE_CODE (treeop0) == PLUS_EXPR
8224 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8225 && TREE_CODE (treeop1) == VAR_DECL
8226 && (DECL_RTL (treeop1) == frame_pointer_rtx
8227 || DECL_RTL (treeop1) == stack_pointer_rtx
8228 || DECL_RTL (treeop1) == arg_pointer_rtx))
8230 gcc_unreachable ();
8233 /* If the result is to be ptr_mode and we are adding an integer to
8234 something, we might be forming a constant. So try to use
8235 plus_constant. If it produces a sum and we can't accept it,
8236 use force_operand. This allows P = &ARR[const] to generate
8237 efficient code on machines where a SYMBOL_REF is not a valid
8238 address.
8240 If this is an EXPAND_SUM call, always return the sum. */
8241 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8242 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8244 if (modifier == EXPAND_STACK_PARM)
8245 target = 0;
8246 if (TREE_CODE (treeop0) == INTEGER_CST
8247 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8248 && TREE_CONSTANT (treeop1))
8250 rtx constant_part;
8251 HOST_WIDE_INT wc;
8252 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8254 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8255 EXPAND_SUM);
8256 /* Use wi::shwi to ensure that the constant is
8257 truncated according to the mode of OP1, then sign extended
8258 to a HOST_WIDE_INT. Using the constant directly can result
8259 in non-canonical RTL in a 64x32 cross compile. */
8260 wc = TREE_INT_CST_LOW (treeop0);
8261 constant_part =
8262 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8263 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8264 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8265 op1 = force_operand (op1, target);
8266 return REDUCE_BIT_FIELD (op1);
8269 else if (TREE_CODE (treeop1) == INTEGER_CST
8270 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8271 && TREE_CONSTANT (treeop0))
8273 rtx constant_part;
8274 HOST_WIDE_INT wc;
8275 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8277 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8278 (modifier == EXPAND_INITIALIZER
8279 ? EXPAND_INITIALIZER : EXPAND_SUM));
8280 if (! CONSTANT_P (op0))
8282 op1 = expand_expr (treeop1, NULL_RTX,
8283 VOIDmode, modifier);
8284 /* Return a PLUS if modifier says it's OK. */
8285 if (modifier == EXPAND_SUM
8286 || modifier == EXPAND_INITIALIZER)
8287 return simplify_gen_binary (PLUS, mode, op0, op1);
8288 goto binop2;
8290 /* Use wi::shwi to ensure that the constant is
8291 truncated according to the mode of OP1, then sign extended
8292 to a HOST_WIDE_INT. Using the constant directly can result
8293 in non-canonical RTL in a 64x32 cross compile. */
8294 wc = TREE_INT_CST_LOW (treeop1);
8295 constant_part
8296 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8297 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8298 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8299 op0 = force_operand (op0, target);
8300 return REDUCE_BIT_FIELD (op0);
8304 /* Use TER to expand pointer addition of a negated value
8305 as pointer subtraction. */
8306 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8307 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8308 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8309 && TREE_CODE (treeop1) == SSA_NAME
8310 && TYPE_MODE (TREE_TYPE (treeop0))
8311 == TYPE_MODE (TREE_TYPE (treeop1)))
8313 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8314 if (def)
8316 treeop1 = gimple_assign_rhs1 (def);
8317 code = MINUS_EXPR;
8318 goto do_minus;
8322 /* No sense saving up arithmetic to be done
8323 if it's all in the wrong mode to form part of an address.
8324 And force_operand won't know whether to sign-extend or
8325 zero-extend. */
8326 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8327 || mode != ptr_mode)
8329 expand_operands (treeop0, treeop1,
8330 subtarget, &op0, &op1, EXPAND_NORMAL);
8331 if (op0 == const0_rtx)
8332 return op1;
8333 if (op1 == const0_rtx)
8334 return op0;
8335 goto binop2;
8338 expand_operands (treeop0, treeop1,
8339 subtarget, &op0, &op1, modifier);
8340 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8342 case MINUS_EXPR:
8343 do_minus:
8344 /* For initializers, we are allowed to return a MINUS of two
8345 symbolic constants. Here we handle all cases when both operands
8346 are constant. */
8347 /* Handle difference of two symbolic constants,
8348 for the sake of an initializer. */
8349 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8350 && really_constant_p (treeop0)
8351 && really_constant_p (treeop1))
8353 expand_operands (treeop0, treeop1,
8354 NULL_RTX, &op0, &op1, modifier);
8356 /* If the last operand is a CONST_INT, use plus_constant of
8357 the negated constant. Else make the MINUS. */
8358 if (CONST_INT_P (op1))
8359 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8360 -INTVAL (op1)));
8361 else
8362 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8365 /* No sense saving up arithmetic to be done
8366 if it's all in the wrong mode to form part of an address.
8367 And force_operand won't know whether to sign-extend or
8368 zero-extend. */
8369 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8370 || mode != ptr_mode)
8371 goto binop;
8373 expand_operands (treeop0, treeop1,
8374 subtarget, &op0, &op1, modifier);
8376 /* Convert A - const to A + (-const). */
8377 if (CONST_INT_P (op1))
8379 op1 = negate_rtx (mode, op1);
8380 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8383 goto binop2;
8385 case WIDEN_MULT_PLUS_EXPR:
8386 case WIDEN_MULT_MINUS_EXPR:
8387 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8388 op2 = expand_normal (treeop2);
8389 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8390 target, unsignedp);
8391 return target;
8393 case WIDEN_MULT_EXPR:
8394 /* If first operand is constant, swap them.
8395 Thus the following special case checks need only
8396 check the second operand. */
8397 if (TREE_CODE (treeop0) == INTEGER_CST)
8398 std::swap (treeop0, treeop1);
8400 /* First, check if we have a multiplication of one signed and one
8401 unsigned operand. */
8402 if (TREE_CODE (treeop1) != INTEGER_CST
8403 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8404 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8406 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8407 this_optab = usmul_widen_optab;
8408 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8409 != CODE_FOR_nothing)
8411 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8412 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8413 EXPAND_NORMAL);
8414 else
8415 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8416 EXPAND_NORMAL);
8417 /* op0 and op1 might still be constant, despite the above
8418 != INTEGER_CST check. Handle it. */
8419 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8421 op0 = convert_modes (innermode, mode, op0, true);
8422 op1 = convert_modes (innermode, mode, op1, false);
8423 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8424 target, unsignedp));
8426 goto binop3;
8429 /* Check for a multiplication with matching signedness. */
8430 else if ((TREE_CODE (treeop1) == INTEGER_CST
8431 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8432 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8433 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8435 tree op0type = TREE_TYPE (treeop0);
8436 machine_mode innermode = TYPE_MODE (op0type);
8437 bool zextend_p = TYPE_UNSIGNED (op0type);
8438 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8439 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8441 if (TREE_CODE (treeop0) != INTEGER_CST)
8443 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8444 != CODE_FOR_nothing)
8446 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8447 EXPAND_NORMAL);
8448 /* op0 and op1 might still be constant, despite the above
8449 != INTEGER_CST check. Handle it. */
8450 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8452 widen_mult_const:
8453 op0 = convert_modes (innermode, mode, op0, zextend_p);
8455 = convert_modes (innermode, mode, op1,
8456 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8457 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8458 target,
8459 unsignedp));
8461 temp = expand_widening_mult (mode, op0, op1, target,
8462 unsignedp, this_optab);
8463 return REDUCE_BIT_FIELD (temp);
8465 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8466 != CODE_FOR_nothing
8467 && innermode == word_mode)
8469 rtx htem, hipart;
8470 op0 = expand_normal (treeop0);
8471 if (TREE_CODE (treeop1) == INTEGER_CST)
8472 op1 = convert_modes (innermode, mode,
8473 expand_normal (treeop1),
8474 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8475 else
8476 op1 = expand_normal (treeop1);
8477 /* op0 and op1 might still be constant, despite the above
8478 != INTEGER_CST check. Handle it. */
8479 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8480 goto widen_mult_const;
8481 temp = expand_binop (mode, other_optab, op0, op1, target,
8482 unsignedp, OPTAB_LIB_WIDEN);
8483 hipart = gen_highpart (innermode, temp);
8484 htem = expand_mult_highpart_adjust (innermode, hipart,
8485 op0, op1, hipart,
8486 zextend_p);
8487 if (htem != hipart)
8488 emit_move_insn (hipart, htem);
8489 return REDUCE_BIT_FIELD (temp);
8493 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8494 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8495 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8496 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8498 case FMA_EXPR:
8500 optab opt = fma_optab;
8501 gimple def0, def2;
8503 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8504 call. */
8505 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8507 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8508 tree call_expr;
8510 gcc_assert (fn != NULL_TREE);
8511 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8512 return expand_builtin (call_expr, target, subtarget, mode, false);
8515 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8516 /* The multiplication is commutative - look at its 2nd operand
8517 if the first isn't fed by a negate. */
8518 if (!def0)
8520 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8521 /* Swap operands if the 2nd operand is fed by a negate. */
8522 if (def0)
8523 std::swap (treeop0, treeop1);
8525 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8527 op0 = op2 = NULL;
8529 if (def0 && def2
8530 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8532 opt = fnms_optab;
8533 op0 = expand_normal (gimple_assign_rhs1 (def0));
8534 op2 = expand_normal (gimple_assign_rhs1 (def2));
8536 else if (def0
8537 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8539 opt = fnma_optab;
8540 op0 = expand_normal (gimple_assign_rhs1 (def0));
8542 else if (def2
8543 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8545 opt = fms_optab;
8546 op2 = expand_normal (gimple_assign_rhs1 (def2));
8549 if (op0 == NULL)
8550 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8551 if (op2 == NULL)
8552 op2 = expand_normal (treeop2);
8553 op1 = expand_normal (treeop1);
8555 return expand_ternary_op (TYPE_MODE (type), opt,
8556 op0, op1, op2, target, 0);
8559 case MULT_EXPR:
8560 /* If this is a fixed-point operation, then we cannot use the code
8561 below because "expand_mult" doesn't support sat/no-sat fixed-point
8562 multiplications. */
8563 if (ALL_FIXED_POINT_MODE_P (mode))
8564 goto binop;
8566 /* If first operand is constant, swap them.
8567 Thus the following special case checks need only
8568 check the second operand. */
8569 if (TREE_CODE (treeop0) == INTEGER_CST)
8570 std::swap (treeop0, treeop1);
8572 /* Attempt to return something suitable for generating an
8573 indexed address, for machines that support that. */
8575 if (modifier == EXPAND_SUM && mode == ptr_mode
8576 && tree_fits_shwi_p (treeop1))
8578 tree exp1 = treeop1;
8580 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8581 EXPAND_SUM);
8583 if (!REG_P (op0))
8584 op0 = force_operand (op0, NULL_RTX);
8585 if (!REG_P (op0))
8586 op0 = copy_to_mode_reg (mode, op0);
8588 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8589 gen_int_mode (tree_to_shwi (exp1),
8590 TYPE_MODE (TREE_TYPE (exp1)))));
8593 if (modifier == EXPAND_STACK_PARM)
8594 target = 0;
8596 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8597 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8599 case TRUNC_DIV_EXPR:
8600 case FLOOR_DIV_EXPR:
8601 case CEIL_DIV_EXPR:
8602 case ROUND_DIV_EXPR:
8603 case EXACT_DIV_EXPR:
8604 /* If this is a fixed-point operation, then we cannot use the code
8605 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8606 divisions. */
8607 if (ALL_FIXED_POINT_MODE_P (mode))
8608 goto binop;
8610 if (modifier == EXPAND_STACK_PARM)
8611 target = 0;
8612 /* Possible optimization: compute the dividend with EXPAND_SUM
8613 then if the divisor is constant can optimize the case
8614 where some terms of the dividend have coeffs divisible by it. */
8615 expand_operands (treeop0, treeop1,
8616 subtarget, &op0, &op1, EXPAND_NORMAL);
8617 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8619 case RDIV_EXPR:
8620 goto binop;
8622 case MULT_HIGHPART_EXPR:
8623 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8624 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8625 gcc_assert (temp);
8626 return temp;
8628 case TRUNC_MOD_EXPR:
8629 case FLOOR_MOD_EXPR:
8630 case CEIL_MOD_EXPR:
8631 case ROUND_MOD_EXPR:
8632 if (modifier == EXPAND_STACK_PARM)
8633 target = 0;
8634 expand_operands (treeop0, treeop1,
8635 subtarget, &op0, &op1, EXPAND_NORMAL);
8636 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8638 case FIXED_CONVERT_EXPR:
8639 op0 = expand_normal (treeop0);
8640 if (target == 0 || modifier == EXPAND_STACK_PARM)
8641 target = gen_reg_rtx (mode);
8643 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8644 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8645 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8646 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8647 else
8648 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8649 return target;
8651 case FIX_TRUNC_EXPR:
8652 op0 = expand_normal (treeop0);
8653 if (target == 0 || modifier == EXPAND_STACK_PARM)
8654 target = gen_reg_rtx (mode);
8655 expand_fix (target, op0, unsignedp);
8656 return target;
8658 case FLOAT_EXPR:
8659 op0 = expand_normal (treeop0);
8660 if (target == 0 || modifier == EXPAND_STACK_PARM)
8661 target = gen_reg_rtx (mode);
8662 /* expand_float can't figure out what to do if FROM has VOIDmode.
8663 So give it the correct mode. With -O, cse will optimize this. */
8664 if (GET_MODE (op0) == VOIDmode)
8665 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8666 op0);
8667 expand_float (target, op0,
8668 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8669 return target;
8671 case NEGATE_EXPR:
8672 op0 = expand_expr (treeop0, subtarget,
8673 VOIDmode, EXPAND_NORMAL);
8674 if (modifier == EXPAND_STACK_PARM)
8675 target = 0;
8676 temp = expand_unop (mode,
8677 optab_for_tree_code (NEGATE_EXPR, type,
8678 optab_default),
8679 op0, target, 0);
8680 gcc_assert (temp);
8681 return REDUCE_BIT_FIELD (temp);
8683 case ABS_EXPR:
8684 op0 = expand_expr (treeop0, subtarget,
8685 VOIDmode, EXPAND_NORMAL);
8686 if (modifier == EXPAND_STACK_PARM)
8687 target = 0;
8689 /* ABS_EXPR is not valid for complex arguments. */
8690 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8691 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8693 /* Unsigned abs is simply the operand. Testing here means we don't
8694 risk generating incorrect code below. */
8695 if (TYPE_UNSIGNED (type))
8696 return op0;
8698 return expand_abs (mode, op0, target, unsignedp,
8699 safe_from_p (target, treeop0, 1));
8701 case MAX_EXPR:
8702 case MIN_EXPR:
8703 target = original_target;
8704 if (target == 0
8705 || modifier == EXPAND_STACK_PARM
8706 || (MEM_P (target) && MEM_VOLATILE_P (target))
8707 || GET_MODE (target) != mode
8708 || (REG_P (target)
8709 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8710 target = gen_reg_rtx (mode);
8711 expand_operands (treeop0, treeop1,
8712 target, &op0, &op1, EXPAND_NORMAL);
8714 /* First try to do it with a special MIN or MAX instruction.
8715 If that does not win, use a conditional jump to select the proper
8716 value. */
8717 this_optab = optab_for_tree_code (code, type, optab_default);
8718 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8719 OPTAB_WIDEN);
8720 if (temp != 0)
8721 return temp;
8723 /* At this point, a MEM target is no longer useful; we will get better
8724 code without it. */
8726 if (! REG_P (target))
8727 target = gen_reg_rtx (mode);
8729 /* If op1 was placed in target, swap op0 and op1. */
8730 if (target != op0 && target == op1)
8731 std::swap (op0, op1);
8733 /* We generate better code and avoid problems with op1 mentioning
8734 target by forcing op1 into a pseudo if it isn't a constant. */
8735 if (! CONSTANT_P (op1))
8736 op1 = force_reg (mode, op1);
8739 enum rtx_code comparison_code;
8740 rtx cmpop1 = op1;
8742 if (code == MAX_EXPR)
8743 comparison_code = unsignedp ? GEU : GE;
8744 else
8745 comparison_code = unsignedp ? LEU : LE;
8747 /* Canonicalize to comparisons against 0. */
8748 if (op1 == const1_rtx)
8750 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8751 or (a != 0 ? a : 1) for unsigned.
8752 For MIN we are safe converting (a <= 1 ? a : 1)
8753 into (a <= 0 ? a : 1) */
8754 cmpop1 = const0_rtx;
8755 if (code == MAX_EXPR)
8756 comparison_code = unsignedp ? NE : GT;
8758 if (op1 == constm1_rtx && !unsignedp)
8760 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8761 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8762 cmpop1 = const0_rtx;
8763 if (code == MIN_EXPR)
8764 comparison_code = LT;
8767 /* Use a conditional move if possible. */
8768 if (can_conditionally_move_p (mode))
8770 rtx insn;
8772 start_sequence ();
8774 /* Try to emit the conditional move. */
8775 insn = emit_conditional_move (target, comparison_code,
8776 op0, cmpop1, mode,
8777 op0, op1, mode,
8778 unsignedp);
8780 /* If we could do the conditional move, emit the sequence,
8781 and return. */
8782 if (insn)
8784 rtx_insn *seq = get_insns ();
8785 end_sequence ();
8786 emit_insn (seq);
8787 return target;
8790 /* Otherwise discard the sequence and fall back to code with
8791 branches. */
8792 end_sequence ();
8795 if (target != op0)
8796 emit_move_insn (target, op0);
8798 lab = gen_label_rtx ();
8799 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8800 unsignedp, mode, NULL_RTX, NULL, lab,
8801 -1);
8803 emit_move_insn (target, op1);
8804 emit_label (lab);
8805 return target;
8807 case BIT_NOT_EXPR:
8808 op0 = expand_expr (treeop0, subtarget,
8809 VOIDmode, EXPAND_NORMAL);
8810 if (modifier == EXPAND_STACK_PARM)
8811 target = 0;
8812 /* In case we have to reduce the result to bitfield precision
8813 for unsigned bitfield expand this as XOR with a proper constant
8814 instead. */
8815 if (reduce_bit_field && TYPE_UNSIGNED (type))
8817 wide_int mask = wi::mask (TYPE_PRECISION (type),
8818 false, GET_MODE_PRECISION (mode));
8820 temp = expand_binop (mode, xor_optab, op0,
8821 immed_wide_int_const (mask, mode),
8822 target, 1, OPTAB_LIB_WIDEN);
8824 else
8825 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8826 gcc_assert (temp);
8827 return temp;
8829 /* ??? Can optimize bitwise operations with one arg constant.
8830 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8831 and (a bitwise1 b) bitwise2 b (etc)
8832 but that is probably not worth while. */
8834 case BIT_AND_EXPR:
8835 case BIT_IOR_EXPR:
8836 case BIT_XOR_EXPR:
8837 goto binop;
8839 case LROTATE_EXPR:
8840 case RROTATE_EXPR:
8841 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8842 || (GET_MODE_PRECISION (TYPE_MODE (type))
8843 == TYPE_PRECISION (type)));
8844 /* fall through */
8846 case LSHIFT_EXPR:
8847 case RSHIFT_EXPR:
8848 /* If this is a fixed-point operation, then we cannot use the code
8849 below because "expand_shift" doesn't support sat/no-sat fixed-point
8850 shifts. */
8851 if (ALL_FIXED_POINT_MODE_P (mode))
8852 goto binop;
8854 if (! safe_from_p (subtarget, treeop1, 1))
8855 subtarget = 0;
8856 if (modifier == EXPAND_STACK_PARM)
8857 target = 0;
8858 op0 = expand_expr (treeop0, subtarget,
8859 VOIDmode, EXPAND_NORMAL);
8860 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8861 unsignedp);
8862 if (code == LSHIFT_EXPR)
8863 temp = REDUCE_BIT_FIELD (temp);
8864 return temp;
8866 /* Could determine the answer when only additive constants differ. Also,
8867 the addition of one can be handled by changing the condition. */
8868 case LT_EXPR:
8869 case LE_EXPR:
8870 case GT_EXPR:
8871 case GE_EXPR:
8872 case EQ_EXPR:
8873 case NE_EXPR:
8874 case UNORDERED_EXPR:
8875 case ORDERED_EXPR:
8876 case UNLT_EXPR:
8877 case UNLE_EXPR:
8878 case UNGT_EXPR:
8879 case UNGE_EXPR:
8880 case UNEQ_EXPR:
8881 case LTGT_EXPR:
8883 temp = do_store_flag (ops,
8884 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8885 tmode != VOIDmode ? tmode : mode);
8886 if (temp)
8887 return temp;
8889 /* Use a compare and a jump for BLKmode comparisons, or for function
8890 type comparisons is have_canonicalize_funcptr_for_compare. */
8892 if ((target == 0
8893 || modifier == EXPAND_STACK_PARM
8894 || ! safe_from_p (target, treeop0, 1)
8895 || ! safe_from_p (target, treeop1, 1)
8896 /* Make sure we don't have a hard reg (such as function's return
8897 value) live across basic blocks, if not optimizing. */
8898 || (!optimize && REG_P (target)
8899 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8900 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8902 emit_move_insn (target, const0_rtx);
8904 rtx_code_label *lab1 = gen_label_rtx ();
8905 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
8907 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8908 emit_move_insn (target, constm1_rtx);
8909 else
8910 emit_move_insn (target, const1_rtx);
8912 emit_label (lab1);
8913 return target;
8915 case COMPLEX_EXPR:
8916 /* Get the rtx code of the operands. */
8917 op0 = expand_normal (treeop0);
8918 op1 = expand_normal (treeop1);
8920 if (!target)
8921 target = gen_reg_rtx (TYPE_MODE (type));
8922 else
8923 /* If target overlaps with op1, then either we need to force
8924 op1 into a pseudo (if target also overlaps with op0),
8925 or write the complex parts in reverse order. */
8926 switch (GET_CODE (target))
8928 case CONCAT:
8929 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8931 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8933 complex_expr_force_op1:
8934 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8935 emit_move_insn (temp, op1);
8936 op1 = temp;
8937 break;
8939 complex_expr_swap_order:
8940 /* Move the imaginary (op1) and real (op0) parts to their
8941 location. */
8942 write_complex_part (target, op1, true);
8943 write_complex_part (target, op0, false);
8945 return target;
8947 break;
8948 case MEM:
8949 temp = adjust_address_nv (target,
8950 GET_MODE_INNER (GET_MODE (target)), 0);
8951 if (reg_overlap_mentioned_p (temp, op1))
8953 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8954 temp = adjust_address_nv (target, imode,
8955 GET_MODE_SIZE (imode));
8956 if (reg_overlap_mentioned_p (temp, op0))
8957 goto complex_expr_force_op1;
8958 goto complex_expr_swap_order;
8960 break;
8961 default:
8962 if (reg_overlap_mentioned_p (target, op1))
8964 if (reg_overlap_mentioned_p (target, op0))
8965 goto complex_expr_force_op1;
8966 goto complex_expr_swap_order;
8968 break;
8971 /* Move the real (op0) and imaginary (op1) parts to their location. */
8972 write_complex_part (target, op0, false);
8973 write_complex_part (target, op1, true);
8975 return target;
8977 case WIDEN_SUM_EXPR:
8979 tree oprnd0 = treeop0;
8980 tree oprnd1 = treeop1;
8982 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8983 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8984 target, unsignedp);
8985 return target;
8988 case REDUC_MAX_EXPR:
8989 case REDUC_MIN_EXPR:
8990 case REDUC_PLUS_EXPR:
8992 op0 = expand_normal (treeop0);
8993 this_optab = optab_for_tree_code (code, type, optab_default);
8994 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
8996 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
8998 struct expand_operand ops[2];
8999 enum insn_code icode = optab_handler (this_optab, vec_mode);
9001 create_output_operand (&ops[0], target, mode);
9002 create_input_operand (&ops[1], op0, vec_mode);
9003 if (maybe_expand_insn (icode, 2, ops))
9005 target = ops[0].value;
9006 if (GET_MODE (target) != mode)
9007 return gen_lowpart (tmode, target);
9008 return target;
9011 /* Fall back to optab with vector result, and then extract scalar. */
9012 this_optab = scalar_reduc_to_vector (this_optab, type);
9013 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9014 gcc_assert (temp);
9015 /* The tree code produces a scalar result, but (somewhat by convention)
9016 the optab produces a vector with the result in element 0 if
9017 little-endian, or element N-1 if big-endian. So pull the scalar
9018 result out of that element. */
9019 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9020 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9021 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9022 target, mode, mode);
9023 gcc_assert (temp);
9024 return temp;
9027 case VEC_UNPACK_HI_EXPR:
9028 case VEC_UNPACK_LO_EXPR:
9030 op0 = expand_normal (treeop0);
9031 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9032 target, unsignedp);
9033 gcc_assert (temp);
9034 return temp;
9037 case VEC_UNPACK_FLOAT_HI_EXPR:
9038 case VEC_UNPACK_FLOAT_LO_EXPR:
9040 op0 = expand_normal (treeop0);
9041 /* The signedness is determined from input operand. */
9042 temp = expand_widen_pattern_expr
9043 (ops, op0, NULL_RTX, NULL_RTX,
9044 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9046 gcc_assert (temp);
9047 return temp;
9050 case VEC_WIDEN_MULT_HI_EXPR:
9051 case VEC_WIDEN_MULT_LO_EXPR:
9052 case VEC_WIDEN_MULT_EVEN_EXPR:
9053 case VEC_WIDEN_MULT_ODD_EXPR:
9054 case VEC_WIDEN_LSHIFT_HI_EXPR:
9055 case VEC_WIDEN_LSHIFT_LO_EXPR:
9056 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9057 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9058 target, unsignedp);
9059 gcc_assert (target);
9060 return target;
9062 case VEC_PACK_TRUNC_EXPR:
9063 case VEC_PACK_SAT_EXPR:
9064 case VEC_PACK_FIX_TRUNC_EXPR:
9065 mode = TYPE_MODE (TREE_TYPE (treeop0));
9066 goto binop;
9068 case VEC_PERM_EXPR:
9069 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9070 op2 = expand_normal (treeop2);
9072 /* Careful here: if the target doesn't support integral vector modes,
9073 a constant selection vector could wind up smooshed into a normal
9074 integral constant. */
9075 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9077 tree sel_type = TREE_TYPE (treeop2);
9078 machine_mode vmode
9079 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9080 TYPE_VECTOR_SUBPARTS (sel_type));
9081 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9082 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9083 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9085 else
9086 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9088 temp = expand_vec_perm (mode, op0, op1, op2, target);
9089 gcc_assert (temp);
9090 return temp;
9092 case DOT_PROD_EXPR:
9094 tree oprnd0 = treeop0;
9095 tree oprnd1 = treeop1;
9096 tree oprnd2 = treeop2;
9097 rtx op2;
9099 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9100 op2 = expand_normal (oprnd2);
9101 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9102 target, unsignedp);
9103 return target;
9106 case SAD_EXPR:
9108 tree oprnd0 = treeop0;
9109 tree oprnd1 = treeop1;
9110 tree oprnd2 = treeop2;
9111 rtx op2;
9113 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9114 op2 = expand_normal (oprnd2);
9115 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9116 target, unsignedp);
9117 return target;
9120 case REALIGN_LOAD_EXPR:
9122 tree oprnd0 = treeop0;
9123 tree oprnd1 = treeop1;
9124 tree oprnd2 = treeop2;
9125 rtx op2;
9127 this_optab = optab_for_tree_code (code, type, optab_default);
9128 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9129 op2 = expand_normal (oprnd2);
9130 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9131 target, unsignedp);
9132 gcc_assert (temp);
9133 return temp;
9136 case COND_EXPR:
9138 /* A COND_EXPR with its type being VOID_TYPE represents a
9139 conditional jump and is handled in
9140 expand_gimple_cond_expr. */
9141 gcc_assert (!VOID_TYPE_P (type));
9143 /* Note that COND_EXPRs whose type is a structure or union
9144 are required to be constructed to contain assignments of
9145 a temporary variable, so that we can evaluate them here
9146 for side effect only. If type is void, we must do likewise. */
9148 gcc_assert (!TREE_ADDRESSABLE (type)
9149 && !ignore
9150 && TREE_TYPE (treeop1) != void_type_node
9151 && TREE_TYPE (treeop2) != void_type_node);
9153 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9154 if (temp)
9155 return temp;
9157 /* If we are not to produce a result, we have no target. Otherwise,
9158 if a target was specified use it; it will not be used as an
9159 intermediate target unless it is safe. If no target, use a
9160 temporary. */
9162 if (modifier != EXPAND_STACK_PARM
9163 && original_target
9164 && safe_from_p (original_target, treeop0, 1)
9165 && GET_MODE (original_target) == mode
9166 && !MEM_P (original_target))
9167 temp = original_target;
9168 else
9169 temp = assign_temp (type, 0, 1);
9171 do_pending_stack_adjust ();
9172 NO_DEFER_POP;
9173 rtx_code_label *lab0 = gen_label_rtx ();
9174 rtx_code_label *lab1 = gen_label_rtx ();
9175 jumpifnot (treeop0, lab0, -1);
9176 store_expr (treeop1, temp,
9177 modifier == EXPAND_STACK_PARM,
9178 false);
9180 emit_jump_insn (targetm.gen_jump (lab1));
9181 emit_barrier ();
9182 emit_label (lab0);
9183 store_expr (treeop2, temp,
9184 modifier == EXPAND_STACK_PARM,
9185 false);
9187 emit_label (lab1);
9188 OK_DEFER_POP;
9189 return temp;
9192 case VEC_COND_EXPR:
9193 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9194 return target;
9196 default:
9197 gcc_unreachable ();
9200 /* Here to do an ordinary binary operator. */
9201 binop:
9202 expand_operands (treeop0, treeop1,
9203 subtarget, &op0, &op1, EXPAND_NORMAL);
9204 binop2:
9205 this_optab = optab_for_tree_code (code, type, optab_default);
9206 binop3:
9207 if (modifier == EXPAND_STACK_PARM)
9208 target = 0;
9209 temp = expand_binop (mode, this_optab, op0, op1, target,
9210 unsignedp, OPTAB_LIB_WIDEN);
9211 gcc_assert (temp);
9212 /* Bitwise operations do not need bitfield reduction as we expect their
9213 operands being properly truncated. */
9214 if (code == BIT_XOR_EXPR
9215 || code == BIT_AND_EXPR
9216 || code == BIT_IOR_EXPR)
9217 return temp;
9218 return REDUCE_BIT_FIELD (temp);
9220 #undef REDUCE_BIT_FIELD
9223 /* Return TRUE if expression STMT is suitable for replacement.
9224 Never consider memory loads as replaceable, because those don't ever lead
9225 into constant expressions. */
9227 static bool
9228 stmt_is_replaceable_p (gimple stmt)
9230 if (ssa_is_replaceable_p (stmt))
9232 /* Don't move around loads. */
9233 if (!gimple_assign_single_p (stmt)
9234 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9235 return true;
9237 return false;
9241 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9242 enum expand_modifier modifier, rtx *alt_rtl,
9243 bool inner_reference_p)
9245 rtx op0, op1, temp, decl_rtl;
9246 tree type;
9247 int unsignedp;
9248 machine_mode mode;
9249 enum tree_code code = TREE_CODE (exp);
9250 rtx subtarget, original_target;
9251 int ignore;
9252 tree context;
9253 bool reduce_bit_field;
9254 location_t loc = EXPR_LOCATION (exp);
9255 struct separate_ops ops;
9256 tree treeop0, treeop1, treeop2;
9257 tree ssa_name = NULL_TREE;
9258 gimple g;
9260 type = TREE_TYPE (exp);
9261 mode = TYPE_MODE (type);
9262 unsignedp = TYPE_UNSIGNED (type);
9264 treeop0 = treeop1 = treeop2 = NULL_TREE;
9265 if (!VL_EXP_CLASS_P (exp))
9266 switch (TREE_CODE_LENGTH (code))
9268 default:
9269 case 3: treeop2 = TREE_OPERAND (exp, 2);
9270 case 2: treeop1 = TREE_OPERAND (exp, 1);
9271 case 1: treeop0 = TREE_OPERAND (exp, 0);
9272 case 0: break;
9274 ops.code = code;
9275 ops.type = type;
9276 ops.op0 = treeop0;
9277 ops.op1 = treeop1;
9278 ops.op2 = treeop2;
9279 ops.location = loc;
9281 ignore = (target == const0_rtx
9282 || ((CONVERT_EXPR_CODE_P (code)
9283 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9284 && TREE_CODE (type) == VOID_TYPE));
9286 /* An operation in what may be a bit-field type needs the
9287 result to be reduced to the precision of the bit-field type,
9288 which is narrower than that of the type's mode. */
9289 reduce_bit_field = (!ignore
9290 && INTEGRAL_TYPE_P (type)
9291 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9293 /* If we are going to ignore this result, we need only do something
9294 if there is a side-effect somewhere in the expression. If there
9295 is, short-circuit the most common cases here. Note that we must
9296 not call expand_expr with anything but const0_rtx in case this
9297 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9299 if (ignore)
9301 if (! TREE_SIDE_EFFECTS (exp))
9302 return const0_rtx;
9304 /* Ensure we reference a volatile object even if value is ignored, but
9305 don't do this if all we are doing is taking its address. */
9306 if (TREE_THIS_VOLATILE (exp)
9307 && TREE_CODE (exp) != FUNCTION_DECL
9308 && mode != VOIDmode && mode != BLKmode
9309 && modifier != EXPAND_CONST_ADDRESS)
9311 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9312 if (MEM_P (temp))
9313 copy_to_reg (temp);
9314 return const0_rtx;
9317 if (TREE_CODE_CLASS (code) == tcc_unary
9318 || code == BIT_FIELD_REF
9319 || code == COMPONENT_REF
9320 || code == INDIRECT_REF)
9321 return expand_expr (treeop0, const0_rtx, VOIDmode,
9322 modifier);
9324 else if (TREE_CODE_CLASS (code) == tcc_binary
9325 || TREE_CODE_CLASS (code) == tcc_comparison
9326 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9328 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9329 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9330 return const0_rtx;
9333 target = 0;
9336 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9337 target = 0;
9339 /* Use subtarget as the target for operand 0 of a binary operation. */
9340 subtarget = get_subtarget (target);
9341 original_target = target;
9343 switch (code)
9345 case LABEL_DECL:
9347 tree function = decl_function_context (exp);
9349 temp = label_rtx (exp);
9350 temp = gen_rtx_LABEL_REF (Pmode, temp);
9352 if (function != current_function_decl
9353 && function != 0)
9354 LABEL_REF_NONLOCAL_P (temp) = 1;
9356 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9357 return temp;
9360 case SSA_NAME:
9361 /* ??? ivopts calls expander, without any preparation from
9362 out-of-ssa. So fake instructions as if this was an access to the
9363 base variable. This unnecessarily allocates a pseudo, see how we can
9364 reuse it, if partition base vars have it set already. */
9365 if (!currently_expanding_to_rtl)
9367 tree var = SSA_NAME_VAR (exp);
9368 if (var && DECL_RTL_SET_P (var))
9369 return DECL_RTL (var);
9370 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9371 LAST_VIRTUAL_REGISTER + 1);
9374 g = get_gimple_for_ssa_name (exp);
9375 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9376 if (g == NULL
9377 && modifier == EXPAND_INITIALIZER
9378 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9379 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9380 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9381 g = SSA_NAME_DEF_STMT (exp);
9382 if (g)
9384 rtx r;
9385 ops.code = gimple_assign_rhs_code (g);
9386 switch (get_gimple_rhs_class (ops.code))
9388 case GIMPLE_TERNARY_RHS:
9389 ops.op2 = gimple_assign_rhs3 (g);
9390 /* Fallthru */
9391 case GIMPLE_BINARY_RHS:
9392 ops.op1 = gimple_assign_rhs2 (g);
9394 /* Try to expand conditonal compare. */
9395 if (targetm.gen_ccmp_first)
9397 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9398 r = expand_ccmp_expr (g);
9399 if (r)
9400 break;
9402 /* Fallthru */
9403 case GIMPLE_UNARY_RHS:
9404 ops.op0 = gimple_assign_rhs1 (g);
9405 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9406 ops.location = gimple_location (g);
9407 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9408 break;
9409 case GIMPLE_SINGLE_RHS:
9411 location_t saved_loc = curr_insn_location ();
9412 set_curr_insn_location (gimple_location (g));
9413 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9414 tmode, modifier, NULL, inner_reference_p);
9415 set_curr_insn_location (saved_loc);
9416 break;
9418 default:
9419 gcc_unreachable ();
9421 if (REG_P (r) && !REG_EXPR (r))
9422 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9423 return r;
9426 ssa_name = exp;
9427 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9428 exp = SSA_NAME_VAR (ssa_name);
9429 goto expand_decl_rtl;
9431 case PARM_DECL:
9432 case VAR_DECL:
9433 /* If a static var's type was incomplete when the decl was written,
9434 but the type is complete now, lay out the decl now. */
9435 if (DECL_SIZE (exp) == 0
9436 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9437 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9438 layout_decl (exp, 0);
9440 /* ... fall through ... */
9442 case FUNCTION_DECL:
9443 case RESULT_DECL:
9444 decl_rtl = DECL_RTL (exp);
9445 expand_decl_rtl:
9446 gcc_assert (decl_rtl);
9447 decl_rtl = copy_rtx (decl_rtl);
9448 /* Record writes to register variables. */
9449 if (modifier == EXPAND_WRITE
9450 && REG_P (decl_rtl)
9451 && HARD_REGISTER_P (decl_rtl))
9452 add_to_hard_reg_set (&crtl->asm_clobbers,
9453 GET_MODE (decl_rtl), REGNO (decl_rtl));
9455 /* Ensure variable marked as used even if it doesn't go through
9456 a parser. If it hasn't be used yet, write out an external
9457 definition. */
9458 TREE_USED (exp) = 1;
9460 /* Show we haven't gotten RTL for this yet. */
9461 temp = 0;
9463 /* Variables inherited from containing functions should have
9464 been lowered by this point. */
9465 context = decl_function_context (exp);
9466 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9467 || context == current_function_decl
9468 || TREE_STATIC (exp)
9469 || DECL_EXTERNAL (exp)
9470 /* ??? C++ creates functions that are not TREE_STATIC. */
9471 || TREE_CODE (exp) == FUNCTION_DECL);
9473 /* This is the case of an array whose size is to be determined
9474 from its initializer, while the initializer is still being parsed.
9475 ??? We aren't parsing while expanding anymore. */
9477 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9478 temp = validize_mem (decl_rtl);
9480 /* If DECL_RTL is memory, we are in the normal case and the
9481 address is not valid, get the address into a register. */
9483 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9485 if (alt_rtl)
9486 *alt_rtl = decl_rtl;
9487 decl_rtl = use_anchored_address (decl_rtl);
9488 if (modifier != EXPAND_CONST_ADDRESS
9489 && modifier != EXPAND_SUM
9490 && !memory_address_addr_space_p (DECL_MODE (exp),
9491 XEXP (decl_rtl, 0),
9492 MEM_ADDR_SPACE (decl_rtl)))
9493 temp = replace_equiv_address (decl_rtl,
9494 copy_rtx (XEXP (decl_rtl, 0)));
9497 /* If we got something, return it. But first, set the alignment
9498 if the address is a register. */
9499 if (temp != 0)
9501 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9502 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9504 return temp;
9507 /* If the mode of DECL_RTL does not match that of the decl,
9508 there are two cases: we are dealing with a BLKmode value
9509 that is returned in a register, or we are dealing with
9510 a promoted value. In the latter case, return a SUBREG
9511 of the wanted mode, but mark it so that we know that it
9512 was already extended. */
9513 if (REG_P (decl_rtl)
9514 && DECL_MODE (exp) != BLKmode
9515 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9517 machine_mode pmode;
9519 /* Get the signedness to be used for this variable. Ensure we get
9520 the same mode we got when the variable was declared. */
9521 if (code == SSA_NAME
9522 && (g = SSA_NAME_DEF_STMT (ssa_name))
9523 && gimple_code (g) == GIMPLE_CALL
9524 && !gimple_call_internal_p (g))
9525 pmode = promote_function_mode (type, mode, &unsignedp,
9526 gimple_call_fntype (g),
9528 else
9529 pmode = promote_decl_mode (exp, &unsignedp);
9530 gcc_assert (GET_MODE (decl_rtl) == pmode);
9532 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9533 SUBREG_PROMOTED_VAR_P (temp) = 1;
9534 SUBREG_PROMOTED_SET (temp, unsignedp);
9535 return temp;
9538 return decl_rtl;
9540 case INTEGER_CST:
9541 /* Given that TYPE_PRECISION (type) is not always equal to
9542 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9543 the former to the latter according to the signedness of the
9544 type. */
9545 temp = immed_wide_int_const (wide_int::from
9546 (exp,
9547 GET_MODE_PRECISION (TYPE_MODE (type)),
9548 TYPE_SIGN (type)),
9549 TYPE_MODE (type));
9550 return temp;
9552 case VECTOR_CST:
9554 tree tmp = NULL_TREE;
9555 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9556 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9557 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9558 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9559 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9560 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9561 return const_vector_from_tree (exp);
9562 if (GET_MODE_CLASS (mode) == MODE_INT)
9564 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9565 if (type_for_mode)
9566 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9568 if (!tmp)
9570 vec<constructor_elt, va_gc> *v;
9571 unsigned i;
9572 vec_alloc (v, VECTOR_CST_NELTS (exp));
9573 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9574 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9575 tmp = build_constructor (type, v);
9577 return expand_expr (tmp, ignore ? const0_rtx : target,
9578 tmode, modifier);
9581 case CONST_DECL:
9582 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9584 case REAL_CST:
9585 /* If optimized, generate immediate CONST_DOUBLE
9586 which will be turned into memory by reload if necessary.
9588 We used to force a register so that loop.c could see it. But
9589 this does not allow gen_* patterns to perform optimizations with
9590 the constants. It also produces two insns in cases like "x = 1.0;".
9591 On most machines, floating-point constants are not permitted in
9592 many insns, so we'd end up copying it to a register in any case.
9594 Now, we do the copying in expand_binop, if appropriate. */
9595 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9596 TYPE_MODE (TREE_TYPE (exp)));
9598 case FIXED_CST:
9599 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9600 TYPE_MODE (TREE_TYPE (exp)));
9602 case COMPLEX_CST:
9603 /* Handle evaluating a complex constant in a CONCAT target. */
9604 if (original_target && GET_CODE (original_target) == CONCAT)
9606 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9607 rtx rtarg, itarg;
9609 rtarg = XEXP (original_target, 0);
9610 itarg = XEXP (original_target, 1);
9612 /* Move the real and imaginary parts separately. */
9613 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9614 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9616 if (op0 != rtarg)
9617 emit_move_insn (rtarg, op0);
9618 if (op1 != itarg)
9619 emit_move_insn (itarg, op1);
9621 return original_target;
9624 /* ... fall through ... */
9626 case STRING_CST:
9627 temp = expand_expr_constant (exp, 1, modifier);
9629 /* temp contains a constant address.
9630 On RISC machines where a constant address isn't valid,
9631 make some insns to get that address into a register. */
9632 if (modifier != EXPAND_CONST_ADDRESS
9633 && modifier != EXPAND_INITIALIZER
9634 && modifier != EXPAND_SUM
9635 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9636 MEM_ADDR_SPACE (temp)))
9637 return replace_equiv_address (temp,
9638 copy_rtx (XEXP (temp, 0)));
9639 return temp;
9641 case SAVE_EXPR:
9643 tree val = treeop0;
9644 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9645 inner_reference_p);
9647 if (!SAVE_EXPR_RESOLVED_P (exp))
9649 /* We can indeed still hit this case, typically via builtin
9650 expanders calling save_expr immediately before expanding
9651 something. Assume this means that we only have to deal
9652 with non-BLKmode values. */
9653 gcc_assert (GET_MODE (ret) != BLKmode);
9655 val = build_decl (curr_insn_location (),
9656 VAR_DECL, NULL, TREE_TYPE (exp));
9657 DECL_ARTIFICIAL (val) = 1;
9658 DECL_IGNORED_P (val) = 1;
9659 treeop0 = val;
9660 TREE_OPERAND (exp, 0) = treeop0;
9661 SAVE_EXPR_RESOLVED_P (exp) = 1;
9663 if (!CONSTANT_P (ret))
9664 ret = copy_to_reg (ret);
9665 SET_DECL_RTL (val, ret);
9668 return ret;
9672 case CONSTRUCTOR:
9673 /* If we don't need the result, just ensure we evaluate any
9674 subexpressions. */
9675 if (ignore)
9677 unsigned HOST_WIDE_INT idx;
9678 tree value;
9680 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9681 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9683 return const0_rtx;
9686 return expand_constructor (exp, target, modifier, false);
9688 case TARGET_MEM_REF:
9690 addr_space_t as
9691 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9692 enum insn_code icode;
9693 unsigned int align;
9695 op0 = addr_for_mem_ref (exp, as, true);
9696 op0 = memory_address_addr_space (mode, op0, as);
9697 temp = gen_rtx_MEM (mode, op0);
9698 set_mem_attributes (temp, exp, 0);
9699 set_mem_addr_space (temp, as);
9700 align = get_object_alignment (exp);
9701 if (modifier != EXPAND_WRITE
9702 && modifier != EXPAND_MEMORY
9703 && mode != BLKmode
9704 && align < GET_MODE_ALIGNMENT (mode)
9705 /* If the target does not have special handling for unaligned
9706 loads of mode then it can use regular moves for them. */
9707 && ((icode = optab_handler (movmisalign_optab, mode))
9708 != CODE_FOR_nothing))
9710 struct expand_operand ops[2];
9712 /* We've already validated the memory, and we're creating a
9713 new pseudo destination. The predicates really can't fail,
9714 nor can the generator. */
9715 create_output_operand (&ops[0], NULL_RTX, mode);
9716 create_fixed_operand (&ops[1], temp);
9717 expand_insn (icode, 2, ops);
9718 temp = ops[0].value;
9720 return temp;
9723 case MEM_REF:
9725 addr_space_t as
9726 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9727 machine_mode address_mode;
9728 tree base = TREE_OPERAND (exp, 0);
9729 gimple def_stmt;
9730 enum insn_code icode;
9731 unsigned align;
9732 /* Handle expansion of non-aliased memory with non-BLKmode. That
9733 might end up in a register. */
9734 if (mem_ref_refers_to_non_mem_p (exp))
9736 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9737 base = TREE_OPERAND (base, 0);
9738 if (offset == 0
9739 && tree_fits_uhwi_p (TYPE_SIZE (type))
9740 && (GET_MODE_BITSIZE (DECL_MODE (base))
9741 == tree_to_uhwi (TYPE_SIZE (type))))
9742 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9743 target, tmode, modifier);
9744 if (TYPE_MODE (type) == BLKmode)
9746 temp = assign_stack_temp (DECL_MODE (base),
9747 GET_MODE_SIZE (DECL_MODE (base)));
9748 store_expr (base, temp, 0, false);
9749 temp = adjust_address (temp, BLKmode, offset);
9750 set_mem_size (temp, int_size_in_bytes (type));
9751 return temp;
9753 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9754 bitsize_int (offset * BITS_PER_UNIT));
9755 return expand_expr (exp, target, tmode, modifier);
9757 address_mode = targetm.addr_space.address_mode (as);
9758 base = TREE_OPERAND (exp, 0);
9759 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9761 tree mask = gimple_assign_rhs2 (def_stmt);
9762 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9763 gimple_assign_rhs1 (def_stmt), mask);
9764 TREE_OPERAND (exp, 0) = base;
9766 align = get_object_alignment (exp);
9767 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9768 op0 = memory_address_addr_space (mode, op0, as);
9769 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9771 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9772 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9773 op0 = memory_address_addr_space (mode, op0, as);
9775 temp = gen_rtx_MEM (mode, op0);
9776 set_mem_attributes (temp, exp, 0);
9777 set_mem_addr_space (temp, as);
9778 if (TREE_THIS_VOLATILE (exp))
9779 MEM_VOLATILE_P (temp) = 1;
9780 if (modifier != EXPAND_WRITE
9781 && modifier != EXPAND_MEMORY
9782 && !inner_reference_p
9783 && mode != BLKmode
9784 && align < GET_MODE_ALIGNMENT (mode))
9786 if ((icode = optab_handler (movmisalign_optab, mode))
9787 != CODE_FOR_nothing)
9789 struct expand_operand ops[2];
9791 /* We've already validated the memory, and we're creating a
9792 new pseudo destination. The predicates really can't fail,
9793 nor can the generator. */
9794 create_output_operand (&ops[0], NULL_RTX, mode);
9795 create_fixed_operand (&ops[1], temp);
9796 expand_insn (icode, 2, ops);
9797 temp = ops[0].value;
9799 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9800 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9801 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9802 (modifier == EXPAND_STACK_PARM
9803 ? NULL_RTX : target),
9804 mode, mode);
9806 return temp;
9809 case ARRAY_REF:
9812 tree array = treeop0;
9813 tree index = treeop1;
9814 tree init;
9816 /* Fold an expression like: "foo"[2].
9817 This is not done in fold so it won't happen inside &.
9818 Don't fold if this is for wide characters since it's too
9819 difficult to do correctly and this is a very rare case. */
9821 if (modifier != EXPAND_CONST_ADDRESS
9822 && modifier != EXPAND_INITIALIZER
9823 && modifier != EXPAND_MEMORY)
9825 tree t = fold_read_from_constant_string (exp);
9827 if (t)
9828 return expand_expr (t, target, tmode, modifier);
9831 /* If this is a constant index into a constant array,
9832 just get the value from the array. Handle both the cases when
9833 we have an explicit constructor and when our operand is a variable
9834 that was declared const. */
9836 if (modifier != EXPAND_CONST_ADDRESS
9837 && modifier != EXPAND_INITIALIZER
9838 && modifier != EXPAND_MEMORY
9839 && TREE_CODE (array) == CONSTRUCTOR
9840 && ! TREE_SIDE_EFFECTS (array)
9841 && TREE_CODE (index) == INTEGER_CST)
9843 unsigned HOST_WIDE_INT ix;
9844 tree field, value;
9846 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9847 field, value)
9848 if (tree_int_cst_equal (field, index))
9850 if (!TREE_SIDE_EFFECTS (value))
9851 return expand_expr (fold (value), target, tmode, modifier);
9852 break;
9856 else if (optimize >= 1
9857 && modifier != EXPAND_CONST_ADDRESS
9858 && modifier != EXPAND_INITIALIZER
9859 && modifier != EXPAND_MEMORY
9860 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9861 && TREE_CODE (index) == INTEGER_CST
9862 && (TREE_CODE (array) == VAR_DECL
9863 || TREE_CODE (array) == CONST_DECL)
9864 && (init = ctor_for_folding (array)) != error_mark_node)
9866 if (init == NULL_TREE)
9868 tree value = build_zero_cst (type);
9869 if (TREE_CODE (value) == CONSTRUCTOR)
9871 /* If VALUE is a CONSTRUCTOR, this optimization is only
9872 useful if this doesn't store the CONSTRUCTOR into
9873 memory. If it does, it is more efficient to just
9874 load the data from the array directly. */
9875 rtx ret = expand_constructor (value, target,
9876 modifier, true);
9877 if (ret == NULL_RTX)
9878 value = NULL_TREE;
9881 if (value)
9882 return expand_expr (value, target, tmode, modifier);
9884 else if (TREE_CODE (init) == CONSTRUCTOR)
9886 unsigned HOST_WIDE_INT ix;
9887 tree field, value;
9889 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9890 field, value)
9891 if (tree_int_cst_equal (field, index))
9893 if (TREE_SIDE_EFFECTS (value))
9894 break;
9896 if (TREE_CODE (value) == CONSTRUCTOR)
9898 /* If VALUE is a CONSTRUCTOR, this
9899 optimization is only useful if
9900 this doesn't store the CONSTRUCTOR
9901 into memory. If it does, it is more
9902 efficient to just load the data from
9903 the array directly. */
9904 rtx ret = expand_constructor (value, target,
9905 modifier, true);
9906 if (ret == NULL_RTX)
9907 break;
9910 return
9911 expand_expr (fold (value), target, tmode, modifier);
9914 else if (TREE_CODE (init) == STRING_CST)
9916 tree low_bound = array_ref_low_bound (exp);
9917 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9919 /* Optimize the special case of a zero lower bound.
9921 We convert the lower bound to sizetype to avoid problems
9922 with constant folding. E.g. suppose the lower bound is
9923 1 and its mode is QI. Without the conversion
9924 (ARRAY + (INDEX - (unsigned char)1))
9925 becomes
9926 (ARRAY + (-(unsigned char)1) + INDEX)
9927 which becomes
9928 (ARRAY + 255 + INDEX). Oops! */
9929 if (!integer_zerop (low_bound))
9930 index1 = size_diffop_loc (loc, index1,
9931 fold_convert_loc (loc, sizetype,
9932 low_bound));
9934 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9936 tree type = TREE_TYPE (TREE_TYPE (init));
9937 machine_mode mode = TYPE_MODE (type);
9939 if (GET_MODE_CLASS (mode) == MODE_INT
9940 && GET_MODE_SIZE (mode) == 1)
9941 return gen_int_mode (TREE_STRING_POINTER (init)
9942 [TREE_INT_CST_LOW (index1)],
9943 mode);
9948 goto normal_inner_ref;
9950 case COMPONENT_REF:
9951 /* If the operand is a CONSTRUCTOR, we can just extract the
9952 appropriate field if it is present. */
9953 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9955 unsigned HOST_WIDE_INT idx;
9956 tree field, value;
9958 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9959 idx, field, value)
9960 if (field == treeop1
9961 /* We can normally use the value of the field in the
9962 CONSTRUCTOR. However, if this is a bitfield in
9963 an integral mode that we can fit in a HOST_WIDE_INT,
9964 we must mask only the number of bits in the bitfield,
9965 since this is done implicitly by the constructor. If
9966 the bitfield does not meet either of those conditions,
9967 we can't do this optimization. */
9968 && (! DECL_BIT_FIELD (field)
9969 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9970 && (GET_MODE_PRECISION (DECL_MODE (field))
9971 <= HOST_BITS_PER_WIDE_INT))))
9973 if (DECL_BIT_FIELD (field)
9974 && modifier == EXPAND_STACK_PARM)
9975 target = 0;
9976 op0 = expand_expr (value, target, tmode, modifier);
9977 if (DECL_BIT_FIELD (field))
9979 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9980 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9982 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9984 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9985 imode);
9986 op0 = expand_and (imode, op0, op1, target);
9988 else
9990 int count = GET_MODE_PRECISION (imode) - bitsize;
9992 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9993 target, 0);
9994 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9995 target, 0);
9999 return op0;
10002 goto normal_inner_ref;
10004 case BIT_FIELD_REF:
10005 case ARRAY_RANGE_REF:
10006 normal_inner_ref:
10008 machine_mode mode1, mode2;
10009 HOST_WIDE_INT bitsize, bitpos;
10010 tree offset;
10011 int volatilep = 0, must_force_mem;
10012 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10013 &mode1, &unsignedp, &volatilep, true);
10014 rtx orig_op0, memloc;
10015 bool clear_mem_expr = false;
10017 /* If we got back the original object, something is wrong. Perhaps
10018 we are evaluating an expression too early. In any event, don't
10019 infinitely recurse. */
10020 gcc_assert (tem != exp);
10022 /* If TEM's type is a union of variable size, pass TARGET to the inner
10023 computation, since it will need a temporary and TARGET is known
10024 to have to do. This occurs in unchecked conversion in Ada. */
10025 orig_op0 = op0
10026 = expand_expr_real (tem,
10027 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10028 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10029 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10030 != INTEGER_CST)
10031 && modifier != EXPAND_STACK_PARM
10032 ? target : NULL_RTX),
10033 VOIDmode,
10034 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10035 NULL, true);
10037 /* If the field has a mode, we want to access it in the
10038 field's mode, not the computed mode.
10039 If a MEM has VOIDmode (external with incomplete type),
10040 use BLKmode for it instead. */
10041 if (MEM_P (op0))
10043 if (mode1 != VOIDmode)
10044 op0 = adjust_address (op0, mode1, 0);
10045 else if (GET_MODE (op0) == VOIDmode)
10046 op0 = adjust_address (op0, BLKmode, 0);
10049 mode2
10050 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10052 /* If we have either an offset, a BLKmode result, or a reference
10053 outside the underlying object, we must force it to memory.
10054 Such a case can occur in Ada if we have unchecked conversion
10055 of an expression from a scalar type to an aggregate type or
10056 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10057 passed a partially uninitialized object or a view-conversion
10058 to a larger size. */
10059 must_force_mem = (offset
10060 || mode1 == BLKmode
10061 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10063 /* Handle CONCAT first. */
10064 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10066 if (bitpos == 0
10067 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10068 return op0;
10069 if (bitpos == 0
10070 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10071 && bitsize)
10073 op0 = XEXP (op0, 0);
10074 mode2 = GET_MODE (op0);
10076 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10077 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10078 && bitpos
10079 && bitsize)
10081 op0 = XEXP (op0, 1);
10082 bitpos = 0;
10083 mode2 = GET_MODE (op0);
10085 else
10086 /* Otherwise force into memory. */
10087 must_force_mem = 1;
10090 /* If this is a constant, put it in a register if it is a legitimate
10091 constant and we don't need a memory reference. */
10092 if (CONSTANT_P (op0)
10093 && mode2 != BLKmode
10094 && targetm.legitimate_constant_p (mode2, op0)
10095 && !must_force_mem)
10096 op0 = force_reg (mode2, op0);
10098 /* Otherwise, if this is a constant, try to force it to the constant
10099 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10100 is a legitimate constant. */
10101 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10102 op0 = validize_mem (memloc);
10104 /* Otherwise, if this is a constant or the object is not in memory
10105 and need be, put it there. */
10106 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10108 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10109 emit_move_insn (memloc, op0);
10110 op0 = memloc;
10111 clear_mem_expr = true;
10114 if (offset)
10116 machine_mode address_mode;
10117 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10118 EXPAND_SUM);
10120 gcc_assert (MEM_P (op0));
10122 address_mode = get_address_mode (op0);
10123 if (GET_MODE (offset_rtx) != address_mode)
10125 /* We cannot be sure that the RTL in offset_rtx is valid outside
10126 of a memory address context, so force it into a register
10127 before attempting to convert it to the desired mode. */
10128 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10129 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10132 /* See the comment in expand_assignment for the rationale. */
10133 if (mode1 != VOIDmode
10134 && bitpos != 0
10135 && bitsize > 0
10136 && (bitpos % bitsize) == 0
10137 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10138 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10140 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10141 bitpos = 0;
10144 op0 = offset_address (op0, offset_rtx,
10145 highest_pow2_factor (offset));
10148 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10149 record its alignment as BIGGEST_ALIGNMENT. */
10150 if (MEM_P (op0) && bitpos == 0 && offset != 0
10151 && is_aligning_offset (offset, tem))
10152 set_mem_align (op0, BIGGEST_ALIGNMENT);
10154 /* Don't forget about volatility even if this is a bitfield. */
10155 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10157 if (op0 == orig_op0)
10158 op0 = copy_rtx (op0);
10160 MEM_VOLATILE_P (op0) = 1;
10163 /* In cases where an aligned union has an unaligned object
10164 as a field, we might be extracting a BLKmode value from
10165 an integer-mode (e.g., SImode) object. Handle this case
10166 by doing the extract into an object as wide as the field
10167 (which we know to be the width of a basic mode), then
10168 storing into memory, and changing the mode to BLKmode. */
10169 if (mode1 == VOIDmode
10170 || REG_P (op0) || GET_CODE (op0) == SUBREG
10171 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10172 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10173 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10174 && modifier != EXPAND_CONST_ADDRESS
10175 && modifier != EXPAND_INITIALIZER
10176 && modifier != EXPAND_MEMORY)
10177 /* If the bitfield is volatile and the bitsize
10178 is narrower than the access size of the bitfield,
10179 we need to extract bitfields from the access. */
10180 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10181 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10182 && mode1 != BLKmode
10183 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10184 /* If the field isn't aligned enough to fetch as a memref,
10185 fetch it as a bit field. */
10186 || (mode1 != BLKmode
10187 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10188 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10189 || (MEM_P (op0)
10190 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10191 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10192 && modifier != EXPAND_MEMORY
10193 && ((modifier == EXPAND_CONST_ADDRESS
10194 || modifier == EXPAND_INITIALIZER)
10195 ? STRICT_ALIGNMENT
10196 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10197 || (bitpos % BITS_PER_UNIT != 0)))
10198 /* If the type and the field are a constant size and the
10199 size of the type isn't the same size as the bitfield,
10200 we must use bitfield operations. */
10201 || (bitsize >= 0
10202 && TYPE_SIZE (TREE_TYPE (exp))
10203 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10204 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10205 bitsize)))
10207 machine_mode ext_mode = mode;
10209 if (ext_mode == BLKmode
10210 && ! (target != 0 && MEM_P (op0)
10211 && MEM_P (target)
10212 && bitpos % BITS_PER_UNIT == 0))
10213 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10215 if (ext_mode == BLKmode)
10217 if (target == 0)
10218 target = assign_temp (type, 1, 1);
10220 /* ??? Unlike the similar test a few lines below, this one is
10221 very likely obsolete. */
10222 if (bitsize == 0)
10223 return target;
10225 /* In this case, BITPOS must start at a byte boundary and
10226 TARGET, if specified, must be a MEM. */
10227 gcc_assert (MEM_P (op0)
10228 && (!target || MEM_P (target))
10229 && !(bitpos % BITS_PER_UNIT));
10231 emit_block_move (target,
10232 adjust_address (op0, VOIDmode,
10233 bitpos / BITS_PER_UNIT),
10234 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10235 / BITS_PER_UNIT),
10236 (modifier == EXPAND_STACK_PARM
10237 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10239 return target;
10242 /* If we have nothing to extract, the result will be 0 for targets
10243 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10244 return 0 for the sake of consistency, as reading a zero-sized
10245 bitfield is valid in Ada and the value is fully specified. */
10246 if (bitsize == 0)
10247 return const0_rtx;
10249 op0 = validize_mem (op0);
10251 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10252 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10254 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10255 (modifier == EXPAND_STACK_PARM
10256 ? NULL_RTX : target),
10257 ext_mode, ext_mode);
10259 /* If the result is a record type and BITSIZE is narrower than
10260 the mode of OP0, an integral mode, and this is a big endian
10261 machine, we must put the field into the high-order bits. */
10262 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10263 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10264 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10265 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10266 GET_MODE_BITSIZE (GET_MODE (op0))
10267 - bitsize, op0, 1);
10269 /* If the result type is BLKmode, store the data into a temporary
10270 of the appropriate type, but with the mode corresponding to the
10271 mode for the data we have (op0's mode). */
10272 if (mode == BLKmode)
10274 rtx new_rtx
10275 = assign_stack_temp_for_type (ext_mode,
10276 GET_MODE_BITSIZE (ext_mode),
10277 type);
10278 emit_move_insn (new_rtx, op0);
10279 op0 = copy_rtx (new_rtx);
10280 PUT_MODE (op0, BLKmode);
10283 return op0;
10286 /* If the result is BLKmode, use that to access the object
10287 now as well. */
10288 if (mode == BLKmode)
10289 mode1 = BLKmode;
10291 /* Get a reference to just this component. */
10292 if (modifier == EXPAND_CONST_ADDRESS
10293 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10294 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10295 else
10296 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10298 if (op0 == orig_op0)
10299 op0 = copy_rtx (op0);
10301 set_mem_attributes (op0, exp, 0);
10303 if (REG_P (XEXP (op0, 0)))
10304 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10306 /* If op0 is a temporary because the original expressions was forced
10307 to memory, clear MEM_EXPR so that the original expression cannot
10308 be marked as addressable through MEM_EXPR of the temporary. */
10309 if (clear_mem_expr)
10310 set_mem_expr (op0, NULL_TREE);
10312 MEM_VOLATILE_P (op0) |= volatilep;
10313 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10314 || modifier == EXPAND_CONST_ADDRESS
10315 || modifier == EXPAND_INITIALIZER)
10316 return op0;
10318 if (target == 0)
10319 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10321 convert_move (target, op0, unsignedp);
10322 return target;
10325 case OBJ_TYPE_REF:
10326 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10328 case CALL_EXPR:
10329 /* All valid uses of __builtin_va_arg_pack () are removed during
10330 inlining. */
10331 if (CALL_EXPR_VA_ARG_PACK (exp))
10332 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10334 tree fndecl = get_callee_fndecl (exp), attr;
10336 if (fndecl
10337 && (attr = lookup_attribute ("error",
10338 DECL_ATTRIBUTES (fndecl))) != NULL)
10339 error ("%Kcall to %qs declared with attribute error: %s",
10340 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10341 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10342 if (fndecl
10343 && (attr = lookup_attribute ("warning",
10344 DECL_ATTRIBUTES (fndecl))) != NULL)
10345 warning_at (tree_nonartificial_location (exp),
10346 0, "%Kcall to %qs declared with attribute warning: %s",
10347 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10348 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10350 /* Check for a built-in function. */
10351 if (fndecl && DECL_BUILT_IN (fndecl))
10353 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10354 if (CALL_WITH_BOUNDS_P (exp))
10355 return expand_builtin_with_bounds (exp, target, subtarget,
10356 tmode, ignore);
10357 else
10358 return expand_builtin (exp, target, subtarget, tmode, ignore);
10361 return expand_call (exp, target, ignore);
10363 case VIEW_CONVERT_EXPR:
10364 op0 = NULL_RTX;
10366 /* If we are converting to BLKmode, try to avoid an intermediate
10367 temporary by fetching an inner memory reference. */
10368 if (mode == BLKmode
10369 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10370 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10371 && handled_component_p (treeop0))
10373 machine_mode mode1;
10374 HOST_WIDE_INT bitsize, bitpos;
10375 tree offset;
10376 int unsignedp;
10377 int volatilep = 0;
10378 tree tem
10379 = get_inner_reference (treeop0, &bitsize, &bitpos,
10380 &offset, &mode1, &unsignedp, &volatilep,
10381 true);
10382 rtx orig_op0;
10384 /* ??? We should work harder and deal with non-zero offsets. */
10385 if (!offset
10386 && (bitpos % BITS_PER_UNIT) == 0
10387 && bitsize >= 0
10388 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10390 /* See the normal_inner_ref case for the rationale. */
10391 orig_op0
10392 = expand_expr_real (tem,
10393 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10394 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10395 != INTEGER_CST)
10396 && modifier != EXPAND_STACK_PARM
10397 ? target : NULL_RTX),
10398 VOIDmode,
10399 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10400 NULL, true);
10402 if (MEM_P (orig_op0))
10404 op0 = orig_op0;
10406 /* Get a reference to just this component. */
10407 if (modifier == EXPAND_CONST_ADDRESS
10408 || modifier == EXPAND_SUM
10409 || modifier == EXPAND_INITIALIZER)
10410 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10411 else
10412 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10414 if (op0 == orig_op0)
10415 op0 = copy_rtx (op0);
10417 set_mem_attributes (op0, treeop0, 0);
10418 if (REG_P (XEXP (op0, 0)))
10419 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10421 MEM_VOLATILE_P (op0) |= volatilep;
10426 if (!op0)
10427 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10428 NULL, inner_reference_p);
10430 /* If the input and output modes are both the same, we are done. */
10431 if (mode == GET_MODE (op0))
10433 /* If neither mode is BLKmode, and both modes are the same size
10434 then we can use gen_lowpart. */
10435 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10436 && (GET_MODE_PRECISION (mode)
10437 == GET_MODE_PRECISION (GET_MODE (op0)))
10438 && !COMPLEX_MODE_P (GET_MODE (op0)))
10440 if (GET_CODE (op0) == SUBREG)
10441 op0 = force_reg (GET_MODE (op0), op0);
10442 temp = gen_lowpart_common (mode, op0);
10443 if (temp)
10444 op0 = temp;
10445 else
10447 if (!REG_P (op0) && !MEM_P (op0))
10448 op0 = force_reg (GET_MODE (op0), op0);
10449 op0 = gen_lowpart (mode, op0);
10452 /* If both types are integral, convert from one mode to the other. */
10453 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10454 op0 = convert_modes (mode, GET_MODE (op0), op0,
10455 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10456 /* If the output type is a bit-field type, do an extraction. */
10457 else if (reduce_bit_field)
10458 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10459 TYPE_UNSIGNED (type), NULL_RTX,
10460 mode, mode);
10461 /* As a last resort, spill op0 to memory, and reload it in a
10462 different mode. */
10463 else if (!MEM_P (op0))
10465 /* If the operand is not a MEM, force it into memory. Since we
10466 are going to be changing the mode of the MEM, don't call
10467 force_const_mem for constants because we don't allow pool
10468 constants to change mode. */
10469 tree inner_type = TREE_TYPE (treeop0);
10471 gcc_assert (!TREE_ADDRESSABLE (exp));
10473 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10474 target
10475 = assign_stack_temp_for_type
10476 (TYPE_MODE (inner_type),
10477 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10479 emit_move_insn (target, op0);
10480 op0 = target;
10483 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10484 output type is such that the operand is known to be aligned, indicate
10485 that it is. Otherwise, we need only be concerned about alignment for
10486 non-BLKmode results. */
10487 if (MEM_P (op0))
10489 enum insn_code icode;
10491 if (TYPE_ALIGN_OK (type))
10493 /* ??? Copying the MEM without substantially changing it might
10494 run afoul of the code handling volatile memory references in
10495 store_expr, which assumes that TARGET is returned unmodified
10496 if it has been used. */
10497 op0 = copy_rtx (op0);
10498 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10500 else if (modifier != EXPAND_WRITE
10501 && modifier != EXPAND_MEMORY
10502 && !inner_reference_p
10503 && mode != BLKmode
10504 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10506 /* If the target does have special handling for unaligned
10507 loads of mode then use them. */
10508 if ((icode = optab_handler (movmisalign_optab, mode))
10509 != CODE_FOR_nothing)
10511 rtx reg;
10513 op0 = adjust_address (op0, mode, 0);
10514 /* We've already validated the memory, and we're creating a
10515 new pseudo destination. The predicates really can't
10516 fail. */
10517 reg = gen_reg_rtx (mode);
10519 /* Nor can the insn generator. */
10520 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
10521 emit_insn (insn);
10522 return reg;
10524 else if (STRICT_ALIGNMENT)
10526 tree inner_type = TREE_TYPE (treeop0);
10527 HOST_WIDE_INT temp_size
10528 = MAX (int_size_in_bytes (inner_type),
10529 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10530 rtx new_rtx
10531 = assign_stack_temp_for_type (mode, temp_size, type);
10532 rtx new_with_op0_mode
10533 = adjust_address (new_rtx, GET_MODE (op0), 0);
10535 gcc_assert (!TREE_ADDRESSABLE (exp));
10537 if (GET_MODE (op0) == BLKmode)
10538 emit_block_move (new_with_op0_mode, op0,
10539 GEN_INT (GET_MODE_SIZE (mode)),
10540 (modifier == EXPAND_STACK_PARM
10541 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10542 else
10543 emit_move_insn (new_with_op0_mode, op0);
10545 op0 = new_rtx;
10549 op0 = adjust_address (op0, mode, 0);
10552 return op0;
10554 case MODIFY_EXPR:
10556 tree lhs = treeop0;
10557 tree rhs = treeop1;
10558 gcc_assert (ignore);
10560 /* Check for |= or &= of a bitfield of size one into another bitfield
10561 of size 1. In this case, (unless we need the result of the
10562 assignment) we can do this more efficiently with a
10563 test followed by an assignment, if necessary.
10565 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10566 things change so we do, this code should be enhanced to
10567 support it. */
10568 if (TREE_CODE (lhs) == COMPONENT_REF
10569 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10570 || TREE_CODE (rhs) == BIT_AND_EXPR)
10571 && TREE_OPERAND (rhs, 0) == lhs
10572 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10573 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10574 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10576 rtx_code_label *label = gen_label_rtx ();
10577 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10578 do_jump (TREE_OPERAND (rhs, 1),
10579 value ? label : 0,
10580 value ? 0 : label, -1);
10581 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10582 false);
10583 do_pending_stack_adjust ();
10584 emit_label (label);
10585 return const0_rtx;
10588 expand_assignment (lhs, rhs, false);
10589 return const0_rtx;
10592 case ADDR_EXPR:
10593 return expand_expr_addr_expr (exp, target, tmode, modifier);
10595 case REALPART_EXPR:
10596 op0 = expand_normal (treeop0);
10597 return read_complex_part (op0, false);
10599 case IMAGPART_EXPR:
10600 op0 = expand_normal (treeop0);
10601 return read_complex_part (op0, true);
10603 case RETURN_EXPR:
10604 case LABEL_EXPR:
10605 case GOTO_EXPR:
10606 case SWITCH_EXPR:
10607 case ASM_EXPR:
10608 /* Expanded in cfgexpand.c. */
10609 gcc_unreachable ();
10611 case TRY_CATCH_EXPR:
10612 case CATCH_EXPR:
10613 case EH_FILTER_EXPR:
10614 case TRY_FINALLY_EXPR:
10615 /* Lowered by tree-eh.c. */
10616 gcc_unreachable ();
10618 case WITH_CLEANUP_EXPR:
10619 case CLEANUP_POINT_EXPR:
10620 case TARGET_EXPR:
10621 case CASE_LABEL_EXPR:
10622 case VA_ARG_EXPR:
10623 case BIND_EXPR:
10624 case INIT_EXPR:
10625 case CONJ_EXPR:
10626 case COMPOUND_EXPR:
10627 case PREINCREMENT_EXPR:
10628 case PREDECREMENT_EXPR:
10629 case POSTINCREMENT_EXPR:
10630 case POSTDECREMENT_EXPR:
10631 case LOOP_EXPR:
10632 case EXIT_EXPR:
10633 case COMPOUND_LITERAL_EXPR:
10634 /* Lowered by gimplify.c. */
10635 gcc_unreachable ();
10637 case FDESC_EXPR:
10638 /* Function descriptors are not valid except for as
10639 initialization constants, and should not be expanded. */
10640 gcc_unreachable ();
10642 case WITH_SIZE_EXPR:
10643 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10644 have pulled out the size to use in whatever context it needed. */
10645 return expand_expr_real (treeop0, original_target, tmode,
10646 modifier, alt_rtl, inner_reference_p);
10648 default:
10649 return expand_expr_real_2 (&ops, target, tmode, modifier);
10653 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10654 signedness of TYPE), possibly returning the result in TARGET. */
10655 static rtx
10656 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10658 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10659 if (target && GET_MODE (target) != GET_MODE (exp))
10660 target = 0;
10661 /* For constant values, reduce using build_int_cst_type. */
10662 if (CONST_INT_P (exp))
10664 HOST_WIDE_INT value = INTVAL (exp);
10665 tree t = build_int_cst_type (type, value);
10666 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10668 else if (TYPE_UNSIGNED (type))
10670 machine_mode mode = GET_MODE (exp);
10671 rtx mask = immed_wide_int_const
10672 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10673 return expand_and (mode, exp, mask, target);
10675 else
10677 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10678 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10679 exp, count, target, 0);
10680 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10681 exp, count, target, 0);
10685 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10686 when applied to the address of EXP produces an address known to be
10687 aligned more than BIGGEST_ALIGNMENT. */
10689 static int
10690 is_aligning_offset (const_tree offset, const_tree exp)
10692 /* Strip off any conversions. */
10693 while (CONVERT_EXPR_P (offset))
10694 offset = TREE_OPERAND (offset, 0);
10696 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10697 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10698 if (TREE_CODE (offset) != BIT_AND_EXPR
10699 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10700 || compare_tree_int (TREE_OPERAND (offset, 1),
10701 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10702 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10703 return 0;
10705 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10706 It must be NEGATE_EXPR. Then strip any more conversions. */
10707 offset = TREE_OPERAND (offset, 0);
10708 while (CONVERT_EXPR_P (offset))
10709 offset = TREE_OPERAND (offset, 0);
10711 if (TREE_CODE (offset) != NEGATE_EXPR)
10712 return 0;
10714 offset = TREE_OPERAND (offset, 0);
10715 while (CONVERT_EXPR_P (offset))
10716 offset = TREE_OPERAND (offset, 0);
10718 /* This must now be the address of EXP. */
10719 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10722 /* Return the tree node if an ARG corresponds to a string constant or zero
10723 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10724 in bytes within the string that ARG is accessing. The type of the
10725 offset will be `sizetype'. */
10727 tree
10728 string_constant (tree arg, tree *ptr_offset)
10730 tree array, offset, lower_bound;
10731 STRIP_NOPS (arg);
10733 if (TREE_CODE (arg) == ADDR_EXPR)
10735 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10737 *ptr_offset = size_zero_node;
10738 return TREE_OPERAND (arg, 0);
10740 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10742 array = TREE_OPERAND (arg, 0);
10743 offset = size_zero_node;
10745 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10747 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10748 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10749 if (TREE_CODE (array) != STRING_CST
10750 && TREE_CODE (array) != VAR_DECL)
10751 return 0;
10753 /* Check if the array has a nonzero lower bound. */
10754 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10755 if (!integer_zerop (lower_bound))
10757 /* If the offset and base aren't both constants, return 0. */
10758 if (TREE_CODE (lower_bound) != INTEGER_CST)
10759 return 0;
10760 if (TREE_CODE (offset) != INTEGER_CST)
10761 return 0;
10762 /* Adjust offset by the lower bound. */
10763 offset = size_diffop (fold_convert (sizetype, offset),
10764 fold_convert (sizetype, lower_bound));
10767 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10769 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10770 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10771 if (TREE_CODE (array) != ADDR_EXPR)
10772 return 0;
10773 array = TREE_OPERAND (array, 0);
10774 if (TREE_CODE (array) != STRING_CST
10775 && TREE_CODE (array) != VAR_DECL)
10776 return 0;
10778 else
10779 return 0;
10781 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10783 tree arg0 = TREE_OPERAND (arg, 0);
10784 tree arg1 = TREE_OPERAND (arg, 1);
10786 STRIP_NOPS (arg0);
10787 STRIP_NOPS (arg1);
10789 if (TREE_CODE (arg0) == ADDR_EXPR
10790 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10791 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10793 array = TREE_OPERAND (arg0, 0);
10794 offset = arg1;
10796 else if (TREE_CODE (arg1) == ADDR_EXPR
10797 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10798 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10800 array = TREE_OPERAND (arg1, 0);
10801 offset = arg0;
10803 else
10804 return 0;
10806 else
10807 return 0;
10809 if (TREE_CODE (array) == STRING_CST)
10811 *ptr_offset = fold_convert (sizetype, offset);
10812 return array;
10814 else if (TREE_CODE (array) == VAR_DECL
10815 || TREE_CODE (array) == CONST_DECL)
10817 int length;
10818 tree init = ctor_for_folding (array);
10820 /* Variables initialized to string literals can be handled too. */
10821 if (init == error_mark_node
10822 || !init
10823 || TREE_CODE (init) != STRING_CST)
10824 return 0;
10826 /* Avoid const char foo[4] = "abcde"; */
10827 if (DECL_SIZE_UNIT (array) == NULL_TREE
10828 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10829 || (length = TREE_STRING_LENGTH (init)) <= 0
10830 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10831 return 0;
10833 /* If variable is bigger than the string literal, OFFSET must be constant
10834 and inside of the bounds of the string literal. */
10835 offset = fold_convert (sizetype, offset);
10836 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10837 && (! tree_fits_uhwi_p (offset)
10838 || compare_tree_int (offset, length) >= 0))
10839 return 0;
10841 *ptr_offset = offset;
10842 return init;
10845 return 0;
10848 /* Generate code to calculate OPS, and exploded expression
10849 using a store-flag instruction and return an rtx for the result.
10850 OPS reflects a comparison.
10852 If TARGET is nonzero, store the result there if convenient.
10854 Return zero if there is no suitable set-flag instruction
10855 available on this machine.
10857 Once expand_expr has been called on the arguments of the comparison,
10858 we are committed to doing the store flag, since it is not safe to
10859 re-evaluate the expression. We emit the store-flag insn by calling
10860 emit_store_flag, but only expand the arguments if we have a reason
10861 to believe that emit_store_flag will be successful. If we think that
10862 it will, but it isn't, we have to simulate the store-flag with a
10863 set/jump/set sequence. */
10865 static rtx
10866 do_store_flag (sepops ops, rtx target, machine_mode mode)
10868 enum rtx_code code;
10869 tree arg0, arg1, type;
10870 machine_mode operand_mode;
10871 int unsignedp;
10872 rtx op0, op1;
10873 rtx subtarget = target;
10874 location_t loc = ops->location;
10876 arg0 = ops->op0;
10877 arg1 = ops->op1;
10879 /* Don't crash if the comparison was erroneous. */
10880 if (arg0 == error_mark_node || arg1 == error_mark_node)
10881 return const0_rtx;
10883 type = TREE_TYPE (arg0);
10884 operand_mode = TYPE_MODE (type);
10885 unsignedp = TYPE_UNSIGNED (type);
10887 /* We won't bother with BLKmode store-flag operations because it would mean
10888 passing a lot of information to emit_store_flag. */
10889 if (operand_mode == BLKmode)
10890 return 0;
10892 /* We won't bother with store-flag operations involving function pointers
10893 when function pointers must be canonicalized before comparisons. */
10894 if (targetm.have_canonicalize_funcptr_for_compare ()
10895 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10896 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10897 == FUNCTION_TYPE))
10898 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10899 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10900 == FUNCTION_TYPE))))
10901 return 0;
10903 STRIP_NOPS (arg0);
10904 STRIP_NOPS (arg1);
10906 /* For vector typed comparisons emit code to generate the desired
10907 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10908 expander for this. */
10909 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10911 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10912 tree if_true = constant_boolean_node (true, ops->type);
10913 tree if_false = constant_boolean_node (false, ops->type);
10914 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10917 /* Get the rtx comparison code to use. We know that EXP is a comparison
10918 operation of some type. Some comparisons against 1 and -1 can be
10919 converted to comparisons with zero. Do so here so that the tests
10920 below will be aware that we have a comparison with zero. These
10921 tests will not catch constants in the first operand, but constants
10922 are rarely passed as the first operand. */
10924 switch (ops->code)
10926 case EQ_EXPR:
10927 code = EQ;
10928 break;
10929 case NE_EXPR:
10930 code = NE;
10931 break;
10932 case LT_EXPR:
10933 if (integer_onep (arg1))
10934 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10935 else
10936 code = unsignedp ? LTU : LT;
10937 break;
10938 case LE_EXPR:
10939 if (! unsignedp && integer_all_onesp (arg1))
10940 arg1 = integer_zero_node, code = LT;
10941 else
10942 code = unsignedp ? LEU : LE;
10943 break;
10944 case GT_EXPR:
10945 if (! unsignedp && integer_all_onesp (arg1))
10946 arg1 = integer_zero_node, code = GE;
10947 else
10948 code = unsignedp ? GTU : GT;
10949 break;
10950 case GE_EXPR:
10951 if (integer_onep (arg1))
10952 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10953 else
10954 code = unsignedp ? GEU : GE;
10955 break;
10957 case UNORDERED_EXPR:
10958 code = UNORDERED;
10959 break;
10960 case ORDERED_EXPR:
10961 code = ORDERED;
10962 break;
10963 case UNLT_EXPR:
10964 code = UNLT;
10965 break;
10966 case UNLE_EXPR:
10967 code = UNLE;
10968 break;
10969 case UNGT_EXPR:
10970 code = UNGT;
10971 break;
10972 case UNGE_EXPR:
10973 code = UNGE;
10974 break;
10975 case UNEQ_EXPR:
10976 code = UNEQ;
10977 break;
10978 case LTGT_EXPR:
10979 code = LTGT;
10980 break;
10982 default:
10983 gcc_unreachable ();
10986 /* Put a constant second. */
10987 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10988 || TREE_CODE (arg0) == FIXED_CST)
10990 std::swap (arg0, arg1);
10991 code = swap_condition (code);
10994 /* If this is an equality or inequality test of a single bit, we can
10995 do this by shifting the bit being tested to the low-order bit and
10996 masking the result with the constant 1. If the condition was EQ,
10997 we xor it with 1. This does not require an scc insn and is faster
10998 than an scc insn even if we have it.
11000 The code to make this transformation was moved into fold_single_bit_test,
11001 so we just call into the folder and expand its result. */
11003 if ((code == NE || code == EQ)
11004 && integer_zerop (arg1)
11005 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11007 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11008 if (srcstmt
11009 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11011 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11012 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11013 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11014 gimple_assign_rhs1 (srcstmt),
11015 gimple_assign_rhs2 (srcstmt));
11016 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11017 if (temp)
11018 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11022 if (! get_subtarget (target)
11023 || GET_MODE (subtarget) != operand_mode)
11024 subtarget = 0;
11026 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11028 if (target == 0)
11029 target = gen_reg_rtx (mode);
11031 /* Try a cstore if possible. */
11032 return emit_store_flag_force (target, code, op0, op1,
11033 operand_mode, unsignedp,
11034 (TYPE_PRECISION (ops->type) == 1
11035 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11038 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11039 0 otherwise (i.e. if there is no casesi instruction).
11041 DEFAULT_PROBABILITY is the probability of jumping to the default
11042 label. */
11044 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11045 rtx table_label, rtx default_label, rtx fallback_label,
11046 int default_probability)
11048 struct expand_operand ops[5];
11049 machine_mode index_mode = SImode;
11050 rtx op1, op2, index;
11052 if (! targetm.have_casesi ())
11053 return 0;
11055 /* Convert the index to SImode. */
11056 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11058 machine_mode omode = TYPE_MODE (index_type);
11059 rtx rangertx = expand_normal (range);
11061 /* We must handle the endpoints in the original mode. */
11062 index_expr = build2 (MINUS_EXPR, index_type,
11063 index_expr, minval);
11064 minval = integer_zero_node;
11065 index = expand_normal (index_expr);
11066 if (default_label)
11067 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11068 omode, 1, default_label,
11069 default_probability);
11070 /* Now we can safely truncate. */
11071 index = convert_to_mode (index_mode, index, 0);
11073 else
11075 if (TYPE_MODE (index_type) != index_mode)
11077 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11078 index_expr = fold_convert (index_type, index_expr);
11081 index = expand_normal (index_expr);
11084 do_pending_stack_adjust ();
11086 op1 = expand_normal (minval);
11087 op2 = expand_normal (range);
11089 create_input_operand (&ops[0], index, index_mode);
11090 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11091 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11092 create_fixed_operand (&ops[3], table_label);
11093 create_fixed_operand (&ops[4], (default_label
11094 ? default_label
11095 : fallback_label));
11096 expand_jump_insn (targetm.code_for_casesi, 5, ops);
11097 return 1;
11100 /* Attempt to generate a tablejump instruction; same concept. */
11101 /* Subroutine of the next function.
11103 INDEX is the value being switched on, with the lowest value
11104 in the table already subtracted.
11105 MODE is its expected mode (needed if INDEX is constant).
11106 RANGE is the length of the jump table.
11107 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11109 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11110 index value is out of range.
11111 DEFAULT_PROBABILITY is the probability of jumping to
11112 the default label. */
11114 static void
11115 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11116 rtx default_label, int default_probability)
11118 rtx temp, vector;
11120 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11121 cfun->cfg->max_jumptable_ents = INTVAL (range);
11123 /* Do an unsigned comparison (in the proper mode) between the index
11124 expression and the value which represents the length of the range.
11125 Since we just finished subtracting the lower bound of the range
11126 from the index expression, this comparison allows us to simultaneously
11127 check that the original index expression value is both greater than
11128 or equal to the minimum value of the range and less than or equal to
11129 the maximum value of the range. */
11131 if (default_label)
11132 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11133 default_label, default_probability);
11136 /* If index is in range, it must fit in Pmode.
11137 Convert to Pmode so we can index with it. */
11138 if (mode != Pmode)
11139 index = convert_to_mode (Pmode, index, 1);
11141 /* Don't let a MEM slip through, because then INDEX that comes
11142 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11143 and break_out_memory_refs will go to work on it and mess it up. */
11144 #ifdef PIC_CASE_VECTOR_ADDRESS
11145 if (flag_pic && !REG_P (index))
11146 index = copy_to_mode_reg (Pmode, index);
11147 #endif
11149 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11150 GET_MODE_SIZE, because this indicates how large insns are. The other
11151 uses should all be Pmode, because they are addresses. This code
11152 could fail if addresses and insns are not the same size. */
11153 index = simplify_gen_binary (MULT, Pmode, index,
11154 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11155 Pmode));
11156 index = simplify_gen_binary (PLUS, Pmode, index,
11157 gen_rtx_LABEL_REF (Pmode, table_label));
11159 #ifdef PIC_CASE_VECTOR_ADDRESS
11160 if (flag_pic)
11161 index = PIC_CASE_VECTOR_ADDRESS (index);
11162 else
11163 #endif
11164 index = memory_address (CASE_VECTOR_MODE, index);
11165 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11166 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11167 convert_move (temp, vector, 0);
11169 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11171 /* If we are generating PIC code or if the table is PC-relative, the
11172 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11173 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11174 emit_barrier ();
11178 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11179 rtx table_label, rtx default_label, int default_probability)
11181 rtx index;
11183 if (! targetm.have_tablejump ())
11184 return 0;
11186 index_expr = fold_build2 (MINUS_EXPR, index_type,
11187 fold_convert (index_type, index_expr),
11188 fold_convert (index_type, minval));
11189 index = expand_normal (index_expr);
11190 do_pending_stack_adjust ();
11192 do_tablejump (index, TYPE_MODE (index_type),
11193 convert_modes (TYPE_MODE (index_type),
11194 TYPE_MODE (TREE_TYPE (range)),
11195 expand_normal (range),
11196 TYPE_UNSIGNED (TREE_TYPE (range))),
11197 table_label, default_label, default_probability);
11198 return 1;
11201 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11202 static rtx
11203 const_vector_from_tree (tree exp)
11205 rtvec v;
11206 unsigned i;
11207 int units;
11208 tree elt;
11209 machine_mode inner, mode;
11211 mode = TYPE_MODE (TREE_TYPE (exp));
11213 if (initializer_zerop (exp))
11214 return CONST0_RTX (mode);
11216 units = GET_MODE_NUNITS (mode);
11217 inner = GET_MODE_INNER (mode);
11219 v = rtvec_alloc (units);
11221 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11223 elt = VECTOR_CST_ELT (exp, i);
11225 if (TREE_CODE (elt) == REAL_CST)
11226 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11227 inner);
11228 else if (TREE_CODE (elt) == FIXED_CST)
11229 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11230 inner);
11231 else
11232 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11235 return gen_rtx_CONST_VECTOR (mode, v);
11238 /* Build a decl for a personality function given a language prefix. */
11240 tree
11241 build_personality_function (const char *lang)
11243 const char *unwind_and_version;
11244 tree decl, type;
11245 char *name;
11247 switch (targetm_common.except_unwind_info (&global_options))
11249 case UI_NONE:
11250 return NULL;
11251 case UI_SJLJ:
11252 unwind_and_version = "_sj0";
11253 break;
11254 case UI_DWARF2:
11255 case UI_TARGET:
11256 unwind_and_version = "_v0";
11257 break;
11258 case UI_SEH:
11259 unwind_and_version = "_seh0";
11260 break;
11261 default:
11262 gcc_unreachable ();
11265 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11267 type = build_function_type_list (integer_type_node, integer_type_node,
11268 long_long_unsigned_type_node,
11269 ptr_type_node, ptr_type_node, NULL_TREE);
11270 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11271 get_identifier (name), type);
11272 DECL_ARTIFICIAL (decl) = 1;
11273 DECL_EXTERNAL (decl) = 1;
11274 TREE_PUBLIC (decl) = 1;
11276 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11277 are the flags assigned by targetm.encode_section_info. */
11278 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11280 return decl;
11283 /* Extracts the personality function of DECL and returns the corresponding
11284 libfunc. */
11287 get_personality_function (tree decl)
11289 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11290 enum eh_personality_kind pk;
11292 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11293 if (pk == eh_personality_none)
11294 return NULL;
11296 if (!personality
11297 && pk == eh_personality_any)
11298 personality = lang_hooks.eh_personality ();
11300 if (pk == eh_personality_lang)
11301 gcc_assert (personality != NULL_TREE);
11303 return XEXP (DECL_RTL (personality), 0);
11306 /* Returns a tree for the size of EXP in bytes. */
11308 static tree
11309 tree_expr_size (const_tree exp)
11311 if (DECL_P (exp)
11312 && DECL_SIZE_UNIT (exp) != 0)
11313 return DECL_SIZE_UNIT (exp);
11314 else
11315 return size_in_bytes (TREE_TYPE (exp));
11318 /* Return an rtx for the size in bytes of the value of EXP. */
11321 expr_size (tree exp)
11323 tree size;
11325 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11326 size = TREE_OPERAND (exp, 1);
11327 else
11329 size = tree_expr_size (exp);
11330 gcc_assert (size);
11331 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11334 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11337 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11338 if the size can vary or is larger than an integer. */
11340 static HOST_WIDE_INT
11341 int_expr_size (tree exp)
11343 tree size;
11345 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11346 size = TREE_OPERAND (exp, 1);
11347 else
11349 size = tree_expr_size (exp);
11350 gcc_assert (size);
11353 if (size == 0 || !tree_fits_shwi_p (size))
11354 return -1;
11356 return tree_to_shwi (size);
11359 #include "gt-expr.h"