Improve costs for DImode shifts of interger constants.
[official-gcc.git] / gcc / expr.c
blob1a15f24b3979abae628c0fd9857f62d70c149d33
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "ssa.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "regs.h"
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "attribs.h"
43 #include "varasm.h"
44 #include "except.h"
45 #include "insn-attr.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "stmt.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
51 #include "expr.h"
52 #include "optabs-tree.h"
53 #include "libfuncs.h"
54 #include "reload.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa-live.h"
59 #include "tree-outof-ssa.h"
60 #include "tree-ssa-address.h"
61 #include "builtins.h"
62 #include "ccmp.h"
63 #include "gimple-fold.h"
64 #include "rtx-vector-builder.h"
67 /* If this is nonzero, we do not bother generating VOLATILE
68 around volatile memory references, and we are willing to
69 output indirect addresses. If cse is to follow, we reject
70 indirect addresses so a useful potential cse is generated;
71 if it is used only once, instruction combination will produce
72 the same indirect address eventually. */
73 int cse_not_expected;
75 static bool block_move_libcall_safe_for_call_parm (void);
76 static bool emit_block_move_via_pattern (rtx, rtx, rtx, unsigned, unsigned,
77 HOST_WIDE_INT, unsigned HOST_WIDE_INT,
78 unsigned HOST_WIDE_INT,
79 unsigned HOST_WIDE_INT, bool);
80 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
81 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
82 static rtx_insn *compress_float_constant (rtx, rtx);
83 static rtx get_subtarget (rtx);
84 static void store_constructor (tree, rtx, int, poly_int64, bool);
85 static rtx store_field (rtx, poly_int64, poly_int64, poly_uint64, poly_uint64,
86 machine_mode, tree, alias_set_type, bool, bool);
88 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
90 static int is_aligning_offset (const_tree, const_tree);
91 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
92 static rtx do_store_flag (sepops, rtx, machine_mode);
93 #ifdef PUSH_ROUNDING
94 static void emit_single_push_insn (machine_mode, rtx, tree);
95 #endif
96 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx,
97 profile_probability);
98 static rtx const_vector_from_tree (tree);
99 static rtx const_scalar_mask_from_tree (scalar_int_mode, tree);
100 static tree tree_expr_size (const_tree);
101 static HOST_WIDE_INT int_expr_size (tree);
102 static void convert_mode_scalar (rtx, rtx, int);
105 /* This is run to set up which modes can be used
106 directly in memory and to initialize the block move optab. It is run
107 at the beginning of compilation and when the target is reinitialized. */
109 void
110 init_expr_target (void)
112 rtx pat;
113 int num_clobbers;
114 rtx mem, mem1;
115 rtx reg;
117 /* Try indexing by frame ptr and try by stack ptr.
118 It is known that on the Convex the stack ptr isn't a valid index.
119 With luck, one or the other is valid on any machine. */
120 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
121 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
123 /* A scratch register we can modify in-place below to avoid
124 useless RTL allocations. */
125 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
127 rtx_insn *insn = as_a<rtx_insn *> (rtx_alloc (INSN));
128 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
129 PATTERN (insn) = pat;
131 for (machine_mode mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
132 mode = (machine_mode) ((int) mode + 1))
134 int regno;
136 direct_load[(int) mode] = direct_store[(int) mode] = 0;
137 PUT_MODE (mem, mode);
138 PUT_MODE (mem1, mode);
140 /* See if there is some register that can be used in this mode and
141 directly loaded or stored from memory. */
143 if (mode != VOIDmode && mode != BLKmode)
144 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
145 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
146 regno++)
148 if (!targetm.hard_regno_mode_ok (regno, mode))
149 continue;
151 set_mode_and_regno (reg, mode, regno);
153 SET_SRC (pat) = mem;
154 SET_DEST (pat) = reg;
155 if (recog (pat, insn, &num_clobbers) >= 0)
156 direct_load[(int) mode] = 1;
158 SET_SRC (pat) = mem1;
159 SET_DEST (pat) = reg;
160 if (recog (pat, insn, &num_clobbers) >= 0)
161 direct_load[(int) mode] = 1;
163 SET_SRC (pat) = reg;
164 SET_DEST (pat) = mem;
165 if (recog (pat, insn, &num_clobbers) >= 0)
166 direct_store[(int) mode] = 1;
168 SET_SRC (pat) = reg;
169 SET_DEST (pat) = mem1;
170 if (recog (pat, insn, &num_clobbers) >= 0)
171 direct_store[(int) mode] = 1;
175 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
177 opt_scalar_float_mode mode_iter;
178 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_FLOAT)
180 scalar_float_mode mode = mode_iter.require ();
181 scalar_float_mode srcmode;
182 FOR_EACH_MODE_UNTIL (srcmode, mode)
184 enum insn_code ic;
186 ic = can_extend_p (mode, srcmode, 0);
187 if (ic == CODE_FOR_nothing)
188 continue;
190 PUT_MODE (mem, srcmode);
192 if (insn_operand_matches (ic, 1, mem))
193 float_extend_from_mem[mode][srcmode] = true;
198 /* This is run at the start of compiling a function. */
200 void
201 init_expr (void)
203 memset (&crtl->expr, 0, sizeof (crtl->expr));
206 /* Copy data from FROM to TO, where the machine modes are not the same.
207 Both modes may be integer, or both may be floating, or both may be
208 fixed-point.
209 UNSIGNEDP should be nonzero if FROM is an unsigned type.
210 This causes zero-extension instead of sign-extension. */
212 void
213 convert_move (rtx to, rtx from, int unsignedp)
215 machine_mode to_mode = GET_MODE (to);
216 machine_mode from_mode = GET_MODE (from);
218 gcc_assert (to_mode != BLKmode);
219 gcc_assert (from_mode != BLKmode);
221 /* If the source and destination are already the same, then there's
222 nothing to do. */
223 if (to == from)
224 return;
226 /* If FROM is a SUBREG that indicates that we have already done at least
227 the required extension, strip it. We don't handle such SUBREGs as
228 TO here. */
230 scalar_int_mode to_int_mode;
231 if (GET_CODE (from) == SUBREG
232 && SUBREG_PROMOTED_VAR_P (from)
233 && is_a <scalar_int_mode> (to_mode, &to_int_mode)
234 && (GET_MODE_PRECISION (subreg_promoted_mode (from))
235 >= GET_MODE_PRECISION (to_int_mode))
236 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
238 from = gen_lowpart (to_int_mode, SUBREG_REG (from));
239 from_mode = to_int_mode;
242 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
244 if (to_mode == from_mode
245 || (from_mode == VOIDmode && CONSTANT_P (from)))
247 emit_move_insn (to, from);
248 return;
251 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
253 if (GET_MODE_UNIT_PRECISION (to_mode)
254 > GET_MODE_UNIT_PRECISION (from_mode))
256 optab op = unsignedp ? zext_optab : sext_optab;
257 insn_code icode = convert_optab_handler (op, to_mode, from_mode);
258 if (icode != CODE_FOR_nothing)
260 emit_unop_insn (icode, to, from,
261 unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
262 return;
266 if (GET_MODE_UNIT_PRECISION (to_mode)
267 < GET_MODE_UNIT_PRECISION (from_mode))
269 insn_code icode = convert_optab_handler (trunc_optab,
270 to_mode, from_mode);
271 if (icode != CODE_FOR_nothing)
273 emit_unop_insn (icode, to, from, TRUNCATE);
274 return;
278 gcc_assert (known_eq (GET_MODE_BITSIZE (from_mode),
279 GET_MODE_BITSIZE (to_mode)));
281 if (VECTOR_MODE_P (to_mode))
282 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
283 else
284 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
286 emit_move_insn (to, from);
287 return;
290 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
292 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
293 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
294 return;
297 convert_mode_scalar (to, from, unsignedp);
300 /* Like convert_move, but deals only with scalar modes. */
302 static void
303 convert_mode_scalar (rtx to, rtx from, int unsignedp)
305 /* Both modes should be scalar types. */
306 scalar_mode from_mode = as_a <scalar_mode> (GET_MODE (from));
307 scalar_mode to_mode = as_a <scalar_mode> (GET_MODE (to));
308 bool to_real = SCALAR_FLOAT_MODE_P (to_mode);
309 bool from_real = SCALAR_FLOAT_MODE_P (from_mode);
310 enum insn_code code;
311 rtx libcall;
313 gcc_assert (to_real == from_real);
315 /* rtx code for making an equivalent value. */
316 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
317 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
319 if (to_real)
321 rtx value;
322 rtx_insn *insns;
323 convert_optab tab;
325 gcc_assert ((GET_MODE_PRECISION (from_mode)
326 != GET_MODE_PRECISION (to_mode))
327 || (DECIMAL_FLOAT_MODE_P (from_mode)
328 != DECIMAL_FLOAT_MODE_P (to_mode)));
330 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
331 /* Conversion between decimal float and binary float, same size. */
332 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
333 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
334 tab = sext_optab;
335 else
336 tab = trunc_optab;
338 /* Try converting directly if the insn is supported. */
340 code = convert_optab_handler (tab, to_mode, from_mode);
341 if (code != CODE_FOR_nothing)
343 emit_unop_insn (code, to, from,
344 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
345 return;
348 /* Otherwise use a libcall. */
349 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
351 /* Is this conversion implemented yet? */
352 gcc_assert (libcall);
354 start_sequence ();
355 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
356 from, from_mode);
357 insns = get_insns ();
358 end_sequence ();
359 emit_libcall_block (insns, to, value,
360 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
361 from)
362 : gen_rtx_FLOAT_EXTEND (to_mode, from));
363 return;
366 /* Handle pointer conversion. */ /* SPEE 900220. */
367 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
369 convert_optab ctab;
371 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
372 ctab = trunc_optab;
373 else if (unsignedp)
374 ctab = zext_optab;
375 else
376 ctab = sext_optab;
378 if (convert_optab_handler (ctab, to_mode, from_mode)
379 != CODE_FOR_nothing)
381 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
382 to, from, UNKNOWN);
383 return;
387 /* Targets are expected to provide conversion insns between PxImode and
388 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
389 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
391 scalar_int_mode full_mode
392 = smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode));
394 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
395 != CODE_FOR_nothing);
397 if (full_mode != from_mode)
398 from = convert_to_mode (full_mode, from, unsignedp);
399 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
400 to, from, UNKNOWN);
401 return;
403 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
405 rtx new_from;
406 scalar_int_mode full_mode
407 = smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode));
408 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
409 enum insn_code icode;
411 icode = convert_optab_handler (ctab, full_mode, from_mode);
412 gcc_assert (icode != CODE_FOR_nothing);
414 if (to_mode == full_mode)
416 emit_unop_insn (icode, to, from, UNKNOWN);
417 return;
420 new_from = gen_reg_rtx (full_mode);
421 emit_unop_insn (icode, new_from, from, UNKNOWN);
423 /* else proceed to integer conversions below. */
424 from_mode = full_mode;
425 from = new_from;
428 /* Make sure both are fixed-point modes or both are not. */
429 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
430 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
431 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
433 /* If we widen from_mode to to_mode and they are in the same class,
434 we won't saturate the result.
435 Otherwise, always saturate the result to play safe. */
436 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
437 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
438 expand_fixed_convert (to, from, 0, 0);
439 else
440 expand_fixed_convert (to, from, 0, 1);
441 return;
444 /* Now both modes are integers. */
446 /* Handle expanding beyond a word. */
447 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
448 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
450 rtx_insn *insns;
451 rtx lowpart;
452 rtx fill_value;
453 rtx lowfrom;
454 int i;
455 scalar_mode lowpart_mode;
456 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
458 /* Try converting directly if the insn is supported. */
459 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
460 != CODE_FOR_nothing)
462 /* If FROM is a SUBREG, put it into a register. Do this
463 so that we always generate the same set of insns for
464 better cse'ing; if an intermediate assignment occurred,
465 we won't be doing the operation directly on the SUBREG. */
466 if (optimize > 0 && GET_CODE (from) == SUBREG)
467 from = force_reg (from_mode, from);
468 emit_unop_insn (code, to, from, equiv_code);
469 return;
471 /* Next, try converting via full word. */
472 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
473 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
474 != CODE_FOR_nothing))
476 rtx word_to = gen_reg_rtx (word_mode);
477 if (REG_P (to))
479 if (reg_overlap_mentioned_p (to, from))
480 from = force_reg (from_mode, from);
481 emit_clobber (to);
483 convert_move (word_to, from, unsignedp);
484 emit_unop_insn (code, to, word_to, equiv_code);
485 return;
488 /* No special multiword conversion insn; do it by hand. */
489 start_sequence ();
491 /* Since we will turn this into a no conflict block, we must ensure
492 the source does not overlap the target so force it into an isolated
493 register when maybe so. Likewise for any MEM input, since the
494 conversion sequence might require several references to it and we
495 must ensure we're getting the same value every time. */
497 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
498 from = force_reg (from_mode, from);
500 /* Get a copy of FROM widened to a word, if necessary. */
501 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
502 lowpart_mode = word_mode;
503 else
504 lowpart_mode = from_mode;
506 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
508 lowpart = gen_lowpart (lowpart_mode, to);
509 emit_move_insn (lowpart, lowfrom);
511 /* Compute the value to put in each remaining word. */
512 if (unsignedp)
513 fill_value = const0_rtx;
514 else
515 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
516 LT, lowfrom, const0_rtx,
517 lowpart_mode, 0, -1);
519 /* Fill the remaining words. */
520 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
522 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
523 rtx subword = operand_subword (to, index, 1, to_mode);
525 gcc_assert (subword);
527 if (fill_value != subword)
528 emit_move_insn (subword, fill_value);
531 insns = get_insns ();
532 end_sequence ();
534 emit_insn (insns);
535 return;
538 /* Truncating multi-word to a word or less. */
539 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
540 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
542 if (!((MEM_P (from)
543 && ! MEM_VOLATILE_P (from)
544 && direct_load[(int) to_mode]
545 && ! mode_dependent_address_p (XEXP (from, 0),
546 MEM_ADDR_SPACE (from)))
547 || REG_P (from)
548 || GET_CODE (from) == SUBREG))
549 from = force_reg (from_mode, from);
550 convert_move (to, gen_lowpart (word_mode, from), 0);
551 return;
554 /* Now follow all the conversions between integers
555 no more than a word long. */
557 /* For truncation, usually we can just refer to FROM in a narrower mode. */
558 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
559 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
561 if (!((MEM_P (from)
562 && ! MEM_VOLATILE_P (from)
563 && direct_load[(int) to_mode]
564 && ! mode_dependent_address_p (XEXP (from, 0),
565 MEM_ADDR_SPACE (from)))
566 || REG_P (from)
567 || GET_CODE (from) == SUBREG))
568 from = force_reg (from_mode, from);
569 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
570 && !targetm.hard_regno_mode_ok (REGNO (from), to_mode))
571 from = copy_to_reg (from);
572 emit_move_insn (to, gen_lowpart (to_mode, from));
573 return;
576 /* Handle extension. */
577 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
579 /* Convert directly if that works. */
580 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
581 != CODE_FOR_nothing)
583 emit_unop_insn (code, to, from, equiv_code);
584 return;
586 else
588 rtx tmp;
589 int shift_amount;
591 /* Search for a mode to convert via. */
592 opt_scalar_mode intermediate_iter;
593 FOR_EACH_MODE_FROM (intermediate_iter, from_mode)
595 scalar_mode intermediate = intermediate_iter.require ();
596 if (((can_extend_p (to_mode, intermediate, unsignedp)
597 != CODE_FOR_nothing)
598 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
599 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode,
600 intermediate)))
601 && (can_extend_p (intermediate, from_mode, unsignedp)
602 != CODE_FOR_nothing))
604 convert_move (to, convert_to_mode (intermediate, from,
605 unsignedp), unsignedp);
606 return;
610 /* No suitable intermediate mode.
611 Generate what we need with shifts. */
612 shift_amount = (GET_MODE_PRECISION (to_mode)
613 - GET_MODE_PRECISION (from_mode));
614 from = gen_lowpart (to_mode, force_reg (from_mode, from));
615 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
616 to, unsignedp);
617 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
618 to, unsignedp);
619 if (tmp != to)
620 emit_move_insn (to, tmp);
621 return;
625 /* Support special truncate insns for certain modes. */
626 if (convert_optab_handler (trunc_optab, to_mode,
627 from_mode) != CODE_FOR_nothing)
629 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
630 to, from, UNKNOWN);
631 return;
634 /* Handle truncation of volatile memrefs, and so on;
635 the things that couldn't be truncated directly,
636 and for which there was no special instruction.
638 ??? Code above formerly short-circuited this, for most integer
639 mode pairs, with a force_reg in from_mode followed by a recursive
640 call to this routine. Appears always to have been wrong. */
641 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
643 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
644 emit_move_insn (to, temp);
645 return;
648 /* Mode combination is not recognized. */
649 gcc_unreachable ();
652 /* Return an rtx for a value that would result
653 from converting X to mode MODE.
654 Both X and MODE may be floating, or both integer.
655 UNSIGNEDP is nonzero if X is an unsigned value.
656 This can be done by referring to a part of X in place
657 or by copying to a new temporary with conversion. */
660 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
662 return convert_modes (mode, VOIDmode, x, unsignedp);
665 /* Return an rtx for a value that would result
666 from converting X from mode OLDMODE to mode MODE.
667 Both modes may be floating, or both integer.
668 UNSIGNEDP is nonzero if X is an unsigned value.
670 This can be done by referring to a part of X in place
671 or by copying to a new temporary with conversion.
673 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
676 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
678 rtx temp;
679 scalar_int_mode int_mode;
681 /* If FROM is a SUBREG that indicates that we have already done at least
682 the required extension, strip it. */
684 if (GET_CODE (x) == SUBREG
685 && SUBREG_PROMOTED_VAR_P (x)
686 && is_a <scalar_int_mode> (mode, &int_mode)
687 && (GET_MODE_PRECISION (subreg_promoted_mode (x))
688 >= GET_MODE_PRECISION (int_mode))
689 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
690 x = gen_lowpart (int_mode, SUBREG_REG (x));
692 if (GET_MODE (x) != VOIDmode)
693 oldmode = GET_MODE (x);
695 if (mode == oldmode)
696 return x;
698 if (CONST_SCALAR_INT_P (x)
699 && is_a <scalar_int_mode> (mode, &int_mode))
701 /* If the caller did not tell us the old mode, then there is not
702 much to do with respect to canonicalization. We have to
703 assume that all the bits are significant. */
704 if (!is_a <scalar_int_mode> (oldmode))
705 oldmode = MAX_MODE_INT;
706 wide_int w = wide_int::from (rtx_mode_t (x, oldmode),
707 GET_MODE_PRECISION (int_mode),
708 unsignedp ? UNSIGNED : SIGNED);
709 return immed_wide_int_const (w, int_mode);
712 /* We can do this with a gen_lowpart if both desired and current modes
713 are integer, and this is either a constant integer, a register, or a
714 non-volatile MEM. */
715 scalar_int_mode int_oldmode;
716 if (is_int_mode (mode, &int_mode)
717 && is_int_mode (oldmode, &int_oldmode)
718 && GET_MODE_PRECISION (int_mode) <= GET_MODE_PRECISION (int_oldmode)
719 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) int_mode])
720 || CONST_POLY_INT_P (x)
721 || (REG_P (x)
722 && (!HARD_REGISTER_P (x)
723 || targetm.hard_regno_mode_ok (REGNO (x), int_mode))
724 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, GET_MODE (x)))))
725 return gen_lowpart (int_mode, x);
727 /* Converting from integer constant into mode is always equivalent to an
728 subreg operation. */
729 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
731 gcc_assert (known_eq (GET_MODE_BITSIZE (mode),
732 GET_MODE_BITSIZE (oldmode)));
733 return simplify_gen_subreg (mode, x, oldmode, 0);
736 temp = gen_reg_rtx (mode);
737 convert_move (temp, x, unsignedp);
738 return temp;
741 /* Return the largest alignment we can use for doing a move (or store)
742 of MAX_PIECES. ALIGN is the largest alignment we could use. */
744 static unsigned int
745 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
747 scalar_int_mode tmode
748 = int_mode_for_size (max_pieces * BITS_PER_UNIT, 1).require ();
750 if (align >= GET_MODE_ALIGNMENT (tmode))
751 align = GET_MODE_ALIGNMENT (tmode);
752 else
754 scalar_int_mode xmode = NARROWEST_INT_MODE;
755 opt_scalar_int_mode mode_iter;
756 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
758 tmode = mode_iter.require ();
759 if (GET_MODE_SIZE (tmode) > max_pieces
760 || targetm.slow_unaligned_access (tmode, align))
761 break;
762 xmode = tmode;
765 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
768 return align;
771 /* Return the widest integer mode that is narrower than SIZE bytes. */
773 static scalar_int_mode
774 widest_int_mode_for_size (unsigned int size)
776 scalar_int_mode result = NARROWEST_INT_MODE;
778 gcc_checking_assert (size > 1);
780 opt_scalar_int_mode tmode;
781 FOR_EACH_MODE_IN_CLASS (tmode, MODE_INT)
782 if (GET_MODE_SIZE (tmode.require ()) < size)
783 result = tmode.require ();
785 return result;
788 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
789 and should be performed piecewise. */
791 static bool
792 can_do_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align,
793 enum by_pieces_operation op)
795 return targetm.use_by_pieces_infrastructure_p (len, align, op,
796 optimize_insn_for_speed_p ());
799 /* Determine whether the LEN bytes can be moved by using several move
800 instructions. Return nonzero if a call to move_by_pieces should
801 succeed. */
803 bool
804 can_move_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align)
806 return can_do_by_pieces (len, align, MOVE_BY_PIECES);
809 /* Return number of insns required to perform operation OP by pieces
810 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
812 unsigned HOST_WIDE_INT
813 by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
814 unsigned int max_size, by_pieces_operation op)
816 unsigned HOST_WIDE_INT n_insns = 0;
818 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
820 while (max_size > 1 && l > 0)
822 scalar_int_mode mode = widest_int_mode_for_size (max_size);
823 enum insn_code icode;
825 unsigned int modesize = GET_MODE_SIZE (mode);
827 icode = optab_handler (mov_optab, mode);
828 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
830 unsigned HOST_WIDE_INT n_pieces = l / modesize;
831 l %= modesize;
832 switch (op)
834 default:
835 n_insns += n_pieces;
836 break;
838 case COMPARE_BY_PIECES:
839 int batch = targetm.compare_by_pieces_branch_ratio (mode);
840 int batch_ops = 4 * batch - 1;
841 unsigned HOST_WIDE_INT full = n_pieces / batch;
842 n_insns += full * batch_ops;
843 if (n_pieces % batch != 0)
844 n_insns++;
845 break;
849 max_size = modesize;
852 gcc_assert (!l);
853 return n_insns;
856 /* Used when performing piecewise block operations, holds information
857 about one of the memory objects involved. The member functions
858 can be used to generate code for loading from the object and
859 updating the address when iterating. */
861 class pieces_addr
863 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
864 stack pushes. */
865 rtx m_obj;
866 /* The address of the object. Can differ from that seen in the
867 MEM rtx if we copied the address to a register. */
868 rtx m_addr;
869 /* Nonzero if the address on the object has an autoincrement already,
870 signifies whether that was an increment or decrement. */
871 signed char m_addr_inc;
872 /* Nonzero if we intend to use autoinc without the address already
873 having autoinc form. We will insert add insns around each memory
874 reference, expecting later passes to form autoinc addressing modes.
875 The only supported options are predecrement and postincrement. */
876 signed char m_explicit_inc;
877 /* True if we have either of the two possible cases of using
878 autoincrement. */
879 bool m_auto;
880 /* True if this is an address to be used for load operations rather
881 than stores. */
882 bool m_is_load;
884 /* Optionally, a function to obtain constants for any given offset into
885 the objects, and data associated with it. */
886 by_pieces_constfn m_constfn;
887 void *m_cfndata;
888 public:
889 pieces_addr (rtx, bool, by_pieces_constfn, void *);
890 rtx adjust (scalar_int_mode, HOST_WIDE_INT);
891 void increment_address (HOST_WIDE_INT);
892 void maybe_predec (HOST_WIDE_INT);
893 void maybe_postinc (HOST_WIDE_INT);
894 void decide_autoinc (machine_mode, bool, HOST_WIDE_INT);
895 int get_addr_inc ()
897 return m_addr_inc;
901 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
902 true if the operation to be performed on this object is a load
903 rather than a store. For stores, OBJ can be NULL, in which case we
904 assume the operation is a stack push. For loads, the optional
905 CONSTFN and its associated CFNDATA can be used in place of the
906 memory load. */
908 pieces_addr::pieces_addr (rtx obj, bool is_load, by_pieces_constfn constfn,
909 void *cfndata)
910 : m_obj (obj), m_is_load (is_load), m_constfn (constfn), m_cfndata (cfndata)
912 m_addr_inc = 0;
913 m_auto = false;
914 if (obj)
916 rtx addr = XEXP (obj, 0);
917 rtx_code code = GET_CODE (addr);
918 m_addr = addr;
919 bool dec = code == PRE_DEC || code == POST_DEC;
920 bool inc = code == PRE_INC || code == POST_INC;
921 m_auto = inc || dec;
922 if (m_auto)
923 m_addr_inc = dec ? -1 : 1;
925 /* While we have always looked for these codes here, the code
926 implementing the memory operation has never handled them.
927 Support could be added later if necessary or beneficial. */
928 gcc_assert (code != PRE_INC && code != POST_DEC);
930 else
932 m_addr = NULL_RTX;
933 if (!is_load)
935 m_auto = true;
936 if (STACK_GROWS_DOWNWARD)
937 m_addr_inc = -1;
938 else
939 m_addr_inc = 1;
941 else
942 gcc_assert (constfn != NULL);
944 m_explicit_inc = 0;
945 if (constfn)
946 gcc_assert (is_load);
949 /* Decide whether to use autoinc for an address involved in a memory op.
950 MODE is the mode of the accesses, REVERSE is true if we've decided to
951 perform the operation starting from the end, and LEN is the length of
952 the operation. Don't override an earlier decision to set m_auto. */
954 void
955 pieces_addr::decide_autoinc (machine_mode ARG_UNUSED (mode), bool reverse,
956 HOST_WIDE_INT len)
958 if (m_auto || m_obj == NULL_RTX)
959 return;
961 bool use_predec = (m_is_load
962 ? USE_LOAD_PRE_DECREMENT (mode)
963 : USE_STORE_PRE_DECREMENT (mode));
964 bool use_postinc = (m_is_load
965 ? USE_LOAD_POST_INCREMENT (mode)
966 : USE_STORE_POST_INCREMENT (mode));
967 machine_mode addr_mode = get_address_mode (m_obj);
969 if (use_predec && reverse)
971 m_addr = copy_to_mode_reg (addr_mode,
972 plus_constant (addr_mode,
973 m_addr, len));
974 m_auto = true;
975 m_explicit_inc = -1;
977 else if (use_postinc && !reverse)
979 m_addr = copy_to_mode_reg (addr_mode, m_addr);
980 m_auto = true;
981 m_explicit_inc = 1;
983 else if (CONSTANT_P (m_addr))
984 m_addr = copy_to_mode_reg (addr_mode, m_addr);
987 /* Adjust the address to refer to the data at OFFSET in MODE. If we
988 are using autoincrement for this address, we don't add the offset,
989 but we still modify the MEM's properties. */
992 pieces_addr::adjust (scalar_int_mode mode, HOST_WIDE_INT offset)
994 if (m_constfn)
995 return m_constfn (m_cfndata, offset, mode);
996 if (m_obj == NULL_RTX)
997 return NULL_RTX;
998 if (m_auto)
999 return adjust_automodify_address (m_obj, mode, m_addr, offset);
1000 else
1001 return adjust_address (m_obj, mode, offset);
1004 /* Emit an add instruction to increment the address by SIZE. */
1006 void
1007 pieces_addr::increment_address (HOST_WIDE_INT size)
1009 rtx amount = gen_int_mode (size, GET_MODE (m_addr));
1010 emit_insn (gen_add2_insn (m_addr, amount));
1013 /* If we are supposed to decrement the address after each access, emit code
1014 to do so now. Increment by SIZE (which has should have the correct sign
1015 already). */
1017 void
1018 pieces_addr::maybe_predec (HOST_WIDE_INT size)
1020 if (m_explicit_inc >= 0)
1021 return;
1022 gcc_assert (HAVE_PRE_DECREMENT);
1023 increment_address (size);
1026 /* If we are supposed to decrement the address after each access, emit code
1027 to do so now. Increment by SIZE. */
1029 void
1030 pieces_addr::maybe_postinc (HOST_WIDE_INT size)
1032 if (m_explicit_inc <= 0)
1033 return;
1034 gcc_assert (HAVE_POST_INCREMENT);
1035 increment_address (size);
1038 /* This structure is used by do_op_by_pieces to describe the operation
1039 to be performed. */
1041 class op_by_pieces_d
1043 protected:
1044 pieces_addr m_to, m_from;
1045 unsigned HOST_WIDE_INT m_len;
1046 HOST_WIDE_INT m_offset;
1047 unsigned int m_align;
1048 unsigned int m_max_size;
1049 bool m_reverse;
1051 /* Virtual functions, overriden by derived classes for the specific
1052 operation. */
1053 virtual void generate (rtx, rtx, machine_mode) = 0;
1054 virtual bool prepare_mode (machine_mode, unsigned int) = 0;
1055 virtual void finish_mode (machine_mode)
1059 public:
1060 op_by_pieces_d (rtx, bool, rtx, bool, by_pieces_constfn, void *,
1061 unsigned HOST_WIDE_INT, unsigned int);
1062 void run ();
1065 /* The constructor for an op_by_pieces_d structure. We require two
1066 objects named TO and FROM, which are identified as loads or stores
1067 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1068 and its associated FROM_CFN_DATA can be used to replace loads with
1069 constant values. LEN describes the length of the operation. */
1071 op_by_pieces_d::op_by_pieces_d (rtx to, bool to_load,
1072 rtx from, bool from_load,
1073 by_pieces_constfn from_cfn,
1074 void *from_cfn_data,
1075 unsigned HOST_WIDE_INT len,
1076 unsigned int align)
1077 : m_to (to, to_load, NULL, NULL),
1078 m_from (from, from_load, from_cfn, from_cfn_data),
1079 m_len (len), m_max_size (MOVE_MAX_PIECES + 1)
1081 int toi = m_to.get_addr_inc ();
1082 int fromi = m_from.get_addr_inc ();
1083 if (toi >= 0 && fromi >= 0)
1084 m_reverse = false;
1085 else if (toi <= 0 && fromi <= 0)
1086 m_reverse = true;
1087 else
1088 gcc_unreachable ();
1090 m_offset = m_reverse ? len : 0;
1091 align = MIN (to ? MEM_ALIGN (to) : align,
1092 from ? MEM_ALIGN (from) : align);
1094 /* If copying requires more than two move insns,
1095 copy addresses to registers (to make displacements shorter)
1096 and use post-increment if available. */
1097 if (by_pieces_ninsns (len, align, m_max_size, MOVE_BY_PIECES) > 2)
1099 /* Find the mode of the largest comparison. */
1100 scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1102 m_from.decide_autoinc (mode, m_reverse, len);
1103 m_to.decide_autoinc (mode, m_reverse, len);
1106 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1107 m_align = align;
1110 /* This function contains the main loop used for expanding a block
1111 operation. First move what we can in the largest integer mode,
1112 then go to successively smaller modes. For every access, call
1113 GENFUN with the two operands and the EXTRA_DATA. */
1115 void
1116 op_by_pieces_d::run ()
1118 while (m_max_size > 1 && m_len > 0)
1120 scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1122 if (prepare_mode (mode, m_align))
1124 unsigned int size = GET_MODE_SIZE (mode);
1125 rtx to1 = NULL_RTX, from1;
1127 while (m_len >= size)
1129 if (m_reverse)
1130 m_offset -= size;
1132 to1 = m_to.adjust (mode, m_offset);
1133 from1 = m_from.adjust (mode, m_offset);
1135 m_to.maybe_predec (-(HOST_WIDE_INT)size);
1136 m_from.maybe_predec (-(HOST_WIDE_INT)size);
1138 generate (to1, from1, mode);
1140 m_to.maybe_postinc (size);
1141 m_from.maybe_postinc (size);
1143 if (!m_reverse)
1144 m_offset += size;
1146 m_len -= size;
1149 finish_mode (mode);
1152 m_max_size = GET_MODE_SIZE (mode);
1155 /* The code above should have handled everything. */
1156 gcc_assert (!m_len);
1159 /* Derived class from op_by_pieces_d, providing support for block move
1160 operations. */
1162 class move_by_pieces_d : public op_by_pieces_d
1164 insn_gen_fn m_gen_fun;
1165 void generate (rtx, rtx, machine_mode);
1166 bool prepare_mode (machine_mode, unsigned int);
1168 public:
1169 move_by_pieces_d (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1170 unsigned int align)
1171 : op_by_pieces_d (to, false, from, true, NULL, NULL, len, align)
1174 rtx finish_retmode (memop_ret);
1177 /* Return true if MODE can be used for a set of copies, given an
1178 alignment ALIGN. Prepare whatever data is necessary for later
1179 calls to generate. */
1181 bool
1182 move_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1184 insn_code icode = optab_handler (mov_optab, mode);
1185 m_gen_fun = GEN_FCN (icode);
1186 return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1189 /* A callback used when iterating for a compare_by_pieces_operation.
1190 OP0 and OP1 are the values that have been loaded and should be
1191 compared in MODE. If OP0 is NULL, this means we should generate a
1192 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1193 gen function that should be used to generate the mode. */
1195 void
1196 move_by_pieces_d::generate (rtx op0, rtx op1,
1197 machine_mode mode ATTRIBUTE_UNUSED)
1199 #ifdef PUSH_ROUNDING
1200 if (op0 == NULL_RTX)
1202 emit_single_push_insn (mode, op1, NULL);
1203 return;
1205 #endif
1206 emit_insn (m_gen_fun (op0, op1));
1209 /* Perform the final adjustment at the end of a string to obtain the
1210 correct return value for the block operation.
1211 Return value is based on RETMODE argument. */
1214 move_by_pieces_d::finish_retmode (memop_ret retmode)
1216 gcc_assert (!m_reverse);
1217 if (retmode == RETURN_END_MINUS_ONE)
1219 m_to.maybe_postinc (-1);
1220 --m_offset;
1222 return m_to.adjust (QImode, m_offset);
1225 /* Generate several move instructions to copy LEN bytes from block FROM to
1226 block TO. (These are MEM rtx's with BLKmode).
1228 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1229 used to push FROM to the stack.
1231 ALIGN is maximum stack alignment we can assume.
1233 Return value is based on RETMODE argument. */
1236 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1237 unsigned int align, memop_ret retmode)
1239 #ifndef PUSH_ROUNDING
1240 if (to == NULL)
1241 gcc_unreachable ();
1242 #endif
1244 move_by_pieces_d data (to, from, len, align);
1246 data.run ();
1248 if (retmode != RETURN_BEGIN)
1249 return data.finish_retmode (retmode);
1250 else
1251 return to;
1254 /* Derived class from op_by_pieces_d, providing support for block move
1255 operations. */
1257 class store_by_pieces_d : public op_by_pieces_d
1259 insn_gen_fn m_gen_fun;
1260 void generate (rtx, rtx, machine_mode);
1261 bool prepare_mode (machine_mode, unsigned int);
1263 public:
1264 store_by_pieces_d (rtx to, by_pieces_constfn cfn, void *cfn_data,
1265 unsigned HOST_WIDE_INT len, unsigned int align)
1266 : op_by_pieces_d (to, false, NULL_RTX, true, cfn, cfn_data, len, align)
1269 rtx finish_retmode (memop_ret);
1272 /* Return true if MODE can be used for a set of stores, given an
1273 alignment ALIGN. Prepare whatever data is necessary for later
1274 calls to generate. */
1276 bool
1277 store_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1279 insn_code icode = optab_handler (mov_optab, mode);
1280 m_gen_fun = GEN_FCN (icode);
1281 return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1284 /* A callback used when iterating for a store_by_pieces_operation.
1285 OP0 and OP1 are the values that have been loaded and should be
1286 compared in MODE. If OP0 is NULL, this means we should generate a
1287 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1288 gen function that should be used to generate the mode. */
1290 void
1291 store_by_pieces_d::generate (rtx op0, rtx op1, machine_mode)
1293 emit_insn (m_gen_fun (op0, op1));
1296 /* Perform the final adjustment at the end of a string to obtain the
1297 correct return value for the block operation.
1298 Return value is based on RETMODE argument. */
1301 store_by_pieces_d::finish_retmode (memop_ret retmode)
1303 gcc_assert (!m_reverse);
1304 if (retmode == RETURN_END_MINUS_ONE)
1306 m_to.maybe_postinc (-1);
1307 --m_offset;
1309 return m_to.adjust (QImode, m_offset);
1312 /* Determine whether the LEN bytes generated by CONSTFUN can be
1313 stored to memory using several move instructions. CONSTFUNDATA is
1314 a pointer which will be passed as argument in every CONSTFUN call.
1315 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1316 a memset operation and false if it's a copy of a constant string.
1317 Return nonzero if a call to store_by_pieces should succeed. */
1320 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1321 rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1322 void *constfundata, unsigned int align, bool memsetp)
1324 unsigned HOST_WIDE_INT l;
1325 unsigned int max_size;
1326 HOST_WIDE_INT offset = 0;
1327 enum insn_code icode;
1328 int reverse;
1329 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1330 rtx cst ATTRIBUTE_UNUSED;
1332 if (len == 0)
1333 return 1;
1335 if (!targetm.use_by_pieces_infrastructure_p (len, align,
1336 memsetp
1337 ? SET_BY_PIECES
1338 : STORE_BY_PIECES,
1339 optimize_insn_for_speed_p ()))
1340 return 0;
1342 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
1344 /* We would first store what we can in the largest integer mode, then go to
1345 successively smaller modes. */
1347 for (reverse = 0;
1348 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
1349 reverse++)
1351 l = len;
1352 max_size = STORE_MAX_PIECES + 1;
1353 while (max_size > 1 && l > 0)
1355 scalar_int_mode mode = widest_int_mode_for_size (max_size);
1357 icode = optab_handler (mov_optab, mode);
1358 if (icode != CODE_FOR_nothing
1359 && align >= GET_MODE_ALIGNMENT (mode))
1361 unsigned int size = GET_MODE_SIZE (mode);
1363 while (l >= size)
1365 if (reverse)
1366 offset -= size;
1368 cst = (*constfun) (constfundata, offset, mode);
1369 if (!targetm.legitimate_constant_p (mode, cst))
1370 return 0;
1372 if (!reverse)
1373 offset += size;
1375 l -= size;
1379 max_size = GET_MODE_SIZE (mode);
1382 /* The code above should have handled everything. */
1383 gcc_assert (!l);
1386 return 1;
1389 /* Generate several move instructions to store LEN bytes generated by
1390 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1391 pointer which will be passed as argument in every CONSTFUN call.
1392 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1393 a memset operation and false if it's a copy of a constant string.
1394 Return value is based on RETMODE argument. */
1397 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
1398 rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1399 void *constfundata, unsigned int align, bool memsetp,
1400 memop_ret retmode)
1402 if (len == 0)
1404 gcc_assert (retmode != RETURN_END_MINUS_ONE);
1405 return to;
1408 gcc_assert (targetm.use_by_pieces_infrastructure_p
1409 (len, align,
1410 memsetp ? SET_BY_PIECES : STORE_BY_PIECES,
1411 optimize_insn_for_speed_p ()));
1413 store_by_pieces_d data (to, constfun, constfundata, len, align);
1414 data.run ();
1416 if (retmode != RETURN_BEGIN)
1417 return data.finish_retmode (retmode);
1418 else
1419 return to;
1422 /* Callback routine for clear_by_pieces.
1423 Return const0_rtx unconditionally. */
1425 static rtx
1426 clear_by_pieces_1 (void *, HOST_WIDE_INT, scalar_int_mode)
1428 return const0_rtx;
1431 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1432 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1434 static void
1435 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
1437 if (len == 0)
1438 return;
1440 store_by_pieces_d data (to, clear_by_pieces_1, NULL, len, align);
1441 data.run ();
1444 /* Context used by compare_by_pieces_genfn. It stores the fail label
1445 to jump to in case of miscomparison, and for branch ratios greater than 1,
1446 it stores an accumulator and the current and maximum counts before
1447 emitting another branch. */
1449 class compare_by_pieces_d : public op_by_pieces_d
1451 rtx_code_label *m_fail_label;
1452 rtx m_accumulator;
1453 int m_count, m_batch;
1455 void generate (rtx, rtx, machine_mode);
1456 bool prepare_mode (machine_mode, unsigned int);
1457 void finish_mode (machine_mode);
1458 public:
1459 compare_by_pieces_d (rtx op0, rtx op1, by_pieces_constfn op1_cfn,
1460 void *op1_cfn_data, HOST_WIDE_INT len, int align,
1461 rtx_code_label *fail_label)
1462 : op_by_pieces_d (op0, true, op1, true, op1_cfn, op1_cfn_data, len, align)
1464 m_fail_label = fail_label;
1468 /* A callback used when iterating for a compare_by_pieces_operation.
1469 OP0 and OP1 are the values that have been loaded and should be
1470 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1471 context structure. */
1473 void
1474 compare_by_pieces_d::generate (rtx op0, rtx op1, machine_mode mode)
1476 if (m_batch > 1)
1478 rtx temp = expand_binop (mode, sub_optab, op0, op1, NULL_RTX,
1479 true, OPTAB_LIB_WIDEN);
1480 if (m_count != 0)
1481 temp = expand_binop (mode, ior_optab, m_accumulator, temp, temp,
1482 true, OPTAB_LIB_WIDEN);
1483 m_accumulator = temp;
1485 if (++m_count < m_batch)
1486 return;
1488 m_count = 0;
1489 op0 = m_accumulator;
1490 op1 = const0_rtx;
1491 m_accumulator = NULL_RTX;
1493 do_compare_rtx_and_jump (op0, op1, NE, true, mode, NULL_RTX, NULL,
1494 m_fail_label, profile_probability::uninitialized ());
1497 /* Return true if MODE can be used for a set of moves and comparisons,
1498 given an alignment ALIGN. Prepare whatever data is necessary for
1499 later calls to generate. */
1501 bool
1502 compare_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1504 insn_code icode = optab_handler (mov_optab, mode);
1505 if (icode == CODE_FOR_nothing
1506 || align < GET_MODE_ALIGNMENT (mode)
1507 || !can_compare_p (EQ, mode, ccp_jump))
1508 return false;
1509 m_batch = targetm.compare_by_pieces_branch_ratio (mode);
1510 if (m_batch < 0)
1511 return false;
1512 m_accumulator = NULL_RTX;
1513 m_count = 0;
1514 return true;
1517 /* Called after expanding a series of comparisons in MODE. If we have
1518 accumulated results for which we haven't emitted a branch yet, do
1519 so now. */
1521 void
1522 compare_by_pieces_d::finish_mode (machine_mode mode)
1524 if (m_accumulator != NULL_RTX)
1525 do_compare_rtx_and_jump (m_accumulator, const0_rtx, NE, true, mode,
1526 NULL_RTX, NULL, m_fail_label,
1527 profile_probability::uninitialized ());
1530 /* Generate several move instructions to compare LEN bytes from blocks
1531 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1533 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1534 used to push FROM to the stack.
1536 ALIGN is maximum stack alignment we can assume.
1538 Optionally, the caller can pass a constfn and associated data in A1_CFN
1539 and A1_CFN_DATA. describing that the second operand being compared is a
1540 known constant and how to obtain its data. */
1542 static rtx
1543 compare_by_pieces (rtx arg0, rtx arg1, unsigned HOST_WIDE_INT len,
1544 rtx target, unsigned int align,
1545 by_pieces_constfn a1_cfn, void *a1_cfn_data)
1547 rtx_code_label *fail_label = gen_label_rtx ();
1548 rtx_code_label *end_label = gen_label_rtx ();
1550 if (target == NULL_RTX
1551 || !REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
1552 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
1554 compare_by_pieces_d data (arg0, arg1, a1_cfn, a1_cfn_data, len, align,
1555 fail_label);
1557 data.run ();
1559 emit_move_insn (target, const0_rtx);
1560 emit_jump (end_label);
1561 emit_barrier ();
1562 emit_label (fail_label);
1563 emit_move_insn (target, const1_rtx);
1564 emit_label (end_label);
1566 return target;
1569 /* Emit code to move a block Y to a block X. This may be done with
1570 string-move instructions, with multiple scalar move instructions,
1571 or with a library call.
1573 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1574 SIZE is an rtx that says how long they are.
1575 ALIGN is the maximum alignment we can assume they have.
1576 METHOD describes what kind of copy this is, and what mechanisms may be used.
1577 MIN_SIZE is the minimal size of block to move
1578 MAX_SIZE is the maximal size of block to move, if it cannot be represented
1579 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1581 Return the address of the new block, if memcpy is called and returns it,
1582 0 otherwise. */
1585 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1586 unsigned int expected_align, HOST_WIDE_INT expected_size,
1587 unsigned HOST_WIDE_INT min_size,
1588 unsigned HOST_WIDE_INT max_size,
1589 unsigned HOST_WIDE_INT probable_max_size,
1590 bool bail_out_libcall, bool *is_move_done,
1591 bool might_overlap)
1593 int may_use_call;
1594 rtx retval = 0;
1595 unsigned int align;
1597 if (is_move_done)
1598 *is_move_done = true;
1600 gcc_assert (size);
1601 if (CONST_INT_P (size) && INTVAL (size) == 0)
1602 return 0;
1604 switch (method)
1606 case BLOCK_OP_NORMAL:
1607 case BLOCK_OP_TAILCALL:
1608 may_use_call = 1;
1609 break;
1611 case BLOCK_OP_CALL_PARM:
1612 may_use_call = block_move_libcall_safe_for_call_parm ();
1614 /* Make inhibit_defer_pop nonzero around the library call
1615 to force it to pop the arguments right away. */
1616 NO_DEFER_POP;
1617 break;
1619 case BLOCK_OP_NO_LIBCALL:
1620 may_use_call = 0;
1621 break;
1623 case BLOCK_OP_NO_LIBCALL_RET:
1624 may_use_call = -1;
1625 break;
1627 default:
1628 gcc_unreachable ();
1631 gcc_assert (MEM_P (x) && MEM_P (y));
1632 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1633 gcc_assert (align >= BITS_PER_UNIT);
1635 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1636 block copy is more efficient for other large modes, e.g. DCmode. */
1637 x = adjust_address (x, BLKmode, 0);
1638 y = adjust_address (y, BLKmode, 0);
1640 /* If source and destination are the same, no need to copy anything. */
1641 if (rtx_equal_p (x, y)
1642 && !MEM_VOLATILE_P (x)
1643 && !MEM_VOLATILE_P (y))
1644 return 0;
1646 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1647 can be incorrect is coming from __builtin_memcpy. */
1648 poly_int64 const_size;
1649 if (poly_int_rtx_p (size, &const_size))
1651 x = shallow_copy_rtx (x);
1652 y = shallow_copy_rtx (y);
1653 set_mem_size (x, const_size);
1654 set_mem_size (y, const_size);
1657 bool pieces_ok = CONST_INT_P (size)
1658 && can_move_by_pieces (INTVAL (size), align);
1659 bool pattern_ok = false;
1661 if (!pieces_ok || might_overlap)
1663 pattern_ok
1664 = emit_block_move_via_pattern (x, y, size, align,
1665 expected_align, expected_size,
1666 min_size, max_size, probable_max_size,
1667 might_overlap);
1668 if (!pattern_ok && might_overlap)
1670 /* Do not try any of the other methods below as they are not safe
1671 for overlapping moves. */
1672 *is_move_done = false;
1673 return retval;
1677 if (pattern_ok)
1679 else if (pieces_ok)
1680 move_by_pieces (x, y, INTVAL (size), align, RETURN_BEGIN);
1681 else if (may_use_call && !might_overlap
1682 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1683 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1685 if (bail_out_libcall)
1687 if (is_move_done)
1688 *is_move_done = false;
1689 return retval;
1692 if (may_use_call < 0)
1693 return pc_rtx;
1695 retval = emit_block_copy_via_libcall (x, y, size,
1696 method == BLOCK_OP_TAILCALL);
1698 else if (might_overlap)
1699 *is_move_done = false;
1700 else
1701 emit_block_move_via_loop (x, y, size, align);
1703 if (method == BLOCK_OP_CALL_PARM)
1704 OK_DEFER_POP;
1706 return retval;
1710 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1712 unsigned HOST_WIDE_INT max, min = 0;
1713 if (GET_CODE (size) == CONST_INT)
1714 min = max = UINTVAL (size);
1715 else
1716 max = GET_MODE_MASK (GET_MODE (size));
1717 return emit_block_move_hints (x, y, size, method, 0, -1,
1718 min, max, max);
1721 /* A subroutine of emit_block_move. Returns true if calling the
1722 block move libcall will not clobber any parameters which may have
1723 already been placed on the stack. */
1725 static bool
1726 block_move_libcall_safe_for_call_parm (void)
1728 tree fn;
1730 /* If arguments are pushed on the stack, then they're safe. */
1731 if (PUSH_ARGS)
1732 return true;
1734 /* If registers go on the stack anyway, any argument is sure to clobber
1735 an outgoing argument. */
1736 #if defined (REG_PARM_STACK_SPACE)
1737 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1738 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1739 depend on its argument. */
1740 (void) fn;
1741 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1742 && REG_PARM_STACK_SPACE (fn) != 0)
1743 return false;
1744 #endif
1746 /* If any argument goes in memory, then it might clobber an outgoing
1747 argument. */
1749 CUMULATIVE_ARGS args_so_far_v;
1750 cumulative_args_t args_so_far;
1751 tree arg;
1753 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1754 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1755 args_so_far = pack_cumulative_args (&args_so_far_v);
1757 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1758 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1760 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1761 function_arg_info arg_info (mode, /*named=*/true);
1762 rtx tmp = targetm.calls.function_arg (args_so_far, arg_info);
1763 if (!tmp || !REG_P (tmp))
1764 return false;
1765 if (targetm.calls.arg_partial_bytes (args_so_far, arg_info))
1766 return false;
1767 targetm.calls.function_arg_advance (args_so_far, arg_info);
1770 return true;
1773 /* A subroutine of emit_block_move. Expand a cpymem or movmem pattern;
1774 return true if successful.
1776 X is the destination of the copy or move.
1777 Y is the source of the copy or move.
1778 SIZE is the size of the block to be moved.
1780 MIGHT_OVERLAP indicates this originated with expansion of a
1781 builtin_memmove() and the source and destination blocks may
1782 overlap.
1785 static bool
1786 emit_block_move_via_pattern (rtx x, rtx y, rtx size, unsigned int align,
1787 unsigned int expected_align,
1788 HOST_WIDE_INT expected_size,
1789 unsigned HOST_WIDE_INT min_size,
1790 unsigned HOST_WIDE_INT max_size,
1791 unsigned HOST_WIDE_INT probable_max_size,
1792 bool might_overlap)
1794 if (expected_align < align)
1795 expected_align = align;
1796 if (expected_size != -1)
1798 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1799 expected_size = probable_max_size;
1800 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1801 expected_size = min_size;
1804 /* Since this is a move insn, we don't care about volatility. */
1805 temporary_volatile_ok v (true);
1807 /* Try the most limited insn first, because there's no point
1808 including more than one in the machine description unless
1809 the more limited one has some advantage. */
1811 opt_scalar_int_mode mode_iter;
1812 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
1814 scalar_int_mode mode = mode_iter.require ();
1815 enum insn_code code;
1816 if (might_overlap)
1817 code = direct_optab_handler (movmem_optab, mode);
1818 else
1819 code = direct_optab_handler (cpymem_optab, mode);
1821 if (code != CODE_FOR_nothing
1822 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1823 here because if SIZE is less than the mode mask, as it is
1824 returned by the macro, it will definitely be less than the
1825 actual mode mask. Since SIZE is within the Pmode address
1826 space, we limit MODE to Pmode. */
1827 && ((CONST_INT_P (size)
1828 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1829 <= (GET_MODE_MASK (mode) >> 1)))
1830 || max_size <= (GET_MODE_MASK (mode) >> 1)
1831 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1833 class expand_operand ops[9];
1834 unsigned int nops;
1836 /* ??? When called via emit_block_move_for_call, it'd be
1837 nice if there were some way to inform the backend, so
1838 that it doesn't fail the expansion because it thinks
1839 emitting the libcall would be more efficient. */
1840 nops = insn_data[(int) code].n_generator_args;
1841 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1843 create_fixed_operand (&ops[0], x);
1844 create_fixed_operand (&ops[1], y);
1845 /* The check above guarantees that this size conversion is valid. */
1846 create_convert_operand_to (&ops[2], size, mode, true);
1847 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1848 if (nops >= 6)
1850 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1851 create_integer_operand (&ops[5], expected_size);
1853 if (nops >= 8)
1855 create_integer_operand (&ops[6], min_size);
1856 /* If we cannot represent the maximal size,
1857 make parameter NULL. */
1858 if ((HOST_WIDE_INT) max_size != -1)
1859 create_integer_operand (&ops[7], max_size);
1860 else
1861 create_fixed_operand (&ops[7], NULL);
1863 if (nops == 9)
1865 /* If we cannot represent the maximal size,
1866 make parameter NULL. */
1867 if ((HOST_WIDE_INT) probable_max_size != -1)
1868 create_integer_operand (&ops[8], probable_max_size);
1869 else
1870 create_fixed_operand (&ops[8], NULL);
1872 if (maybe_expand_insn (code, nops, ops))
1873 return true;
1877 return false;
1880 /* A subroutine of emit_block_move. Copy the data via an explicit
1881 loop. This is used only when libcalls are forbidden. */
1882 /* ??? It'd be nice to copy in hunks larger than QImode. */
1884 static void
1885 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1886 unsigned int align ATTRIBUTE_UNUSED)
1888 rtx_code_label *cmp_label, *top_label;
1889 rtx iter, x_addr, y_addr, tmp;
1890 machine_mode x_addr_mode = get_address_mode (x);
1891 machine_mode y_addr_mode = get_address_mode (y);
1892 machine_mode iter_mode;
1894 iter_mode = GET_MODE (size);
1895 if (iter_mode == VOIDmode)
1896 iter_mode = word_mode;
1898 top_label = gen_label_rtx ();
1899 cmp_label = gen_label_rtx ();
1900 iter = gen_reg_rtx (iter_mode);
1902 emit_move_insn (iter, const0_rtx);
1904 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1905 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1906 do_pending_stack_adjust ();
1908 emit_jump (cmp_label);
1909 emit_label (top_label);
1911 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1912 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1914 if (x_addr_mode != y_addr_mode)
1915 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1916 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1918 x = change_address (x, QImode, x_addr);
1919 y = change_address (y, QImode, y_addr);
1921 emit_move_insn (x, y);
1923 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1924 true, OPTAB_LIB_WIDEN);
1925 if (tmp != iter)
1926 emit_move_insn (iter, tmp);
1928 emit_label (cmp_label);
1930 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1931 true, top_label,
1932 profile_probability::guessed_always ()
1933 .apply_scale (9, 10));
1936 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1937 TAILCALL is true if this is a tail call. */
1940 emit_block_op_via_libcall (enum built_in_function fncode, rtx dst, rtx src,
1941 rtx size, bool tailcall)
1943 rtx dst_addr, src_addr;
1944 tree call_expr, dst_tree, src_tree, size_tree;
1945 machine_mode size_mode;
1947 /* Since dst and src are passed to a libcall, mark the corresponding
1948 tree EXPR as addressable. */
1949 tree dst_expr = MEM_EXPR (dst);
1950 tree src_expr = MEM_EXPR (src);
1951 if (dst_expr)
1952 mark_addressable (dst_expr);
1953 if (src_expr)
1954 mark_addressable (src_expr);
1956 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1957 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1958 dst_tree = make_tree (ptr_type_node, dst_addr);
1960 src_addr = copy_addr_to_reg (XEXP (src, 0));
1961 src_addr = convert_memory_address (ptr_mode, src_addr);
1962 src_tree = make_tree (ptr_type_node, src_addr);
1964 size_mode = TYPE_MODE (sizetype);
1965 size = convert_to_mode (size_mode, size, 1);
1966 size = copy_to_mode_reg (size_mode, size);
1967 size_tree = make_tree (sizetype, size);
1969 /* It is incorrect to use the libcall calling conventions for calls to
1970 memcpy/memmove/memcmp because they can be provided by the user. */
1971 tree fn = builtin_decl_implicit (fncode);
1972 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1973 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1975 return expand_call (call_expr, NULL_RTX, false);
1978 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1979 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1980 otherwise return null. */
1983 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
1984 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
1985 HOST_WIDE_INT align)
1987 machine_mode insn_mode = insn_data[icode].operand[0].mode;
1989 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
1990 target = NULL_RTX;
1992 class expand_operand ops[5];
1993 create_output_operand (&ops[0], target, insn_mode);
1994 create_fixed_operand (&ops[1], arg1_rtx);
1995 create_fixed_operand (&ops[2], arg2_rtx);
1996 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
1997 TYPE_UNSIGNED (arg3_type));
1998 create_integer_operand (&ops[4], align);
1999 if (maybe_expand_insn (icode, 5, ops))
2000 return ops[0].value;
2001 return NULL_RTX;
2004 /* Expand a block compare between X and Y with length LEN using the
2005 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
2006 of the expression that was used to calculate the length. ALIGN
2007 gives the known minimum common alignment. */
2009 static rtx
2010 emit_block_cmp_via_cmpmem (rtx x, rtx y, rtx len, tree len_type, rtx target,
2011 unsigned align)
2013 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
2014 implementing memcmp because it will stop if it encounters two
2015 zero bytes. */
2016 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
2018 if (icode == CODE_FOR_nothing)
2019 return NULL_RTX;
2021 return expand_cmpstrn_or_cmpmem (icode, target, x, y, len_type, len, align);
2024 /* Emit code to compare a block Y to a block X. This may be done with
2025 string-compare instructions, with multiple scalar instructions,
2026 or with a library call.
2028 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
2029 they are. LEN_TYPE is the type of the expression that was used to
2030 calculate it.
2032 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
2033 value of a normal memcmp call, instead we can just compare for equality.
2034 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
2035 returning NULL_RTX.
2037 Optionally, the caller can pass a constfn and associated data in Y_CFN
2038 and Y_CFN_DATA. describing that the second operand being compared is a
2039 known constant and how to obtain its data.
2040 Return the result of the comparison, or NULL_RTX if we failed to
2041 perform the operation. */
2044 emit_block_cmp_hints (rtx x, rtx y, rtx len, tree len_type, rtx target,
2045 bool equality_only, by_pieces_constfn y_cfn,
2046 void *y_cfndata)
2048 rtx result = 0;
2050 if (CONST_INT_P (len) && INTVAL (len) == 0)
2051 return const0_rtx;
2053 gcc_assert (MEM_P (x) && MEM_P (y));
2054 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
2055 gcc_assert (align >= BITS_PER_UNIT);
2057 x = adjust_address (x, BLKmode, 0);
2058 y = adjust_address (y, BLKmode, 0);
2060 if (equality_only
2061 && CONST_INT_P (len)
2062 && can_do_by_pieces (INTVAL (len), align, COMPARE_BY_PIECES))
2063 result = compare_by_pieces (x, y, INTVAL (len), target, align,
2064 y_cfn, y_cfndata);
2065 else
2066 result = emit_block_cmp_via_cmpmem (x, y, len, len_type, target, align);
2068 return result;
2071 /* Copy all or part of a value X into registers starting at REGNO.
2072 The number of registers to be filled is NREGS. */
2074 void
2075 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
2077 if (nregs == 0)
2078 return;
2080 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
2081 x = validize_mem (force_const_mem (mode, x));
2083 /* See if the machine can do this with a load multiple insn. */
2084 if (targetm.have_load_multiple ())
2086 rtx_insn *last = get_last_insn ();
2087 rtx first = gen_rtx_REG (word_mode, regno);
2088 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
2089 GEN_INT (nregs)))
2091 emit_insn (pat);
2092 return;
2094 else
2095 delete_insns_since (last);
2098 for (int i = 0; i < nregs; i++)
2099 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2100 operand_subword_force (x, i, mode));
2103 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2104 The number of registers to be filled is NREGS. */
2106 void
2107 move_block_from_reg (int regno, rtx x, int nregs)
2109 if (nregs == 0)
2110 return;
2112 /* See if the machine can do this with a store multiple insn. */
2113 if (targetm.have_store_multiple ())
2115 rtx_insn *last = get_last_insn ();
2116 rtx first = gen_rtx_REG (word_mode, regno);
2117 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
2118 GEN_INT (nregs)))
2120 emit_insn (pat);
2121 return;
2123 else
2124 delete_insns_since (last);
2127 for (int i = 0; i < nregs; i++)
2129 rtx tem = operand_subword (x, i, 1, BLKmode);
2131 gcc_assert (tem);
2133 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2137 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2138 ORIG, where ORIG is a non-consecutive group of registers represented by
2139 a PARALLEL. The clone is identical to the original except in that the
2140 original set of registers is replaced by a new set of pseudo registers.
2141 The new set has the same modes as the original set. */
2144 gen_group_rtx (rtx orig)
2146 int i, length;
2147 rtx *tmps;
2149 gcc_assert (GET_CODE (orig) == PARALLEL);
2151 length = XVECLEN (orig, 0);
2152 tmps = XALLOCAVEC (rtx, length);
2154 /* Skip a NULL entry in first slot. */
2155 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2157 if (i)
2158 tmps[0] = 0;
2160 for (; i < length; i++)
2162 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2163 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2165 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2168 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2171 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2172 except that values are placed in TMPS[i], and must later be moved
2173 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2175 static void
2176 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type,
2177 poly_int64 ssize)
2179 rtx src;
2180 int start, i;
2181 machine_mode m = GET_MODE (orig_src);
2183 gcc_assert (GET_CODE (dst) == PARALLEL);
2185 if (m != VOIDmode
2186 && !SCALAR_INT_MODE_P (m)
2187 && !MEM_P (orig_src)
2188 && GET_CODE (orig_src) != CONCAT)
2190 scalar_int_mode imode;
2191 if (int_mode_for_mode (GET_MODE (orig_src)).exists (&imode))
2193 src = gen_reg_rtx (imode);
2194 emit_move_insn (gen_lowpart (GET_MODE (orig_src), src), orig_src);
2196 else
2198 src = assign_stack_temp (GET_MODE (orig_src), ssize);
2199 emit_move_insn (src, orig_src);
2201 emit_group_load_1 (tmps, dst, src, type, ssize);
2202 return;
2205 /* Check for a NULL entry, used to indicate that the parameter goes
2206 both on the stack and in registers. */
2207 if (XEXP (XVECEXP (dst, 0, 0), 0))
2208 start = 0;
2209 else
2210 start = 1;
2212 /* Process the pieces. */
2213 for (i = start; i < XVECLEN (dst, 0); i++)
2215 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2216 poly_int64 bytepos = rtx_to_poly_int64 (XEXP (XVECEXP (dst, 0, i), 1));
2217 poly_int64 bytelen = GET_MODE_SIZE (mode);
2218 poly_int64 shift = 0;
2220 /* Handle trailing fragments that run over the size of the struct.
2221 It's the target's responsibility to make sure that the fragment
2222 cannot be strictly smaller in some cases and strictly larger
2223 in others. */
2224 gcc_checking_assert (ordered_p (bytepos + bytelen, ssize));
2225 if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2227 /* Arrange to shift the fragment to where it belongs.
2228 extract_bit_field loads to the lsb of the reg. */
2229 if (
2230 #ifdef BLOCK_REG_PADDING
2231 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2232 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2233 #else
2234 BYTES_BIG_ENDIAN
2235 #endif
2237 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2238 bytelen = ssize - bytepos;
2239 gcc_assert (maybe_gt (bytelen, 0));
2242 /* If we won't be loading directly from memory, protect the real source
2243 from strange tricks we might play; but make sure that the source can
2244 be loaded directly into the destination. */
2245 src = orig_src;
2246 if (!MEM_P (orig_src)
2247 && (!CONSTANT_P (orig_src)
2248 || (GET_MODE (orig_src) != mode
2249 && GET_MODE (orig_src) != VOIDmode)))
2251 if (GET_MODE (orig_src) == VOIDmode)
2252 src = gen_reg_rtx (mode);
2253 else
2254 src = gen_reg_rtx (GET_MODE (orig_src));
2256 emit_move_insn (src, orig_src);
2259 /* Optimize the access just a bit. */
2260 if (MEM_P (src)
2261 && (! targetm.slow_unaligned_access (mode, MEM_ALIGN (src))
2262 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2263 && multiple_p (bytepos * BITS_PER_UNIT, GET_MODE_ALIGNMENT (mode))
2264 && known_eq (bytelen, GET_MODE_SIZE (mode)))
2266 tmps[i] = gen_reg_rtx (mode);
2267 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2269 else if (COMPLEX_MODE_P (mode)
2270 && GET_MODE (src) == mode
2271 && known_eq (bytelen, GET_MODE_SIZE (mode)))
2272 /* Let emit_move_complex do the bulk of the work. */
2273 tmps[i] = src;
2274 else if (GET_CODE (src) == CONCAT)
2276 poly_int64 slen = GET_MODE_SIZE (GET_MODE (src));
2277 poly_int64 slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2278 unsigned int elt;
2279 poly_int64 subpos;
2281 if (can_div_trunc_p (bytepos, slen0, &elt, &subpos)
2282 && known_le (subpos + bytelen, slen0))
2284 /* The following assumes that the concatenated objects all
2285 have the same size. In this case, a simple calculation
2286 can be used to determine the object and the bit field
2287 to be extracted. */
2288 tmps[i] = XEXP (src, elt);
2289 if (maybe_ne (subpos, 0)
2290 || maybe_ne (subpos + bytelen, slen0)
2291 || (!CONSTANT_P (tmps[i])
2292 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)))
2293 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2294 subpos * BITS_PER_UNIT,
2295 1, NULL_RTX, mode, mode, false,
2296 NULL);
2298 else
2300 rtx mem;
2302 gcc_assert (known_eq (bytepos, 0));
2303 mem = assign_stack_temp (GET_MODE (src), slen);
2304 emit_move_insn (mem, src);
2305 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
2306 0, 1, NULL_RTX, mode, mode, false,
2307 NULL);
2310 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2311 SIMD register, which is currently broken. While we get GCC
2312 to emit proper RTL for these cases, let's dump to memory. */
2313 else if (VECTOR_MODE_P (GET_MODE (dst))
2314 && REG_P (src))
2316 poly_uint64 slen = GET_MODE_SIZE (GET_MODE (src));
2317 rtx mem;
2319 mem = assign_stack_temp (GET_MODE (src), slen);
2320 emit_move_insn (mem, src);
2321 tmps[i] = adjust_address (mem, mode, bytepos);
2323 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
2324 && XVECLEN (dst, 0) > 1)
2325 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
2326 else if (CONSTANT_P (src))
2328 if (known_eq (bytelen, ssize))
2329 tmps[i] = src;
2330 else
2332 rtx first, second;
2334 /* TODO: const_wide_int can have sizes other than this... */
2335 gcc_assert (known_eq (2 * bytelen, ssize));
2336 split_double (src, &first, &second);
2337 if (i)
2338 tmps[i] = second;
2339 else
2340 tmps[i] = first;
2343 else if (REG_P (src) && GET_MODE (src) == mode)
2344 tmps[i] = src;
2345 else
2346 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2347 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2348 mode, mode, false, NULL);
2350 if (maybe_ne (shift, 0))
2351 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
2352 shift, tmps[i], 0);
2356 /* Emit code to move a block SRC of type TYPE to a block DST,
2357 where DST is non-consecutive registers represented by a PARALLEL.
2358 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2359 if not known. */
2361 void
2362 emit_group_load (rtx dst, rtx src, tree type, poly_int64 ssize)
2364 rtx *tmps;
2365 int i;
2367 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
2368 emit_group_load_1 (tmps, dst, src, type, ssize);
2370 /* Copy the extracted pieces into the proper (probable) hard regs. */
2371 for (i = 0; i < XVECLEN (dst, 0); i++)
2373 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
2374 if (d == NULL)
2375 continue;
2376 emit_move_insn (d, tmps[i]);
2380 /* Similar, but load SRC into new pseudos in a format that looks like
2381 PARALLEL. This can later be fed to emit_group_move to get things
2382 in the right place. */
2385 emit_group_load_into_temps (rtx parallel, rtx src, tree type, poly_int64 ssize)
2387 rtvec vec;
2388 int i;
2390 vec = rtvec_alloc (XVECLEN (parallel, 0));
2391 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
2393 /* Convert the vector to look just like the original PARALLEL, except
2394 with the computed values. */
2395 for (i = 0; i < XVECLEN (parallel, 0); i++)
2397 rtx e = XVECEXP (parallel, 0, i);
2398 rtx d = XEXP (e, 0);
2400 if (d)
2402 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
2403 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
2405 RTVEC_ELT (vec, i) = e;
2408 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
2411 /* Emit code to move a block SRC to block DST, where SRC and DST are
2412 non-consecutive groups of registers, each represented by a PARALLEL. */
2414 void
2415 emit_group_move (rtx dst, rtx src)
2417 int i;
2419 gcc_assert (GET_CODE (src) == PARALLEL
2420 && GET_CODE (dst) == PARALLEL
2421 && XVECLEN (src, 0) == XVECLEN (dst, 0));
2423 /* Skip first entry if NULL. */
2424 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2425 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2426 XEXP (XVECEXP (src, 0, i), 0));
2429 /* Move a group of registers represented by a PARALLEL into pseudos. */
2432 emit_group_move_into_temps (rtx src)
2434 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
2435 int i;
2437 for (i = 0; i < XVECLEN (src, 0); i++)
2439 rtx e = XVECEXP (src, 0, i);
2440 rtx d = XEXP (e, 0);
2442 if (d)
2443 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
2444 RTVEC_ELT (vec, i) = e;
2447 return gen_rtx_PARALLEL (GET_MODE (src), vec);
2450 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2451 where SRC is non-consecutive registers represented by a PARALLEL.
2452 SSIZE represents the total size of block ORIG_DST, or -1 if not
2453 known. */
2455 void
2456 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED,
2457 poly_int64 ssize)
2459 rtx *tmps, dst;
2460 int start, finish, i;
2461 machine_mode m = GET_MODE (orig_dst);
2463 gcc_assert (GET_CODE (src) == PARALLEL);
2465 if (!SCALAR_INT_MODE_P (m)
2466 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
2468 scalar_int_mode imode;
2469 if (int_mode_for_mode (GET_MODE (orig_dst)).exists (&imode))
2471 dst = gen_reg_rtx (imode);
2472 emit_group_store (dst, src, type, ssize);
2473 dst = gen_lowpart (GET_MODE (orig_dst), dst);
2475 else
2477 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
2478 emit_group_store (dst, src, type, ssize);
2480 emit_move_insn (orig_dst, dst);
2481 return;
2484 /* Check for a NULL entry, used to indicate that the parameter goes
2485 both on the stack and in registers. */
2486 if (XEXP (XVECEXP (src, 0, 0), 0))
2487 start = 0;
2488 else
2489 start = 1;
2490 finish = XVECLEN (src, 0);
2492 tmps = XALLOCAVEC (rtx, finish);
2494 /* Copy the (probable) hard regs into pseudos. */
2495 for (i = start; i < finish; i++)
2497 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2498 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
2500 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2501 emit_move_insn (tmps[i], reg);
2503 else
2504 tmps[i] = reg;
2507 /* If we won't be storing directly into memory, protect the real destination
2508 from strange tricks we might play. */
2509 dst = orig_dst;
2510 if (GET_CODE (dst) == PARALLEL)
2512 rtx temp;
2514 /* We can get a PARALLEL dst if there is a conditional expression in
2515 a return statement. In that case, the dst and src are the same,
2516 so no action is necessary. */
2517 if (rtx_equal_p (dst, src))
2518 return;
2520 /* It is unclear if we can ever reach here, but we may as well handle
2521 it. Allocate a temporary, and split this into a store/load to/from
2522 the temporary. */
2523 temp = assign_stack_temp (GET_MODE (dst), ssize);
2524 emit_group_store (temp, src, type, ssize);
2525 emit_group_load (dst, temp, type, ssize);
2526 return;
2528 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2530 machine_mode outer = GET_MODE (dst);
2531 machine_mode inner;
2532 poly_int64 bytepos;
2533 bool done = false;
2534 rtx temp;
2536 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2537 dst = gen_reg_rtx (outer);
2539 /* Make life a bit easier for combine. */
2540 /* If the first element of the vector is the low part
2541 of the destination mode, use a paradoxical subreg to
2542 initialize the destination. */
2543 if (start < finish)
2545 inner = GET_MODE (tmps[start]);
2546 bytepos = subreg_lowpart_offset (inner, outer);
2547 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src, 0, start), 1)),
2548 bytepos))
2550 temp = simplify_gen_subreg (outer, tmps[start],
2551 inner, 0);
2552 if (temp)
2554 emit_move_insn (dst, temp);
2555 done = true;
2556 start++;
2561 /* If the first element wasn't the low part, try the last. */
2562 if (!done
2563 && start < finish - 1)
2565 inner = GET_MODE (tmps[finish - 1]);
2566 bytepos = subreg_lowpart_offset (inner, outer);
2567 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src, 0,
2568 finish - 1), 1)),
2569 bytepos))
2571 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2572 inner, 0);
2573 if (temp)
2575 emit_move_insn (dst, temp);
2576 done = true;
2577 finish--;
2582 /* Otherwise, simply initialize the result to zero. */
2583 if (!done)
2584 emit_move_insn (dst, CONST0_RTX (outer));
2587 /* Process the pieces. */
2588 for (i = start; i < finish; i++)
2590 poly_int64 bytepos = rtx_to_poly_int64 (XEXP (XVECEXP (src, 0, i), 1));
2591 machine_mode mode = GET_MODE (tmps[i]);
2592 poly_int64 bytelen = GET_MODE_SIZE (mode);
2593 poly_uint64 adj_bytelen;
2594 rtx dest = dst;
2596 /* Handle trailing fragments that run over the size of the struct.
2597 It's the target's responsibility to make sure that the fragment
2598 cannot be strictly smaller in some cases and strictly larger
2599 in others. */
2600 gcc_checking_assert (ordered_p (bytepos + bytelen, ssize));
2601 if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2602 adj_bytelen = ssize - bytepos;
2603 else
2604 adj_bytelen = bytelen;
2606 if (GET_CODE (dst) == CONCAT)
2608 if (known_le (bytepos + adj_bytelen,
2609 GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))))
2610 dest = XEXP (dst, 0);
2611 else if (known_ge (bytepos, GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))))
2613 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2614 dest = XEXP (dst, 1);
2616 else
2618 machine_mode dest_mode = GET_MODE (dest);
2619 machine_mode tmp_mode = GET_MODE (tmps[i]);
2621 gcc_assert (known_eq (bytepos, 0) && XVECLEN (src, 0));
2623 if (GET_MODE_ALIGNMENT (dest_mode)
2624 >= GET_MODE_ALIGNMENT (tmp_mode))
2626 dest = assign_stack_temp (dest_mode,
2627 GET_MODE_SIZE (dest_mode));
2628 emit_move_insn (adjust_address (dest,
2629 tmp_mode,
2630 bytepos),
2631 tmps[i]);
2632 dst = dest;
2634 else
2636 dest = assign_stack_temp (tmp_mode,
2637 GET_MODE_SIZE (tmp_mode));
2638 emit_move_insn (dest, tmps[i]);
2639 dst = adjust_address (dest, dest_mode, bytepos);
2641 break;
2645 /* Handle trailing fragments that run over the size of the struct. */
2646 if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2648 /* store_bit_field always takes its value from the lsb.
2649 Move the fragment to the lsb if it's not already there. */
2650 if (
2651 #ifdef BLOCK_REG_PADDING
2652 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2653 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2654 #else
2655 BYTES_BIG_ENDIAN
2656 #endif
2659 poly_int64 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2660 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2661 shift, tmps[i], 0);
2664 /* Make sure not to write past the end of the struct. */
2665 store_bit_field (dest,
2666 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2667 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2668 VOIDmode, tmps[i], false);
2671 /* Optimize the access just a bit. */
2672 else if (MEM_P (dest)
2673 && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (dest))
2674 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2675 && multiple_p (bytepos * BITS_PER_UNIT,
2676 GET_MODE_ALIGNMENT (mode))
2677 && known_eq (bytelen, GET_MODE_SIZE (mode)))
2678 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2680 else
2681 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2682 0, 0, mode, tmps[i], false);
2685 /* Copy from the pseudo into the (probable) hard reg. */
2686 if (orig_dst != dst)
2687 emit_move_insn (orig_dst, dst);
2690 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2691 of the value stored in X. */
2694 maybe_emit_group_store (rtx x, tree type)
2696 machine_mode mode = TYPE_MODE (type);
2697 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2698 if (GET_CODE (x) == PARALLEL)
2700 rtx result = gen_reg_rtx (mode);
2701 emit_group_store (result, x, type, int_size_in_bytes (type));
2702 return result;
2704 return x;
2707 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2709 This is used on targets that return BLKmode values in registers. */
2711 static void
2712 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2714 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2715 rtx src = NULL, dst = NULL;
2716 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2717 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2718 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2719 fixed_size_mode mode = as_a <fixed_size_mode> (GET_MODE (srcreg));
2720 fixed_size_mode tmode = as_a <fixed_size_mode> (GET_MODE (target));
2721 fixed_size_mode copy_mode;
2723 /* BLKmode registers created in the back-end shouldn't have survived. */
2724 gcc_assert (mode != BLKmode);
2726 /* If the structure doesn't take up a whole number of words, see whether
2727 SRCREG is padded on the left or on the right. If it's on the left,
2728 set PADDING_CORRECTION to the number of bits to skip.
2730 In most ABIs, the structure will be returned at the least end of
2731 the register, which translates to right padding on little-endian
2732 targets and left padding on big-endian targets. The opposite
2733 holds if the structure is returned at the most significant
2734 end of the register. */
2735 if (bytes % UNITS_PER_WORD != 0
2736 && (targetm.calls.return_in_msb (type)
2737 ? !BYTES_BIG_ENDIAN
2738 : BYTES_BIG_ENDIAN))
2739 padding_correction
2740 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2742 /* We can use a single move if we have an exact mode for the size. */
2743 else if (MEM_P (target)
2744 && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (target))
2745 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2746 && bytes == GET_MODE_SIZE (mode))
2748 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2749 return;
2752 /* And if we additionally have the same mode for a register. */
2753 else if (REG_P (target)
2754 && GET_MODE (target) == mode
2755 && bytes == GET_MODE_SIZE (mode))
2757 emit_move_insn (target, srcreg);
2758 return;
2761 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2762 into a new pseudo which is a full word. */
2763 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2765 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2766 mode = word_mode;
2769 /* Copy the structure BITSIZE bits at a time. If the target lives in
2770 memory, take care of not reading/writing past its end by selecting
2771 a copy mode suited to BITSIZE. This should always be possible given
2772 how it is computed.
2774 If the target lives in register, make sure not to select a copy mode
2775 larger than the mode of the register.
2777 We could probably emit more efficient code for machines which do not use
2778 strict alignment, but it doesn't seem worth the effort at the current
2779 time. */
2781 copy_mode = word_mode;
2782 if (MEM_P (target))
2784 opt_scalar_int_mode mem_mode = int_mode_for_size (bitsize, 1);
2785 if (mem_mode.exists ())
2786 copy_mode = mem_mode.require ();
2788 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2789 copy_mode = tmode;
2791 for (bitpos = 0, xbitpos = padding_correction;
2792 bitpos < bytes * BITS_PER_UNIT;
2793 bitpos += bitsize, xbitpos += bitsize)
2795 /* We need a new source operand each time xbitpos is on a
2796 word boundary and when xbitpos == padding_correction
2797 (the first time through). */
2798 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2799 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2801 /* We need a new destination operand each time bitpos is on
2802 a word boundary. */
2803 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2804 dst = target;
2805 else if (bitpos % BITS_PER_WORD == 0)
2806 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2808 /* Use xbitpos for the source extraction (right justified) and
2809 bitpos for the destination store (left justified). */
2810 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2811 extract_bit_field (src, bitsize,
2812 xbitpos % BITS_PER_WORD, 1,
2813 NULL_RTX, copy_mode, copy_mode,
2814 false, NULL),
2815 false);
2819 /* Copy BLKmode value SRC into a register of mode MODE_IN. Return the
2820 register if it contains any data, otherwise return null.
2822 This is used on targets that return BLKmode values in registers. */
2825 copy_blkmode_to_reg (machine_mode mode_in, tree src)
2827 int i, n_regs;
2828 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2829 unsigned int bitsize;
2830 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2831 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2832 fixed_size_mode mode = as_a <fixed_size_mode> (mode_in);
2833 fixed_size_mode dst_mode;
2834 scalar_int_mode min_mode;
2836 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2838 x = expand_normal (src);
2840 bytes = arg_int_size_in_bytes (TREE_TYPE (src));
2841 if (bytes == 0)
2842 return NULL_RTX;
2844 /* If the structure doesn't take up a whole number of words, see
2845 whether the register value should be padded on the left or on
2846 the right. Set PADDING_CORRECTION to the number of padding
2847 bits needed on the left side.
2849 In most ABIs, the structure will be returned at the least end of
2850 the register, which translates to right padding on little-endian
2851 targets and left padding on big-endian targets. The opposite
2852 holds if the structure is returned at the most significant
2853 end of the register. */
2854 if (bytes % UNITS_PER_WORD != 0
2855 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2856 ? !BYTES_BIG_ENDIAN
2857 : BYTES_BIG_ENDIAN))
2858 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2859 * BITS_PER_UNIT));
2861 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2862 dst_words = XALLOCAVEC (rtx, n_regs);
2863 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2864 min_mode = smallest_int_mode_for_size (bitsize);
2866 /* Copy the structure BITSIZE bits at a time. */
2867 for (bitpos = 0, xbitpos = padding_correction;
2868 bitpos < bytes * BITS_PER_UNIT;
2869 bitpos += bitsize, xbitpos += bitsize)
2871 /* We need a new destination pseudo each time xbitpos is
2872 on a word boundary and when xbitpos == padding_correction
2873 (the first time through). */
2874 if (xbitpos % BITS_PER_WORD == 0
2875 || xbitpos == padding_correction)
2877 /* Generate an appropriate register. */
2878 dst_word = gen_reg_rtx (word_mode);
2879 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2881 /* Clear the destination before we move anything into it. */
2882 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2885 /* Find the largest integer mode that can be used to copy all or as
2886 many bits as possible of the structure if the target supports larger
2887 copies. There are too many corner cases here w.r.t to alignments on
2888 the read/writes. So if there is any padding just use single byte
2889 operations. */
2890 opt_scalar_int_mode mode_iter;
2891 if (padding_correction == 0 && !STRICT_ALIGNMENT)
2893 FOR_EACH_MODE_FROM (mode_iter, min_mode)
2895 unsigned int msize = GET_MODE_BITSIZE (mode_iter.require ());
2896 if (msize <= ((bytes * BITS_PER_UNIT) - bitpos)
2897 && msize <= BITS_PER_WORD)
2898 bitsize = msize;
2899 else
2900 break;
2904 /* We need a new source operand each time bitpos is on a word
2905 boundary. */
2906 if (bitpos % BITS_PER_WORD == 0)
2907 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2909 /* Use bitpos for the source extraction (left justified) and
2910 xbitpos for the destination store (right justified). */
2911 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2912 0, 0, word_mode,
2913 extract_bit_field (src_word, bitsize,
2914 bitpos % BITS_PER_WORD, 1,
2915 NULL_RTX, word_mode, word_mode,
2916 false, NULL),
2917 false);
2920 if (mode == BLKmode)
2922 /* Find the smallest integer mode large enough to hold the
2923 entire structure. */
2924 opt_scalar_int_mode mode_iter;
2925 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
2926 if (GET_MODE_SIZE (mode_iter.require ()) >= bytes)
2927 break;
2929 /* A suitable mode should have been found. */
2930 mode = mode_iter.require ();
2933 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2934 dst_mode = word_mode;
2935 else
2936 dst_mode = mode;
2937 dst = gen_reg_rtx (dst_mode);
2939 for (i = 0; i < n_regs; i++)
2940 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2942 if (mode != dst_mode)
2943 dst = gen_lowpart (mode, dst);
2945 return dst;
2948 /* Add a USE expression for REG to the (possibly empty) list pointed
2949 to by CALL_FUSAGE. REG must denote a hard register. */
2951 void
2952 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2954 gcc_assert (REG_P (reg));
2956 if (!HARD_REGISTER_P (reg))
2957 return;
2959 *call_fusage
2960 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2963 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2964 to by CALL_FUSAGE. REG must denote a hard register. */
2966 void
2967 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2969 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2971 *call_fusage
2972 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2975 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2976 starting at REGNO. All of these registers must be hard registers. */
2978 void
2979 use_regs (rtx *call_fusage, int regno, int nregs)
2981 int i;
2983 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2985 for (i = 0; i < nregs; i++)
2986 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2989 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2990 PARALLEL REGS. This is for calls that pass values in multiple
2991 non-contiguous locations. The Irix 6 ABI has examples of this. */
2993 void
2994 use_group_regs (rtx *call_fusage, rtx regs)
2996 int i;
2998 for (i = 0; i < XVECLEN (regs, 0); i++)
3000 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
3002 /* A NULL entry means the parameter goes both on the stack and in
3003 registers. This can also be a MEM for targets that pass values
3004 partially on the stack and partially in registers. */
3005 if (reg != 0 && REG_P (reg))
3006 use_reg (call_fusage, reg);
3010 /* Return the defining gimple statement for SSA_NAME NAME if it is an
3011 assigment and the code of the expresion on the RHS is CODE. Return
3012 NULL otherwise. */
3014 static gimple *
3015 get_def_for_expr (tree name, enum tree_code code)
3017 gimple *def_stmt;
3019 if (TREE_CODE (name) != SSA_NAME)
3020 return NULL;
3022 def_stmt = get_gimple_for_ssa_name (name);
3023 if (!def_stmt
3024 || gimple_assign_rhs_code (def_stmt) != code)
3025 return NULL;
3027 return def_stmt;
3030 /* Return the defining gimple statement for SSA_NAME NAME if it is an
3031 assigment and the class of the expresion on the RHS is CLASS. Return
3032 NULL otherwise. */
3034 static gimple *
3035 get_def_for_expr_class (tree name, enum tree_code_class tclass)
3037 gimple *def_stmt;
3039 if (TREE_CODE (name) != SSA_NAME)
3040 return NULL;
3042 def_stmt = get_gimple_for_ssa_name (name);
3043 if (!def_stmt
3044 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
3045 return NULL;
3047 return def_stmt;
3050 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
3051 its length in bytes. */
3054 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
3055 unsigned int expected_align, HOST_WIDE_INT expected_size,
3056 unsigned HOST_WIDE_INT min_size,
3057 unsigned HOST_WIDE_INT max_size,
3058 unsigned HOST_WIDE_INT probable_max_size)
3060 machine_mode mode = GET_MODE (object);
3061 unsigned int align;
3063 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
3065 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
3066 just move a zero. Otherwise, do this a piece at a time. */
3067 poly_int64 size_val;
3068 if (mode != BLKmode
3069 && poly_int_rtx_p (size, &size_val)
3070 && known_eq (size_val, GET_MODE_SIZE (mode)))
3072 rtx zero = CONST0_RTX (mode);
3073 if (zero != NULL)
3075 emit_move_insn (object, zero);
3076 return NULL;
3079 if (COMPLEX_MODE_P (mode))
3081 zero = CONST0_RTX (GET_MODE_INNER (mode));
3082 if (zero != NULL)
3084 write_complex_part (object, zero, 0);
3085 write_complex_part (object, zero, 1);
3086 return NULL;
3091 if (size == const0_rtx)
3092 return NULL;
3094 align = MEM_ALIGN (object);
3096 if (CONST_INT_P (size)
3097 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
3098 CLEAR_BY_PIECES,
3099 optimize_insn_for_speed_p ()))
3100 clear_by_pieces (object, INTVAL (size), align);
3101 else if (set_storage_via_setmem (object, size, const0_rtx, align,
3102 expected_align, expected_size,
3103 min_size, max_size, probable_max_size))
3105 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
3106 return set_storage_via_libcall (object, size, const0_rtx,
3107 method == BLOCK_OP_TAILCALL);
3108 else
3109 gcc_unreachable ();
3111 return NULL;
3115 clear_storage (rtx object, rtx size, enum block_op_methods method)
3117 unsigned HOST_WIDE_INT max, min = 0;
3118 if (GET_CODE (size) == CONST_INT)
3119 min = max = UINTVAL (size);
3120 else
3121 max = GET_MODE_MASK (GET_MODE (size));
3122 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
3126 /* A subroutine of clear_storage. Expand a call to memset.
3127 Return the return value of memset, 0 otherwise. */
3130 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
3132 tree call_expr, fn, object_tree, size_tree, val_tree;
3133 machine_mode size_mode;
3135 object = copy_addr_to_reg (XEXP (object, 0));
3136 object_tree = make_tree (ptr_type_node, object);
3138 if (!CONST_INT_P (val))
3139 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
3140 val_tree = make_tree (integer_type_node, val);
3142 size_mode = TYPE_MODE (sizetype);
3143 size = convert_to_mode (size_mode, size, 1);
3144 size = copy_to_mode_reg (size_mode, size);
3145 size_tree = make_tree (sizetype, size);
3147 /* It is incorrect to use the libcall calling conventions for calls to
3148 memset because it can be provided by the user. */
3149 fn = builtin_decl_implicit (BUILT_IN_MEMSET);
3150 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
3151 CALL_EXPR_TAILCALL (call_expr) = tailcall;
3153 return expand_call (call_expr, NULL_RTX, false);
3156 /* Expand a setmem pattern; return true if successful. */
3158 bool
3159 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
3160 unsigned int expected_align, HOST_WIDE_INT expected_size,
3161 unsigned HOST_WIDE_INT min_size,
3162 unsigned HOST_WIDE_INT max_size,
3163 unsigned HOST_WIDE_INT probable_max_size)
3165 /* Try the most limited insn first, because there's no point
3166 including more than one in the machine description unless
3167 the more limited one has some advantage. */
3169 if (expected_align < align)
3170 expected_align = align;
3171 if (expected_size != -1)
3173 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
3174 expected_size = max_size;
3175 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
3176 expected_size = min_size;
3179 opt_scalar_int_mode mode_iter;
3180 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
3182 scalar_int_mode mode = mode_iter.require ();
3183 enum insn_code code = direct_optab_handler (setmem_optab, mode);
3185 if (code != CODE_FOR_nothing
3186 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3187 here because if SIZE is less than the mode mask, as it is
3188 returned by the macro, it will definitely be less than the
3189 actual mode mask. Since SIZE is within the Pmode address
3190 space, we limit MODE to Pmode. */
3191 && ((CONST_INT_P (size)
3192 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3193 <= (GET_MODE_MASK (mode) >> 1)))
3194 || max_size <= (GET_MODE_MASK (mode) >> 1)
3195 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
3197 class expand_operand ops[9];
3198 unsigned int nops;
3200 nops = insn_data[(int) code].n_generator_args;
3201 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
3203 create_fixed_operand (&ops[0], object);
3204 /* The check above guarantees that this size conversion is valid. */
3205 create_convert_operand_to (&ops[1], size, mode, true);
3206 create_convert_operand_from (&ops[2], val, byte_mode, true);
3207 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
3208 if (nops >= 6)
3210 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
3211 create_integer_operand (&ops[5], expected_size);
3213 if (nops >= 8)
3215 create_integer_operand (&ops[6], min_size);
3216 /* If we cannot represent the maximal size,
3217 make parameter NULL. */
3218 if ((HOST_WIDE_INT) max_size != -1)
3219 create_integer_operand (&ops[7], max_size);
3220 else
3221 create_fixed_operand (&ops[7], NULL);
3223 if (nops == 9)
3225 /* If we cannot represent the maximal size,
3226 make parameter NULL. */
3227 if ((HOST_WIDE_INT) probable_max_size != -1)
3228 create_integer_operand (&ops[8], probable_max_size);
3229 else
3230 create_fixed_operand (&ops[8], NULL);
3232 if (maybe_expand_insn (code, nops, ops))
3233 return true;
3237 return false;
3241 /* Write to one of the components of the complex value CPLX. Write VAL to
3242 the real part if IMAG_P is false, and the imaginary part if its true. */
3244 void
3245 write_complex_part (rtx cplx, rtx val, bool imag_p)
3247 machine_mode cmode;
3248 scalar_mode imode;
3249 unsigned ibitsize;
3251 if (GET_CODE (cplx) == CONCAT)
3253 emit_move_insn (XEXP (cplx, imag_p), val);
3254 return;
3257 cmode = GET_MODE (cplx);
3258 imode = GET_MODE_INNER (cmode);
3259 ibitsize = GET_MODE_BITSIZE (imode);
3261 /* For MEMs simplify_gen_subreg may generate an invalid new address
3262 because, e.g., the original address is considered mode-dependent
3263 by the target, which restricts simplify_subreg from invoking
3264 adjust_address_nv. Instead of preparing fallback support for an
3265 invalid address, we call adjust_address_nv directly. */
3266 if (MEM_P (cplx))
3268 emit_move_insn (adjust_address_nv (cplx, imode,
3269 imag_p ? GET_MODE_SIZE (imode) : 0),
3270 val);
3271 return;
3274 /* If the sub-object is at least word sized, then we know that subregging
3275 will work. This special case is important, since store_bit_field
3276 wants to operate on integer modes, and there's rarely an OImode to
3277 correspond to TCmode. */
3278 if (ibitsize >= BITS_PER_WORD
3279 /* For hard regs we have exact predicates. Assume we can split
3280 the original object if it spans an even number of hard regs.
3281 This special case is important for SCmode on 64-bit platforms
3282 where the natural size of floating-point regs is 32-bit. */
3283 || (REG_P (cplx)
3284 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3285 && REG_NREGS (cplx) % 2 == 0))
3287 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3288 imag_p ? GET_MODE_SIZE (imode) : 0);
3289 if (part)
3291 emit_move_insn (part, val);
3292 return;
3294 else
3295 /* simplify_gen_subreg may fail for sub-word MEMs. */
3296 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3299 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3300 false);
3303 /* Extract one of the components of the complex value CPLX. Extract the
3304 real part if IMAG_P is false, and the imaginary part if it's true. */
3307 read_complex_part (rtx cplx, bool imag_p)
3309 machine_mode cmode;
3310 scalar_mode imode;
3311 unsigned ibitsize;
3313 if (GET_CODE (cplx) == CONCAT)
3314 return XEXP (cplx, imag_p);
3316 cmode = GET_MODE (cplx);
3317 imode = GET_MODE_INNER (cmode);
3318 ibitsize = GET_MODE_BITSIZE (imode);
3320 /* Special case reads from complex constants that got spilled to memory. */
3321 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3323 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3324 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3326 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3327 if (CONSTANT_CLASS_P (part))
3328 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3332 /* For MEMs simplify_gen_subreg may generate an invalid new address
3333 because, e.g., the original address is considered mode-dependent
3334 by the target, which restricts simplify_subreg from invoking
3335 adjust_address_nv. Instead of preparing fallback support for an
3336 invalid address, we call adjust_address_nv directly. */
3337 if (MEM_P (cplx))
3338 return adjust_address_nv (cplx, imode,
3339 imag_p ? GET_MODE_SIZE (imode) : 0);
3341 /* If the sub-object is at least word sized, then we know that subregging
3342 will work. This special case is important, since extract_bit_field
3343 wants to operate on integer modes, and there's rarely an OImode to
3344 correspond to TCmode. */
3345 if (ibitsize >= BITS_PER_WORD
3346 /* For hard regs we have exact predicates. Assume we can split
3347 the original object if it spans an even number of hard regs.
3348 This special case is important for SCmode on 64-bit platforms
3349 where the natural size of floating-point regs is 32-bit. */
3350 || (REG_P (cplx)
3351 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3352 && REG_NREGS (cplx) % 2 == 0))
3354 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3355 imag_p ? GET_MODE_SIZE (imode) : 0);
3356 if (ret)
3357 return ret;
3358 else
3359 /* simplify_gen_subreg may fail for sub-word MEMs. */
3360 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3363 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3364 true, NULL_RTX, imode, imode, false, NULL);
3367 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3368 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3369 represented in NEW_MODE. If FORCE is true, this will never happen, as
3370 we'll force-create a SUBREG if needed. */
3372 static rtx
3373 emit_move_change_mode (machine_mode new_mode,
3374 machine_mode old_mode, rtx x, bool force)
3376 rtx ret;
3378 if (push_operand (x, GET_MODE (x)))
3380 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3381 MEM_COPY_ATTRIBUTES (ret, x);
3383 else if (MEM_P (x))
3385 /* We don't have to worry about changing the address since the
3386 size in bytes is supposed to be the same. */
3387 if (reload_in_progress)
3389 /* Copy the MEM to change the mode and move any
3390 substitutions from the old MEM to the new one. */
3391 ret = adjust_address_nv (x, new_mode, 0);
3392 copy_replacements (x, ret);
3394 else
3395 ret = adjust_address (x, new_mode, 0);
3397 else
3399 /* Note that we do want simplify_subreg's behavior of validating
3400 that the new mode is ok for a hard register. If we were to use
3401 simplify_gen_subreg, we would create the subreg, but would
3402 probably run into the target not being able to implement it. */
3403 /* Except, of course, when FORCE is true, when this is exactly what
3404 we want. Which is needed for CCmodes on some targets. */
3405 if (force)
3406 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3407 else
3408 ret = simplify_subreg (new_mode, x, old_mode, 0);
3411 return ret;
3414 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3415 an integer mode of the same size as MODE. Returns the instruction
3416 emitted, or NULL if such a move could not be generated. */
3418 static rtx_insn *
3419 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3421 scalar_int_mode imode;
3422 enum insn_code code;
3424 /* There must exist a mode of the exact size we require. */
3425 if (!int_mode_for_mode (mode).exists (&imode))
3426 return NULL;
3428 /* The target must support moves in this mode. */
3429 code = optab_handler (mov_optab, imode);
3430 if (code == CODE_FOR_nothing)
3431 return NULL;
3433 x = emit_move_change_mode (imode, mode, x, force);
3434 if (x == NULL_RTX)
3435 return NULL;
3436 y = emit_move_change_mode (imode, mode, y, force);
3437 if (y == NULL_RTX)
3438 return NULL;
3439 return emit_insn (GEN_FCN (code) (x, y));
3442 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3443 Return an equivalent MEM that does not use an auto-increment. */
3446 emit_move_resolve_push (machine_mode mode, rtx x)
3448 enum rtx_code code = GET_CODE (XEXP (x, 0));
3449 rtx temp;
3451 poly_int64 adjust = GET_MODE_SIZE (mode);
3452 #ifdef PUSH_ROUNDING
3453 adjust = PUSH_ROUNDING (adjust);
3454 #endif
3455 if (code == PRE_DEC || code == POST_DEC)
3456 adjust = -adjust;
3457 else if (code == PRE_MODIFY || code == POST_MODIFY)
3459 rtx expr = XEXP (XEXP (x, 0), 1);
3461 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3462 poly_int64 val = rtx_to_poly_int64 (XEXP (expr, 1));
3463 if (GET_CODE (expr) == MINUS)
3464 val = -val;
3465 gcc_assert (known_eq (adjust, val) || known_eq (adjust, -val));
3466 adjust = val;
3469 /* Do not use anti_adjust_stack, since we don't want to update
3470 stack_pointer_delta. */
3471 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3472 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3473 0, OPTAB_LIB_WIDEN);
3474 if (temp != stack_pointer_rtx)
3475 emit_move_insn (stack_pointer_rtx, temp);
3477 switch (code)
3479 case PRE_INC:
3480 case PRE_DEC:
3481 case PRE_MODIFY:
3482 temp = stack_pointer_rtx;
3483 break;
3484 case POST_INC:
3485 case POST_DEC:
3486 case POST_MODIFY:
3487 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3488 break;
3489 default:
3490 gcc_unreachable ();
3493 return replace_equiv_address (x, temp);
3496 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3497 X is known to satisfy push_operand, and MODE is known to be complex.
3498 Returns the last instruction emitted. */
3500 rtx_insn *
3501 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3503 scalar_mode submode = GET_MODE_INNER (mode);
3504 bool imag_first;
3506 #ifdef PUSH_ROUNDING
3507 poly_int64 submodesize = GET_MODE_SIZE (submode);
3509 /* In case we output to the stack, but the size is smaller than the
3510 machine can push exactly, we need to use move instructions. */
3511 if (maybe_ne (PUSH_ROUNDING (submodesize), submodesize))
3513 x = emit_move_resolve_push (mode, x);
3514 return emit_move_insn (x, y);
3516 #endif
3518 /* Note that the real part always precedes the imag part in memory
3519 regardless of machine's endianness. */
3520 switch (GET_CODE (XEXP (x, 0)))
3522 case PRE_DEC:
3523 case POST_DEC:
3524 imag_first = true;
3525 break;
3526 case PRE_INC:
3527 case POST_INC:
3528 imag_first = false;
3529 break;
3530 default:
3531 gcc_unreachable ();
3534 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3535 read_complex_part (y, imag_first));
3536 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3537 read_complex_part (y, !imag_first));
3540 /* A subroutine of emit_move_complex. Perform the move from Y to X
3541 via two moves of the parts. Returns the last instruction emitted. */
3543 rtx_insn *
3544 emit_move_complex_parts (rtx x, rtx y)
3546 /* Show the output dies here. This is necessary for SUBREGs
3547 of pseudos since we cannot track their lifetimes correctly;
3548 hard regs shouldn't appear here except as return values. */
3549 if (!reload_completed && !reload_in_progress
3550 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3551 emit_clobber (x);
3553 write_complex_part (x, read_complex_part (y, false), false);
3554 write_complex_part (x, read_complex_part (y, true), true);
3556 return get_last_insn ();
3559 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3560 MODE is known to be complex. Returns the last instruction emitted. */
3562 static rtx_insn *
3563 emit_move_complex (machine_mode mode, rtx x, rtx y)
3565 bool try_int;
3567 /* Need to take special care for pushes, to maintain proper ordering
3568 of the data, and possibly extra padding. */
3569 if (push_operand (x, mode))
3570 return emit_move_complex_push (mode, x, y);
3572 /* See if we can coerce the target into moving both values at once, except
3573 for floating point where we favor moving as parts if this is easy. */
3574 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3575 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3576 && !(REG_P (x)
3577 && HARD_REGISTER_P (x)
3578 && REG_NREGS (x) == 1)
3579 && !(REG_P (y)
3580 && HARD_REGISTER_P (y)
3581 && REG_NREGS (y) == 1))
3582 try_int = false;
3583 /* Not possible if the values are inherently not adjacent. */
3584 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3585 try_int = false;
3586 /* Is possible if both are registers (or subregs of registers). */
3587 else if (register_operand (x, mode) && register_operand (y, mode))
3588 try_int = true;
3589 /* If one of the operands is a memory, and alignment constraints
3590 are friendly enough, we may be able to do combined memory operations.
3591 We do not attempt this if Y is a constant because that combination is
3592 usually better with the by-parts thing below. */
3593 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3594 && (!STRICT_ALIGNMENT
3595 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3596 try_int = true;
3597 else
3598 try_int = false;
3600 if (try_int)
3602 rtx_insn *ret;
3604 /* For memory to memory moves, optimal behavior can be had with the
3605 existing block move logic. But use normal expansion if optimizing
3606 for size. */
3607 if (MEM_P (x) && MEM_P (y))
3609 emit_block_move (x, y, gen_int_mode (GET_MODE_SIZE (mode), Pmode),
3610 (optimize_insn_for_speed_p()
3611 ? BLOCK_OP_NO_LIBCALL : BLOCK_OP_NORMAL));
3612 return get_last_insn ();
3615 ret = emit_move_via_integer (mode, x, y, true);
3616 if (ret)
3617 return ret;
3620 return emit_move_complex_parts (x, y);
3623 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3624 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3626 static rtx_insn *
3627 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3629 rtx_insn *ret;
3631 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3632 if (mode != CCmode)
3634 enum insn_code code = optab_handler (mov_optab, CCmode);
3635 if (code != CODE_FOR_nothing)
3637 x = emit_move_change_mode (CCmode, mode, x, true);
3638 y = emit_move_change_mode (CCmode, mode, y, true);
3639 return emit_insn (GEN_FCN (code) (x, y));
3643 /* Otherwise, find the MODE_INT mode of the same width. */
3644 ret = emit_move_via_integer (mode, x, y, false);
3645 gcc_assert (ret != NULL);
3646 return ret;
3649 /* Return true if word I of OP lies entirely in the
3650 undefined bits of a paradoxical subreg. */
3652 static bool
3653 undefined_operand_subword_p (const_rtx op, int i)
3655 if (GET_CODE (op) != SUBREG)
3656 return false;
3657 machine_mode innermostmode = GET_MODE (SUBREG_REG (op));
3658 poly_int64 offset = i * UNITS_PER_WORD + subreg_memory_offset (op);
3659 return (known_ge (offset, GET_MODE_SIZE (innermostmode))
3660 || known_le (offset, -UNITS_PER_WORD));
3663 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3664 MODE is any multi-word or full-word mode that lacks a move_insn
3665 pattern. Note that you will get better code if you define such
3666 patterns, even if they must turn into multiple assembler instructions. */
3668 static rtx_insn *
3669 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3671 rtx_insn *last_insn = 0;
3672 rtx_insn *seq;
3673 rtx inner;
3674 bool need_clobber;
3675 int i, mode_size;
3677 /* This function can only handle cases where the number of words is
3678 known at compile time. */
3679 mode_size = GET_MODE_SIZE (mode).to_constant ();
3680 gcc_assert (mode_size >= UNITS_PER_WORD);
3682 /* If X is a push on the stack, do the push now and replace
3683 X with a reference to the stack pointer. */
3684 if (push_operand (x, mode))
3685 x = emit_move_resolve_push (mode, x);
3687 /* If we are in reload, see if either operand is a MEM whose address
3688 is scheduled for replacement. */
3689 if (reload_in_progress && MEM_P (x)
3690 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3691 x = replace_equiv_address_nv (x, inner);
3692 if (reload_in_progress && MEM_P (y)
3693 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3694 y = replace_equiv_address_nv (y, inner);
3696 start_sequence ();
3698 need_clobber = false;
3699 for (i = 0; i < CEIL (mode_size, UNITS_PER_WORD); i++)
3701 /* Do not generate code for a move if it would go entirely
3702 to the non-existing bits of a paradoxical subreg. */
3703 if (undefined_operand_subword_p (x, i))
3704 continue;
3706 rtx xpart = operand_subword (x, i, 1, mode);
3707 rtx ypart;
3709 /* Do not generate code for a move if it would come entirely
3710 from the undefined bits of a paradoxical subreg. */
3711 if (undefined_operand_subword_p (y, i))
3712 continue;
3714 ypart = operand_subword (y, i, 1, mode);
3716 /* If we can't get a part of Y, put Y into memory if it is a
3717 constant. Otherwise, force it into a register. Then we must
3718 be able to get a part of Y. */
3719 if (ypart == 0 && CONSTANT_P (y))
3721 y = use_anchored_address (force_const_mem (mode, y));
3722 ypart = operand_subword (y, i, 1, mode);
3724 else if (ypart == 0)
3725 ypart = operand_subword_force (y, i, mode);
3727 gcc_assert (xpart && ypart);
3729 need_clobber |= (GET_CODE (xpart) == SUBREG);
3731 last_insn = emit_move_insn (xpart, ypart);
3734 seq = get_insns ();
3735 end_sequence ();
3737 /* Show the output dies here. This is necessary for SUBREGs
3738 of pseudos since we cannot track their lifetimes correctly;
3739 hard regs shouldn't appear here except as return values.
3740 We never want to emit such a clobber after reload. */
3741 if (x != y
3742 && ! (reload_in_progress || reload_completed)
3743 && need_clobber != 0)
3744 emit_clobber (x);
3746 emit_insn (seq);
3748 return last_insn;
3751 /* Low level part of emit_move_insn.
3752 Called just like emit_move_insn, but assumes X and Y
3753 are basically valid. */
3755 rtx_insn *
3756 emit_move_insn_1 (rtx x, rtx y)
3758 machine_mode mode = GET_MODE (x);
3759 enum insn_code code;
3761 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3763 code = optab_handler (mov_optab, mode);
3764 if (code != CODE_FOR_nothing)
3765 return emit_insn (GEN_FCN (code) (x, y));
3767 /* Expand complex moves by moving real part and imag part. */
3768 if (COMPLEX_MODE_P (mode))
3769 return emit_move_complex (mode, x, y);
3771 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3772 || ALL_FIXED_POINT_MODE_P (mode))
3774 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3776 /* If we can't find an integer mode, use multi words. */
3777 if (result)
3778 return result;
3779 else
3780 return emit_move_multi_word (mode, x, y);
3783 if (GET_MODE_CLASS (mode) == MODE_CC)
3784 return emit_move_ccmode (mode, x, y);
3786 /* Try using a move pattern for the corresponding integer mode. This is
3787 only safe when simplify_subreg can convert MODE constants into integer
3788 constants. At present, it can only do this reliably if the value
3789 fits within a HOST_WIDE_INT. */
3790 if (!CONSTANT_P (y)
3791 || known_le (GET_MODE_BITSIZE (mode), HOST_BITS_PER_WIDE_INT))
3793 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3795 if (ret)
3797 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3798 return ret;
3802 return emit_move_multi_word (mode, x, y);
3805 /* Generate code to copy Y into X.
3806 Both Y and X must have the same mode, except that
3807 Y can be a constant with VOIDmode.
3808 This mode cannot be BLKmode; use emit_block_move for that.
3810 Return the last instruction emitted. */
3812 rtx_insn *
3813 emit_move_insn (rtx x, rtx y)
3815 machine_mode mode = GET_MODE (x);
3816 rtx y_cst = NULL_RTX;
3817 rtx_insn *last_insn;
3818 rtx set;
3820 gcc_assert (mode != BLKmode
3821 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3823 /* If we have a copy that looks like one of the following patterns:
3824 (set (subreg:M1 (reg:M2 ...)) (subreg:M1 (reg:M2 ...)))
3825 (set (subreg:M1 (reg:M2 ...)) (mem:M1 ADDR))
3826 (set (mem:M1 ADDR) (subreg:M1 (reg:M2 ...)))
3827 (set (subreg:M1 (reg:M2 ...)) (constant C))
3828 where mode M1 is equal in size to M2, try to detect whether the
3829 mode change involves an implicit round trip through memory.
3830 If so, see if we can avoid that by removing the subregs and
3831 doing the move in mode M2 instead. */
3833 rtx x_inner = NULL_RTX;
3834 rtx y_inner = NULL_RTX;
3836 auto candidate_subreg_p = [&](rtx subreg) {
3837 return (REG_P (SUBREG_REG (subreg))
3838 && known_eq (GET_MODE_SIZE (GET_MODE (SUBREG_REG (subreg))),
3839 GET_MODE_SIZE (GET_MODE (subreg)))
3840 && optab_handler (mov_optab, GET_MODE (SUBREG_REG (subreg)))
3841 != CODE_FOR_nothing);
3844 auto candidate_mem_p = [&](machine_mode innermode, rtx mem) {
3845 return (!targetm.can_change_mode_class (innermode, GET_MODE (mem), ALL_REGS)
3846 && !push_operand (mem, GET_MODE (mem))
3847 /* Not a candiate if innermode requires too much alignment. */
3848 && (MEM_ALIGN (mem) >= GET_MODE_ALIGNMENT (innermode)
3849 || targetm.slow_unaligned_access (GET_MODE (mem),
3850 MEM_ALIGN (mem))
3851 || !targetm.slow_unaligned_access (innermode,
3852 MEM_ALIGN (mem))));
3855 if (SUBREG_P (x) && candidate_subreg_p (x))
3856 x_inner = SUBREG_REG (x);
3858 if (SUBREG_P (y) && candidate_subreg_p (y))
3859 y_inner = SUBREG_REG (y);
3861 if (x_inner != NULL_RTX
3862 && y_inner != NULL_RTX
3863 && GET_MODE (x_inner) == GET_MODE (y_inner)
3864 && !targetm.can_change_mode_class (GET_MODE (x_inner), mode, ALL_REGS))
3866 x = x_inner;
3867 y = y_inner;
3868 mode = GET_MODE (x_inner);
3870 else if (x_inner != NULL_RTX
3871 && MEM_P (y)
3872 && candidate_mem_p (GET_MODE (x_inner), y))
3874 x = x_inner;
3875 y = adjust_address (y, GET_MODE (x_inner), 0);
3876 mode = GET_MODE (x_inner);
3878 else if (y_inner != NULL_RTX
3879 && MEM_P (x)
3880 && candidate_mem_p (GET_MODE (y_inner), x))
3882 x = adjust_address (x, GET_MODE (y_inner), 0);
3883 y = y_inner;
3884 mode = GET_MODE (y_inner);
3886 else if (x_inner != NULL_RTX
3887 && CONSTANT_P (y)
3888 && !targetm.can_change_mode_class (GET_MODE (x_inner),
3889 mode, ALL_REGS)
3890 && (y_inner = simplify_subreg (GET_MODE (x_inner), y, mode, 0)))
3892 x = x_inner;
3893 y = y_inner;
3894 mode = GET_MODE (x_inner);
3897 if (CONSTANT_P (y))
3899 if (optimize
3900 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3901 && (last_insn = compress_float_constant (x, y)))
3902 return last_insn;
3904 y_cst = y;
3906 if (!targetm.legitimate_constant_p (mode, y))
3908 y = force_const_mem (mode, y);
3910 /* If the target's cannot_force_const_mem prevented the spill,
3911 assume that the target's move expanders will also take care
3912 of the non-legitimate constant. */
3913 if (!y)
3914 y = y_cst;
3915 else
3916 y = use_anchored_address (y);
3920 /* If X or Y are memory references, verify that their addresses are valid
3921 for the machine. */
3922 if (MEM_P (x)
3923 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3924 MEM_ADDR_SPACE (x))
3925 && ! push_operand (x, GET_MODE (x))))
3926 x = validize_mem (x);
3928 if (MEM_P (y)
3929 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3930 MEM_ADDR_SPACE (y)))
3931 y = validize_mem (y);
3933 gcc_assert (mode != BLKmode);
3935 last_insn = emit_move_insn_1 (x, y);
3937 if (y_cst && REG_P (x)
3938 && (set = single_set (last_insn)) != NULL_RTX
3939 && SET_DEST (set) == x
3940 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3941 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3943 return last_insn;
3946 /* Generate the body of an instruction to copy Y into X.
3947 It may be a list of insns, if one insn isn't enough. */
3949 rtx_insn *
3950 gen_move_insn (rtx x, rtx y)
3952 rtx_insn *seq;
3954 start_sequence ();
3955 emit_move_insn_1 (x, y);
3956 seq = get_insns ();
3957 end_sequence ();
3958 return seq;
3961 /* If Y is representable exactly in a narrower mode, and the target can
3962 perform the extension directly from constant or memory, then emit the
3963 move as an extension. */
3965 static rtx_insn *
3966 compress_float_constant (rtx x, rtx y)
3968 machine_mode dstmode = GET_MODE (x);
3969 machine_mode orig_srcmode = GET_MODE (y);
3970 machine_mode srcmode;
3971 const REAL_VALUE_TYPE *r;
3972 int oldcost, newcost;
3973 bool speed = optimize_insn_for_speed_p ();
3975 r = CONST_DOUBLE_REAL_VALUE (y);
3977 if (targetm.legitimate_constant_p (dstmode, y))
3978 oldcost = set_src_cost (y, orig_srcmode, speed);
3979 else
3980 oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3982 FOR_EACH_MODE_UNTIL (srcmode, orig_srcmode)
3984 enum insn_code ic;
3985 rtx trunc_y;
3986 rtx_insn *last_insn;
3988 /* Skip if the target can't extend this way. */
3989 ic = can_extend_p (dstmode, srcmode, 0);
3990 if (ic == CODE_FOR_nothing)
3991 continue;
3993 /* Skip if the narrowed value isn't exact. */
3994 if (! exact_real_truncate (srcmode, r))
3995 continue;
3997 trunc_y = const_double_from_real_value (*r, srcmode);
3999 if (targetm.legitimate_constant_p (srcmode, trunc_y))
4001 /* Skip if the target needs extra instructions to perform
4002 the extension. */
4003 if (!insn_operand_matches (ic, 1, trunc_y))
4004 continue;
4005 /* This is valid, but may not be cheaper than the original. */
4006 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
4007 dstmode, speed);
4008 if (oldcost < newcost)
4009 continue;
4011 else if (float_extend_from_mem[dstmode][srcmode])
4013 trunc_y = force_const_mem (srcmode, trunc_y);
4014 /* This is valid, but may not be cheaper than the original. */
4015 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
4016 dstmode, speed);
4017 if (oldcost < newcost)
4018 continue;
4019 trunc_y = validize_mem (trunc_y);
4021 else
4022 continue;
4024 /* For CSE's benefit, force the compressed constant pool entry
4025 into a new pseudo. This constant may be used in different modes,
4026 and if not, combine will put things back together for us. */
4027 trunc_y = force_reg (srcmode, trunc_y);
4029 /* If x is a hard register, perform the extension into a pseudo,
4030 so that e.g. stack realignment code is aware of it. */
4031 rtx target = x;
4032 if (REG_P (x) && HARD_REGISTER_P (x))
4033 target = gen_reg_rtx (dstmode);
4035 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
4036 last_insn = get_last_insn ();
4038 if (REG_P (target))
4039 set_unique_reg_note (last_insn, REG_EQUAL, y);
4041 if (target != x)
4042 return emit_move_insn (x, target);
4043 return last_insn;
4046 return NULL;
4049 /* Pushing data onto the stack. */
4051 /* Push a block of length SIZE (perhaps variable)
4052 and return an rtx to address the beginning of the block.
4053 The value may be virtual_outgoing_args_rtx.
4055 EXTRA is the number of bytes of padding to push in addition to SIZE.
4056 BELOW nonzero means this padding comes at low addresses;
4057 otherwise, the padding comes at high addresses. */
4060 push_block (rtx size, poly_int64 extra, int below)
4062 rtx temp;
4064 size = convert_modes (Pmode, ptr_mode, size, 1);
4065 if (CONSTANT_P (size))
4066 anti_adjust_stack (plus_constant (Pmode, size, extra));
4067 else if (REG_P (size) && known_eq (extra, 0))
4068 anti_adjust_stack (size);
4069 else
4071 temp = copy_to_mode_reg (Pmode, size);
4072 if (maybe_ne (extra, 0))
4073 temp = expand_binop (Pmode, add_optab, temp,
4074 gen_int_mode (extra, Pmode),
4075 temp, 0, OPTAB_LIB_WIDEN);
4076 anti_adjust_stack (temp);
4079 if (STACK_GROWS_DOWNWARD)
4081 temp = virtual_outgoing_args_rtx;
4082 if (maybe_ne (extra, 0) && below)
4083 temp = plus_constant (Pmode, temp, extra);
4085 else
4087 poly_int64 csize;
4088 if (poly_int_rtx_p (size, &csize))
4089 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
4090 -csize - (below ? 0 : extra));
4091 else if (maybe_ne (extra, 0) && !below)
4092 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
4093 negate_rtx (Pmode, plus_constant (Pmode, size,
4094 extra)));
4095 else
4096 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
4097 negate_rtx (Pmode, size));
4100 return memory_address (NARROWEST_INT_MODE, temp);
4103 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
4105 static rtx
4106 mem_autoinc_base (rtx mem)
4108 if (MEM_P (mem))
4110 rtx addr = XEXP (mem, 0);
4111 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
4112 return XEXP (addr, 0);
4114 return NULL;
4117 /* A utility routine used here, in reload, and in try_split. The insns
4118 after PREV up to and including LAST are known to adjust the stack,
4119 with a final value of END_ARGS_SIZE. Iterate backward from LAST
4120 placing notes as appropriate. PREV may be NULL, indicating the
4121 entire insn sequence prior to LAST should be scanned.
4123 The set of allowed stack pointer modifications is small:
4124 (1) One or more auto-inc style memory references (aka pushes),
4125 (2) One or more addition/subtraction with the SP as destination,
4126 (3) A single move insn with the SP as destination,
4127 (4) A call_pop insn,
4128 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
4130 Insns in the sequence that do not modify the SP are ignored,
4131 except for noreturn calls.
4133 The return value is the amount of adjustment that can be trivially
4134 verified, via immediate operand or auto-inc. If the adjustment
4135 cannot be trivially extracted, the return value is HOST_WIDE_INT_MIN. */
4137 poly_int64
4138 find_args_size_adjust (rtx_insn *insn)
4140 rtx dest, set, pat;
4141 int i;
4143 pat = PATTERN (insn);
4144 set = NULL;
4146 /* Look for a call_pop pattern. */
4147 if (CALL_P (insn))
4149 /* We have to allow non-call_pop patterns for the case
4150 of emit_single_push_insn of a TLS address. */
4151 if (GET_CODE (pat) != PARALLEL)
4152 return 0;
4154 /* All call_pop have a stack pointer adjust in the parallel.
4155 The call itself is always first, and the stack adjust is
4156 usually last, so search from the end. */
4157 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
4159 set = XVECEXP (pat, 0, i);
4160 if (GET_CODE (set) != SET)
4161 continue;
4162 dest = SET_DEST (set);
4163 if (dest == stack_pointer_rtx)
4164 break;
4166 /* We'd better have found the stack pointer adjust. */
4167 if (i == 0)
4168 return 0;
4169 /* Fall through to process the extracted SET and DEST
4170 as if it was a standalone insn. */
4172 else if (GET_CODE (pat) == SET)
4173 set = pat;
4174 else if ((set = single_set (insn)) != NULL)
4176 else if (GET_CODE (pat) == PARALLEL)
4178 /* ??? Some older ports use a parallel with a stack adjust
4179 and a store for a PUSH_ROUNDING pattern, rather than a
4180 PRE/POST_MODIFY rtx. Don't force them to update yet... */
4181 /* ??? See h8300 and m68k, pushqi1. */
4182 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
4184 set = XVECEXP (pat, 0, i);
4185 if (GET_CODE (set) != SET)
4186 continue;
4187 dest = SET_DEST (set);
4188 if (dest == stack_pointer_rtx)
4189 break;
4191 /* We do not expect an auto-inc of the sp in the parallel. */
4192 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
4193 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4194 != stack_pointer_rtx);
4196 if (i < 0)
4197 return 0;
4199 else
4200 return 0;
4202 dest = SET_DEST (set);
4204 /* Look for direct modifications of the stack pointer. */
4205 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
4207 /* Look for a trivial adjustment, otherwise assume nothing. */
4208 /* Note that the SPU restore_stack_block pattern refers to
4209 the stack pointer in V4SImode. Consider that non-trivial. */
4210 poly_int64 offset;
4211 if (SCALAR_INT_MODE_P (GET_MODE (dest))
4212 && strip_offset (SET_SRC (set), &offset) == stack_pointer_rtx)
4213 return offset;
4214 /* ??? Reload can generate no-op moves, which will be cleaned
4215 up later. Recognize it and continue searching. */
4216 else if (rtx_equal_p (dest, SET_SRC (set)))
4217 return 0;
4218 else
4219 return HOST_WIDE_INT_MIN;
4221 else
4223 rtx mem, addr;
4225 /* Otherwise only think about autoinc patterns. */
4226 if (mem_autoinc_base (dest) == stack_pointer_rtx)
4228 mem = dest;
4229 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4230 != stack_pointer_rtx);
4232 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
4233 mem = SET_SRC (set);
4234 else
4235 return 0;
4237 addr = XEXP (mem, 0);
4238 switch (GET_CODE (addr))
4240 case PRE_INC:
4241 case POST_INC:
4242 return GET_MODE_SIZE (GET_MODE (mem));
4243 case PRE_DEC:
4244 case POST_DEC:
4245 return -GET_MODE_SIZE (GET_MODE (mem));
4246 case PRE_MODIFY:
4247 case POST_MODIFY:
4248 addr = XEXP (addr, 1);
4249 gcc_assert (GET_CODE (addr) == PLUS);
4250 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
4251 return rtx_to_poly_int64 (XEXP (addr, 1));
4252 default:
4253 gcc_unreachable ();
4258 poly_int64
4259 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last,
4260 poly_int64 end_args_size)
4262 poly_int64 args_size = end_args_size;
4263 bool saw_unknown = false;
4264 rtx_insn *insn;
4266 for (insn = last; insn != prev; insn = PREV_INSN (insn))
4268 if (!NONDEBUG_INSN_P (insn))
4269 continue;
4271 /* We might have existing REG_ARGS_SIZE notes, e.g. when pushing
4272 a call argument containing a TLS address that itself requires
4273 a call to __tls_get_addr. The handling of stack_pointer_delta
4274 in emit_single_push_insn is supposed to ensure that any such
4275 notes are already correct. */
4276 rtx note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4277 gcc_assert (!note || known_eq (args_size, get_args_size (note)));
4279 poly_int64 this_delta = find_args_size_adjust (insn);
4280 if (known_eq (this_delta, 0))
4282 if (!CALL_P (insn)
4283 || ACCUMULATE_OUTGOING_ARGS
4284 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
4285 continue;
4288 gcc_assert (!saw_unknown);
4289 if (known_eq (this_delta, HOST_WIDE_INT_MIN))
4290 saw_unknown = true;
4292 if (!note)
4293 add_args_size_note (insn, args_size);
4294 if (STACK_GROWS_DOWNWARD)
4295 this_delta = -poly_uint64 (this_delta);
4297 if (saw_unknown)
4298 args_size = HOST_WIDE_INT_MIN;
4299 else
4300 args_size -= this_delta;
4303 return args_size;
4306 #ifdef PUSH_ROUNDING
4307 /* Emit single push insn. */
4309 static void
4310 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4312 rtx dest_addr;
4313 poly_int64 rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4314 rtx dest;
4315 enum insn_code icode;
4317 /* If there is push pattern, use it. Otherwise try old way of throwing
4318 MEM representing push operation to move expander. */
4319 icode = optab_handler (push_optab, mode);
4320 if (icode != CODE_FOR_nothing)
4322 class expand_operand ops[1];
4324 create_input_operand (&ops[0], x, mode);
4325 if (maybe_expand_insn (icode, 1, ops))
4326 return;
4328 if (known_eq (GET_MODE_SIZE (mode), rounded_size))
4329 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4330 /* If we are to pad downward, adjust the stack pointer first and
4331 then store X into the stack location using an offset. This is
4332 because emit_move_insn does not know how to pad; it does not have
4333 access to type. */
4334 else if (targetm.calls.function_arg_padding (mode, type) == PAD_DOWNWARD)
4336 emit_move_insn (stack_pointer_rtx,
4337 expand_binop (Pmode,
4338 STACK_GROWS_DOWNWARD ? sub_optab
4339 : add_optab,
4340 stack_pointer_rtx,
4341 gen_int_mode (rounded_size, Pmode),
4342 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4344 poly_int64 offset = rounded_size - GET_MODE_SIZE (mode);
4345 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
4346 /* We have already decremented the stack pointer, so get the
4347 previous value. */
4348 offset += rounded_size;
4350 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4351 /* We have already incremented the stack pointer, so get the
4352 previous value. */
4353 offset -= rounded_size;
4355 dest_addr = plus_constant (Pmode, stack_pointer_rtx, offset);
4357 else
4359 if (STACK_GROWS_DOWNWARD)
4360 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4361 dest_addr = plus_constant (Pmode, stack_pointer_rtx, -rounded_size);
4362 else
4363 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4364 dest_addr = plus_constant (Pmode, stack_pointer_rtx, rounded_size);
4366 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4369 dest = gen_rtx_MEM (mode, dest_addr);
4371 if (type != 0)
4373 set_mem_attributes (dest, type, 1);
4375 if (cfun->tail_call_marked)
4376 /* Function incoming arguments may overlap with sibling call
4377 outgoing arguments and we cannot allow reordering of reads
4378 from function arguments with stores to outgoing arguments
4379 of sibling calls. */
4380 set_mem_alias_set (dest, 0);
4382 emit_move_insn (dest, x);
4385 /* Emit and annotate a single push insn. */
4387 static void
4388 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4390 poly_int64 delta, old_delta = stack_pointer_delta;
4391 rtx_insn *prev = get_last_insn ();
4392 rtx_insn *last;
4394 emit_single_push_insn_1 (mode, x, type);
4396 /* Adjust stack_pointer_delta to describe the situation after the push
4397 we just performed. Note that we must do this after the push rather
4398 than before the push in case calculating X needs pushes and pops of
4399 its own (e.g. if calling __tls_get_addr). The REG_ARGS_SIZE notes
4400 for such pushes and pops must not include the effect of the future
4401 push of X. */
4402 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4404 last = get_last_insn ();
4406 /* Notice the common case where we emitted exactly one insn. */
4407 if (PREV_INSN (last) == prev)
4409 add_args_size_note (last, stack_pointer_delta);
4410 return;
4413 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4414 gcc_assert (known_eq (delta, HOST_WIDE_INT_MIN)
4415 || known_eq (delta, old_delta));
4417 #endif
4419 /* If reading SIZE bytes from X will end up reading from
4420 Y return the number of bytes that overlap. Return -1
4421 if there is no overlap or -2 if we can't determine
4422 (for example when X and Y have different base registers). */
4424 static int
4425 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4427 rtx tmp = plus_constant (Pmode, x, size);
4428 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4430 if (!CONST_INT_P (sub))
4431 return -2;
4433 HOST_WIDE_INT val = INTVAL (sub);
4435 return IN_RANGE (val, 1, size) ? val : -1;
4438 /* Generate code to push X onto the stack, assuming it has mode MODE and
4439 type TYPE.
4440 MODE is redundant except when X is a CONST_INT (since they don't
4441 carry mode info).
4442 SIZE is an rtx for the size of data to be copied (in bytes),
4443 needed only if X is BLKmode.
4444 Return true if successful. May return false if asked to push a
4445 partial argument during a sibcall optimization (as specified by
4446 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4447 to not overlap.
4449 ALIGN (in bits) is maximum alignment we can assume.
4451 If PARTIAL and REG are both nonzero, then copy that many of the first
4452 bytes of X into registers starting with REG, and push the rest of X.
4453 The amount of space pushed is decreased by PARTIAL bytes.
4454 REG must be a hard register in this case.
4455 If REG is zero but PARTIAL is not, take any all others actions for an
4456 argument partially in registers, but do not actually load any
4457 registers.
4459 EXTRA is the amount in bytes of extra space to leave next to this arg.
4460 This is ignored if an argument block has already been allocated.
4462 On a machine that lacks real push insns, ARGS_ADDR is the address of
4463 the bottom of the argument block for this call. We use indexing off there
4464 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4465 argument block has not been preallocated.
4467 ARGS_SO_FAR is the size of args previously pushed for this call.
4469 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4470 for arguments passed in registers. If nonzero, it will be the number
4471 of bytes required. */
4473 bool
4474 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4475 unsigned int align, int partial, rtx reg, poly_int64 extra,
4476 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4477 rtx alignment_pad, bool sibcall_p)
4479 rtx xinner;
4480 pad_direction stack_direction
4481 = STACK_GROWS_DOWNWARD ? PAD_DOWNWARD : PAD_UPWARD;
4483 /* Decide where to pad the argument: PAD_DOWNWARD for below,
4484 PAD_UPWARD for above, or PAD_NONE for don't pad it.
4485 Default is below for small data on big-endian machines; else above. */
4486 pad_direction where_pad = targetm.calls.function_arg_padding (mode, type);
4488 /* Invert direction if stack is post-decrement.
4489 FIXME: why? */
4490 if (STACK_PUSH_CODE == POST_DEC)
4491 if (where_pad != PAD_NONE)
4492 where_pad = (where_pad == PAD_DOWNWARD ? PAD_UPWARD : PAD_DOWNWARD);
4494 xinner = x;
4496 int nregs = partial / UNITS_PER_WORD;
4497 rtx *tmp_regs = NULL;
4498 int overlapping = 0;
4500 if (mode == BLKmode
4501 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4503 /* Copy a block into the stack, entirely or partially. */
4505 rtx temp;
4506 int used;
4507 int offset;
4508 int skip;
4510 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4511 used = partial - offset;
4513 if (mode != BLKmode)
4515 /* A value is to be stored in an insufficiently aligned
4516 stack slot; copy via a suitably aligned slot if
4517 necessary. */
4518 size = gen_int_mode (GET_MODE_SIZE (mode), Pmode);
4519 if (!MEM_P (xinner))
4521 temp = assign_temp (type, 1, 1);
4522 emit_move_insn (temp, xinner);
4523 xinner = temp;
4527 gcc_assert (size);
4529 /* USED is now the # of bytes we need not copy to the stack
4530 because registers will take care of them. */
4532 if (partial != 0)
4533 xinner = adjust_address (xinner, BLKmode, used);
4535 /* If the partial register-part of the arg counts in its stack size,
4536 skip the part of stack space corresponding to the registers.
4537 Otherwise, start copying to the beginning of the stack space,
4538 by setting SKIP to 0. */
4539 skip = (reg_parm_stack_space == 0) ? 0 : used;
4541 #ifdef PUSH_ROUNDING
4542 /* Do it with several push insns if that doesn't take lots of insns
4543 and if there is no difficulty with push insns that skip bytes
4544 on the stack for alignment purposes. */
4545 if (args_addr == 0
4546 && PUSH_ARGS
4547 && CONST_INT_P (size)
4548 && skip == 0
4549 && MEM_ALIGN (xinner) >= align
4550 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4551 /* Here we avoid the case of a structure whose weak alignment
4552 forces many pushes of a small amount of data,
4553 and such small pushes do rounding that causes trouble. */
4554 && ((!targetm.slow_unaligned_access (word_mode, align))
4555 || align >= BIGGEST_ALIGNMENT
4556 || known_eq (PUSH_ROUNDING (align / BITS_PER_UNIT),
4557 align / BITS_PER_UNIT))
4558 && known_eq (PUSH_ROUNDING (INTVAL (size)), INTVAL (size)))
4560 /* Push padding now if padding above and stack grows down,
4561 or if padding below and stack grows up.
4562 But if space already allocated, this has already been done. */
4563 if (maybe_ne (extra, 0)
4564 && args_addr == 0
4565 && where_pad != PAD_NONE
4566 && where_pad != stack_direction)
4567 anti_adjust_stack (gen_int_mode (extra, Pmode));
4569 move_by_pieces (NULL, xinner, INTVAL (size) - used, align,
4570 RETURN_BEGIN);
4572 else
4573 #endif /* PUSH_ROUNDING */
4575 rtx target;
4577 /* Otherwise make space on the stack and copy the data
4578 to the address of that space. */
4580 /* Deduct words put into registers from the size we must copy. */
4581 if (partial != 0)
4583 if (CONST_INT_P (size))
4584 size = GEN_INT (INTVAL (size) - used);
4585 else
4586 size = expand_binop (GET_MODE (size), sub_optab, size,
4587 gen_int_mode (used, GET_MODE (size)),
4588 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4591 /* Get the address of the stack space.
4592 In this case, we do not deal with EXTRA separately.
4593 A single stack adjust will do. */
4594 poly_int64 const_args_so_far;
4595 if (! args_addr)
4597 temp = push_block (size, extra, where_pad == PAD_DOWNWARD);
4598 extra = 0;
4600 else if (poly_int_rtx_p (args_so_far, &const_args_so_far))
4601 temp = memory_address (BLKmode,
4602 plus_constant (Pmode, args_addr,
4603 skip + const_args_so_far));
4604 else
4605 temp = memory_address (BLKmode,
4606 plus_constant (Pmode,
4607 gen_rtx_PLUS (Pmode,
4608 args_addr,
4609 args_so_far),
4610 skip));
4612 if (!ACCUMULATE_OUTGOING_ARGS)
4614 /* If the source is referenced relative to the stack pointer,
4615 copy it to another register to stabilize it. We do not need
4616 to do this if we know that we won't be changing sp. */
4618 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4619 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4620 temp = copy_to_reg (temp);
4623 target = gen_rtx_MEM (BLKmode, temp);
4625 /* We do *not* set_mem_attributes here, because incoming arguments
4626 may overlap with sibling call outgoing arguments and we cannot
4627 allow reordering of reads from function arguments with stores
4628 to outgoing arguments of sibling calls. We do, however, want
4629 to record the alignment of the stack slot. */
4630 /* ALIGN may well be better aligned than TYPE, e.g. due to
4631 PARM_BOUNDARY. Assume the caller isn't lying. */
4632 set_mem_align (target, align);
4634 /* If part should go in registers and pushing to that part would
4635 overwrite some of the values that need to go into regs, load the
4636 overlapping values into temporary pseudos to be moved into the hard
4637 regs at the end after the stack pushing has completed.
4638 We cannot load them directly into the hard regs here because
4639 they can be clobbered by the block move expansions.
4640 See PR 65358. */
4642 if (partial > 0 && reg != 0 && mode == BLKmode
4643 && GET_CODE (reg) != PARALLEL)
4645 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4646 if (overlapping > 0)
4648 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4649 overlapping /= UNITS_PER_WORD;
4651 tmp_regs = XALLOCAVEC (rtx, overlapping);
4653 for (int i = 0; i < overlapping; i++)
4654 tmp_regs[i] = gen_reg_rtx (word_mode);
4656 for (int i = 0; i < overlapping; i++)
4657 emit_move_insn (tmp_regs[i],
4658 operand_subword_force (target, i, mode));
4660 else if (overlapping == -1)
4661 overlapping = 0;
4662 /* Could not determine whether there is overlap.
4663 Fail the sibcall. */
4664 else
4666 overlapping = 0;
4667 if (sibcall_p)
4668 return false;
4671 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4674 else if (partial > 0)
4676 /* Scalar partly in registers. This case is only supported
4677 for fixed-wdth modes. */
4678 int num_words = GET_MODE_SIZE (mode).to_constant ();
4679 num_words /= UNITS_PER_WORD;
4680 int i;
4681 int not_stack;
4682 /* # bytes of start of argument
4683 that we must make space for but need not store. */
4684 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4685 int args_offset = INTVAL (args_so_far);
4686 int skip;
4688 /* Push padding now if padding above and stack grows down,
4689 or if padding below and stack grows up.
4690 But if space already allocated, this has already been done. */
4691 if (maybe_ne (extra, 0)
4692 && args_addr == 0
4693 && where_pad != PAD_NONE
4694 && where_pad != stack_direction)
4695 anti_adjust_stack (gen_int_mode (extra, Pmode));
4697 /* If we make space by pushing it, we might as well push
4698 the real data. Otherwise, we can leave OFFSET nonzero
4699 and leave the space uninitialized. */
4700 if (args_addr == 0)
4701 offset = 0;
4703 /* Now NOT_STACK gets the number of words that we don't need to
4704 allocate on the stack. Convert OFFSET to words too. */
4705 not_stack = (partial - offset) / UNITS_PER_WORD;
4706 offset /= UNITS_PER_WORD;
4708 /* If the partial register-part of the arg counts in its stack size,
4709 skip the part of stack space corresponding to the registers.
4710 Otherwise, start copying to the beginning of the stack space,
4711 by setting SKIP to 0. */
4712 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4714 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4715 x = validize_mem (force_const_mem (mode, x));
4717 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4718 SUBREGs of such registers are not allowed. */
4719 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4720 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4721 x = copy_to_reg (x);
4723 /* Loop over all the words allocated on the stack for this arg. */
4724 /* We can do it by words, because any scalar bigger than a word
4725 has a size a multiple of a word. */
4726 for (i = num_words - 1; i >= not_stack; i--)
4727 if (i >= not_stack + offset)
4728 if (!emit_push_insn (operand_subword_force (x, i, mode),
4729 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4730 0, args_addr,
4731 GEN_INT (args_offset + ((i - not_stack + skip)
4732 * UNITS_PER_WORD)),
4733 reg_parm_stack_space, alignment_pad, sibcall_p))
4734 return false;
4736 else
4738 rtx addr;
4739 rtx dest;
4741 /* Push padding now if padding above and stack grows down,
4742 or if padding below and stack grows up.
4743 But if space already allocated, this has already been done. */
4744 if (maybe_ne (extra, 0)
4745 && args_addr == 0
4746 && where_pad != PAD_NONE
4747 && where_pad != stack_direction)
4748 anti_adjust_stack (gen_int_mode (extra, Pmode));
4750 #ifdef PUSH_ROUNDING
4751 if (args_addr == 0 && PUSH_ARGS)
4752 emit_single_push_insn (mode, x, type);
4753 else
4754 #endif
4756 addr = simplify_gen_binary (PLUS, Pmode, args_addr, args_so_far);
4757 dest = gen_rtx_MEM (mode, memory_address (mode, addr));
4759 /* We do *not* set_mem_attributes here, because incoming arguments
4760 may overlap with sibling call outgoing arguments and we cannot
4761 allow reordering of reads from function arguments with stores
4762 to outgoing arguments of sibling calls. We do, however, want
4763 to record the alignment of the stack slot. */
4764 /* ALIGN may well be better aligned than TYPE, e.g. due to
4765 PARM_BOUNDARY. Assume the caller isn't lying. */
4766 set_mem_align (dest, align);
4768 emit_move_insn (dest, x);
4772 /* Move the partial arguments into the registers and any overlapping
4773 values that we moved into the pseudos in tmp_regs. */
4774 if (partial > 0 && reg != 0)
4776 /* Handle calls that pass values in multiple non-contiguous locations.
4777 The Irix 6 ABI has examples of this. */
4778 if (GET_CODE (reg) == PARALLEL)
4779 emit_group_load (reg, x, type, -1);
4780 else
4782 gcc_assert (partial % UNITS_PER_WORD == 0);
4783 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4785 for (int i = 0; i < overlapping; i++)
4786 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4787 + nregs - overlapping + i),
4788 tmp_regs[i]);
4793 if (maybe_ne (extra, 0) && args_addr == 0 && where_pad == stack_direction)
4794 anti_adjust_stack (gen_int_mode (extra, Pmode));
4796 if (alignment_pad && args_addr == 0)
4797 anti_adjust_stack (alignment_pad);
4799 return true;
4802 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4803 operations. */
4805 static rtx
4806 get_subtarget (rtx x)
4808 return (optimize
4809 || x == 0
4810 /* Only registers can be subtargets. */
4811 || !REG_P (x)
4812 /* Don't use hard regs to avoid extending their life. */
4813 || REGNO (x) < FIRST_PSEUDO_REGISTER
4814 ? 0 : x);
4817 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4818 FIELD is a bitfield. Returns true if the optimization was successful,
4819 and there's nothing else to do. */
4821 static bool
4822 optimize_bitfield_assignment_op (poly_uint64 pbitsize,
4823 poly_uint64 pbitpos,
4824 poly_uint64 pbitregion_start,
4825 poly_uint64 pbitregion_end,
4826 machine_mode mode1, rtx str_rtx,
4827 tree to, tree src, bool reverse)
4829 /* str_mode is not guaranteed to be a scalar type. */
4830 machine_mode str_mode = GET_MODE (str_rtx);
4831 unsigned int str_bitsize;
4832 tree op0, op1;
4833 rtx value, result;
4834 optab binop;
4835 gimple *srcstmt;
4836 enum tree_code code;
4838 unsigned HOST_WIDE_INT bitsize, bitpos, bitregion_start, bitregion_end;
4839 if (mode1 != VOIDmode
4840 || !pbitsize.is_constant (&bitsize)
4841 || !pbitpos.is_constant (&bitpos)
4842 || !pbitregion_start.is_constant (&bitregion_start)
4843 || !pbitregion_end.is_constant (&bitregion_end)
4844 || bitsize >= BITS_PER_WORD
4845 || !GET_MODE_BITSIZE (str_mode).is_constant (&str_bitsize)
4846 || str_bitsize > BITS_PER_WORD
4847 || TREE_SIDE_EFFECTS (to)
4848 || TREE_THIS_VOLATILE (to))
4849 return false;
4851 STRIP_NOPS (src);
4852 if (TREE_CODE (src) != SSA_NAME)
4853 return false;
4854 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4855 return false;
4857 srcstmt = get_gimple_for_ssa_name (src);
4858 if (!srcstmt
4859 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4860 return false;
4862 code = gimple_assign_rhs_code (srcstmt);
4864 op0 = gimple_assign_rhs1 (srcstmt);
4866 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4867 to find its initialization. Hopefully the initialization will
4868 be from a bitfield load. */
4869 if (TREE_CODE (op0) == SSA_NAME)
4871 gimple *op0stmt = get_gimple_for_ssa_name (op0);
4873 /* We want to eventually have OP0 be the same as TO, which
4874 should be a bitfield. */
4875 if (!op0stmt
4876 || !is_gimple_assign (op0stmt)
4877 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4878 return false;
4879 op0 = gimple_assign_rhs1 (op0stmt);
4882 op1 = gimple_assign_rhs2 (srcstmt);
4884 if (!operand_equal_p (to, op0, 0))
4885 return false;
4887 if (MEM_P (str_rtx))
4889 unsigned HOST_WIDE_INT offset1;
4891 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4892 str_bitsize = BITS_PER_WORD;
4894 scalar_int_mode best_mode;
4895 if (!get_best_mode (bitsize, bitpos, bitregion_start, bitregion_end,
4896 MEM_ALIGN (str_rtx), str_bitsize, false, &best_mode))
4897 return false;
4898 str_mode = best_mode;
4899 str_bitsize = GET_MODE_BITSIZE (best_mode);
4901 offset1 = bitpos;
4902 bitpos %= str_bitsize;
4903 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4904 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4906 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4907 return false;
4909 /* If the bit field covers the whole REG/MEM, store_field
4910 will likely generate better code. */
4911 if (bitsize >= str_bitsize)
4912 return false;
4914 /* We can't handle fields split across multiple entities. */
4915 if (bitpos + bitsize > str_bitsize)
4916 return false;
4918 if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4919 bitpos = str_bitsize - bitpos - bitsize;
4921 switch (code)
4923 case PLUS_EXPR:
4924 case MINUS_EXPR:
4925 /* For now, just optimize the case of the topmost bitfield
4926 where we don't need to do any masking and also
4927 1 bit bitfields where xor can be used.
4928 We might win by one instruction for the other bitfields
4929 too if insv/extv instructions aren't used, so that
4930 can be added later. */
4931 if ((reverse || bitpos + bitsize != str_bitsize)
4932 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4933 break;
4935 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4936 value = convert_modes (str_mode,
4937 TYPE_MODE (TREE_TYPE (op1)), value,
4938 TYPE_UNSIGNED (TREE_TYPE (op1)));
4940 /* We may be accessing data outside the field, which means
4941 we can alias adjacent data. */
4942 if (MEM_P (str_rtx))
4944 str_rtx = shallow_copy_rtx (str_rtx);
4945 set_mem_alias_set (str_rtx, 0);
4946 set_mem_expr (str_rtx, 0);
4949 if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4951 value = expand_and (str_mode, value, const1_rtx, NULL);
4952 binop = xor_optab;
4954 else
4955 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4957 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4958 if (reverse)
4959 value = flip_storage_order (str_mode, value);
4960 result = expand_binop (str_mode, binop, str_rtx,
4961 value, str_rtx, 1, OPTAB_WIDEN);
4962 if (result != str_rtx)
4963 emit_move_insn (str_rtx, result);
4964 return true;
4966 case BIT_IOR_EXPR:
4967 case BIT_XOR_EXPR:
4968 if (TREE_CODE (op1) != INTEGER_CST)
4969 break;
4970 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4971 value = convert_modes (str_mode,
4972 TYPE_MODE (TREE_TYPE (op1)), value,
4973 TYPE_UNSIGNED (TREE_TYPE (op1)));
4975 /* We may be accessing data outside the field, which means
4976 we can alias adjacent data. */
4977 if (MEM_P (str_rtx))
4979 str_rtx = shallow_copy_rtx (str_rtx);
4980 set_mem_alias_set (str_rtx, 0);
4981 set_mem_expr (str_rtx, 0);
4984 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4985 if (bitpos + bitsize != str_bitsize)
4987 rtx mask = gen_int_mode ((HOST_WIDE_INT_1U << bitsize) - 1,
4988 str_mode);
4989 value = expand_and (str_mode, value, mask, NULL_RTX);
4991 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4992 if (reverse)
4993 value = flip_storage_order (str_mode, value);
4994 result = expand_binop (str_mode, binop, str_rtx,
4995 value, str_rtx, 1, OPTAB_WIDEN);
4996 if (result != str_rtx)
4997 emit_move_insn (str_rtx, result);
4998 return true;
5000 default:
5001 break;
5004 return false;
5007 /* In the C++ memory model, consecutive bit fields in a structure are
5008 considered one memory location.
5010 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
5011 returns the bit range of consecutive bits in which this COMPONENT_REF
5012 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
5013 and *OFFSET may be adjusted in the process.
5015 If the access does not need to be restricted, 0 is returned in both
5016 *BITSTART and *BITEND. */
5018 void
5019 get_bit_range (poly_uint64_pod *bitstart, poly_uint64_pod *bitend, tree exp,
5020 poly_int64_pod *bitpos, tree *offset)
5022 poly_int64 bitoffset;
5023 tree field, repr;
5025 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
5027 field = TREE_OPERAND (exp, 1);
5028 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
5029 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
5030 need to limit the range we can access. */
5031 if (!repr)
5033 *bitstart = *bitend = 0;
5034 return;
5037 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
5038 part of a larger bit field, then the representative does not serve any
5039 useful purpose. This can occur in Ada. */
5040 if (handled_component_p (TREE_OPERAND (exp, 0)))
5042 machine_mode rmode;
5043 poly_int64 rbitsize, rbitpos;
5044 tree roffset;
5045 int unsignedp, reversep, volatilep = 0;
5046 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
5047 &roffset, &rmode, &unsignedp, &reversep,
5048 &volatilep);
5049 if (!multiple_p (rbitpos, BITS_PER_UNIT))
5051 *bitstart = *bitend = 0;
5052 return;
5056 /* Compute the adjustment to bitpos from the offset of the field
5057 relative to the representative. DECL_FIELD_OFFSET of field and
5058 repr are the same by construction if they are not constants,
5059 see finish_bitfield_layout. */
5060 poly_uint64 field_offset, repr_offset;
5061 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
5062 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
5063 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
5064 else
5065 bitoffset = 0;
5066 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
5067 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
5069 /* If the adjustment is larger than bitpos, we would have a negative bit
5070 position for the lower bound and this may wreak havoc later. Adjust
5071 offset and bitpos to make the lower bound non-negative in that case. */
5072 if (maybe_gt (bitoffset, *bitpos))
5074 poly_int64 adjust_bits = upper_bound (bitoffset, *bitpos) - *bitpos;
5075 poly_int64 adjust_bytes = exact_div (adjust_bits, BITS_PER_UNIT);
5077 *bitpos += adjust_bits;
5078 if (*offset == NULL_TREE)
5079 *offset = size_int (-adjust_bytes);
5080 else
5081 *offset = size_binop (MINUS_EXPR, *offset, size_int (adjust_bytes));
5082 *bitstart = 0;
5084 else
5085 *bitstart = *bitpos - bitoffset;
5087 *bitend = *bitstart + tree_to_poly_uint64 (DECL_SIZE (repr)) - 1;
5090 /* Returns true if BASE is a DECL that does not reside in memory and
5091 has non-BLKmode. DECL_RTL must not be a MEM; if
5092 DECL_RTL was not set yet, return false. */
5094 static inline bool
5095 non_mem_decl_p (tree base)
5097 if (!DECL_P (base)
5098 || TREE_ADDRESSABLE (base)
5099 || DECL_MODE (base) == BLKmode)
5100 return false;
5102 if (!DECL_RTL_SET_P (base))
5103 return false;
5105 return (!MEM_P (DECL_RTL (base)));
5108 /* Returns true if REF refers to an object that does not
5109 reside in memory and has non-BLKmode. */
5111 static inline bool
5112 mem_ref_refers_to_non_mem_p (tree ref)
5114 tree base;
5116 if (TREE_CODE (ref) == MEM_REF
5117 || TREE_CODE (ref) == TARGET_MEM_REF)
5119 tree addr = TREE_OPERAND (ref, 0);
5121 if (TREE_CODE (addr) != ADDR_EXPR)
5122 return false;
5124 base = TREE_OPERAND (addr, 0);
5126 else
5127 base = ref;
5129 return non_mem_decl_p (base);
5132 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
5133 is true, try generating a nontemporal store. */
5135 void
5136 expand_assignment (tree to, tree from, bool nontemporal)
5138 rtx to_rtx = 0;
5139 rtx result;
5140 machine_mode mode;
5141 unsigned int align;
5142 enum insn_code icode;
5144 /* Don't crash if the lhs of the assignment was erroneous. */
5145 if (TREE_CODE (to) == ERROR_MARK)
5147 expand_normal (from);
5148 return;
5151 /* Optimize away no-op moves without side-effects. */
5152 if (operand_equal_p (to, from, 0))
5153 return;
5155 /* Handle misaligned stores. */
5156 mode = TYPE_MODE (TREE_TYPE (to));
5157 if ((TREE_CODE (to) == MEM_REF
5158 || TREE_CODE (to) == TARGET_MEM_REF
5159 || DECL_P (to))
5160 && mode != BLKmode
5161 && !mem_ref_refers_to_non_mem_p (to)
5162 && ((align = get_object_alignment (to))
5163 < GET_MODE_ALIGNMENT (mode))
5164 && (((icode = optab_handler (movmisalign_optab, mode))
5165 != CODE_FOR_nothing)
5166 || targetm.slow_unaligned_access (mode, align)))
5168 rtx reg, mem;
5170 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
5171 reg = force_not_mem (reg);
5172 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5173 if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
5174 reg = flip_storage_order (mode, reg);
5176 if (icode != CODE_FOR_nothing)
5178 class expand_operand ops[2];
5180 create_fixed_operand (&ops[0], mem);
5181 create_input_operand (&ops[1], reg, mode);
5182 /* The movmisalign<mode> pattern cannot fail, else the assignment
5183 would silently be omitted. */
5184 expand_insn (icode, 2, ops);
5186 else
5187 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
5188 false);
5189 return;
5192 /* Assignment of a structure component needs special treatment
5193 if the structure component's rtx is not simply a MEM.
5194 Assignment of an array element at a constant index, and assignment of
5195 an array element in an unaligned packed structure field, has the same
5196 problem. Same for (partially) storing into a non-memory object. */
5197 if (handled_component_p (to)
5198 || (TREE_CODE (to) == MEM_REF
5199 && (REF_REVERSE_STORAGE_ORDER (to)
5200 || mem_ref_refers_to_non_mem_p (to)))
5201 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
5203 machine_mode mode1;
5204 poly_int64 bitsize, bitpos;
5205 poly_uint64 bitregion_start = 0;
5206 poly_uint64 bitregion_end = 0;
5207 tree offset;
5208 int unsignedp, reversep, volatilep = 0;
5209 tree tem;
5211 push_temp_slots ();
5212 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
5213 &unsignedp, &reversep, &volatilep);
5215 /* Make sure bitpos is not negative, it can wreak havoc later. */
5216 if (maybe_lt (bitpos, 0))
5218 gcc_assert (offset == NULL_TREE);
5219 offset = size_int (bits_to_bytes_round_down (bitpos));
5220 bitpos = num_trailing_bits (bitpos);
5223 if (TREE_CODE (to) == COMPONENT_REF
5224 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
5225 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
5226 /* The C++ memory model naturally applies to byte-aligned fields.
5227 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5228 BITSIZE are not byte-aligned, there is no need to limit the range
5229 we can access. This can occur with packed structures in Ada. */
5230 else if (maybe_gt (bitsize, 0)
5231 && multiple_p (bitsize, BITS_PER_UNIT)
5232 && multiple_p (bitpos, BITS_PER_UNIT))
5234 bitregion_start = bitpos;
5235 bitregion_end = bitpos + bitsize - 1;
5238 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
5240 /* If the field has a mode, we want to access it in the
5241 field's mode, not the computed mode.
5242 If a MEM has VOIDmode (external with incomplete type),
5243 use BLKmode for it instead. */
5244 if (MEM_P (to_rtx))
5246 if (mode1 != VOIDmode)
5247 to_rtx = adjust_address (to_rtx, mode1, 0);
5248 else if (GET_MODE (to_rtx) == VOIDmode)
5249 to_rtx = adjust_address (to_rtx, BLKmode, 0);
5252 if (offset != 0)
5254 machine_mode address_mode;
5255 rtx offset_rtx;
5257 if (!MEM_P (to_rtx))
5259 /* We can get constant negative offsets into arrays with broken
5260 user code. Translate this to a trap instead of ICEing. */
5261 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
5262 expand_builtin_trap ();
5263 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
5266 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
5267 address_mode = get_address_mode (to_rtx);
5268 if (GET_MODE (offset_rtx) != address_mode)
5270 /* We cannot be sure that the RTL in offset_rtx is valid outside
5271 of a memory address context, so force it into a register
5272 before attempting to convert it to the desired mode. */
5273 offset_rtx = force_operand (offset_rtx, NULL_RTX);
5274 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5277 /* If we have an expression in OFFSET_RTX and a non-zero
5278 byte offset in BITPOS, adding the byte offset before the
5279 OFFSET_RTX results in better intermediate code, which makes
5280 later rtl optimization passes perform better.
5282 We prefer intermediate code like this:
5284 r124:DI=r123:DI+0x18
5285 [r124:DI]=r121:DI
5287 ... instead of ...
5289 r124:DI=r123:DI+0x10
5290 [r124:DI+0x8]=r121:DI
5292 This is only done for aligned data values, as these can
5293 be expected to result in single move instructions. */
5294 poly_int64 bytepos;
5295 if (mode1 != VOIDmode
5296 && maybe_ne (bitpos, 0)
5297 && maybe_gt (bitsize, 0)
5298 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
5299 && multiple_p (bitpos, bitsize)
5300 && multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
5301 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
5303 to_rtx = adjust_address (to_rtx, mode1, bytepos);
5304 bitregion_start = 0;
5305 if (known_ge (bitregion_end, poly_uint64 (bitpos)))
5306 bitregion_end -= bitpos;
5307 bitpos = 0;
5310 to_rtx = offset_address (to_rtx, offset_rtx,
5311 highest_pow2_factor_for_target (to,
5312 offset));
5315 /* No action is needed if the target is not a memory and the field
5316 lies completely outside that target. This can occur if the source
5317 code contains an out-of-bounds access to a small array. */
5318 if (!MEM_P (to_rtx)
5319 && GET_MODE (to_rtx) != BLKmode
5320 && known_ge (bitpos, GET_MODE_PRECISION (GET_MODE (to_rtx))))
5322 expand_normal (from);
5323 result = NULL;
5325 /* Handle expand_expr of a complex value returning a CONCAT. */
5326 else if (GET_CODE (to_rtx) == CONCAT)
5328 machine_mode to_mode = GET_MODE (to_rtx);
5329 gcc_checking_assert (COMPLEX_MODE_P (to_mode));
5330 poly_int64 mode_bitsize = GET_MODE_BITSIZE (to_mode);
5331 unsigned short inner_bitsize = GET_MODE_UNIT_BITSIZE (to_mode);
5332 if (TYPE_MODE (TREE_TYPE (from)) == to_mode
5333 && known_eq (bitpos, 0)
5334 && known_eq (bitsize, mode_bitsize))
5335 result = store_expr (from, to_rtx, false, nontemporal, reversep);
5336 else if (TYPE_MODE (TREE_TYPE (from)) == GET_MODE_INNER (to_mode)
5337 && known_eq (bitsize, inner_bitsize)
5338 && (known_eq (bitpos, 0)
5339 || known_eq (bitpos, inner_bitsize)))
5340 result = store_expr (from, XEXP (to_rtx, maybe_ne (bitpos, 0)),
5341 false, nontemporal, reversep);
5342 else if (known_le (bitpos + bitsize, inner_bitsize))
5343 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
5344 bitregion_start, bitregion_end,
5345 mode1, from, get_alias_set (to),
5346 nontemporal, reversep);
5347 else if (known_ge (bitpos, inner_bitsize))
5348 result = store_field (XEXP (to_rtx, 1), bitsize,
5349 bitpos - inner_bitsize,
5350 bitregion_start, bitregion_end,
5351 mode1, from, get_alias_set (to),
5352 nontemporal, reversep);
5353 else if (known_eq (bitpos, 0) && known_eq (bitsize, mode_bitsize))
5355 result = expand_normal (from);
5356 if (GET_CODE (result) == CONCAT)
5358 to_mode = GET_MODE_INNER (to_mode);
5359 machine_mode from_mode = GET_MODE_INNER (GET_MODE (result));
5360 rtx from_real
5361 = simplify_gen_subreg (to_mode, XEXP (result, 0),
5362 from_mode, 0);
5363 rtx from_imag
5364 = simplify_gen_subreg (to_mode, XEXP (result, 1),
5365 from_mode, 0);
5366 if (!from_real || !from_imag)
5367 goto concat_store_slow;
5368 emit_move_insn (XEXP (to_rtx, 0), from_real);
5369 emit_move_insn (XEXP (to_rtx, 1), from_imag);
5371 else
5373 machine_mode from_mode
5374 = GET_MODE (result) == VOIDmode
5375 ? TYPE_MODE (TREE_TYPE (from))
5376 : GET_MODE (result);
5377 rtx from_rtx;
5378 if (MEM_P (result))
5379 from_rtx = change_address (result, to_mode, NULL_RTX);
5380 else
5381 from_rtx
5382 = simplify_gen_subreg (to_mode, result, from_mode, 0);
5383 if (from_rtx)
5385 emit_move_insn (XEXP (to_rtx, 0),
5386 read_complex_part (from_rtx, false));
5387 emit_move_insn (XEXP (to_rtx, 1),
5388 read_complex_part (from_rtx, true));
5390 else
5392 to_mode = GET_MODE_INNER (to_mode);
5393 rtx from_real
5394 = simplify_gen_subreg (to_mode, result, from_mode, 0);
5395 rtx from_imag
5396 = simplify_gen_subreg (to_mode, result, from_mode,
5397 GET_MODE_SIZE (to_mode));
5398 if (!from_real || !from_imag)
5399 goto concat_store_slow;
5400 emit_move_insn (XEXP (to_rtx, 0), from_real);
5401 emit_move_insn (XEXP (to_rtx, 1), from_imag);
5405 else
5407 concat_store_slow:;
5408 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5409 GET_MODE_SIZE (GET_MODE (to_rtx)));
5410 write_complex_part (temp, XEXP (to_rtx, 0), false);
5411 write_complex_part (temp, XEXP (to_rtx, 1), true);
5412 result = store_field (temp, bitsize, bitpos,
5413 bitregion_start, bitregion_end,
5414 mode1, from, get_alias_set (to),
5415 nontemporal, reversep);
5416 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
5417 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5420 /* For calls to functions returning variable length structures, if TO_RTX
5421 is not a MEM, go through a MEM because we must not create temporaries
5422 of the VLA type. */
5423 else if (!MEM_P (to_rtx)
5424 && TREE_CODE (from) == CALL_EXPR
5425 && COMPLETE_TYPE_P (TREE_TYPE (from))
5426 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) != INTEGER_CST)
5428 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5429 GET_MODE_SIZE (GET_MODE (to_rtx)));
5430 result = store_field (temp, bitsize, bitpos, bitregion_start,
5431 bitregion_end, mode1, from, get_alias_set (to),
5432 nontemporal, reversep);
5433 emit_move_insn (to_rtx, temp);
5435 else
5437 if (MEM_P (to_rtx))
5439 /* If the field is at offset zero, we could have been given the
5440 DECL_RTX of the parent struct. Don't munge it. */
5441 to_rtx = shallow_copy_rtx (to_rtx);
5442 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5443 if (volatilep)
5444 MEM_VOLATILE_P (to_rtx) = 1;
5447 gcc_checking_assert (known_ge (bitpos, 0));
5448 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5449 bitregion_start, bitregion_end,
5450 mode1, to_rtx, to, from,
5451 reversep))
5452 result = NULL;
5453 else
5454 result = store_field (to_rtx, bitsize, bitpos,
5455 bitregion_start, bitregion_end,
5456 mode1, from, get_alias_set (to),
5457 nontemporal, reversep);
5460 if (result)
5461 preserve_temp_slots (result);
5462 pop_temp_slots ();
5463 return;
5466 /* If the rhs is a function call and its value is not an aggregate,
5467 call the function before we start to compute the lhs.
5468 This is needed for correct code for cases such as
5469 val = setjmp (buf) on machines where reference to val
5470 requires loading up part of an address in a separate insn.
5472 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5473 since it might be a promoted variable where the zero- or sign- extension
5474 needs to be done. Handling this in the normal way is safe because no
5475 computation is done before the call. The same is true for SSA names. */
5476 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5477 && COMPLETE_TYPE_P (TREE_TYPE (from))
5478 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5479 && ! (((VAR_P (to)
5480 || TREE_CODE (to) == PARM_DECL
5481 || TREE_CODE (to) == RESULT_DECL)
5482 && REG_P (DECL_RTL (to)))
5483 || TREE_CODE (to) == SSA_NAME))
5485 rtx value;
5487 push_temp_slots ();
5488 value = expand_normal (from);
5490 if (to_rtx == 0)
5491 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5493 /* Handle calls that return values in multiple non-contiguous locations.
5494 The Irix 6 ABI has examples of this. */
5495 if (GET_CODE (to_rtx) == PARALLEL)
5497 if (GET_CODE (value) == PARALLEL)
5498 emit_group_move (to_rtx, value);
5499 else
5500 emit_group_load (to_rtx, value, TREE_TYPE (from),
5501 int_size_in_bytes (TREE_TYPE (from)));
5503 else if (GET_CODE (value) == PARALLEL)
5504 emit_group_store (to_rtx, value, TREE_TYPE (from),
5505 int_size_in_bytes (TREE_TYPE (from)));
5506 else if (GET_MODE (to_rtx) == BLKmode)
5508 /* Handle calls that return BLKmode values in registers. */
5509 if (REG_P (value))
5510 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5511 else
5512 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5514 else
5516 if (POINTER_TYPE_P (TREE_TYPE (to)))
5517 value = convert_memory_address_addr_space
5518 (as_a <scalar_int_mode> (GET_MODE (to_rtx)), value,
5519 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5521 emit_move_insn (to_rtx, value);
5524 preserve_temp_slots (to_rtx);
5525 pop_temp_slots ();
5526 return;
5529 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5530 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5532 /* Don't move directly into a return register. */
5533 if (TREE_CODE (to) == RESULT_DECL
5534 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5536 rtx temp;
5538 push_temp_slots ();
5540 /* If the source is itself a return value, it still is in a pseudo at
5541 this point so we can move it back to the return register directly. */
5542 if (REG_P (to_rtx)
5543 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5544 && TREE_CODE (from) != CALL_EXPR)
5545 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5546 else
5547 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5549 /* Handle calls that return values in multiple non-contiguous locations.
5550 The Irix 6 ABI has examples of this. */
5551 if (GET_CODE (to_rtx) == PARALLEL)
5553 if (GET_CODE (temp) == PARALLEL)
5554 emit_group_move (to_rtx, temp);
5555 else
5556 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5557 int_size_in_bytes (TREE_TYPE (from)));
5559 else if (temp)
5560 emit_move_insn (to_rtx, temp);
5562 preserve_temp_slots (to_rtx);
5563 pop_temp_slots ();
5564 return;
5567 /* In case we are returning the contents of an object which overlaps
5568 the place the value is being stored, use a safe function when copying
5569 a value through a pointer into a structure value return block. */
5570 if (TREE_CODE (to) == RESULT_DECL
5571 && TREE_CODE (from) == INDIRECT_REF
5572 && ADDR_SPACE_GENERIC_P
5573 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5574 && refs_may_alias_p (to, from)
5575 && cfun->returns_struct
5576 && !cfun->returns_pcc_struct)
5578 rtx from_rtx, size;
5580 push_temp_slots ();
5581 size = expr_size (from);
5582 from_rtx = expand_normal (from);
5584 emit_block_move_via_libcall (XEXP (to_rtx, 0), XEXP (from_rtx, 0), size);
5586 preserve_temp_slots (to_rtx);
5587 pop_temp_slots ();
5588 return;
5591 /* Compute FROM and store the value in the rtx we got. */
5593 push_temp_slots ();
5594 result = store_expr (from, to_rtx, 0, nontemporal, false);
5595 preserve_temp_slots (result);
5596 pop_temp_slots ();
5597 return;
5600 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5601 succeeded, false otherwise. */
5603 bool
5604 emit_storent_insn (rtx to, rtx from)
5606 class expand_operand ops[2];
5607 machine_mode mode = GET_MODE (to);
5608 enum insn_code code = optab_handler (storent_optab, mode);
5610 if (code == CODE_FOR_nothing)
5611 return false;
5613 create_fixed_operand (&ops[0], to);
5614 create_input_operand (&ops[1], from, mode);
5615 return maybe_expand_insn (code, 2, ops);
5618 /* Helper function for store_expr storing of STRING_CST. */
5620 static rtx
5621 string_cst_read_str (void *data, HOST_WIDE_INT offset, scalar_int_mode mode)
5623 tree str = (tree) data;
5625 gcc_assert (offset >= 0);
5626 if (offset >= TREE_STRING_LENGTH (str))
5627 return const0_rtx;
5629 if ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
5630 > (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (str))
5632 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
5633 size_t l = TREE_STRING_LENGTH (str) - offset;
5634 memcpy (p, TREE_STRING_POINTER (str) + offset, l);
5635 memset (p + l, '\0', GET_MODE_SIZE (mode) - l);
5636 return c_readstr (p, mode, false);
5639 return c_readstr (TREE_STRING_POINTER (str) + offset, mode, false);
5642 /* Generate code for computing expression EXP,
5643 and storing the value into TARGET.
5645 If the mode is BLKmode then we may return TARGET itself.
5646 It turns out that in BLKmode it doesn't cause a problem.
5647 because C has no operators that could combine two different
5648 assignments into the same BLKmode object with different values
5649 with no sequence point. Will other languages need this to
5650 be more thorough?
5652 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5653 stack, and block moves may need to be treated specially.
5655 If NONTEMPORAL is true, try using a nontemporal store instruction.
5657 If REVERSE is true, the store is to be done in reverse order. */
5660 store_expr (tree exp, rtx target, int call_param_p,
5661 bool nontemporal, bool reverse)
5663 rtx temp;
5664 rtx alt_rtl = NULL_RTX;
5665 location_t loc = curr_insn_location ();
5666 bool shortened_string_cst = false;
5668 if (VOID_TYPE_P (TREE_TYPE (exp)))
5670 /* C++ can generate ?: expressions with a throw expression in one
5671 branch and an rvalue in the other. Here, we resolve attempts to
5672 store the throw expression's nonexistent result. */
5673 gcc_assert (!call_param_p);
5674 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5675 return NULL_RTX;
5677 if (TREE_CODE (exp) == COMPOUND_EXPR)
5679 /* Perform first part of compound expression, then assign from second
5680 part. */
5681 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5682 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5683 return store_expr (TREE_OPERAND (exp, 1), target,
5684 call_param_p, nontemporal, reverse);
5686 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5688 /* For conditional expression, get safe form of the target. Then
5689 test the condition, doing the appropriate assignment on either
5690 side. This avoids the creation of unnecessary temporaries.
5691 For non-BLKmode, it is more efficient not to do this. */
5693 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5695 do_pending_stack_adjust ();
5696 NO_DEFER_POP;
5697 jumpifnot (TREE_OPERAND (exp, 0), lab1,
5698 profile_probability::uninitialized ());
5699 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5700 nontemporal, reverse);
5701 emit_jump_insn (targetm.gen_jump (lab2));
5702 emit_barrier ();
5703 emit_label (lab1);
5704 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5705 nontemporal, reverse);
5706 emit_label (lab2);
5707 OK_DEFER_POP;
5709 return NULL_RTX;
5711 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5712 /* If this is a scalar in a register that is stored in a wider mode
5713 than the declared mode, compute the result into its declared mode
5714 and then convert to the wider mode. Our value is the computed
5715 expression. */
5717 rtx inner_target = 0;
5718 scalar_int_mode outer_mode = subreg_unpromoted_mode (target);
5719 scalar_int_mode inner_mode = subreg_promoted_mode (target);
5721 /* We can do the conversion inside EXP, which will often result
5722 in some optimizations. Do the conversion in two steps: first
5723 change the signedness, if needed, then the extend. But don't
5724 do this if the type of EXP is a subtype of something else
5725 since then the conversion might involve more than just
5726 converting modes. */
5727 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5728 && TREE_TYPE (TREE_TYPE (exp)) == 0
5729 && GET_MODE_PRECISION (outer_mode)
5730 == TYPE_PRECISION (TREE_TYPE (exp)))
5732 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5733 TYPE_UNSIGNED (TREE_TYPE (exp))))
5735 /* Some types, e.g. Fortran's logical*4, won't have a signed
5736 version, so use the mode instead. */
5737 tree ntype
5738 = (signed_or_unsigned_type_for
5739 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5740 if (ntype == NULL)
5741 ntype = lang_hooks.types.type_for_mode
5742 (TYPE_MODE (TREE_TYPE (exp)),
5743 SUBREG_PROMOTED_SIGN (target));
5745 exp = fold_convert_loc (loc, ntype, exp);
5748 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5749 (inner_mode, SUBREG_PROMOTED_SIGN (target)),
5750 exp);
5752 inner_target = SUBREG_REG (target);
5755 temp = expand_expr (exp, inner_target, VOIDmode,
5756 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5759 /* If TEMP is a VOIDmode constant, use convert_modes to make
5760 sure that we properly convert it. */
5761 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5763 temp = convert_modes (outer_mode, TYPE_MODE (TREE_TYPE (exp)),
5764 temp, SUBREG_PROMOTED_SIGN (target));
5765 temp = convert_modes (inner_mode, outer_mode, temp,
5766 SUBREG_PROMOTED_SIGN (target));
5769 convert_move (SUBREG_REG (target), temp,
5770 SUBREG_PROMOTED_SIGN (target));
5772 return NULL_RTX;
5774 else if ((TREE_CODE (exp) == STRING_CST
5775 || (TREE_CODE (exp) == MEM_REF
5776 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5777 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5778 == STRING_CST
5779 && integer_zerop (TREE_OPERAND (exp, 1))))
5780 && !nontemporal && !call_param_p
5781 && MEM_P (target))
5783 /* Optimize initialization of an array with a STRING_CST. */
5784 HOST_WIDE_INT exp_len, str_copy_len;
5785 rtx dest_mem;
5786 tree str = TREE_CODE (exp) == STRING_CST
5787 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5789 exp_len = int_expr_size (exp);
5790 if (exp_len <= 0)
5791 goto normal_expr;
5793 if (TREE_STRING_LENGTH (str) <= 0)
5794 goto normal_expr;
5796 if (can_store_by_pieces (exp_len, string_cst_read_str, (void *) str,
5797 MEM_ALIGN (target), false))
5799 store_by_pieces (target, exp_len, string_cst_read_str, (void *) str,
5800 MEM_ALIGN (target), false, RETURN_BEGIN);
5801 return NULL_RTX;
5804 str_copy_len = TREE_STRING_LENGTH (str);
5805 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
5807 str_copy_len += STORE_MAX_PIECES - 1;
5808 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5810 if (str_copy_len >= exp_len)
5811 goto normal_expr;
5813 if (!can_store_by_pieces (str_copy_len, string_cst_read_str,
5814 (void *) str, MEM_ALIGN (target), false))
5815 goto normal_expr;
5817 dest_mem = store_by_pieces (target, str_copy_len, string_cst_read_str,
5818 (void *) str, MEM_ALIGN (target), false,
5819 RETURN_END);
5820 clear_storage (adjust_address_1 (dest_mem, BLKmode, 0, 1, 1, 0,
5821 exp_len - str_copy_len),
5822 GEN_INT (exp_len - str_copy_len), BLOCK_OP_NORMAL);
5823 return NULL_RTX;
5825 else
5827 rtx tmp_target;
5829 normal_expr:
5830 /* If we want to use a nontemporal or a reverse order store, force the
5831 value into a register first. */
5832 tmp_target = nontemporal || reverse ? NULL_RTX : target;
5833 tree rexp = exp;
5834 if (TREE_CODE (exp) == STRING_CST
5835 && tmp_target == target
5836 && GET_MODE (target) == BLKmode
5837 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
5839 rtx size = expr_size (exp);
5840 if (CONST_INT_P (size)
5841 && size != const0_rtx
5842 && (UINTVAL (size)
5843 > ((unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (exp) + 32)))
5845 /* If the STRING_CST has much larger array type than
5846 TREE_STRING_LENGTH, only emit the TREE_STRING_LENGTH part of
5847 it into the rodata section as the code later on will use
5848 memset zero for the remainder anyway. See PR95052. */
5849 tmp_target = NULL_RTX;
5850 rexp = copy_node (exp);
5851 tree index
5852 = build_index_type (size_int (TREE_STRING_LENGTH (exp) - 1));
5853 TREE_TYPE (rexp) = build_array_type (TREE_TYPE (TREE_TYPE (exp)),
5854 index);
5855 shortened_string_cst = true;
5858 temp = expand_expr_real (rexp, tmp_target, GET_MODE (target),
5859 (call_param_p
5860 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5861 &alt_rtl, false);
5862 if (shortened_string_cst)
5864 gcc_assert (MEM_P (temp));
5865 temp = change_address (temp, BLKmode, NULL_RTX);
5869 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5870 the same as that of TARGET, adjust the constant. This is needed, for
5871 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5872 only a word-sized value. */
5873 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5874 && TREE_CODE (exp) != ERROR_MARK
5875 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5877 gcc_assert (!shortened_string_cst);
5878 if (GET_MODE_CLASS (GET_MODE (target))
5879 != GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp)))
5880 && known_eq (GET_MODE_BITSIZE (GET_MODE (target)),
5881 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)))))
5883 rtx t = simplify_gen_subreg (GET_MODE (target), temp,
5884 TYPE_MODE (TREE_TYPE (exp)), 0);
5885 if (t)
5886 temp = t;
5888 if (GET_MODE (temp) == VOIDmode)
5889 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5890 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5893 /* If value was not generated in the target, store it there.
5894 Convert the value to TARGET's type first if necessary and emit the
5895 pending incrementations that have been queued when expanding EXP.
5896 Note that we cannot emit the whole queue blindly because this will
5897 effectively disable the POST_INC optimization later.
5899 If TEMP and TARGET compare equal according to rtx_equal_p, but
5900 one or both of them are volatile memory refs, we have to distinguish
5901 two cases:
5902 - expand_expr has used TARGET. In this case, we must not generate
5903 another copy. This can be detected by TARGET being equal according
5904 to == .
5905 - expand_expr has not used TARGET - that means that the source just
5906 happens to have the same RTX form. Since temp will have been created
5907 by expand_expr, it will compare unequal according to == .
5908 We must generate a copy in this case, to reach the correct number
5909 of volatile memory references. */
5911 if ((! rtx_equal_p (temp, target)
5912 || (temp != target && (side_effects_p (temp)
5913 || side_effects_p (target))))
5914 && TREE_CODE (exp) != ERROR_MARK
5915 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5916 but TARGET is not valid memory reference, TEMP will differ
5917 from TARGET although it is really the same location. */
5918 && !(alt_rtl
5919 && rtx_equal_p (alt_rtl, target)
5920 && !side_effects_p (alt_rtl)
5921 && !side_effects_p (target))
5922 /* If there's nothing to copy, don't bother. Don't call
5923 expr_size unless necessary, because some front-ends (C++)
5924 expr_size-hook must not be given objects that are not
5925 supposed to be bit-copied or bit-initialized. */
5926 && expr_size (exp) != const0_rtx)
5928 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5930 gcc_assert (!shortened_string_cst);
5931 if (GET_MODE (target) == BLKmode)
5933 /* Handle calls that return BLKmode values in registers. */
5934 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5935 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5936 else
5937 store_bit_field (target,
5938 rtx_to_poly_int64 (expr_size (exp))
5939 * BITS_PER_UNIT,
5940 0, 0, 0, GET_MODE (temp), temp, reverse);
5942 else
5943 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5946 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5948 /* Handle copying a string constant into an array. The string
5949 constant may be shorter than the array. So copy just the string's
5950 actual length, and clear the rest. First get the size of the data
5951 type of the string, which is actually the size of the target. */
5952 rtx size = expr_size (exp);
5954 if (CONST_INT_P (size)
5955 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5956 emit_block_move (target, temp, size,
5957 (call_param_p
5958 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5959 else
5961 machine_mode pointer_mode
5962 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5963 machine_mode address_mode = get_address_mode (target);
5965 /* Compute the size of the data to copy from the string. */
5966 tree copy_size
5967 = size_binop_loc (loc, MIN_EXPR,
5968 make_tree (sizetype, size),
5969 size_int (TREE_STRING_LENGTH (exp)));
5970 rtx copy_size_rtx
5971 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5972 (call_param_p
5973 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5974 rtx_code_label *label = 0;
5976 /* Copy that much. */
5977 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5978 TYPE_UNSIGNED (sizetype));
5979 emit_block_move (target, temp, copy_size_rtx,
5980 (call_param_p
5981 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5983 /* Figure out how much is left in TARGET that we have to clear.
5984 Do all calculations in pointer_mode. */
5985 poly_int64 const_copy_size;
5986 if (poly_int_rtx_p (copy_size_rtx, &const_copy_size))
5988 size = plus_constant (address_mode, size, -const_copy_size);
5989 target = adjust_address (target, BLKmode, const_copy_size);
5991 else
5993 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5994 copy_size_rtx, NULL_RTX, 0,
5995 OPTAB_LIB_WIDEN);
5997 if (GET_MODE (copy_size_rtx) != address_mode)
5998 copy_size_rtx = convert_to_mode (address_mode,
5999 copy_size_rtx,
6000 TYPE_UNSIGNED (sizetype));
6002 target = offset_address (target, copy_size_rtx,
6003 highest_pow2_factor (copy_size));
6004 label = gen_label_rtx ();
6005 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
6006 GET_MODE (size), 0, label);
6009 if (size != const0_rtx)
6010 clear_storage (target, size, BLOCK_OP_NORMAL);
6012 if (label)
6013 emit_label (label);
6016 else if (shortened_string_cst)
6017 gcc_unreachable ();
6018 /* Handle calls that return values in multiple non-contiguous locations.
6019 The Irix 6 ABI has examples of this. */
6020 else if (GET_CODE (target) == PARALLEL)
6022 if (GET_CODE (temp) == PARALLEL)
6023 emit_group_move (target, temp);
6024 else
6025 emit_group_load (target, temp, TREE_TYPE (exp),
6026 int_size_in_bytes (TREE_TYPE (exp)));
6028 else if (GET_CODE (temp) == PARALLEL)
6029 emit_group_store (target, temp, TREE_TYPE (exp),
6030 int_size_in_bytes (TREE_TYPE (exp)));
6031 else if (GET_MODE (temp) == BLKmode)
6032 emit_block_move (target, temp, expr_size (exp),
6033 (call_param_p
6034 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
6035 /* If we emit a nontemporal store, there is nothing else to do. */
6036 else if (nontemporal && emit_storent_insn (target, temp))
6038 else
6040 if (reverse)
6041 temp = flip_storage_order (GET_MODE (target), temp);
6042 temp = force_operand (temp, target);
6043 if (temp != target)
6044 emit_move_insn (target, temp);
6047 else
6048 gcc_assert (!shortened_string_cst);
6050 return NULL_RTX;
6053 /* Return true if field F of structure TYPE is a flexible array. */
6055 static bool
6056 flexible_array_member_p (const_tree f, const_tree type)
6058 const_tree tf;
6060 tf = TREE_TYPE (f);
6061 return (DECL_CHAIN (f) == NULL
6062 && TREE_CODE (tf) == ARRAY_TYPE
6063 && TYPE_DOMAIN (tf)
6064 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
6065 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
6066 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
6067 && int_size_in_bytes (type) >= 0);
6070 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
6071 must have in order for it to completely initialize a value of type TYPE.
6072 Return -1 if the number isn't known.
6074 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
6076 static HOST_WIDE_INT
6077 count_type_elements (const_tree type, bool for_ctor_p)
6079 switch (TREE_CODE (type))
6081 case ARRAY_TYPE:
6083 tree nelts;
6085 nelts = array_type_nelts (type);
6086 if (nelts && tree_fits_uhwi_p (nelts))
6088 unsigned HOST_WIDE_INT n;
6090 n = tree_to_uhwi (nelts) + 1;
6091 if (n == 0 || for_ctor_p)
6092 return n;
6093 else
6094 return n * count_type_elements (TREE_TYPE (type), false);
6096 return for_ctor_p ? -1 : 1;
6099 case RECORD_TYPE:
6101 unsigned HOST_WIDE_INT n;
6102 tree f;
6104 n = 0;
6105 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
6106 if (TREE_CODE (f) == FIELD_DECL)
6108 if (!for_ctor_p)
6109 n += count_type_elements (TREE_TYPE (f), false);
6110 else if (!flexible_array_member_p (f, type))
6111 /* Don't count flexible arrays, which are not supposed
6112 to be initialized. */
6113 n += 1;
6116 return n;
6119 case UNION_TYPE:
6120 case QUAL_UNION_TYPE:
6122 tree f;
6123 HOST_WIDE_INT n, m;
6125 gcc_assert (!for_ctor_p);
6126 /* Estimate the number of scalars in each field and pick the
6127 maximum. Other estimates would do instead; the idea is simply
6128 to make sure that the estimate is not sensitive to the ordering
6129 of the fields. */
6130 n = 1;
6131 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
6132 if (TREE_CODE (f) == FIELD_DECL)
6134 m = count_type_elements (TREE_TYPE (f), false);
6135 /* If the field doesn't span the whole union, add an extra
6136 scalar for the rest. */
6137 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
6138 TYPE_SIZE (type)) != 1)
6139 m++;
6140 if (n < m)
6141 n = m;
6143 return n;
6146 case COMPLEX_TYPE:
6147 return 2;
6149 case VECTOR_TYPE:
6151 unsigned HOST_WIDE_INT nelts;
6152 if (TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
6153 return nelts;
6154 else
6155 return -1;
6158 case INTEGER_TYPE:
6159 case REAL_TYPE:
6160 case FIXED_POINT_TYPE:
6161 case ENUMERAL_TYPE:
6162 case BOOLEAN_TYPE:
6163 case POINTER_TYPE:
6164 case OFFSET_TYPE:
6165 case REFERENCE_TYPE:
6166 case NULLPTR_TYPE:
6167 return 1;
6169 case ERROR_MARK:
6170 return 0;
6172 case VOID_TYPE:
6173 case METHOD_TYPE:
6174 case FUNCTION_TYPE:
6175 case LANG_TYPE:
6176 default:
6177 gcc_unreachable ();
6181 /* Helper for categorize_ctor_elements. Identical interface. */
6183 static bool
6184 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
6185 HOST_WIDE_INT *p_unique_nz_elts,
6186 HOST_WIDE_INT *p_init_elts, bool *p_complete)
6188 unsigned HOST_WIDE_INT idx;
6189 HOST_WIDE_INT nz_elts, unique_nz_elts, init_elts, num_fields;
6190 tree value, purpose, elt_type;
6192 /* Whether CTOR is a valid constant initializer, in accordance with what
6193 initializer_constant_valid_p does. If inferred from the constructor
6194 elements, true until proven otherwise. */
6195 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
6196 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
6198 nz_elts = 0;
6199 unique_nz_elts = 0;
6200 init_elts = 0;
6201 num_fields = 0;
6202 elt_type = NULL_TREE;
6204 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
6206 HOST_WIDE_INT mult = 1;
6208 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
6210 tree lo_index = TREE_OPERAND (purpose, 0);
6211 tree hi_index = TREE_OPERAND (purpose, 1);
6213 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
6214 mult = (tree_to_uhwi (hi_index)
6215 - tree_to_uhwi (lo_index) + 1);
6217 num_fields += mult;
6218 elt_type = TREE_TYPE (value);
6220 switch (TREE_CODE (value))
6222 case CONSTRUCTOR:
6224 HOST_WIDE_INT nz = 0, unz = 0, ic = 0;
6226 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &unz,
6227 &ic, p_complete);
6229 nz_elts += mult * nz;
6230 unique_nz_elts += unz;
6231 init_elts += mult * ic;
6233 if (const_from_elts_p && const_p)
6234 const_p = const_elt_p;
6236 break;
6238 case INTEGER_CST:
6239 case REAL_CST:
6240 case FIXED_CST:
6241 if (!initializer_zerop (value))
6243 nz_elts += mult;
6244 unique_nz_elts++;
6246 init_elts += mult;
6247 break;
6249 case STRING_CST:
6250 nz_elts += mult * TREE_STRING_LENGTH (value);
6251 unique_nz_elts += TREE_STRING_LENGTH (value);
6252 init_elts += mult * TREE_STRING_LENGTH (value);
6253 break;
6255 case COMPLEX_CST:
6256 if (!initializer_zerop (TREE_REALPART (value)))
6258 nz_elts += mult;
6259 unique_nz_elts++;
6261 if (!initializer_zerop (TREE_IMAGPART (value)))
6263 nz_elts += mult;
6264 unique_nz_elts++;
6266 init_elts += 2 * mult;
6267 break;
6269 case VECTOR_CST:
6271 /* We can only construct constant-length vectors using
6272 CONSTRUCTOR. */
6273 unsigned int nunits = VECTOR_CST_NELTS (value).to_constant ();
6274 for (unsigned int i = 0; i < nunits; ++i)
6276 tree v = VECTOR_CST_ELT (value, i);
6277 if (!initializer_zerop (v))
6279 nz_elts += mult;
6280 unique_nz_elts++;
6282 init_elts += mult;
6285 break;
6287 default:
6289 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
6290 nz_elts += mult * tc;
6291 unique_nz_elts += tc;
6292 init_elts += mult * tc;
6294 if (const_from_elts_p && const_p)
6295 const_p
6296 = initializer_constant_valid_p (value,
6297 elt_type,
6298 TYPE_REVERSE_STORAGE_ORDER
6299 (TREE_TYPE (ctor)))
6300 != NULL_TREE;
6302 break;
6306 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
6307 num_fields, elt_type))
6308 *p_complete = false;
6310 *p_nz_elts += nz_elts;
6311 *p_unique_nz_elts += unique_nz_elts;
6312 *p_init_elts += init_elts;
6314 return const_p;
6317 /* Examine CTOR to discover:
6318 * how many scalar fields are set to nonzero values,
6319 and place it in *P_NZ_ELTS;
6320 * the same, but counting RANGE_EXPRs as multiplier of 1 instead of
6321 high - low + 1 (this can be useful for callers to determine ctors
6322 that could be cheaply initialized with - perhaps nested - loops
6323 compared to copied from huge read-only data),
6324 and place it in *P_UNIQUE_NZ_ELTS;
6325 * how many scalar fields in total are in CTOR,
6326 and place it in *P_ELT_COUNT.
6327 * whether the constructor is complete -- in the sense that every
6328 meaningful byte is explicitly given a value --
6329 and place it in *P_COMPLETE.
6331 Return whether or not CTOR is a valid static constant initializer, the same
6332 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
6334 bool
6335 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
6336 HOST_WIDE_INT *p_unique_nz_elts,
6337 HOST_WIDE_INT *p_init_elts, bool *p_complete)
6339 *p_nz_elts = 0;
6340 *p_unique_nz_elts = 0;
6341 *p_init_elts = 0;
6342 *p_complete = true;
6344 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_unique_nz_elts,
6345 p_init_elts, p_complete);
6348 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6349 of which had type LAST_TYPE. Each element was itself a complete
6350 initializer, in the sense that every meaningful byte was explicitly
6351 given a value. Return true if the same is true for the constructor
6352 as a whole. */
6354 bool
6355 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
6356 const_tree last_type)
6358 if (TREE_CODE (type) == UNION_TYPE
6359 || TREE_CODE (type) == QUAL_UNION_TYPE)
6361 if (num_elts == 0)
6362 return false;
6364 gcc_assert (num_elts == 1 && last_type);
6366 /* ??? We could look at each element of the union, and find the
6367 largest element. Which would avoid comparing the size of the
6368 initialized element against any tail padding in the union.
6369 Doesn't seem worth the effort... */
6370 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
6373 return count_type_elements (type, true) == num_elts;
6376 /* Return 1 if EXP contains mostly (3/4) zeros. */
6378 static int
6379 mostly_zeros_p (const_tree exp)
6381 if (TREE_CODE (exp) == CONSTRUCTOR)
6383 HOST_WIDE_INT nz_elts, unz_elts, init_elts;
6384 bool complete_p;
6386 categorize_ctor_elements (exp, &nz_elts, &unz_elts, &init_elts,
6387 &complete_p);
6388 return !complete_p || nz_elts < init_elts / 4;
6391 return initializer_zerop (exp);
6394 /* Return 1 if EXP contains all zeros. */
6396 static int
6397 all_zeros_p (const_tree exp)
6399 if (TREE_CODE (exp) == CONSTRUCTOR)
6401 HOST_WIDE_INT nz_elts, unz_elts, init_elts;
6402 bool complete_p;
6404 categorize_ctor_elements (exp, &nz_elts, &unz_elts, &init_elts,
6405 &complete_p);
6406 return nz_elts == 0;
6409 return initializer_zerop (exp);
6412 /* Helper function for store_constructor.
6413 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6414 CLEARED is as for store_constructor.
6415 ALIAS_SET is the alias set to use for any stores.
6416 If REVERSE is true, the store is to be done in reverse order.
6418 This provides a recursive shortcut back to store_constructor when it isn't
6419 necessary to go through store_field. This is so that we can pass through
6420 the cleared field to let store_constructor know that we may not have to
6421 clear a substructure if the outer structure has already been cleared. */
6423 static void
6424 store_constructor_field (rtx target, poly_uint64 bitsize, poly_int64 bitpos,
6425 poly_uint64 bitregion_start,
6426 poly_uint64 bitregion_end,
6427 machine_mode mode,
6428 tree exp, int cleared,
6429 alias_set_type alias_set, bool reverse)
6431 poly_int64 bytepos;
6432 poly_uint64 bytesize;
6433 if (TREE_CODE (exp) == CONSTRUCTOR
6434 /* We can only call store_constructor recursively if the size and
6435 bit position are on a byte boundary. */
6436 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
6437 && maybe_ne (bitsize, 0U)
6438 && multiple_p (bitsize, BITS_PER_UNIT, &bytesize)
6439 /* If we have a nonzero bitpos for a register target, then we just
6440 let store_field do the bitfield handling. This is unlikely to
6441 generate unnecessary clear instructions anyways. */
6442 && (known_eq (bitpos, 0) || MEM_P (target)))
6444 if (MEM_P (target))
6446 machine_mode target_mode = GET_MODE (target);
6447 if (target_mode != BLKmode
6448 && !multiple_p (bitpos, GET_MODE_ALIGNMENT (target_mode)))
6449 target_mode = BLKmode;
6450 target = adjust_address (target, target_mode, bytepos);
6454 /* Update the alias set, if required. */
6455 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
6456 && MEM_ALIAS_SET (target) != 0)
6458 target = copy_rtx (target);
6459 set_mem_alias_set (target, alias_set);
6462 store_constructor (exp, target, cleared, bytesize, reverse);
6464 else
6465 store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode,
6466 exp, alias_set, false, reverse);
6470 /* Returns the number of FIELD_DECLs in TYPE. */
6472 static int
6473 fields_length (const_tree type)
6475 tree t = TYPE_FIELDS (type);
6476 int count = 0;
6478 for (; t; t = DECL_CHAIN (t))
6479 if (TREE_CODE (t) == FIELD_DECL)
6480 ++count;
6482 return count;
6486 /* Store the value of constructor EXP into the rtx TARGET.
6487 TARGET is either a REG or a MEM; we know it cannot conflict, since
6488 safe_from_p has been called.
6489 CLEARED is true if TARGET is known to have been zero'd.
6490 SIZE is the number of bytes of TARGET we are allowed to modify: this
6491 may not be the same as the size of EXP if we are assigning to a field
6492 which has been packed to exclude padding bits.
6493 If REVERSE is true, the store is to be done in reverse order. */
6495 static void
6496 store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
6497 bool reverse)
6499 tree type = TREE_TYPE (exp);
6500 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6501 poly_int64 bitregion_end = known_gt (size, 0) ? size * BITS_PER_UNIT - 1 : 0;
6503 switch (TREE_CODE (type))
6505 case RECORD_TYPE:
6506 case UNION_TYPE:
6507 case QUAL_UNION_TYPE:
6509 unsigned HOST_WIDE_INT idx;
6510 tree field, value;
6512 /* The storage order is specified for every aggregate type. */
6513 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6515 /* If size is zero or the target is already cleared, do nothing. */
6516 if (known_eq (size, 0) || cleared)
6517 cleared = 1;
6518 /* We either clear the aggregate or indicate the value is dead. */
6519 else if ((TREE_CODE (type) == UNION_TYPE
6520 || TREE_CODE (type) == QUAL_UNION_TYPE)
6521 && ! CONSTRUCTOR_ELTS (exp))
6522 /* If the constructor is empty, clear the union. */
6524 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6525 cleared = 1;
6528 /* If we are building a static constructor into a register,
6529 set the initial value as zero so we can fold the value into
6530 a constant. But if more than one register is involved,
6531 this probably loses. */
6532 else if (REG_P (target) && TREE_STATIC (exp)
6533 && known_le (GET_MODE_SIZE (GET_MODE (target)),
6534 REGMODE_NATURAL_SIZE (GET_MODE (target))))
6536 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6537 cleared = 1;
6540 /* If the constructor has fewer fields than the structure or
6541 if we are initializing the structure to mostly zeros, clear
6542 the whole structure first. Don't do this if TARGET is a
6543 register whose mode size isn't equal to SIZE since
6544 clear_storage can't handle this case. */
6545 else if (known_size_p (size)
6546 && (((int) CONSTRUCTOR_NELTS (exp) != fields_length (type))
6547 || mostly_zeros_p (exp))
6548 && (!REG_P (target)
6549 || known_eq (GET_MODE_SIZE (GET_MODE (target)), size)))
6551 clear_storage (target, gen_int_mode (size, Pmode),
6552 BLOCK_OP_NORMAL);
6553 cleared = 1;
6556 if (REG_P (target) && !cleared)
6557 emit_clobber (target);
6559 /* Store each element of the constructor into the
6560 corresponding field of TARGET. */
6561 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6563 machine_mode mode;
6564 HOST_WIDE_INT bitsize;
6565 HOST_WIDE_INT bitpos = 0;
6566 tree offset;
6567 rtx to_rtx = target;
6569 /* Just ignore missing fields. We cleared the whole
6570 structure, above, if any fields are missing. */
6571 if (field == 0)
6572 continue;
6574 if (cleared && initializer_zerop (value))
6575 continue;
6577 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6578 bitsize = tree_to_uhwi (DECL_SIZE (field));
6579 else
6580 gcc_unreachable ();
6582 mode = DECL_MODE (field);
6583 if (DECL_BIT_FIELD (field))
6584 mode = VOIDmode;
6586 offset = DECL_FIELD_OFFSET (field);
6587 if (tree_fits_shwi_p (offset)
6588 && tree_fits_shwi_p (bit_position (field)))
6590 bitpos = int_bit_position (field);
6591 offset = NULL_TREE;
6593 else
6594 gcc_unreachable ();
6596 /* If this initializes a field that is smaller than a
6597 word, at the start of a word, try to widen it to a full
6598 word. This special case allows us to output C++ member
6599 function initializations in a form that the optimizers
6600 can understand. */
6601 if (WORD_REGISTER_OPERATIONS
6602 && REG_P (target)
6603 && bitsize < BITS_PER_WORD
6604 && bitpos % BITS_PER_WORD == 0
6605 && GET_MODE_CLASS (mode) == MODE_INT
6606 && TREE_CODE (value) == INTEGER_CST
6607 && exp_size >= 0
6608 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6610 type = TREE_TYPE (value);
6612 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6614 type = lang_hooks.types.type_for_mode
6615 (word_mode, TYPE_UNSIGNED (type));
6616 value = fold_convert (type, value);
6617 /* Make sure the bits beyond the original bitsize are zero
6618 so that we can correctly avoid extra zeroing stores in
6619 later constructor elements. */
6620 tree bitsize_mask
6621 = wide_int_to_tree (type, wi::mask (bitsize, false,
6622 BITS_PER_WORD));
6623 value = fold_build2 (BIT_AND_EXPR, type, value, bitsize_mask);
6626 if (BYTES_BIG_ENDIAN)
6627 value
6628 = fold_build2 (LSHIFT_EXPR, type, value,
6629 build_int_cst (type,
6630 BITS_PER_WORD - bitsize));
6631 bitsize = BITS_PER_WORD;
6632 mode = word_mode;
6635 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6636 && DECL_NONADDRESSABLE_P (field))
6638 to_rtx = copy_rtx (to_rtx);
6639 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6642 store_constructor_field (to_rtx, bitsize, bitpos,
6643 0, bitregion_end, mode,
6644 value, cleared,
6645 get_alias_set (TREE_TYPE (field)),
6646 reverse);
6648 break;
6650 case ARRAY_TYPE:
6652 tree value, index;
6653 unsigned HOST_WIDE_INT i;
6654 int need_to_clear;
6655 tree domain;
6656 tree elttype = TREE_TYPE (type);
6657 int const_bounds_p;
6658 HOST_WIDE_INT minelt = 0;
6659 HOST_WIDE_INT maxelt = 0;
6661 /* The storage order is specified for every aggregate type. */
6662 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6664 domain = TYPE_DOMAIN (type);
6665 const_bounds_p = (TYPE_MIN_VALUE (domain)
6666 && TYPE_MAX_VALUE (domain)
6667 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6668 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6670 /* If we have constant bounds for the range of the type, get them. */
6671 if (const_bounds_p)
6673 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6674 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6677 /* If the constructor has fewer elements than the array, clear
6678 the whole array first. Similarly if this is static
6679 constructor of a non-BLKmode object. */
6680 if (cleared)
6681 need_to_clear = 0;
6682 else if (REG_P (target) && TREE_STATIC (exp))
6683 need_to_clear = 1;
6684 else
6686 unsigned HOST_WIDE_INT idx;
6687 HOST_WIDE_INT count = 0, zero_count = 0;
6688 need_to_clear = ! const_bounds_p;
6690 /* This loop is a more accurate version of the loop in
6691 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6692 is also needed to check for missing elements. */
6693 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6695 HOST_WIDE_INT this_node_count;
6697 if (need_to_clear)
6698 break;
6700 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6702 tree lo_index = TREE_OPERAND (index, 0);
6703 tree hi_index = TREE_OPERAND (index, 1);
6705 if (! tree_fits_uhwi_p (lo_index)
6706 || ! tree_fits_uhwi_p (hi_index))
6708 need_to_clear = 1;
6709 break;
6712 this_node_count = (tree_to_uhwi (hi_index)
6713 - tree_to_uhwi (lo_index) + 1);
6715 else
6716 this_node_count = 1;
6718 count += this_node_count;
6719 if (mostly_zeros_p (value))
6720 zero_count += this_node_count;
6723 /* Clear the entire array first if there are any missing
6724 elements, or if the incidence of zero elements is >=
6725 75%. */
6726 if (! need_to_clear
6727 && (count < maxelt - minelt + 1
6728 || 4 * zero_count >= 3 * count))
6729 need_to_clear = 1;
6732 if (need_to_clear && maybe_gt (size, 0))
6734 if (REG_P (target))
6735 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6736 else
6737 clear_storage (target, gen_int_mode (size, Pmode),
6738 BLOCK_OP_NORMAL);
6739 cleared = 1;
6742 if (!cleared && REG_P (target))
6743 /* Inform later passes that the old value is dead. */
6744 emit_clobber (target);
6746 /* Store each element of the constructor into the
6747 corresponding element of TARGET, determined by counting the
6748 elements. */
6749 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6751 machine_mode mode;
6752 poly_int64 bitsize;
6753 HOST_WIDE_INT bitpos;
6754 rtx xtarget = target;
6756 if (cleared && initializer_zerop (value))
6757 continue;
6759 mode = TYPE_MODE (elttype);
6760 if (mode != BLKmode)
6761 bitsize = GET_MODE_BITSIZE (mode);
6762 else if (!poly_int_tree_p (TYPE_SIZE (elttype), &bitsize))
6763 bitsize = -1;
6765 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6767 tree lo_index = TREE_OPERAND (index, 0);
6768 tree hi_index = TREE_OPERAND (index, 1);
6769 rtx index_r, pos_rtx;
6770 HOST_WIDE_INT lo, hi, count;
6771 tree position;
6773 /* If the range is constant and "small", unroll the loop. */
6774 if (const_bounds_p
6775 && tree_fits_shwi_p (lo_index)
6776 && tree_fits_shwi_p (hi_index)
6777 && (lo = tree_to_shwi (lo_index),
6778 hi = tree_to_shwi (hi_index),
6779 count = hi - lo + 1,
6780 (!MEM_P (target)
6781 || count <= 2
6782 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6783 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6784 <= 40 * 8)))))
6786 lo -= minelt; hi -= minelt;
6787 for (; lo <= hi; lo++)
6789 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6791 if (MEM_P (target)
6792 && !MEM_KEEP_ALIAS_SET_P (target)
6793 && TREE_CODE (type) == ARRAY_TYPE
6794 && TYPE_NONALIASED_COMPONENT (type))
6796 target = copy_rtx (target);
6797 MEM_KEEP_ALIAS_SET_P (target) = 1;
6800 store_constructor_field
6801 (target, bitsize, bitpos, 0, bitregion_end,
6802 mode, value, cleared,
6803 get_alias_set (elttype), reverse);
6806 else
6808 rtx_code_label *loop_start = gen_label_rtx ();
6809 rtx_code_label *loop_end = gen_label_rtx ();
6810 tree exit_cond;
6812 expand_normal (hi_index);
6814 index = build_decl (EXPR_LOCATION (exp),
6815 VAR_DECL, NULL_TREE, domain);
6816 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6817 SET_DECL_RTL (index, index_r);
6818 store_expr (lo_index, index_r, 0, false, reverse);
6820 /* Build the head of the loop. */
6821 do_pending_stack_adjust ();
6822 emit_label (loop_start);
6824 /* Assign value to element index. */
6825 position =
6826 fold_convert (ssizetype,
6827 fold_build2 (MINUS_EXPR,
6828 TREE_TYPE (index),
6829 index,
6830 TYPE_MIN_VALUE (domain)));
6832 position =
6833 size_binop (MULT_EXPR, position,
6834 fold_convert (ssizetype,
6835 TYPE_SIZE_UNIT (elttype)));
6837 pos_rtx = expand_normal (position);
6838 xtarget = offset_address (target, pos_rtx,
6839 highest_pow2_factor (position));
6840 xtarget = adjust_address (xtarget, mode, 0);
6841 if (TREE_CODE (value) == CONSTRUCTOR)
6842 store_constructor (value, xtarget, cleared,
6843 exact_div (bitsize, BITS_PER_UNIT),
6844 reverse);
6845 else
6846 store_expr (value, xtarget, 0, false, reverse);
6848 /* Generate a conditional jump to exit the loop. */
6849 exit_cond = build2 (LT_EXPR, integer_type_node,
6850 index, hi_index);
6851 jumpif (exit_cond, loop_end,
6852 profile_probability::uninitialized ());
6854 /* Update the loop counter, and jump to the head of
6855 the loop. */
6856 expand_assignment (index,
6857 build2 (PLUS_EXPR, TREE_TYPE (index),
6858 index, integer_one_node),
6859 false);
6861 emit_jump (loop_start);
6863 /* Build the end of the loop. */
6864 emit_label (loop_end);
6867 else if ((index != 0 && ! tree_fits_shwi_p (index))
6868 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6870 tree position;
6872 if (index == 0)
6873 index = ssize_int (1);
6875 if (minelt)
6876 index = fold_convert (ssizetype,
6877 fold_build2 (MINUS_EXPR,
6878 TREE_TYPE (index),
6879 index,
6880 TYPE_MIN_VALUE (domain)));
6882 position =
6883 size_binop (MULT_EXPR, index,
6884 fold_convert (ssizetype,
6885 TYPE_SIZE_UNIT (elttype)));
6886 xtarget = offset_address (target,
6887 expand_normal (position),
6888 highest_pow2_factor (position));
6889 xtarget = adjust_address (xtarget, mode, 0);
6890 store_expr (value, xtarget, 0, false, reverse);
6892 else
6894 if (index != 0)
6895 bitpos = ((tree_to_shwi (index) - minelt)
6896 * tree_to_uhwi (TYPE_SIZE (elttype)));
6897 else
6898 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6900 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6901 && TREE_CODE (type) == ARRAY_TYPE
6902 && TYPE_NONALIASED_COMPONENT (type))
6904 target = copy_rtx (target);
6905 MEM_KEEP_ALIAS_SET_P (target) = 1;
6907 store_constructor_field (target, bitsize, bitpos, 0,
6908 bitregion_end, mode, value,
6909 cleared, get_alias_set (elttype),
6910 reverse);
6913 break;
6916 case VECTOR_TYPE:
6918 unsigned HOST_WIDE_INT idx;
6919 constructor_elt *ce;
6920 int i;
6921 int need_to_clear;
6922 insn_code icode = CODE_FOR_nothing;
6923 tree elt;
6924 tree elttype = TREE_TYPE (type);
6925 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6926 machine_mode eltmode = TYPE_MODE (elttype);
6927 HOST_WIDE_INT bitsize;
6928 HOST_WIDE_INT bitpos;
6929 rtvec vector = NULL;
6930 poly_uint64 n_elts;
6931 unsigned HOST_WIDE_INT const_n_elts;
6932 alias_set_type alias;
6933 bool vec_vec_init_p = false;
6934 machine_mode mode = GET_MODE (target);
6936 gcc_assert (eltmode != BLKmode);
6938 /* Try using vec_duplicate_optab for uniform vectors. */
6939 if (!TREE_SIDE_EFFECTS (exp)
6940 && VECTOR_MODE_P (mode)
6941 && eltmode == GET_MODE_INNER (mode)
6942 && ((icode = optab_handler (vec_duplicate_optab, mode))
6943 != CODE_FOR_nothing)
6944 && (elt = uniform_vector_p (exp)))
6946 class expand_operand ops[2];
6947 create_output_operand (&ops[0], target, mode);
6948 create_input_operand (&ops[1], expand_normal (elt), eltmode);
6949 expand_insn (icode, 2, ops);
6950 if (!rtx_equal_p (target, ops[0].value))
6951 emit_move_insn (target, ops[0].value);
6952 break;
6955 n_elts = TYPE_VECTOR_SUBPARTS (type);
6956 if (REG_P (target)
6957 && VECTOR_MODE_P (mode)
6958 && n_elts.is_constant (&const_n_elts))
6960 machine_mode emode = eltmode;
6961 bool vector_typed_elts_p = false;
6963 if (CONSTRUCTOR_NELTS (exp)
6964 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value))
6965 == VECTOR_TYPE))
6967 tree etype = TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value);
6968 gcc_assert (known_eq (CONSTRUCTOR_NELTS (exp)
6969 * TYPE_VECTOR_SUBPARTS (etype),
6970 n_elts));
6971 emode = TYPE_MODE (etype);
6972 vector_typed_elts_p = true;
6974 icode = convert_optab_handler (vec_init_optab, mode, emode);
6975 if (icode != CODE_FOR_nothing)
6977 unsigned int n = const_n_elts;
6979 if (vector_typed_elts_p)
6981 n = CONSTRUCTOR_NELTS (exp);
6982 vec_vec_init_p = true;
6984 vector = rtvec_alloc (n);
6985 for (unsigned int k = 0; k < n; k++)
6986 RTVEC_ELT (vector, k) = CONST0_RTX (emode);
6990 /* If the constructor has fewer elements than the vector,
6991 clear the whole array first. Similarly if this is static
6992 constructor of a non-BLKmode object. */
6993 if (cleared)
6994 need_to_clear = 0;
6995 else if (REG_P (target) && TREE_STATIC (exp))
6996 need_to_clear = 1;
6997 else
6999 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
7000 tree value;
7002 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7004 tree sz = TYPE_SIZE (TREE_TYPE (value));
7005 int n_elts_here
7006 = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR, sz,
7007 TYPE_SIZE (elttype)));
7009 count += n_elts_here;
7010 if (mostly_zeros_p (value))
7011 zero_count += n_elts_here;
7014 /* Clear the entire vector first if there are any missing elements,
7015 or if the incidence of zero elements is >= 75%. */
7016 need_to_clear = (maybe_lt (count, n_elts)
7017 || 4 * zero_count >= 3 * count);
7020 if (need_to_clear && maybe_gt (size, 0) && !vector)
7022 if (REG_P (target))
7023 emit_move_insn (target, CONST0_RTX (mode));
7024 else
7025 clear_storage (target, gen_int_mode (size, Pmode),
7026 BLOCK_OP_NORMAL);
7027 cleared = 1;
7030 /* Inform later passes that the old value is dead. */
7031 if (!cleared && !vector && REG_P (target))
7032 emit_move_insn (target, CONST0_RTX (mode));
7034 if (MEM_P (target))
7035 alias = MEM_ALIAS_SET (target);
7036 else
7037 alias = get_alias_set (elttype);
7039 /* Store each element of the constructor into the corresponding
7040 element of TARGET, determined by counting the elements. */
7041 for (idx = 0, i = 0;
7042 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
7043 idx++, i += bitsize / elt_size)
7045 HOST_WIDE_INT eltpos;
7046 tree value = ce->value;
7048 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
7049 if (cleared && initializer_zerop (value))
7050 continue;
7052 if (ce->index)
7053 eltpos = tree_to_uhwi (ce->index);
7054 else
7055 eltpos = i;
7057 if (vector)
7059 if (vec_vec_init_p)
7061 gcc_assert (ce->index == NULL_TREE);
7062 gcc_assert (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE);
7063 eltpos = idx;
7065 else
7066 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
7067 RTVEC_ELT (vector, eltpos) = expand_normal (value);
7069 else
7071 machine_mode value_mode
7072 = (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
7073 ? TYPE_MODE (TREE_TYPE (value)) : eltmode);
7074 bitpos = eltpos * elt_size;
7075 store_constructor_field (target, bitsize, bitpos, 0,
7076 bitregion_end, value_mode,
7077 value, cleared, alias, reverse);
7081 if (vector)
7082 emit_insn (GEN_FCN (icode) (target,
7083 gen_rtx_PARALLEL (mode, vector)));
7084 break;
7087 default:
7088 gcc_unreachable ();
7092 /* Store the value of EXP (an expression tree)
7093 into a subfield of TARGET which has mode MODE and occupies
7094 BITSIZE bits, starting BITPOS bits from the start of TARGET.
7095 If MODE is VOIDmode, it means that we are storing into a bit-field.
7097 BITREGION_START is bitpos of the first bitfield in this region.
7098 BITREGION_END is the bitpos of the ending bitfield in this region.
7099 These two fields are 0, if the C++ memory model does not apply,
7100 or we are not interested in keeping track of bitfield regions.
7102 Always return const0_rtx unless we have something particular to
7103 return.
7105 ALIAS_SET is the alias set for the destination. This value will
7106 (in general) be different from that for TARGET, since TARGET is a
7107 reference to the containing structure.
7109 If NONTEMPORAL is true, try generating a nontemporal store.
7111 If REVERSE is true, the store is to be done in reverse order. */
7113 static rtx
7114 store_field (rtx target, poly_int64 bitsize, poly_int64 bitpos,
7115 poly_uint64 bitregion_start, poly_uint64 bitregion_end,
7116 machine_mode mode, tree exp,
7117 alias_set_type alias_set, bool nontemporal, bool reverse)
7119 if (TREE_CODE (exp) == ERROR_MARK)
7120 return const0_rtx;
7122 /* If we have nothing to store, do nothing unless the expression has
7123 side-effects. Don't do that for zero sized addressable lhs of
7124 calls. */
7125 if (known_eq (bitsize, 0)
7126 && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
7127 || TREE_CODE (exp) != CALL_EXPR))
7128 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
7130 if (GET_CODE (target) == CONCAT)
7132 /* We're storing into a struct containing a single __complex. */
7134 gcc_assert (known_eq (bitpos, 0));
7135 return store_expr (exp, target, 0, nontemporal, reverse);
7138 /* If the structure is in a register or if the component
7139 is a bit field, we cannot use addressing to access it.
7140 Use bit-field techniques or SUBREG to store in it. */
7142 poly_int64 decl_bitsize;
7143 if (mode == VOIDmode
7144 || (mode != BLKmode && ! direct_store[(int) mode]
7145 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7146 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7147 || REG_P (target)
7148 || GET_CODE (target) == SUBREG
7149 /* If the field isn't aligned enough to store as an ordinary memref,
7150 store it as a bit field. */
7151 || (mode != BLKmode
7152 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
7153 || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
7154 && targetm.slow_unaligned_access (mode, MEM_ALIGN (target)))
7155 || !multiple_p (bitpos, BITS_PER_UNIT)))
7156 || (known_size_p (bitsize)
7157 && mode != BLKmode
7158 && maybe_gt (GET_MODE_BITSIZE (mode), bitsize))
7159 /* If the RHS and field are a constant size and the size of the
7160 RHS isn't the same size as the bitfield, we must use bitfield
7161 operations. */
7162 || (known_size_p (bitsize)
7163 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
7164 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
7165 bitsize)
7166 /* Except for initialization of full bytes from a CONSTRUCTOR, which
7167 we will handle specially below. */
7168 && !(TREE_CODE (exp) == CONSTRUCTOR
7169 && multiple_p (bitsize, BITS_PER_UNIT))
7170 /* And except for bitwise copying of TREE_ADDRESSABLE types,
7171 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
7172 includes some extra padding. store_expr / expand_expr will in
7173 that case call get_inner_reference that will have the bitsize
7174 we check here and thus the block move will not clobber the
7175 padding that shouldn't be clobbered. In the future we could
7176 replace the TREE_ADDRESSABLE check with a check that
7177 get_base_address needs to live in memory. */
7178 && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
7179 || TREE_CODE (exp) != COMPONENT_REF
7180 || !multiple_p (bitsize, BITS_PER_UNIT)
7181 || !multiple_p (bitpos, BITS_PER_UNIT)
7182 || !poly_int_tree_p (DECL_SIZE (TREE_OPERAND (exp, 1)),
7183 &decl_bitsize)
7184 || maybe_ne (decl_bitsize, bitsize)))
7185 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
7186 decl we must use bitfield operations. */
7187 || (known_size_p (bitsize)
7188 && TREE_CODE (exp) == MEM_REF
7189 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7190 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7191 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7192 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
7194 rtx temp;
7195 gimple *nop_def;
7197 /* If EXP is a NOP_EXPR of precision less than its mode, then that
7198 implies a mask operation. If the precision is the same size as
7199 the field we're storing into, that mask is redundant. This is
7200 particularly common with bit field assignments generated by the
7201 C front end. */
7202 nop_def = get_def_for_expr (exp, NOP_EXPR);
7203 if (nop_def)
7205 tree type = TREE_TYPE (exp);
7206 if (INTEGRAL_TYPE_P (type)
7207 && maybe_ne (TYPE_PRECISION (type),
7208 GET_MODE_BITSIZE (TYPE_MODE (type)))
7209 && known_eq (bitsize, TYPE_PRECISION (type)))
7211 tree op = gimple_assign_rhs1 (nop_def);
7212 type = TREE_TYPE (op);
7213 if (INTEGRAL_TYPE_P (type)
7214 && known_ge (TYPE_PRECISION (type), bitsize))
7215 exp = op;
7219 temp = expand_normal (exp);
7221 /* We don't support variable-sized BLKmode bitfields, since our
7222 handling of BLKmode is bound up with the ability to break
7223 things into words. */
7224 gcc_assert (mode != BLKmode || bitsize.is_constant ());
7226 /* Handle calls that return values in multiple non-contiguous locations.
7227 The Irix 6 ABI has examples of this. */
7228 if (GET_CODE (temp) == PARALLEL)
7230 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
7231 machine_mode temp_mode = GET_MODE (temp);
7232 if (temp_mode == BLKmode || temp_mode == VOIDmode)
7233 temp_mode = smallest_int_mode_for_size (size * BITS_PER_UNIT);
7234 rtx temp_target = gen_reg_rtx (temp_mode);
7235 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
7236 temp = temp_target;
7239 /* Handle calls that return BLKmode values in registers. */
7240 else if (mode == BLKmode && REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
7242 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
7243 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
7244 temp = temp_target;
7247 /* If the value has aggregate type and an integral mode then, if BITSIZE
7248 is narrower than this mode and this is for big-endian data, we first
7249 need to put the value into the low-order bits for store_bit_field,
7250 except when MODE is BLKmode and BITSIZE larger than the word size
7251 (see the handling of fields larger than a word in store_bit_field).
7252 Moreover, the field may be not aligned on a byte boundary; in this
7253 case, if it has reverse storage order, it needs to be accessed as a
7254 scalar field with reverse storage order and we must first put the
7255 value into target order. */
7256 scalar_int_mode temp_mode;
7257 if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
7258 && is_int_mode (GET_MODE (temp), &temp_mode))
7260 HOST_WIDE_INT size = GET_MODE_BITSIZE (temp_mode);
7262 reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
7264 if (reverse)
7265 temp = flip_storage_order (temp_mode, temp);
7267 gcc_checking_assert (known_le (bitsize, size));
7268 if (maybe_lt (bitsize, size)
7269 && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN
7270 /* Use of to_constant for BLKmode was checked above. */
7271 && !(mode == BLKmode && bitsize.to_constant () > BITS_PER_WORD))
7272 temp = expand_shift (RSHIFT_EXPR, temp_mode, temp,
7273 size - bitsize, NULL_RTX, 1);
7276 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
7277 if (mode != VOIDmode && mode != BLKmode
7278 && mode != TYPE_MODE (TREE_TYPE (exp)))
7279 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
7281 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
7282 and BITPOS must be aligned on a byte boundary. If so, we simply do
7283 a block copy. Likewise for a BLKmode-like TARGET. */
7284 if (GET_MODE (temp) == BLKmode
7285 && (GET_MODE (target) == BLKmode
7286 || (MEM_P (target)
7287 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
7288 && multiple_p (bitpos, BITS_PER_UNIT)
7289 && multiple_p (bitsize, BITS_PER_UNIT))))
7291 gcc_assert (MEM_P (target) && MEM_P (temp));
7292 poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
7293 poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
7295 target = adjust_address (target, VOIDmode, bytepos);
7296 emit_block_move (target, temp,
7297 gen_int_mode (bytesize, Pmode),
7298 BLOCK_OP_NORMAL);
7300 return const0_rtx;
7303 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
7304 word size, we need to load the value (see again store_bit_field). */
7305 if (GET_MODE (temp) == BLKmode && known_le (bitsize, BITS_PER_WORD))
7307 temp_mode = smallest_int_mode_for_size (bitsize);
7308 temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
7309 temp_mode, false, NULL);
7312 /* Store the value in the bitfield. */
7313 gcc_checking_assert (known_ge (bitpos, 0));
7314 store_bit_field (target, bitsize, bitpos,
7315 bitregion_start, bitregion_end,
7316 mode, temp, reverse);
7318 return const0_rtx;
7320 else
7322 /* Now build a reference to just the desired component. */
7323 rtx to_rtx = adjust_address (target, mode,
7324 exact_div (bitpos, BITS_PER_UNIT));
7326 if (to_rtx == target)
7327 to_rtx = copy_rtx (to_rtx);
7329 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
7330 set_mem_alias_set (to_rtx, alias_set);
7332 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
7333 into a target smaller than its type; handle that case now. */
7334 if (TREE_CODE (exp) == CONSTRUCTOR && known_size_p (bitsize))
7336 poly_int64 bytesize = exact_div (bitsize, BITS_PER_UNIT);
7337 store_constructor (exp, to_rtx, 0, bytesize, reverse);
7338 return to_rtx;
7341 return store_expr (exp, to_rtx, 0, nontemporal, reverse);
7345 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
7346 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
7347 codes and find the ultimate containing object, which we return.
7349 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
7350 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
7351 storage order of the field.
7352 If the position of the field is variable, we store a tree
7353 giving the variable offset (in units) in *POFFSET.
7354 This offset is in addition to the bit position.
7355 If the position is not variable, we store 0 in *POFFSET.
7357 If any of the extraction expressions is volatile,
7358 we store 1 in *PVOLATILEP. Otherwise we don't change that.
7360 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
7361 Otherwise, it is a mode that can be used to access the field.
7363 If the field describes a variable-sized object, *PMODE is set to
7364 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
7365 this case, but the address of the object can be found. */
7367 tree
7368 get_inner_reference (tree exp, poly_int64_pod *pbitsize,
7369 poly_int64_pod *pbitpos, tree *poffset,
7370 machine_mode *pmode, int *punsignedp,
7371 int *preversep, int *pvolatilep)
7373 tree size_tree = 0;
7374 machine_mode mode = VOIDmode;
7375 bool blkmode_bitfield = false;
7376 tree offset = size_zero_node;
7377 poly_offset_int bit_offset = 0;
7379 /* First get the mode, signedness, storage order and size. We do this from
7380 just the outermost expression. */
7381 *pbitsize = -1;
7382 if (TREE_CODE (exp) == COMPONENT_REF)
7384 tree field = TREE_OPERAND (exp, 1);
7385 size_tree = DECL_SIZE (field);
7386 if (flag_strict_volatile_bitfields > 0
7387 && TREE_THIS_VOLATILE (exp)
7388 && DECL_BIT_FIELD_TYPE (field)
7389 && DECL_MODE (field) != BLKmode)
7390 /* Volatile bitfields should be accessed in the mode of the
7391 field's type, not the mode computed based on the bit
7392 size. */
7393 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
7394 else if (!DECL_BIT_FIELD (field))
7396 mode = DECL_MODE (field);
7397 /* For vector fields re-check the target flags, as DECL_MODE
7398 could have been set with different target flags than
7399 the current function has. */
7400 if (mode == BLKmode
7401 && VECTOR_TYPE_P (TREE_TYPE (field))
7402 && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field))))
7403 mode = TYPE_MODE (TREE_TYPE (field));
7405 else if (DECL_MODE (field) == BLKmode)
7406 blkmode_bitfield = true;
7408 *punsignedp = DECL_UNSIGNED (field);
7410 else if (TREE_CODE (exp) == BIT_FIELD_REF)
7412 size_tree = TREE_OPERAND (exp, 1);
7413 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
7414 || TYPE_UNSIGNED (TREE_TYPE (exp)));
7416 /* For vector element types with the correct size of access or for
7417 vector typed accesses use the mode of the access type. */
7418 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
7419 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
7420 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
7421 || VECTOR_TYPE_P (TREE_TYPE (exp)))
7422 mode = TYPE_MODE (TREE_TYPE (exp));
7424 else
7426 mode = TYPE_MODE (TREE_TYPE (exp));
7427 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
7429 if (mode == BLKmode)
7430 size_tree = TYPE_SIZE (TREE_TYPE (exp));
7431 else
7432 *pbitsize = GET_MODE_BITSIZE (mode);
7435 if (size_tree != 0)
7437 if (! tree_fits_uhwi_p (size_tree))
7438 mode = BLKmode, *pbitsize = -1;
7439 else
7440 *pbitsize = tree_to_uhwi (size_tree);
7443 *preversep = reverse_storage_order_for_component_p (exp);
7445 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7446 and find the ultimate containing object. */
7447 while (1)
7449 switch (TREE_CODE (exp))
7451 case BIT_FIELD_REF:
7452 bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
7453 break;
7455 case COMPONENT_REF:
7457 tree field = TREE_OPERAND (exp, 1);
7458 tree this_offset = component_ref_field_offset (exp);
7460 /* If this field hasn't been filled in yet, don't go past it.
7461 This should only happen when folding expressions made during
7462 type construction. */
7463 if (this_offset == 0)
7464 break;
7466 offset = size_binop (PLUS_EXPR, offset, this_offset);
7467 bit_offset += wi::to_poly_offset (DECL_FIELD_BIT_OFFSET (field));
7469 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7471 break;
7473 case ARRAY_REF:
7474 case ARRAY_RANGE_REF:
7476 tree index = TREE_OPERAND (exp, 1);
7477 tree low_bound = array_ref_low_bound (exp);
7478 tree unit_size = array_ref_element_size (exp);
7480 /* We assume all arrays have sizes that are a multiple of a byte.
7481 First subtract the lower bound, if any, in the type of the
7482 index, then convert to sizetype and multiply by the size of
7483 the array element. */
7484 if (! integer_zerop (low_bound))
7485 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
7486 index, low_bound);
7488 offset = size_binop (PLUS_EXPR, offset,
7489 size_binop (MULT_EXPR,
7490 fold_convert (sizetype, index),
7491 unit_size));
7493 break;
7495 case REALPART_EXPR:
7496 break;
7498 case IMAGPART_EXPR:
7499 bit_offset += *pbitsize;
7500 break;
7502 case VIEW_CONVERT_EXPR:
7503 break;
7505 case MEM_REF:
7506 /* Hand back the decl for MEM[&decl, off]. */
7507 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
7509 tree off = TREE_OPERAND (exp, 1);
7510 if (!integer_zerop (off))
7512 poly_offset_int boff = mem_ref_offset (exp);
7513 boff <<= LOG2_BITS_PER_UNIT;
7514 bit_offset += boff;
7516 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7518 goto done;
7520 default:
7521 goto done;
7524 /* If any reference in the chain is volatile, the effect is volatile. */
7525 if (TREE_THIS_VOLATILE (exp))
7526 *pvolatilep = 1;
7528 exp = TREE_OPERAND (exp, 0);
7530 done:
7532 /* If OFFSET is constant, see if we can return the whole thing as a
7533 constant bit position. Make sure to handle overflow during
7534 this conversion. */
7535 if (poly_int_tree_p (offset))
7537 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset),
7538 TYPE_PRECISION (sizetype));
7539 tem <<= LOG2_BITS_PER_UNIT;
7540 tem += bit_offset;
7541 if (tem.to_shwi (pbitpos))
7542 *poffset = offset = NULL_TREE;
7545 /* Otherwise, split it up. */
7546 if (offset)
7548 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7549 if (!bit_offset.to_shwi (pbitpos) || maybe_lt (*pbitpos, 0))
7551 *pbitpos = num_trailing_bits (bit_offset.force_shwi ());
7552 poly_offset_int bytes = bits_to_bytes_round_down (bit_offset);
7553 offset = size_binop (PLUS_EXPR, offset,
7554 build_int_cst (sizetype, bytes.force_shwi ()));
7557 *poffset = offset;
7560 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7561 if (mode == VOIDmode
7562 && blkmode_bitfield
7563 && multiple_p (*pbitpos, BITS_PER_UNIT)
7564 && multiple_p (*pbitsize, BITS_PER_UNIT))
7565 *pmode = BLKmode;
7566 else
7567 *pmode = mode;
7569 return exp;
7572 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7574 static unsigned HOST_WIDE_INT
7575 target_align (const_tree target)
7577 /* We might have a chain of nested references with intermediate misaligning
7578 bitfields components, so need to recurse to find out. */
7580 unsigned HOST_WIDE_INT this_align, outer_align;
7582 switch (TREE_CODE (target))
7584 case BIT_FIELD_REF:
7585 return 1;
7587 case COMPONENT_REF:
7588 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7589 outer_align = target_align (TREE_OPERAND (target, 0));
7590 return MIN (this_align, outer_align);
7592 case ARRAY_REF:
7593 case ARRAY_RANGE_REF:
7594 this_align = TYPE_ALIGN (TREE_TYPE (target));
7595 outer_align = target_align (TREE_OPERAND (target, 0));
7596 return MIN (this_align, outer_align);
7598 CASE_CONVERT:
7599 case NON_LVALUE_EXPR:
7600 case VIEW_CONVERT_EXPR:
7601 this_align = TYPE_ALIGN (TREE_TYPE (target));
7602 outer_align = target_align (TREE_OPERAND (target, 0));
7603 return MAX (this_align, outer_align);
7605 default:
7606 return TYPE_ALIGN (TREE_TYPE (target));
7611 /* Given an rtx VALUE that may contain additions and multiplications, return
7612 an equivalent value that just refers to a register, memory, or constant.
7613 This is done by generating instructions to perform the arithmetic and
7614 returning a pseudo-register containing the value.
7616 The returned value may be a REG, SUBREG, MEM or constant. */
7619 force_operand (rtx value, rtx target)
7621 rtx op1, op2;
7622 /* Use subtarget as the target for operand 0 of a binary operation. */
7623 rtx subtarget = get_subtarget (target);
7624 enum rtx_code code = GET_CODE (value);
7626 /* Check for subreg applied to an expression produced by loop optimizer. */
7627 if (code == SUBREG
7628 && !REG_P (SUBREG_REG (value))
7629 && !MEM_P (SUBREG_REG (value)))
7631 value
7632 = simplify_gen_subreg (GET_MODE (value),
7633 force_reg (GET_MODE (SUBREG_REG (value)),
7634 force_operand (SUBREG_REG (value),
7635 NULL_RTX)),
7636 GET_MODE (SUBREG_REG (value)),
7637 SUBREG_BYTE (value));
7638 code = GET_CODE (value);
7641 /* Check for a PIC address load. */
7642 if ((code == PLUS || code == MINUS)
7643 && XEXP (value, 0) == pic_offset_table_rtx
7644 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7645 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7646 || GET_CODE (XEXP (value, 1)) == CONST))
7648 if (!subtarget)
7649 subtarget = gen_reg_rtx (GET_MODE (value));
7650 emit_move_insn (subtarget, value);
7651 return subtarget;
7654 if (ARITHMETIC_P (value))
7656 op2 = XEXP (value, 1);
7657 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7658 subtarget = 0;
7659 if (code == MINUS && CONST_INT_P (op2))
7661 code = PLUS;
7662 op2 = negate_rtx (GET_MODE (value), op2);
7665 /* Check for an addition with OP2 a constant integer and our first
7666 operand a PLUS of a virtual register and something else. In that
7667 case, we want to emit the sum of the virtual register and the
7668 constant first and then add the other value. This allows virtual
7669 register instantiation to simply modify the constant rather than
7670 creating another one around this addition. */
7671 if (code == PLUS && CONST_INT_P (op2)
7672 && GET_CODE (XEXP (value, 0)) == PLUS
7673 && REG_P (XEXP (XEXP (value, 0), 0))
7674 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7675 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7677 rtx temp = expand_simple_binop (GET_MODE (value), code,
7678 XEXP (XEXP (value, 0), 0), op2,
7679 subtarget, 0, OPTAB_LIB_WIDEN);
7680 return expand_simple_binop (GET_MODE (value), code, temp,
7681 force_operand (XEXP (XEXP (value,
7682 0), 1), 0),
7683 target, 0, OPTAB_LIB_WIDEN);
7686 op1 = force_operand (XEXP (value, 0), subtarget);
7687 op2 = force_operand (op2, NULL_RTX);
7688 switch (code)
7690 case MULT:
7691 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7692 case DIV:
7693 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7694 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7695 target, 1, OPTAB_LIB_WIDEN);
7696 else
7697 return expand_divmod (0,
7698 FLOAT_MODE_P (GET_MODE (value))
7699 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7700 GET_MODE (value), op1, op2, target, 0);
7701 case MOD:
7702 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7703 target, 0);
7704 case UDIV:
7705 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7706 target, 1);
7707 case UMOD:
7708 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7709 target, 1);
7710 case ASHIFTRT:
7711 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7712 target, 0, OPTAB_LIB_WIDEN);
7713 default:
7714 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7715 target, 1, OPTAB_LIB_WIDEN);
7718 if (UNARY_P (value))
7720 if (!target)
7721 target = gen_reg_rtx (GET_MODE (value));
7722 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7723 switch (code)
7725 case ZERO_EXTEND:
7726 case SIGN_EXTEND:
7727 case TRUNCATE:
7728 case FLOAT_EXTEND:
7729 case FLOAT_TRUNCATE:
7730 convert_move (target, op1, code == ZERO_EXTEND);
7731 return target;
7733 case FIX:
7734 case UNSIGNED_FIX:
7735 expand_fix (target, op1, code == UNSIGNED_FIX);
7736 return target;
7738 case FLOAT:
7739 case UNSIGNED_FLOAT:
7740 expand_float (target, op1, code == UNSIGNED_FLOAT);
7741 return target;
7743 default:
7744 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7748 #ifdef INSN_SCHEDULING
7749 /* On machines that have insn scheduling, we want all memory reference to be
7750 explicit, so we need to deal with such paradoxical SUBREGs. */
7751 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7752 value
7753 = simplify_gen_subreg (GET_MODE (value),
7754 force_reg (GET_MODE (SUBREG_REG (value)),
7755 force_operand (SUBREG_REG (value),
7756 NULL_RTX)),
7757 GET_MODE (SUBREG_REG (value)),
7758 SUBREG_BYTE (value));
7759 #endif
7761 return value;
7764 /* Subroutine of expand_expr: return nonzero iff there is no way that
7765 EXP can reference X, which is being modified. TOP_P is nonzero if this
7766 call is going to be used to determine whether we need a temporary
7767 for EXP, as opposed to a recursive call to this function.
7769 It is always safe for this routine to return zero since it merely
7770 searches for optimization opportunities. */
7773 safe_from_p (const_rtx x, tree exp, int top_p)
7775 rtx exp_rtl = 0;
7776 int i, nops;
7778 if (x == 0
7779 /* If EXP has varying size, we MUST use a target since we currently
7780 have no way of allocating temporaries of variable size
7781 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7782 So we assume here that something at a higher level has prevented a
7783 clash. This is somewhat bogus, but the best we can do. Only
7784 do this when X is BLKmode and when we are at the top level. */
7785 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7786 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7787 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7788 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7789 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7790 != INTEGER_CST)
7791 && GET_MODE (x) == BLKmode)
7792 /* If X is in the outgoing argument area, it is always safe. */
7793 || (MEM_P (x)
7794 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7795 || (GET_CODE (XEXP (x, 0)) == PLUS
7796 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7797 return 1;
7799 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7800 find the underlying pseudo. */
7801 if (GET_CODE (x) == SUBREG)
7803 x = SUBREG_REG (x);
7804 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7805 return 0;
7808 /* Now look at our tree code and possibly recurse. */
7809 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7811 case tcc_declaration:
7812 exp_rtl = DECL_RTL_IF_SET (exp);
7813 break;
7815 case tcc_constant:
7816 return 1;
7818 case tcc_exceptional:
7819 if (TREE_CODE (exp) == TREE_LIST)
7821 while (1)
7823 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7824 return 0;
7825 exp = TREE_CHAIN (exp);
7826 if (!exp)
7827 return 1;
7828 if (TREE_CODE (exp) != TREE_LIST)
7829 return safe_from_p (x, exp, 0);
7832 else if (TREE_CODE (exp) == CONSTRUCTOR)
7834 constructor_elt *ce;
7835 unsigned HOST_WIDE_INT idx;
7837 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7838 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7839 || !safe_from_p (x, ce->value, 0))
7840 return 0;
7841 return 1;
7843 else if (TREE_CODE (exp) == ERROR_MARK)
7844 return 1; /* An already-visited SAVE_EXPR? */
7845 else
7846 return 0;
7848 case tcc_statement:
7849 /* The only case we look at here is the DECL_INITIAL inside a
7850 DECL_EXPR. */
7851 return (TREE_CODE (exp) != DECL_EXPR
7852 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7853 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7854 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7856 case tcc_binary:
7857 case tcc_comparison:
7858 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7859 return 0;
7860 /* Fall through. */
7862 case tcc_unary:
7863 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7865 case tcc_expression:
7866 case tcc_reference:
7867 case tcc_vl_exp:
7868 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7869 the expression. If it is set, we conflict iff we are that rtx or
7870 both are in memory. Otherwise, we check all operands of the
7871 expression recursively. */
7873 switch (TREE_CODE (exp))
7875 case ADDR_EXPR:
7876 /* If the operand is static or we are static, we can't conflict.
7877 Likewise if we don't conflict with the operand at all. */
7878 if (staticp (TREE_OPERAND (exp, 0))
7879 || TREE_STATIC (exp)
7880 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7881 return 1;
7883 /* Otherwise, the only way this can conflict is if we are taking
7884 the address of a DECL a that address if part of X, which is
7885 very rare. */
7886 exp = TREE_OPERAND (exp, 0);
7887 if (DECL_P (exp))
7889 if (!DECL_RTL_SET_P (exp)
7890 || !MEM_P (DECL_RTL (exp)))
7891 return 0;
7892 else
7893 exp_rtl = XEXP (DECL_RTL (exp), 0);
7895 break;
7897 case MEM_REF:
7898 if (MEM_P (x)
7899 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7900 get_alias_set (exp)))
7901 return 0;
7902 break;
7904 case CALL_EXPR:
7905 /* Assume that the call will clobber all hard registers and
7906 all of memory. */
7907 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7908 || MEM_P (x))
7909 return 0;
7910 break;
7912 case WITH_CLEANUP_EXPR:
7913 case CLEANUP_POINT_EXPR:
7914 /* Lowered by gimplify.c. */
7915 gcc_unreachable ();
7917 case SAVE_EXPR:
7918 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7920 default:
7921 break;
7924 /* If we have an rtx, we do not need to scan our operands. */
7925 if (exp_rtl)
7926 break;
7928 nops = TREE_OPERAND_LENGTH (exp);
7929 for (i = 0; i < nops; i++)
7930 if (TREE_OPERAND (exp, i) != 0
7931 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7932 return 0;
7934 break;
7936 case tcc_type:
7937 /* Should never get a type here. */
7938 gcc_unreachable ();
7941 /* If we have an rtl, find any enclosed object. Then see if we conflict
7942 with it. */
7943 if (exp_rtl)
7945 if (GET_CODE (exp_rtl) == SUBREG)
7947 exp_rtl = SUBREG_REG (exp_rtl);
7948 if (REG_P (exp_rtl)
7949 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7950 return 0;
7953 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7954 are memory and they conflict. */
7955 return ! (rtx_equal_p (x, exp_rtl)
7956 || (MEM_P (x) && MEM_P (exp_rtl)
7957 && true_dependence (exp_rtl, VOIDmode, x)));
7960 /* If we reach here, it is safe. */
7961 return 1;
7965 /* Return the highest power of two that EXP is known to be a multiple of.
7966 This is used in updating alignment of MEMs in array references. */
7968 unsigned HOST_WIDE_INT
7969 highest_pow2_factor (const_tree exp)
7971 unsigned HOST_WIDE_INT ret;
7972 int trailing_zeros = tree_ctz (exp);
7973 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7974 return BIGGEST_ALIGNMENT;
7975 ret = HOST_WIDE_INT_1U << trailing_zeros;
7976 if (ret > BIGGEST_ALIGNMENT)
7977 return BIGGEST_ALIGNMENT;
7978 return ret;
7981 /* Similar, except that the alignment requirements of TARGET are
7982 taken into account. Assume it is at least as aligned as its
7983 type, unless it is a COMPONENT_REF in which case the layout of
7984 the structure gives the alignment. */
7986 static unsigned HOST_WIDE_INT
7987 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7989 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7990 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7992 return MAX (factor, talign);
7995 /* Convert the tree comparison code TCODE to the rtl one where the
7996 signedness is UNSIGNEDP. */
7998 static enum rtx_code
7999 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
8001 enum rtx_code code;
8002 switch (tcode)
8004 case EQ_EXPR:
8005 code = EQ;
8006 break;
8007 case NE_EXPR:
8008 code = NE;
8009 break;
8010 case LT_EXPR:
8011 code = unsignedp ? LTU : LT;
8012 break;
8013 case LE_EXPR:
8014 code = unsignedp ? LEU : LE;
8015 break;
8016 case GT_EXPR:
8017 code = unsignedp ? GTU : GT;
8018 break;
8019 case GE_EXPR:
8020 code = unsignedp ? GEU : GE;
8021 break;
8022 case UNORDERED_EXPR:
8023 code = UNORDERED;
8024 break;
8025 case ORDERED_EXPR:
8026 code = ORDERED;
8027 break;
8028 case UNLT_EXPR:
8029 code = UNLT;
8030 break;
8031 case UNLE_EXPR:
8032 code = UNLE;
8033 break;
8034 case UNGT_EXPR:
8035 code = UNGT;
8036 break;
8037 case UNGE_EXPR:
8038 code = UNGE;
8039 break;
8040 case UNEQ_EXPR:
8041 code = UNEQ;
8042 break;
8043 case LTGT_EXPR:
8044 code = LTGT;
8045 break;
8047 default:
8048 gcc_unreachable ();
8050 return code;
8053 /* Subroutine of expand_expr. Expand the two operands of a binary
8054 expression EXP0 and EXP1 placing the results in OP0 and OP1.
8055 The value may be stored in TARGET if TARGET is nonzero. The
8056 MODIFIER argument is as documented by expand_expr. */
8058 void
8059 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
8060 enum expand_modifier modifier)
8062 if (! safe_from_p (target, exp1, 1))
8063 target = 0;
8064 if (operand_equal_p (exp0, exp1, 0))
8066 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
8067 *op1 = copy_rtx (*op0);
8069 else
8071 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
8072 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
8077 /* Return a MEM that contains constant EXP. DEFER is as for
8078 output_constant_def and MODIFIER is as for expand_expr. */
8080 static rtx
8081 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
8083 rtx mem;
8085 mem = output_constant_def (exp, defer);
8086 if (modifier != EXPAND_INITIALIZER)
8087 mem = use_anchored_address (mem);
8088 return mem;
8091 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
8092 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
8094 static rtx
8095 expand_expr_addr_expr_1 (tree exp, rtx target, scalar_int_mode tmode,
8096 enum expand_modifier modifier, addr_space_t as)
8098 rtx result, subtarget;
8099 tree inner, offset;
8100 poly_int64 bitsize, bitpos;
8101 int unsignedp, reversep, volatilep = 0;
8102 machine_mode mode1;
8104 /* If we are taking the address of a constant and are at the top level,
8105 we have to use output_constant_def since we can't call force_const_mem
8106 at top level. */
8107 /* ??? This should be considered a front-end bug. We should not be
8108 generating ADDR_EXPR of something that isn't an LVALUE. The only
8109 exception here is STRING_CST. */
8110 if (CONSTANT_CLASS_P (exp))
8112 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
8113 if (modifier < EXPAND_SUM)
8114 result = force_operand (result, target);
8115 return result;
8118 /* Everything must be something allowed by is_gimple_addressable. */
8119 switch (TREE_CODE (exp))
8121 case INDIRECT_REF:
8122 /* This case will happen via recursion for &a->b. */
8123 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
8125 case MEM_REF:
8127 tree tem = TREE_OPERAND (exp, 0);
8128 if (!integer_zerop (TREE_OPERAND (exp, 1)))
8129 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
8130 return expand_expr (tem, target, tmode, modifier);
8133 case TARGET_MEM_REF:
8134 return addr_for_mem_ref (exp, as, true);
8136 case CONST_DECL:
8137 /* Expand the initializer like constants above. */
8138 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
8139 0, modifier), 0);
8140 if (modifier < EXPAND_SUM)
8141 result = force_operand (result, target);
8142 return result;
8144 case REALPART_EXPR:
8145 /* The real part of the complex number is always first, therefore
8146 the address is the same as the address of the parent object. */
8147 offset = 0;
8148 bitpos = 0;
8149 inner = TREE_OPERAND (exp, 0);
8150 break;
8152 case IMAGPART_EXPR:
8153 /* The imaginary part of the complex number is always second.
8154 The expression is therefore always offset by the size of the
8155 scalar type. */
8156 offset = 0;
8157 bitpos = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (exp)));
8158 inner = TREE_OPERAND (exp, 0);
8159 break;
8161 case COMPOUND_LITERAL_EXPR:
8162 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
8163 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
8164 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
8165 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
8166 the initializers aren't gimplified. */
8167 if (COMPOUND_LITERAL_EXPR_DECL (exp)
8168 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
8169 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
8170 target, tmode, modifier, as);
8171 /* FALLTHRU */
8172 default:
8173 /* If the object is a DECL, then expand it for its rtl. Don't bypass
8174 expand_expr, as that can have various side effects; LABEL_DECLs for
8175 example, may not have their DECL_RTL set yet. Expand the rtl of
8176 CONSTRUCTORs too, which should yield a memory reference for the
8177 constructor's contents. Assume language specific tree nodes can
8178 be expanded in some interesting way. */
8179 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
8180 if (DECL_P (exp)
8181 || TREE_CODE (exp) == CONSTRUCTOR
8182 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
8184 result = expand_expr (exp, target, tmode,
8185 modifier == EXPAND_INITIALIZER
8186 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
8188 /* If the DECL isn't in memory, then the DECL wasn't properly
8189 marked TREE_ADDRESSABLE, which will be either a front-end
8190 or a tree optimizer bug. */
8192 gcc_assert (MEM_P (result));
8193 result = XEXP (result, 0);
8195 /* ??? Is this needed anymore? */
8196 if (DECL_P (exp))
8197 TREE_USED (exp) = 1;
8199 if (modifier != EXPAND_INITIALIZER
8200 && modifier != EXPAND_CONST_ADDRESS
8201 && modifier != EXPAND_SUM)
8202 result = force_operand (result, target);
8203 return result;
8206 /* Pass FALSE as the last argument to get_inner_reference although
8207 we are expanding to RTL. The rationale is that we know how to
8208 handle "aligning nodes" here: we can just bypass them because
8209 they won't change the final object whose address will be returned
8210 (they actually exist only for that purpose). */
8211 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
8212 &unsignedp, &reversep, &volatilep);
8213 break;
8216 /* We must have made progress. */
8217 gcc_assert (inner != exp);
8219 subtarget = offset || maybe_ne (bitpos, 0) ? NULL_RTX : target;
8220 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
8221 inner alignment, force the inner to be sufficiently aligned. */
8222 if (CONSTANT_CLASS_P (inner)
8223 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
8225 inner = copy_node (inner);
8226 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
8227 SET_TYPE_ALIGN (TREE_TYPE (inner), TYPE_ALIGN (TREE_TYPE (exp)));
8228 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
8230 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
8232 if (offset)
8234 rtx tmp;
8236 if (modifier != EXPAND_NORMAL)
8237 result = force_operand (result, NULL);
8238 tmp = expand_expr (offset, NULL_RTX, tmode,
8239 modifier == EXPAND_INITIALIZER
8240 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
8242 /* expand_expr is allowed to return an object in a mode other
8243 than TMODE. If it did, we need to convert. */
8244 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
8245 tmp = convert_modes (tmode, GET_MODE (tmp),
8246 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
8247 result = convert_memory_address_addr_space (tmode, result, as);
8248 tmp = convert_memory_address_addr_space (tmode, tmp, as);
8250 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8251 result = simplify_gen_binary (PLUS, tmode, result, tmp);
8252 else
8254 subtarget = maybe_ne (bitpos, 0) ? NULL_RTX : target;
8255 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
8256 1, OPTAB_LIB_WIDEN);
8260 if (maybe_ne (bitpos, 0))
8262 /* Someone beforehand should have rejected taking the address
8263 of an object that isn't byte-aligned. */
8264 poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
8265 result = convert_memory_address_addr_space (tmode, result, as);
8266 result = plus_constant (tmode, result, bytepos);
8267 if (modifier < EXPAND_SUM)
8268 result = force_operand (result, target);
8271 return result;
8274 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
8275 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
8277 static rtx
8278 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
8279 enum expand_modifier modifier)
8281 addr_space_t as = ADDR_SPACE_GENERIC;
8282 scalar_int_mode address_mode = Pmode;
8283 scalar_int_mode pointer_mode = ptr_mode;
8284 machine_mode rmode;
8285 rtx result;
8287 /* Target mode of VOIDmode says "whatever's natural". */
8288 if (tmode == VOIDmode)
8289 tmode = TYPE_MODE (TREE_TYPE (exp));
8291 if (POINTER_TYPE_P (TREE_TYPE (exp)))
8293 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
8294 address_mode = targetm.addr_space.address_mode (as);
8295 pointer_mode = targetm.addr_space.pointer_mode (as);
8298 /* We can get called with some Weird Things if the user does silliness
8299 like "(short) &a". In that case, convert_memory_address won't do
8300 the right thing, so ignore the given target mode. */
8301 scalar_int_mode new_tmode = (tmode == pointer_mode
8302 ? pointer_mode
8303 : address_mode);
8305 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
8306 new_tmode, modifier, as);
8308 /* Despite expand_expr claims concerning ignoring TMODE when not
8309 strictly convenient, stuff breaks if we don't honor it. Note
8310 that combined with the above, we only do this for pointer modes. */
8311 rmode = GET_MODE (result);
8312 if (rmode == VOIDmode)
8313 rmode = new_tmode;
8314 if (rmode != new_tmode)
8315 result = convert_memory_address_addr_space (new_tmode, result, as);
8317 return result;
8320 /* Generate code for computing CONSTRUCTOR EXP.
8321 An rtx for the computed value is returned. If AVOID_TEMP_MEM
8322 is TRUE, instead of creating a temporary variable in memory
8323 NULL is returned and the caller needs to handle it differently. */
8325 static rtx
8326 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
8327 bool avoid_temp_mem)
8329 tree type = TREE_TYPE (exp);
8330 machine_mode mode = TYPE_MODE (type);
8332 /* Try to avoid creating a temporary at all. This is possible
8333 if all of the initializer is zero.
8334 FIXME: try to handle all [0..255] initializers we can handle
8335 with memset. */
8336 if (TREE_STATIC (exp)
8337 && !TREE_ADDRESSABLE (exp)
8338 && target != 0 && mode == BLKmode
8339 && all_zeros_p (exp))
8341 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
8342 return target;
8345 /* All elts simple constants => refer to a constant in memory. But
8346 if this is a non-BLKmode mode, let it store a field at a time
8347 since that should make a CONST_INT, CONST_WIDE_INT or
8348 CONST_DOUBLE when we fold. Likewise, if we have a target we can
8349 use, it is best to store directly into the target unless the type
8350 is large enough that memcpy will be used. If we are making an
8351 initializer and all operands are constant, put it in memory as
8352 well.
8354 FIXME: Avoid trying to fill vector constructors piece-meal.
8355 Output them with output_constant_def below unless we're sure
8356 they're zeros. This should go away when vector initializers
8357 are treated like VECTOR_CST instead of arrays. */
8358 if ((TREE_STATIC (exp)
8359 && ((mode == BLKmode
8360 && ! (target != 0 && safe_from_p (target, exp, 1)))
8361 || TREE_ADDRESSABLE (exp)
8362 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
8363 && (! can_move_by_pieces
8364 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
8365 TYPE_ALIGN (type)))
8366 && ! mostly_zeros_p (exp))))
8367 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
8368 && TREE_CONSTANT (exp)))
8370 rtx constructor;
8372 if (avoid_temp_mem)
8373 return NULL_RTX;
8375 constructor = expand_expr_constant (exp, 1, modifier);
8377 if (modifier != EXPAND_CONST_ADDRESS
8378 && modifier != EXPAND_INITIALIZER
8379 && modifier != EXPAND_SUM)
8380 constructor = validize_mem (constructor);
8382 return constructor;
8385 /* Handle calls that pass values in multiple non-contiguous
8386 locations. The Irix 6 ABI has examples of this. */
8387 if (target == 0 || ! safe_from_p (target, exp, 1)
8388 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM
8389 /* Also make a temporary if the store is to volatile memory, to
8390 avoid individual accesses to aggregate members. */
8391 || (GET_CODE (target) == MEM
8392 && MEM_VOLATILE_P (target)
8393 && !TREE_ADDRESSABLE (TREE_TYPE (exp))))
8395 if (avoid_temp_mem)
8396 return NULL_RTX;
8398 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
8401 store_constructor (exp, target, 0, int_expr_size (exp), false);
8402 return target;
8406 /* expand_expr: generate code for computing expression EXP.
8407 An rtx for the computed value is returned. The value is never null.
8408 In the case of a void EXP, const0_rtx is returned.
8410 The value may be stored in TARGET if TARGET is nonzero.
8411 TARGET is just a suggestion; callers must assume that
8412 the rtx returned may not be the same as TARGET.
8414 If TARGET is CONST0_RTX, it means that the value will be ignored.
8416 If TMODE is not VOIDmode, it suggests generating the
8417 result in mode TMODE. But this is done only when convenient.
8418 Otherwise, TMODE is ignored and the value generated in its natural mode.
8419 TMODE is just a suggestion; callers must assume that
8420 the rtx returned may not have mode TMODE.
8422 Note that TARGET may have neither TMODE nor MODE. In that case, it
8423 probably will not be used.
8425 If MODIFIER is EXPAND_SUM then when EXP is an addition
8426 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8427 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8428 products as above, or REG or MEM, or constant.
8429 Ordinarily in such cases we would output mul or add instructions
8430 and then return a pseudo reg containing the sum.
8432 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8433 it also marks a label as absolutely required (it can't be dead).
8434 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8435 This is used for outputting expressions used in initializers.
8437 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8438 with a constant address even if that address is not normally legitimate.
8439 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8441 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8442 a call parameter. Such targets require special care as we haven't yet
8443 marked TARGET so that it's safe from being trashed by libcalls. We
8444 don't want to use TARGET for anything but the final result;
8445 Intermediate values must go elsewhere. Additionally, calls to
8446 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8448 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8449 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8450 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8451 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8452 recursively.
8453 If the result can be stored at TARGET, and ALT_RTL is non-NULL,
8454 then *ALT_RTL is set to TARGET (before legitimziation).
8456 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8457 In this case, we don't adjust a returned MEM rtx that wouldn't be
8458 sufficiently aligned for its mode; instead, it's up to the caller
8459 to deal with it afterwards. This is used to make sure that unaligned
8460 base objects for which out-of-bounds accesses are supported, for
8461 example record types with trailing arrays, aren't realigned behind
8462 the back of the caller.
8463 The normal operating mode is to pass FALSE for this parameter. */
8466 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8467 enum expand_modifier modifier, rtx *alt_rtl,
8468 bool inner_reference_p)
8470 rtx ret;
8472 /* Handle ERROR_MARK before anybody tries to access its type. */
8473 if (TREE_CODE (exp) == ERROR_MARK
8474 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8476 ret = CONST0_RTX (tmode);
8477 return ret ? ret : const0_rtx;
8480 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8481 inner_reference_p);
8482 return ret;
8485 /* Try to expand the conditional expression which is represented by
8486 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8487 return the rtl reg which represents the result. Otherwise return
8488 NULL_RTX. */
8490 static rtx
8491 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8492 tree treeop1 ATTRIBUTE_UNUSED,
8493 tree treeop2 ATTRIBUTE_UNUSED)
8495 rtx insn;
8496 rtx op00, op01, op1, op2;
8497 enum rtx_code comparison_code;
8498 machine_mode comparison_mode;
8499 gimple *srcstmt;
8500 rtx temp;
8501 tree type = TREE_TYPE (treeop1);
8502 int unsignedp = TYPE_UNSIGNED (type);
8503 machine_mode mode = TYPE_MODE (type);
8504 machine_mode orig_mode = mode;
8505 static bool expanding_cond_expr_using_cmove = false;
8507 /* Conditional move expansion can end up TERing two operands which,
8508 when recursively hitting conditional expressions can result in
8509 exponential behavior if the cmove expansion ultimatively fails.
8510 It's hardly profitable to TER a cmove into a cmove so avoid doing
8511 that by failing early if we end up recursing. */
8512 if (expanding_cond_expr_using_cmove)
8513 return NULL_RTX;
8515 /* If we cannot do a conditional move on the mode, try doing it
8516 with the promoted mode. */
8517 if (!can_conditionally_move_p (mode))
8519 mode = promote_mode (type, mode, &unsignedp);
8520 if (!can_conditionally_move_p (mode))
8521 return NULL_RTX;
8522 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
8524 else
8525 temp = assign_temp (type, 0, 1);
8527 expanding_cond_expr_using_cmove = true;
8528 start_sequence ();
8529 expand_operands (treeop1, treeop2,
8530 temp, &op1, &op2, EXPAND_NORMAL);
8532 if (TREE_CODE (treeop0) == SSA_NAME
8533 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8535 type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8536 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8537 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8538 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8539 comparison_mode = TYPE_MODE (type);
8540 unsignedp = TYPE_UNSIGNED (type);
8541 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8543 else if (COMPARISON_CLASS_P (treeop0))
8545 type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8546 enum tree_code cmpcode = TREE_CODE (treeop0);
8547 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8548 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8549 unsignedp = TYPE_UNSIGNED (type);
8550 comparison_mode = TYPE_MODE (type);
8551 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8553 else
8555 op00 = expand_normal (treeop0);
8556 op01 = const0_rtx;
8557 comparison_code = NE;
8558 comparison_mode = GET_MODE (op00);
8559 if (comparison_mode == VOIDmode)
8560 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8562 expanding_cond_expr_using_cmove = false;
8564 if (GET_MODE (op1) != mode)
8565 op1 = gen_lowpart (mode, op1);
8567 if (GET_MODE (op2) != mode)
8568 op2 = gen_lowpart (mode, op2);
8570 /* Try to emit the conditional move. */
8571 insn = emit_conditional_move (temp, comparison_code,
8572 op00, op01, comparison_mode,
8573 op1, op2, mode,
8574 unsignedp);
8576 /* If we could do the conditional move, emit the sequence,
8577 and return. */
8578 if (insn)
8580 rtx_insn *seq = get_insns ();
8581 end_sequence ();
8582 emit_insn (seq);
8583 return convert_modes (orig_mode, mode, temp, 0);
8586 /* Otherwise discard the sequence and fall back to code with
8587 branches. */
8588 end_sequence ();
8589 return NULL_RTX;
8592 /* A helper function for expand_expr_real_2 to be used with a
8593 misaligned mem_ref TEMP. Assume an unsigned type if UNSIGNEDP
8594 is nonzero, with alignment ALIGN in bits.
8595 Store the value at TARGET if possible (if TARGET is nonzero).
8596 Regardless of TARGET, we return the rtx for where the value is placed.
8597 If the result can be stored at TARGET, and ALT_RTL is non-NULL,
8598 then *ALT_RTL is set to TARGET (before legitimziation). */
8600 static rtx
8601 expand_misaligned_mem_ref (rtx temp, machine_mode mode, int unsignedp,
8602 unsigned int align, rtx target, rtx *alt_rtl)
8604 enum insn_code icode;
8606 if ((icode = optab_handler (movmisalign_optab, mode))
8607 != CODE_FOR_nothing)
8609 class expand_operand ops[2];
8611 /* We've already validated the memory, and we're creating a
8612 new pseudo destination. The predicates really can't fail,
8613 nor can the generator. */
8614 create_output_operand (&ops[0], NULL_RTX, mode);
8615 create_fixed_operand (&ops[1], temp);
8616 expand_insn (icode, 2, ops);
8617 temp = ops[0].value;
8619 else if (targetm.slow_unaligned_access (mode, align))
8620 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
8621 0, unsignedp, target,
8622 mode, mode, false, alt_rtl);
8623 return temp;
8627 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8628 enum expand_modifier modifier)
8630 rtx op0, op1, op2, temp;
8631 rtx_code_label *lab;
8632 tree type;
8633 int unsignedp;
8634 machine_mode mode;
8635 scalar_int_mode int_mode;
8636 enum tree_code code = ops->code;
8637 optab this_optab;
8638 rtx subtarget, original_target;
8639 int ignore;
8640 bool reduce_bit_field;
8641 location_t loc = ops->location;
8642 tree treeop0, treeop1, treeop2;
8643 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8644 ? reduce_to_bit_field_precision ((expr), \
8645 target, \
8646 type) \
8647 : (expr))
8649 type = ops->type;
8650 mode = TYPE_MODE (type);
8651 unsignedp = TYPE_UNSIGNED (type);
8653 treeop0 = ops->op0;
8654 treeop1 = ops->op1;
8655 treeop2 = ops->op2;
8657 /* We should be called only on simple (binary or unary) expressions,
8658 exactly those that are valid in gimple expressions that aren't
8659 GIMPLE_SINGLE_RHS (or invalid). */
8660 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8661 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8662 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8664 ignore = (target == const0_rtx
8665 || ((CONVERT_EXPR_CODE_P (code)
8666 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8667 && TREE_CODE (type) == VOID_TYPE));
8669 /* We should be called only if we need the result. */
8670 gcc_assert (!ignore);
8672 /* An operation in what may be a bit-field type needs the
8673 result to be reduced to the precision of the bit-field type,
8674 which is narrower than that of the type's mode. */
8675 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8676 && !type_has_mode_precision_p (type));
8678 if (reduce_bit_field
8679 && (modifier == EXPAND_STACK_PARM
8680 || (target && GET_MODE (target) != mode)))
8681 target = 0;
8683 /* Use subtarget as the target for operand 0 of a binary operation. */
8684 subtarget = get_subtarget (target);
8685 original_target = target;
8687 switch (code)
8689 case NON_LVALUE_EXPR:
8690 case PAREN_EXPR:
8691 CASE_CONVERT:
8692 if (treeop0 == error_mark_node)
8693 return const0_rtx;
8695 if (TREE_CODE (type) == UNION_TYPE)
8697 tree valtype = TREE_TYPE (treeop0);
8699 /* If both input and output are BLKmode, this conversion isn't doing
8700 anything except possibly changing memory attribute. */
8701 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8703 rtx result = expand_expr (treeop0, target, tmode,
8704 modifier);
8706 result = copy_rtx (result);
8707 set_mem_attributes (result, type, 0);
8708 return result;
8711 if (target == 0)
8713 if (TYPE_MODE (type) != BLKmode)
8714 target = gen_reg_rtx (TYPE_MODE (type));
8715 else
8716 target = assign_temp (type, 1, 1);
8719 if (MEM_P (target))
8720 /* Store data into beginning of memory target. */
8721 store_expr (treeop0,
8722 adjust_address (target, TYPE_MODE (valtype), 0),
8723 modifier == EXPAND_STACK_PARM,
8724 false, TYPE_REVERSE_STORAGE_ORDER (type));
8726 else
8728 gcc_assert (REG_P (target)
8729 && !TYPE_REVERSE_STORAGE_ORDER (type));
8731 /* Store this field into a union of the proper type. */
8732 poly_uint64 op0_size
8733 = tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (treeop0)));
8734 poly_uint64 union_size = GET_MODE_BITSIZE (mode);
8735 store_field (target,
8736 /* The conversion must be constructed so that
8737 we know at compile time how many bits
8738 to preserve. */
8739 ordered_min (op0_size, union_size),
8740 0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8741 false, false);
8744 /* Return the entire union. */
8745 return target;
8748 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8750 op0 = expand_expr (treeop0, target, VOIDmode,
8751 modifier);
8753 /* If the signedness of the conversion differs and OP0 is
8754 a promoted SUBREG, clear that indication since we now
8755 have to do the proper extension. */
8756 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8757 && GET_CODE (op0) == SUBREG)
8758 SUBREG_PROMOTED_VAR_P (op0) = 0;
8760 return REDUCE_BIT_FIELD (op0);
8763 op0 = expand_expr (treeop0, NULL_RTX, mode,
8764 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8765 if (GET_MODE (op0) == mode)
8768 /* If OP0 is a constant, just convert it into the proper mode. */
8769 else if (CONSTANT_P (op0))
8771 tree inner_type = TREE_TYPE (treeop0);
8772 machine_mode inner_mode = GET_MODE (op0);
8774 if (inner_mode == VOIDmode)
8775 inner_mode = TYPE_MODE (inner_type);
8777 if (modifier == EXPAND_INITIALIZER)
8778 op0 = lowpart_subreg (mode, op0, inner_mode);
8779 else
8780 op0= convert_modes (mode, inner_mode, op0,
8781 TYPE_UNSIGNED (inner_type));
8784 else if (modifier == EXPAND_INITIALIZER)
8785 op0 = gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8786 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8788 else if (target == 0)
8789 op0 = convert_to_mode (mode, op0,
8790 TYPE_UNSIGNED (TREE_TYPE
8791 (treeop0)));
8792 else
8794 convert_move (target, op0,
8795 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8796 op0 = target;
8799 return REDUCE_BIT_FIELD (op0);
8801 case ADDR_SPACE_CONVERT_EXPR:
8803 tree treeop0_type = TREE_TYPE (treeop0);
8805 gcc_assert (POINTER_TYPE_P (type));
8806 gcc_assert (POINTER_TYPE_P (treeop0_type));
8808 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8809 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8811 /* Conversions between pointers to the same address space should
8812 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8813 gcc_assert (as_to != as_from);
8815 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8817 /* Ask target code to handle conversion between pointers
8818 to overlapping address spaces. */
8819 if (targetm.addr_space.subset_p (as_to, as_from)
8820 || targetm.addr_space.subset_p (as_from, as_to))
8822 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8824 else
8826 /* For disjoint address spaces, converting anything but a null
8827 pointer invokes undefined behavior. We truncate or extend the
8828 value as if we'd converted via integers, which handles 0 as
8829 required, and all others as the programmer likely expects. */
8830 #ifndef POINTERS_EXTEND_UNSIGNED
8831 const int POINTERS_EXTEND_UNSIGNED = 1;
8832 #endif
8833 op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8834 op0, POINTERS_EXTEND_UNSIGNED);
8836 gcc_assert (op0);
8837 return op0;
8840 case POINTER_PLUS_EXPR:
8841 /* Even though the sizetype mode and the pointer's mode can be different
8842 expand is able to handle this correctly and get the correct result out
8843 of the PLUS_EXPR code. */
8844 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8845 if sizetype precision is smaller than pointer precision. */
8846 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8847 treeop1 = fold_convert_loc (loc, type,
8848 fold_convert_loc (loc, ssizetype,
8849 treeop1));
8850 /* If sizetype precision is larger than pointer precision, truncate the
8851 offset to have matching modes. */
8852 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8853 treeop1 = fold_convert_loc (loc, type, treeop1);
8854 /* FALLTHRU */
8856 case PLUS_EXPR:
8857 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8858 something else, make sure we add the register to the constant and
8859 then to the other thing. This case can occur during strength
8860 reduction and doing it this way will produce better code if the
8861 frame pointer or argument pointer is eliminated.
8863 fold-const.c will ensure that the constant is always in the inner
8864 PLUS_EXPR, so the only case we need to do anything about is if
8865 sp, ap, or fp is our second argument, in which case we must swap
8866 the innermost first argument and our second argument. */
8868 if (TREE_CODE (treeop0) == PLUS_EXPR
8869 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8870 && VAR_P (treeop1)
8871 && (DECL_RTL (treeop1) == frame_pointer_rtx
8872 || DECL_RTL (treeop1) == stack_pointer_rtx
8873 || DECL_RTL (treeop1) == arg_pointer_rtx))
8875 gcc_unreachable ();
8878 /* If the result is to be ptr_mode and we are adding an integer to
8879 something, we might be forming a constant. So try to use
8880 plus_constant. If it produces a sum and we can't accept it,
8881 use force_operand. This allows P = &ARR[const] to generate
8882 efficient code on machines where a SYMBOL_REF is not a valid
8883 address.
8885 If this is an EXPAND_SUM call, always return the sum. */
8886 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8887 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8889 if (modifier == EXPAND_STACK_PARM)
8890 target = 0;
8891 if (TREE_CODE (treeop0) == INTEGER_CST
8892 && HWI_COMPUTABLE_MODE_P (mode)
8893 && TREE_CONSTANT (treeop1))
8895 rtx constant_part;
8896 HOST_WIDE_INT wc;
8897 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8899 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8900 EXPAND_SUM);
8901 /* Use wi::shwi to ensure that the constant is
8902 truncated according to the mode of OP1, then sign extended
8903 to a HOST_WIDE_INT. Using the constant directly can result
8904 in non-canonical RTL in a 64x32 cross compile. */
8905 wc = TREE_INT_CST_LOW (treeop0);
8906 constant_part =
8907 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8908 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8909 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8910 op1 = force_operand (op1, target);
8911 return REDUCE_BIT_FIELD (op1);
8914 else if (TREE_CODE (treeop1) == INTEGER_CST
8915 && HWI_COMPUTABLE_MODE_P (mode)
8916 && TREE_CONSTANT (treeop0))
8918 rtx constant_part;
8919 HOST_WIDE_INT wc;
8920 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8922 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8923 (modifier == EXPAND_INITIALIZER
8924 ? EXPAND_INITIALIZER : EXPAND_SUM));
8925 if (! CONSTANT_P (op0))
8927 op1 = expand_expr (treeop1, NULL_RTX,
8928 VOIDmode, modifier);
8929 /* Return a PLUS if modifier says it's OK. */
8930 if (modifier == EXPAND_SUM
8931 || modifier == EXPAND_INITIALIZER)
8932 return simplify_gen_binary (PLUS, mode, op0, op1);
8933 goto binop2;
8935 /* Use wi::shwi to ensure that the constant is
8936 truncated according to the mode of OP1, then sign extended
8937 to a HOST_WIDE_INT. Using the constant directly can result
8938 in non-canonical RTL in a 64x32 cross compile. */
8939 wc = TREE_INT_CST_LOW (treeop1);
8940 constant_part
8941 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8942 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8943 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8944 op0 = force_operand (op0, target);
8945 return REDUCE_BIT_FIELD (op0);
8949 /* Use TER to expand pointer addition of a negated value
8950 as pointer subtraction. */
8951 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8952 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8953 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8954 && TREE_CODE (treeop1) == SSA_NAME
8955 && TYPE_MODE (TREE_TYPE (treeop0))
8956 == TYPE_MODE (TREE_TYPE (treeop1)))
8958 gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8959 if (def)
8961 treeop1 = gimple_assign_rhs1 (def);
8962 code = MINUS_EXPR;
8963 goto do_minus;
8967 /* No sense saving up arithmetic to be done
8968 if it's all in the wrong mode to form part of an address.
8969 And force_operand won't know whether to sign-extend or
8970 zero-extend. */
8971 if (modifier != EXPAND_INITIALIZER
8972 && (modifier != EXPAND_SUM || mode != ptr_mode))
8974 expand_operands (treeop0, treeop1,
8975 subtarget, &op0, &op1, modifier);
8976 if (op0 == const0_rtx)
8977 return op1;
8978 if (op1 == const0_rtx)
8979 return op0;
8980 goto binop2;
8983 expand_operands (treeop0, treeop1,
8984 subtarget, &op0, &op1, modifier);
8985 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8987 case MINUS_EXPR:
8988 case POINTER_DIFF_EXPR:
8989 do_minus:
8990 /* For initializers, we are allowed to return a MINUS of two
8991 symbolic constants. Here we handle all cases when both operands
8992 are constant. */
8993 /* Handle difference of two symbolic constants,
8994 for the sake of an initializer. */
8995 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8996 && really_constant_p (treeop0)
8997 && really_constant_p (treeop1))
8999 expand_operands (treeop0, treeop1,
9000 NULL_RTX, &op0, &op1, modifier);
9001 return simplify_gen_binary (MINUS, mode, op0, op1);
9004 /* No sense saving up arithmetic to be done
9005 if it's all in the wrong mode to form part of an address.
9006 And force_operand won't know whether to sign-extend or
9007 zero-extend. */
9008 if (modifier != EXPAND_INITIALIZER
9009 && (modifier != EXPAND_SUM || mode != ptr_mode))
9010 goto binop;
9012 expand_operands (treeop0, treeop1,
9013 subtarget, &op0, &op1, modifier);
9015 /* Convert A - const to A + (-const). */
9016 if (CONST_INT_P (op1))
9018 op1 = negate_rtx (mode, op1);
9019 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
9022 goto binop2;
9024 case WIDEN_MULT_PLUS_EXPR:
9025 case WIDEN_MULT_MINUS_EXPR:
9026 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9027 op2 = expand_normal (treeop2);
9028 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9029 target, unsignedp);
9030 return target;
9032 case WIDEN_MULT_EXPR:
9033 /* If first operand is constant, swap them.
9034 Thus the following special case checks need only
9035 check the second operand. */
9036 if (TREE_CODE (treeop0) == INTEGER_CST)
9037 std::swap (treeop0, treeop1);
9039 /* First, check if we have a multiplication of one signed and one
9040 unsigned operand. */
9041 if (TREE_CODE (treeop1) != INTEGER_CST
9042 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
9043 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
9045 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
9046 this_optab = usmul_widen_optab;
9047 if (find_widening_optab_handler (this_optab, mode, innermode)
9048 != CODE_FOR_nothing)
9050 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
9051 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
9052 EXPAND_NORMAL);
9053 else
9054 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
9055 EXPAND_NORMAL);
9056 /* op0 and op1 might still be constant, despite the above
9057 != INTEGER_CST check. Handle it. */
9058 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
9060 op0 = convert_modes (mode, innermode, op0, true);
9061 op1 = convert_modes (mode, innermode, op1, false);
9062 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
9063 target, unsignedp));
9065 goto binop3;
9068 /* Check for a multiplication with matching signedness. */
9069 else if ((TREE_CODE (treeop1) == INTEGER_CST
9070 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
9071 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
9072 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
9074 tree op0type = TREE_TYPE (treeop0);
9075 machine_mode innermode = TYPE_MODE (op0type);
9076 bool zextend_p = TYPE_UNSIGNED (op0type);
9077 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
9078 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
9080 if (TREE_CODE (treeop0) != INTEGER_CST)
9082 if (find_widening_optab_handler (this_optab, mode, innermode)
9083 != CODE_FOR_nothing)
9085 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
9086 EXPAND_NORMAL);
9087 /* op0 and op1 might still be constant, despite the above
9088 != INTEGER_CST check. Handle it. */
9089 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
9091 widen_mult_const:
9092 op0 = convert_modes (mode, innermode, op0, zextend_p);
9094 = convert_modes (mode, innermode, op1,
9095 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
9096 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
9097 target,
9098 unsignedp));
9100 temp = expand_widening_mult (mode, op0, op1, target,
9101 unsignedp, this_optab);
9102 return REDUCE_BIT_FIELD (temp);
9104 if (find_widening_optab_handler (other_optab, mode, innermode)
9105 != CODE_FOR_nothing
9106 && innermode == word_mode)
9108 rtx htem, hipart;
9109 op0 = expand_normal (treeop0);
9110 op1 = expand_normal (treeop1);
9111 /* op0 and op1 might be constants, despite the above
9112 != INTEGER_CST check. Handle it. */
9113 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
9114 goto widen_mult_const;
9115 temp = expand_binop (mode, other_optab, op0, op1, target,
9116 unsignedp, OPTAB_LIB_WIDEN);
9117 hipart = gen_highpart (word_mode, temp);
9118 htem = expand_mult_highpart_adjust (word_mode, hipart,
9119 op0, op1, hipart,
9120 zextend_p);
9121 if (htem != hipart)
9122 emit_move_insn (hipart, htem);
9123 return REDUCE_BIT_FIELD (temp);
9127 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
9128 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
9129 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
9130 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
9132 case MULT_EXPR:
9133 /* If this is a fixed-point operation, then we cannot use the code
9134 below because "expand_mult" doesn't support sat/no-sat fixed-point
9135 multiplications. */
9136 if (ALL_FIXED_POINT_MODE_P (mode))
9137 goto binop;
9139 /* If first operand is constant, swap them.
9140 Thus the following special case checks need only
9141 check the second operand. */
9142 if (TREE_CODE (treeop0) == INTEGER_CST)
9143 std::swap (treeop0, treeop1);
9145 /* Attempt to return something suitable for generating an
9146 indexed address, for machines that support that. */
9148 if (modifier == EXPAND_SUM && mode == ptr_mode
9149 && tree_fits_shwi_p (treeop1))
9151 tree exp1 = treeop1;
9153 op0 = expand_expr (treeop0, subtarget, VOIDmode,
9154 EXPAND_SUM);
9156 if (!REG_P (op0))
9157 op0 = force_operand (op0, NULL_RTX);
9158 if (!REG_P (op0))
9159 op0 = copy_to_mode_reg (mode, op0);
9161 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
9162 gen_int_mode (tree_to_shwi (exp1),
9163 TYPE_MODE (TREE_TYPE (exp1)))));
9166 if (modifier == EXPAND_STACK_PARM)
9167 target = 0;
9169 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
9170 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
9172 case TRUNC_MOD_EXPR:
9173 case FLOOR_MOD_EXPR:
9174 case CEIL_MOD_EXPR:
9175 case ROUND_MOD_EXPR:
9177 case TRUNC_DIV_EXPR:
9178 case FLOOR_DIV_EXPR:
9179 case CEIL_DIV_EXPR:
9180 case ROUND_DIV_EXPR:
9181 case EXACT_DIV_EXPR:
9183 /* If this is a fixed-point operation, then we cannot use the code
9184 below because "expand_divmod" doesn't support sat/no-sat fixed-point
9185 divisions. */
9186 if (ALL_FIXED_POINT_MODE_P (mode))
9187 goto binop;
9189 if (modifier == EXPAND_STACK_PARM)
9190 target = 0;
9191 /* Possible optimization: compute the dividend with EXPAND_SUM
9192 then if the divisor is constant can optimize the case
9193 where some terms of the dividend have coeffs divisible by it. */
9194 expand_operands (treeop0, treeop1,
9195 subtarget, &op0, &op1, EXPAND_NORMAL);
9196 bool mod_p = code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR
9197 || code == CEIL_MOD_EXPR || code == ROUND_MOD_EXPR;
9198 if (SCALAR_INT_MODE_P (mode)
9199 && optimize >= 2
9200 && get_range_pos_neg (treeop0) == 1
9201 && get_range_pos_neg (treeop1) == 1)
9203 /* If both arguments are known to be positive when interpreted
9204 as signed, we can expand it as both signed and unsigned
9205 division or modulo. Choose the cheaper sequence in that case. */
9206 bool speed_p = optimize_insn_for_speed_p ();
9207 do_pending_stack_adjust ();
9208 start_sequence ();
9209 rtx uns_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 1);
9210 rtx_insn *uns_insns = get_insns ();
9211 end_sequence ();
9212 start_sequence ();
9213 rtx sgn_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 0);
9214 rtx_insn *sgn_insns = get_insns ();
9215 end_sequence ();
9216 unsigned uns_cost = seq_cost (uns_insns, speed_p);
9217 unsigned sgn_cost = seq_cost (sgn_insns, speed_p);
9219 /* If costs are the same then use as tie breaker the other
9220 other factor. */
9221 if (uns_cost == sgn_cost)
9223 uns_cost = seq_cost (uns_insns, !speed_p);
9224 sgn_cost = seq_cost (sgn_insns, !speed_p);
9227 if (uns_cost < sgn_cost || (uns_cost == sgn_cost && unsignedp))
9229 emit_insn (uns_insns);
9230 return uns_ret;
9232 emit_insn (sgn_insns);
9233 return sgn_ret;
9235 return expand_divmod (mod_p, code, mode, op0, op1, target, unsignedp);
9237 case RDIV_EXPR:
9238 goto binop;
9240 case MULT_HIGHPART_EXPR:
9241 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
9242 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
9243 gcc_assert (temp);
9244 return temp;
9246 case FIXED_CONVERT_EXPR:
9247 op0 = expand_normal (treeop0);
9248 if (target == 0 || modifier == EXPAND_STACK_PARM)
9249 target = gen_reg_rtx (mode);
9251 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
9252 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
9253 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
9254 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
9255 else
9256 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
9257 return target;
9259 case FIX_TRUNC_EXPR:
9260 op0 = expand_normal (treeop0);
9261 if (target == 0 || modifier == EXPAND_STACK_PARM)
9262 target = gen_reg_rtx (mode);
9263 expand_fix (target, op0, unsignedp);
9264 return target;
9266 case FLOAT_EXPR:
9267 op0 = expand_normal (treeop0);
9268 if (target == 0 || modifier == EXPAND_STACK_PARM)
9269 target = gen_reg_rtx (mode);
9270 /* expand_float can't figure out what to do if FROM has VOIDmode.
9271 So give it the correct mode. With -O, cse will optimize this. */
9272 if (GET_MODE (op0) == VOIDmode)
9273 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
9274 op0);
9275 expand_float (target, op0,
9276 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9277 return target;
9279 case NEGATE_EXPR:
9280 op0 = expand_expr (treeop0, subtarget,
9281 VOIDmode, EXPAND_NORMAL);
9282 if (modifier == EXPAND_STACK_PARM)
9283 target = 0;
9284 temp = expand_unop (mode,
9285 optab_for_tree_code (NEGATE_EXPR, type,
9286 optab_default),
9287 op0, target, 0);
9288 gcc_assert (temp);
9289 return REDUCE_BIT_FIELD (temp);
9291 case ABS_EXPR:
9292 case ABSU_EXPR:
9293 op0 = expand_expr (treeop0, subtarget,
9294 VOIDmode, EXPAND_NORMAL);
9295 if (modifier == EXPAND_STACK_PARM)
9296 target = 0;
9298 /* ABS_EXPR is not valid for complex arguments. */
9299 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9300 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
9302 /* Unsigned abs is simply the operand. Testing here means we don't
9303 risk generating incorrect code below. */
9304 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
9305 return op0;
9307 return expand_abs (mode, op0, target, unsignedp,
9308 safe_from_p (target, treeop0, 1));
9310 case MAX_EXPR:
9311 case MIN_EXPR:
9312 target = original_target;
9313 if (target == 0
9314 || modifier == EXPAND_STACK_PARM
9315 || (MEM_P (target) && MEM_VOLATILE_P (target))
9316 || GET_MODE (target) != mode
9317 || (REG_P (target)
9318 && REGNO (target) < FIRST_PSEUDO_REGISTER))
9319 target = gen_reg_rtx (mode);
9320 expand_operands (treeop0, treeop1,
9321 target, &op0, &op1, EXPAND_NORMAL);
9323 /* First try to do it with a special MIN or MAX instruction.
9324 If that does not win, use a conditional jump to select the proper
9325 value. */
9326 this_optab = optab_for_tree_code (code, type, optab_default);
9327 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9328 OPTAB_WIDEN);
9329 if (temp != 0)
9330 return temp;
9332 if (VECTOR_TYPE_P (type))
9333 gcc_unreachable ();
9335 /* At this point, a MEM target is no longer useful; we will get better
9336 code without it. */
9338 if (! REG_P (target))
9339 target = gen_reg_rtx (mode);
9341 /* If op1 was placed in target, swap op0 and op1. */
9342 if (target != op0 && target == op1)
9343 std::swap (op0, op1);
9345 /* We generate better code and avoid problems with op1 mentioning
9346 target by forcing op1 into a pseudo if it isn't a constant. */
9347 if (! CONSTANT_P (op1))
9348 op1 = force_reg (mode, op1);
9351 enum rtx_code comparison_code;
9352 rtx cmpop1 = op1;
9354 if (code == MAX_EXPR)
9355 comparison_code = unsignedp ? GEU : GE;
9356 else
9357 comparison_code = unsignedp ? LEU : LE;
9359 /* Canonicalize to comparisons against 0. */
9360 if (op1 == const1_rtx)
9362 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9363 or (a != 0 ? a : 1) for unsigned.
9364 For MIN we are safe converting (a <= 1 ? a : 1)
9365 into (a <= 0 ? a : 1) */
9366 cmpop1 = const0_rtx;
9367 if (code == MAX_EXPR)
9368 comparison_code = unsignedp ? NE : GT;
9370 if (op1 == constm1_rtx && !unsignedp)
9372 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9373 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9374 cmpop1 = const0_rtx;
9375 if (code == MIN_EXPR)
9376 comparison_code = LT;
9379 /* Use a conditional move if possible. */
9380 if (can_conditionally_move_p (mode))
9382 rtx insn;
9384 start_sequence ();
9386 /* Try to emit the conditional move. */
9387 insn = emit_conditional_move (target, comparison_code,
9388 op0, cmpop1, mode,
9389 op0, op1, mode,
9390 unsignedp);
9392 /* If we could do the conditional move, emit the sequence,
9393 and return. */
9394 if (insn)
9396 rtx_insn *seq = get_insns ();
9397 end_sequence ();
9398 emit_insn (seq);
9399 return target;
9402 /* Otherwise discard the sequence and fall back to code with
9403 branches. */
9404 end_sequence ();
9407 if (target != op0)
9408 emit_move_insn (target, op0);
9410 lab = gen_label_rtx ();
9411 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
9412 unsignedp, mode, NULL_RTX, NULL, lab,
9413 profile_probability::uninitialized ());
9415 emit_move_insn (target, op1);
9416 emit_label (lab);
9417 return target;
9419 case BIT_NOT_EXPR:
9420 op0 = expand_expr (treeop0, subtarget,
9421 VOIDmode, EXPAND_NORMAL);
9422 if (modifier == EXPAND_STACK_PARM)
9423 target = 0;
9424 /* In case we have to reduce the result to bitfield precision
9425 for unsigned bitfield expand this as XOR with a proper constant
9426 instead. */
9427 if (reduce_bit_field && TYPE_UNSIGNED (type))
9429 int_mode = SCALAR_INT_TYPE_MODE (type);
9430 wide_int mask = wi::mask (TYPE_PRECISION (type),
9431 false, GET_MODE_PRECISION (int_mode));
9433 temp = expand_binop (int_mode, xor_optab, op0,
9434 immed_wide_int_const (mask, int_mode),
9435 target, 1, OPTAB_LIB_WIDEN);
9437 else
9438 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
9439 gcc_assert (temp);
9440 return temp;
9442 /* ??? Can optimize bitwise operations with one arg constant.
9443 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9444 and (a bitwise1 b) bitwise2 b (etc)
9445 but that is probably not worth while. */
9447 case BIT_AND_EXPR:
9448 case BIT_IOR_EXPR:
9449 case BIT_XOR_EXPR:
9450 goto binop;
9452 case LROTATE_EXPR:
9453 case RROTATE_EXPR:
9454 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
9455 || type_has_mode_precision_p (type));
9456 /* fall through */
9458 case LSHIFT_EXPR:
9459 case RSHIFT_EXPR:
9461 /* If this is a fixed-point operation, then we cannot use the code
9462 below because "expand_shift" doesn't support sat/no-sat fixed-point
9463 shifts. */
9464 if (ALL_FIXED_POINT_MODE_P (mode))
9465 goto binop;
9467 if (! safe_from_p (subtarget, treeop1, 1))
9468 subtarget = 0;
9469 if (modifier == EXPAND_STACK_PARM)
9470 target = 0;
9471 op0 = expand_expr (treeop0, subtarget,
9472 VOIDmode, EXPAND_NORMAL);
9474 /* Left shift optimization when shifting across word_size boundary.
9476 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9477 there isn't native instruction to support this wide mode
9478 left shift. Given below scenario:
9480 Type A = (Type) B << C
9482 |< T >|
9483 | dest_high | dest_low |
9485 | word_size |
9487 If the shift amount C caused we shift B to across the word
9488 size boundary, i.e part of B shifted into high half of
9489 destination register, and part of B remains in the low
9490 half, then GCC will use the following left shift expand
9491 logic:
9493 1. Initialize dest_low to B.
9494 2. Initialize every bit of dest_high to the sign bit of B.
9495 3. Logic left shift dest_low by C bit to finalize dest_low.
9496 The value of dest_low before this shift is kept in a temp D.
9497 4. Logic left shift dest_high by C.
9498 5. Logic right shift D by (word_size - C).
9499 6. Or the result of 4 and 5 to finalize dest_high.
9501 While, by checking gimple statements, if operand B is
9502 coming from signed extension, then we can simplify above
9503 expand logic into:
9505 1. dest_high = src_low >> (word_size - C).
9506 2. dest_low = src_low << C.
9508 We can use one arithmetic right shift to finish all the
9509 purpose of steps 2, 4, 5, 6, thus we reduce the steps
9510 needed from 6 into 2.
9512 The case is similar for zero extension, except that we
9513 initialize dest_high to zero rather than copies of the sign
9514 bit from B. Furthermore, we need to use a logical right shift
9515 in this case.
9517 The choice of sign-extension versus zero-extension is
9518 determined entirely by whether or not B is signed and is
9519 independent of the current setting of unsignedp. */
9521 temp = NULL_RTX;
9522 if (code == LSHIFT_EXPR
9523 && target
9524 && REG_P (target)
9525 && GET_MODE_2XWIDER_MODE (word_mode).exists (&int_mode)
9526 && mode == int_mode
9527 && TREE_CONSTANT (treeop1)
9528 && TREE_CODE (treeop0) == SSA_NAME)
9530 gimple *def = SSA_NAME_DEF_STMT (treeop0);
9531 if (is_gimple_assign (def)
9532 && gimple_assign_rhs_code (def) == NOP_EXPR)
9534 scalar_int_mode rmode = SCALAR_INT_TYPE_MODE
9535 (TREE_TYPE (gimple_assign_rhs1 (def)));
9537 if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (int_mode)
9538 && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
9539 && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
9540 >= GET_MODE_BITSIZE (word_mode)))
9542 rtx_insn *seq, *seq_old;
9543 poly_uint64 high_off = subreg_highpart_offset (word_mode,
9544 int_mode);
9545 bool extend_unsigned
9546 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def)));
9547 rtx low = lowpart_subreg (word_mode, op0, int_mode);
9548 rtx dest_low = lowpart_subreg (word_mode, target, int_mode);
9549 rtx dest_high = simplify_gen_subreg (word_mode, target,
9550 int_mode, high_off);
9551 HOST_WIDE_INT ramount = (BITS_PER_WORD
9552 - TREE_INT_CST_LOW (treeop1));
9553 tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
9555 start_sequence ();
9556 /* dest_high = src_low >> (word_size - C). */
9557 temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
9558 rshift, dest_high,
9559 extend_unsigned);
9560 if (temp != dest_high)
9561 emit_move_insn (dest_high, temp);
9563 /* dest_low = src_low << C. */
9564 temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
9565 treeop1, dest_low, unsignedp);
9566 if (temp != dest_low)
9567 emit_move_insn (dest_low, temp);
9569 seq = get_insns ();
9570 end_sequence ();
9571 temp = target ;
9573 if (have_insn_for (ASHIFT, int_mode))
9575 bool speed_p = optimize_insn_for_speed_p ();
9576 start_sequence ();
9577 rtx ret_old = expand_variable_shift (code, int_mode,
9578 op0, treeop1,
9579 target,
9580 unsignedp);
9582 seq_old = get_insns ();
9583 end_sequence ();
9584 if (seq_cost (seq, speed_p)
9585 >= seq_cost (seq_old, speed_p))
9587 seq = seq_old;
9588 temp = ret_old;
9591 emit_insn (seq);
9596 if (temp == NULL_RTX)
9597 temp = expand_variable_shift (code, mode, op0, treeop1, target,
9598 unsignedp);
9599 if (code == LSHIFT_EXPR)
9600 temp = REDUCE_BIT_FIELD (temp);
9601 return temp;
9604 /* Could determine the answer when only additive constants differ. Also,
9605 the addition of one can be handled by changing the condition. */
9606 case LT_EXPR:
9607 case LE_EXPR:
9608 case GT_EXPR:
9609 case GE_EXPR:
9610 case EQ_EXPR:
9611 case NE_EXPR:
9612 case UNORDERED_EXPR:
9613 case ORDERED_EXPR:
9614 case UNLT_EXPR:
9615 case UNLE_EXPR:
9616 case UNGT_EXPR:
9617 case UNGE_EXPR:
9618 case UNEQ_EXPR:
9619 case LTGT_EXPR:
9621 temp = do_store_flag (ops,
9622 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9623 tmode != VOIDmode ? tmode : mode);
9624 if (temp)
9625 return temp;
9627 /* Use a compare and a jump for BLKmode comparisons, or for function
9628 type comparisons is have_canonicalize_funcptr_for_compare. */
9630 if ((target == 0
9631 || modifier == EXPAND_STACK_PARM
9632 || ! safe_from_p (target, treeop0, 1)
9633 || ! safe_from_p (target, treeop1, 1)
9634 /* Make sure we don't have a hard reg (such as function's return
9635 value) live across basic blocks, if not optimizing. */
9636 || (!optimize && REG_P (target)
9637 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9638 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9640 emit_move_insn (target, const0_rtx);
9642 rtx_code_label *lab1 = gen_label_rtx ();
9643 jumpifnot_1 (code, treeop0, treeop1, lab1,
9644 profile_probability::uninitialized ());
9646 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9647 emit_move_insn (target, constm1_rtx);
9648 else
9649 emit_move_insn (target, const1_rtx);
9651 emit_label (lab1);
9652 return target;
9654 case COMPLEX_EXPR:
9655 /* Get the rtx code of the operands. */
9656 op0 = expand_normal (treeop0);
9657 op1 = expand_normal (treeop1);
9659 if (!target)
9660 target = gen_reg_rtx (TYPE_MODE (type));
9661 else
9662 /* If target overlaps with op1, then either we need to force
9663 op1 into a pseudo (if target also overlaps with op0),
9664 or write the complex parts in reverse order. */
9665 switch (GET_CODE (target))
9667 case CONCAT:
9668 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9670 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9672 complex_expr_force_op1:
9673 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9674 emit_move_insn (temp, op1);
9675 op1 = temp;
9676 break;
9678 complex_expr_swap_order:
9679 /* Move the imaginary (op1) and real (op0) parts to their
9680 location. */
9681 write_complex_part (target, op1, true);
9682 write_complex_part (target, op0, false);
9684 return target;
9686 break;
9687 case MEM:
9688 temp = adjust_address_nv (target,
9689 GET_MODE_INNER (GET_MODE (target)), 0);
9690 if (reg_overlap_mentioned_p (temp, op1))
9692 scalar_mode imode = GET_MODE_INNER (GET_MODE (target));
9693 temp = adjust_address_nv (target, imode,
9694 GET_MODE_SIZE (imode));
9695 if (reg_overlap_mentioned_p (temp, op0))
9696 goto complex_expr_force_op1;
9697 goto complex_expr_swap_order;
9699 break;
9700 default:
9701 if (reg_overlap_mentioned_p (target, op1))
9703 if (reg_overlap_mentioned_p (target, op0))
9704 goto complex_expr_force_op1;
9705 goto complex_expr_swap_order;
9707 break;
9710 /* Move the real (op0) and imaginary (op1) parts to their location. */
9711 write_complex_part (target, op0, false);
9712 write_complex_part (target, op1, true);
9714 return target;
9716 case WIDEN_SUM_EXPR:
9718 tree oprnd0 = treeop0;
9719 tree oprnd1 = treeop1;
9721 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9722 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9723 target, unsignedp);
9724 return target;
9727 case VEC_UNPACK_HI_EXPR:
9728 case VEC_UNPACK_LO_EXPR:
9729 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
9730 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
9732 op0 = expand_normal (treeop0);
9733 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9734 target, unsignedp);
9735 gcc_assert (temp);
9736 return temp;
9739 case VEC_UNPACK_FLOAT_HI_EXPR:
9740 case VEC_UNPACK_FLOAT_LO_EXPR:
9742 op0 = expand_normal (treeop0);
9743 /* The signedness is determined from input operand. */
9744 temp = expand_widen_pattern_expr
9745 (ops, op0, NULL_RTX, NULL_RTX,
9746 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9748 gcc_assert (temp);
9749 return temp;
9752 case VEC_WIDEN_MULT_HI_EXPR:
9753 case VEC_WIDEN_MULT_LO_EXPR:
9754 case VEC_WIDEN_MULT_EVEN_EXPR:
9755 case VEC_WIDEN_MULT_ODD_EXPR:
9756 case VEC_WIDEN_LSHIFT_HI_EXPR:
9757 case VEC_WIDEN_LSHIFT_LO_EXPR:
9758 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9759 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9760 target, unsignedp);
9761 gcc_assert (target);
9762 return target;
9764 case VEC_PACK_SAT_EXPR:
9765 case VEC_PACK_FIX_TRUNC_EXPR:
9766 mode = TYPE_MODE (TREE_TYPE (treeop0));
9767 goto binop;
9769 case VEC_PACK_TRUNC_EXPR:
9770 if (VECTOR_BOOLEAN_TYPE_P (type)
9771 && VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (treeop0))
9772 && mode == TYPE_MODE (TREE_TYPE (treeop0))
9773 && SCALAR_INT_MODE_P (mode))
9775 class expand_operand eops[4];
9776 machine_mode imode = TYPE_MODE (TREE_TYPE (treeop0));
9777 expand_operands (treeop0, treeop1,
9778 subtarget, &op0, &op1, EXPAND_NORMAL);
9779 this_optab = vec_pack_sbool_trunc_optab;
9780 enum insn_code icode = optab_handler (this_optab, imode);
9781 create_output_operand (&eops[0], target, mode);
9782 create_convert_operand_from (&eops[1], op0, imode, false);
9783 create_convert_operand_from (&eops[2], op1, imode, false);
9784 temp = GEN_INT (TYPE_VECTOR_SUBPARTS (type).to_constant ());
9785 create_input_operand (&eops[3], temp, imode);
9786 expand_insn (icode, 4, eops);
9787 return eops[0].value;
9789 mode = TYPE_MODE (TREE_TYPE (treeop0));
9790 goto binop;
9792 case VEC_PACK_FLOAT_EXPR:
9793 mode = TYPE_MODE (TREE_TYPE (treeop0));
9794 expand_operands (treeop0, treeop1,
9795 subtarget, &op0, &op1, EXPAND_NORMAL);
9796 this_optab = optab_for_tree_code (code, TREE_TYPE (treeop0),
9797 optab_default);
9798 target = expand_binop (mode, this_optab, op0, op1, target,
9799 TYPE_UNSIGNED (TREE_TYPE (treeop0)),
9800 OPTAB_LIB_WIDEN);
9801 gcc_assert (target);
9802 return target;
9804 case VEC_PERM_EXPR:
9806 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9807 vec_perm_builder sel;
9808 if (TREE_CODE (treeop2) == VECTOR_CST
9809 && tree_to_vec_perm_builder (&sel, treeop2))
9811 machine_mode sel_mode = TYPE_MODE (TREE_TYPE (treeop2));
9812 temp = expand_vec_perm_const (mode, op0, op1, sel,
9813 sel_mode, target);
9815 else
9817 op2 = expand_normal (treeop2);
9818 temp = expand_vec_perm_var (mode, op0, op1, op2, target);
9820 gcc_assert (temp);
9821 return temp;
9824 case DOT_PROD_EXPR:
9826 tree oprnd0 = treeop0;
9827 tree oprnd1 = treeop1;
9828 tree oprnd2 = treeop2;
9830 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9831 op2 = expand_normal (oprnd2);
9832 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9833 target, unsignedp);
9834 return target;
9837 case SAD_EXPR:
9839 tree oprnd0 = treeop0;
9840 tree oprnd1 = treeop1;
9841 tree oprnd2 = treeop2;
9843 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9844 op2 = expand_normal (oprnd2);
9845 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9846 target, unsignedp);
9847 return target;
9850 case REALIGN_LOAD_EXPR:
9852 tree oprnd0 = treeop0;
9853 tree oprnd1 = treeop1;
9854 tree oprnd2 = treeop2;
9856 this_optab = optab_for_tree_code (code, type, optab_default);
9857 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9858 op2 = expand_normal (oprnd2);
9859 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9860 target, unsignedp);
9861 gcc_assert (temp);
9862 return temp;
9865 case COND_EXPR:
9867 /* A COND_EXPR with its type being VOID_TYPE represents a
9868 conditional jump and is handled in
9869 expand_gimple_cond_expr. */
9870 gcc_assert (!VOID_TYPE_P (type));
9872 /* Note that COND_EXPRs whose type is a structure or union
9873 are required to be constructed to contain assignments of
9874 a temporary variable, so that we can evaluate them here
9875 for side effect only. If type is void, we must do likewise. */
9877 gcc_assert (!TREE_ADDRESSABLE (type)
9878 && !ignore
9879 && TREE_TYPE (treeop1) != void_type_node
9880 && TREE_TYPE (treeop2) != void_type_node);
9882 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9883 if (temp)
9884 return temp;
9886 /* If we are not to produce a result, we have no target. Otherwise,
9887 if a target was specified use it; it will not be used as an
9888 intermediate target unless it is safe. If no target, use a
9889 temporary. */
9891 if (modifier != EXPAND_STACK_PARM
9892 && original_target
9893 && safe_from_p (original_target, treeop0, 1)
9894 && GET_MODE (original_target) == mode
9895 && !MEM_P (original_target))
9896 temp = original_target;
9897 else
9898 temp = assign_temp (type, 0, 1);
9900 do_pending_stack_adjust ();
9901 NO_DEFER_POP;
9902 rtx_code_label *lab0 = gen_label_rtx ();
9903 rtx_code_label *lab1 = gen_label_rtx ();
9904 jumpifnot (treeop0, lab0,
9905 profile_probability::uninitialized ());
9906 store_expr (treeop1, temp,
9907 modifier == EXPAND_STACK_PARM,
9908 false, false);
9910 emit_jump_insn (targetm.gen_jump (lab1));
9911 emit_barrier ();
9912 emit_label (lab0);
9913 store_expr (treeop2, temp,
9914 modifier == EXPAND_STACK_PARM,
9915 false, false);
9917 emit_label (lab1);
9918 OK_DEFER_POP;
9919 return temp;
9922 case VEC_DUPLICATE_EXPR:
9923 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
9924 target = expand_vector_broadcast (mode, op0);
9925 gcc_assert (target);
9926 return target;
9928 case VEC_SERIES_EXPR:
9929 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, modifier);
9930 return expand_vec_series_expr (mode, op0, op1, target);
9932 case BIT_INSERT_EXPR:
9934 unsigned bitpos = tree_to_uhwi (treeop2);
9935 unsigned bitsize;
9936 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1)))
9937 bitsize = TYPE_PRECISION (TREE_TYPE (treeop1));
9938 else
9939 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1)));
9940 op0 = expand_normal (treeop0);
9941 op1 = expand_normal (treeop1);
9942 rtx dst = gen_reg_rtx (mode);
9943 emit_move_insn (dst, op0);
9944 store_bit_field (dst, bitsize, bitpos, 0, 0,
9945 TYPE_MODE (TREE_TYPE (treeop1)), op1, false);
9946 return dst;
9949 default:
9950 gcc_unreachable ();
9953 /* Here to do an ordinary binary operator. */
9954 binop:
9955 expand_operands (treeop0, treeop1,
9956 subtarget, &op0, &op1, EXPAND_NORMAL);
9957 binop2:
9958 this_optab = optab_for_tree_code (code, type, optab_default);
9959 binop3:
9960 if (modifier == EXPAND_STACK_PARM)
9961 target = 0;
9962 temp = expand_binop (mode, this_optab, op0, op1, target,
9963 unsignedp, OPTAB_LIB_WIDEN);
9964 gcc_assert (temp);
9965 /* Bitwise operations do not need bitfield reduction as we expect their
9966 operands being properly truncated. */
9967 if (code == BIT_XOR_EXPR
9968 || code == BIT_AND_EXPR
9969 || code == BIT_IOR_EXPR)
9970 return temp;
9971 return REDUCE_BIT_FIELD (temp);
9973 #undef REDUCE_BIT_FIELD
9976 /* Return TRUE if expression STMT is suitable for replacement.
9977 Never consider memory loads as replaceable, because those don't ever lead
9978 into constant expressions. */
9980 static bool
9981 stmt_is_replaceable_p (gimple *stmt)
9983 if (ssa_is_replaceable_p (stmt))
9985 /* Don't move around loads. */
9986 if (!gimple_assign_single_p (stmt)
9987 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9988 return true;
9990 return false;
9994 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9995 enum expand_modifier modifier, rtx *alt_rtl,
9996 bool inner_reference_p)
9998 rtx op0, op1, temp, decl_rtl;
9999 tree type;
10000 int unsignedp;
10001 machine_mode mode, dmode;
10002 enum tree_code code = TREE_CODE (exp);
10003 rtx subtarget, original_target;
10004 int ignore;
10005 tree context;
10006 bool reduce_bit_field;
10007 location_t loc = EXPR_LOCATION (exp);
10008 struct separate_ops ops;
10009 tree treeop0, treeop1, treeop2;
10010 tree ssa_name = NULL_TREE;
10011 gimple *g;
10013 type = TREE_TYPE (exp);
10014 mode = TYPE_MODE (type);
10015 unsignedp = TYPE_UNSIGNED (type);
10017 treeop0 = treeop1 = treeop2 = NULL_TREE;
10018 if (!VL_EXP_CLASS_P (exp))
10019 switch (TREE_CODE_LENGTH (code))
10021 default:
10022 case 3: treeop2 = TREE_OPERAND (exp, 2); /* FALLTHRU */
10023 case 2: treeop1 = TREE_OPERAND (exp, 1); /* FALLTHRU */
10024 case 1: treeop0 = TREE_OPERAND (exp, 0); /* FALLTHRU */
10025 case 0: break;
10027 ops.code = code;
10028 ops.type = type;
10029 ops.op0 = treeop0;
10030 ops.op1 = treeop1;
10031 ops.op2 = treeop2;
10032 ops.location = loc;
10034 ignore = (target == const0_rtx
10035 || ((CONVERT_EXPR_CODE_P (code)
10036 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
10037 && TREE_CODE (type) == VOID_TYPE));
10039 /* An operation in what may be a bit-field type needs the
10040 result to be reduced to the precision of the bit-field type,
10041 which is narrower than that of the type's mode. */
10042 reduce_bit_field = (!ignore
10043 && INTEGRAL_TYPE_P (type)
10044 && !type_has_mode_precision_p (type));
10046 /* If we are going to ignore this result, we need only do something
10047 if there is a side-effect somewhere in the expression. If there
10048 is, short-circuit the most common cases here. Note that we must
10049 not call expand_expr with anything but const0_rtx in case this
10050 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
10052 if (ignore)
10054 if (! TREE_SIDE_EFFECTS (exp))
10055 return const0_rtx;
10057 /* Ensure we reference a volatile object even if value is ignored, but
10058 don't do this if all we are doing is taking its address. */
10059 if (TREE_THIS_VOLATILE (exp)
10060 && TREE_CODE (exp) != FUNCTION_DECL
10061 && mode != VOIDmode && mode != BLKmode
10062 && modifier != EXPAND_CONST_ADDRESS)
10064 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
10065 if (MEM_P (temp))
10066 copy_to_reg (temp);
10067 return const0_rtx;
10070 if (TREE_CODE_CLASS (code) == tcc_unary
10071 || code == BIT_FIELD_REF
10072 || code == COMPONENT_REF
10073 || code == INDIRECT_REF)
10074 return expand_expr (treeop0, const0_rtx, VOIDmode,
10075 modifier);
10077 else if (TREE_CODE_CLASS (code) == tcc_binary
10078 || TREE_CODE_CLASS (code) == tcc_comparison
10079 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
10081 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
10082 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
10083 return const0_rtx;
10086 target = 0;
10089 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
10090 target = 0;
10092 /* Use subtarget as the target for operand 0 of a binary operation. */
10093 subtarget = get_subtarget (target);
10094 original_target = target;
10096 switch (code)
10098 case LABEL_DECL:
10100 tree function = decl_function_context (exp);
10102 temp = label_rtx (exp);
10103 temp = gen_rtx_LABEL_REF (Pmode, temp);
10105 if (function != current_function_decl
10106 && function != 0)
10107 LABEL_REF_NONLOCAL_P (temp) = 1;
10109 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
10110 return temp;
10113 case SSA_NAME:
10114 /* ??? ivopts calls expander, without any preparation from
10115 out-of-ssa. So fake instructions as if this was an access to the
10116 base variable. This unnecessarily allocates a pseudo, see how we can
10117 reuse it, if partition base vars have it set already. */
10118 if (!currently_expanding_to_rtl)
10120 tree var = SSA_NAME_VAR (exp);
10121 if (var && DECL_RTL_SET_P (var))
10122 return DECL_RTL (var);
10123 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
10124 LAST_VIRTUAL_REGISTER + 1);
10127 g = get_gimple_for_ssa_name (exp);
10128 /* For EXPAND_INITIALIZER try harder to get something simpler. */
10129 if (g == NULL
10130 && modifier == EXPAND_INITIALIZER
10131 && !SSA_NAME_IS_DEFAULT_DEF (exp)
10132 && (optimize || !SSA_NAME_VAR (exp)
10133 || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
10134 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
10135 g = SSA_NAME_DEF_STMT (exp);
10136 if (g)
10138 rtx r;
10139 location_t saved_loc = curr_insn_location ();
10140 loc = gimple_location (g);
10141 if (loc != UNKNOWN_LOCATION)
10142 set_curr_insn_location (loc);
10143 ops.code = gimple_assign_rhs_code (g);
10144 switch (get_gimple_rhs_class (ops.code))
10146 case GIMPLE_TERNARY_RHS:
10147 ops.op2 = gimple_assign_rhs3 (g);
10148 /* Fallthru */
10149 case GIMPLE_BINARY_RHS:
10150 ops.op1 = gimple_assign_rhs2 (g);
10152 /* Try to expand conditonal compare. */
10153 if (targetm.gen_ccmp_first)
10155 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
10156 r = expand_ccmp_expr (g, mode);
10157 if (r)
10158 break;
10160 /* Fallthru */
10161 case GIMPLE_UNARY_RHS:
10162 ops.op0 = gimple_assign_rhs1 (g);
10163 ops.type = TREE_TYPE (gimple_assign_lhs (g));
10164 ops.location = loc;
10165 r = expand_expr_real_2 (&ops, target, tmode, modifier);
10166 break;
10167 case GIMPLE_SINGLE_RHS:
10169 r = expand_expr_real (gimple_assign_rhs1 (g), target,
10170 tmode, modifier, alt_rtl,
10171 inner_reference_p);
10172 break;
10174 default:
10175 gcc_unreachable ();
10177 set_curr_insn_location (saved_loc);
10178 if (REG_P (r) && !REG_EXPR (r))
10179 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
10180 return r;
10183 ssa_name = exp;
10184 decl_rtl = get_rtx_for_ssa_name (ssa_name);
10185 exp = SSA_NAME_VAR (ssa_name);
10186 goto expand_decl_rtl;
10188 case PARM_DECL:
10189 case VAR_DECL:
10190 /* If a static var's type was incomplete when the decl was written,
10191 but the type is complete now, lay out the decl now. */
10192 if (DECL_SIZE (exp) == 0
10193 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
10194 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
10195 layout_decl (exp, 0);
10197 /* fall through */
10199 case FUNCTION_DECL:
10200 case RESULT_DECL:
10201 decl_rtl = DECL_RTL (exp);
10202 expand_decl_rtl:
10203 gcc_assert (decl_rtl);
10205 /* DECL_MODE might change when TYPE_MODE depends on attribute target
10206 settings for VECTOR_TYPE_P that might switch for the function. */
10207 if (currently_expanding_to_rtl
10208 && code == VAR_DECL && MEM_P (decl_rtl)
10209 && VECTOR_TYPE_P (type) && exp && DECL_MODE (exp) != mode)
10210 decl_rtl = change_address (decl_rtl, TYPE_MODE (type), 0);
10211 else
10212 decl_rtl = copy_rtx (decl_rtl);
10214 /* Record writes to register variables. */
10215 if (modifier == EXPAND_WRITE
10216 && REG_P (decl_rtl)
10217 && HARD_REGISTER_P (decl_rtl))
10218 add_to_hard_reg_set (&crtl->asm_clobbers,
10219 GET_MODE (decl_rtl), REGNO (decl_rtl));
10221 /* Ensure variable marked as used even if it doesn't go through
10222 a parser. If it hasn't be used yet, write out an external
10223 definition. */
10224 if (exp)
10225 TREE_USED (exp) = 1;
10227 /* Show we haven't gotten RTL for this yet. */
10228 temp = 0;
10230 /* Variables inherited from containing functions should have
10231 been lowered by this point. */
10232 if (exp)
10233 context = decl_function_context (exp);
10234 gcc_assert (!exp
10235 || SCOPE_FILE_SCOPE_P (context)
10236 || context == current_function_decl
10237 || TREE_STATIC (exp)
10238 || DECL_EXTERNAL (exp)
10239 /* ??? C++ creates functions that are not TREE_STATIC. */
10240 || TREE_CODE (exp) == FUNCTION_DECL);
10242 /* This is the case of an array whose size is to be determined
10243 from its initializer, while the initializer is still being parsed.
10244 ??? We aren't parsing while expanding anymore. */
10246 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
10247 temp = validize_mem (decl_rtl);
10249 /* If DECL_RTL is memory, we are in the normal case and the
10250 address is not valid, get the address into a register. */
10252 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
10254 if (alt_rtl)
10255 *alt_rtl = decl_rtl;
10256 decl_rtl = use_anchored_address (decl_rtl);
10257 if (modifier != EXPAND_CONST_ADDRESS
10258 && modifier != EXPAND_SUM
10259 && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
10260 : GET_MODE (decl_rtl),
10261 XEXP (decl_rtl, 0),
10262 MEM_ADDR_SPACE (decl_rtl)))
10263 temp = replace_equiv_address (decl_rtl,
10264 copy_rtx (XEXP (decl_rtl, 0)));
10267 /* If we got something, return it. But first, set the alignment
10268 if the address is a register. */
10269 if (temp != 0)
10271 if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
10272 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
10274 else if (MEM_P (decl_rtl))
10275 temp = decl_rtl;
10277 if (temp != 0)
10279 if (MEM_P (temp)
10280 && modifier != EXPAND_WRITE
10281 && modifier != EXPAND_MEMORY
10282 && modifier != EXPAND_INITIALIZER
10283 && modifier != EXPAND_CONST_ADDRESS
10284 && modifier != EXPAND_SUM
10285 && !inner_reference_p
10286 && mode != BLKmode
10287 && MEM_ALIGN (temp) < GET_MODE_ALIGNMENT (mode))
10288 temp = expand_misaligned_mem_ref (temp, mode, unsignedp,
10289 MEM_ALIGN (temp), NULL_RTX, NULL);
10291 return temp;
10294 if (exp)
10295 dmode = DECL_MODE (exp);
10296 else
10297 dmode = TYPE_MODE (TREE_TYPE (ssa_name));
10299 /* If the mode of DECL_RTL does not match that of the decl,
10300 there are two cases: we are dealing with a BLKmode value
10301 that is returned in a register, or we are dealing with
10302 a promoted value. In the latter case, return a SUBREG
10303 of the wanted mode, but mark it so that we know that it
10304 was already extended. */
10305 if (REG_P (decl_rtl)
10306 && dmode != BLKmode
10307 && GET_MODE (decl_rtl) != dmode)
10309 machine_mode pmode;
10311 /* Get the signedness to be used for this variable. Ensure we get
10312 the same mode we got when the variable was declared. */
10313 if (code != SSA_NAME)
10314 pmode = promote_decl_mode (exp, &unsignedp);
10315 else if ((g = SSA_NAME_DEF_STMT (ssa_name))
10316 && gimple_code (g) == GIMPLE_CALL
10317 && !gimple_call_internal_p (g))
10318 pmode = promote_function_mode (type, mode, &unsignedp,
10319 gimple_call_fntype (g),
10321 else
10322 pmode = promote_ssa_mode (ssa_name, &unsignedp);
10323 gcc_assert (GET_MODE (decl_rtl) == pmode);
10325 temp = gen_lowpart_SUBREG (mode, decl_rtl);
10326 SUBREG_PROMOTED_VAR_P (temp) = 1;
10327 SUBREG_PROMOTED_SET (temp, unsignedp);
10328 return temp;
10331 return decl_rtl;
10333 case INTEGER_CST:
10335 /* Given that TYPE_PRECISION (type) is not always equal to
10336 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
10337 the former to the latter according to the signedness of the
10338 type. */
10339 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (type);
10340 temp = immed_wide_int_const
10341 (wi::to_wide (exp, GET_MODE_PRECISION (int_mode)), int_mode);
10342 return temp;
10345 case VECTOR_CST:
10347 tree tmp = NULL_TREE;
10348 if (VECTOR_MODE_P (mode))
10349 return const_vector_from_tree (exp);
10350 scalar_int_mode int_mode;
10351 if (is_int_mode (mode, &int_mode))
10353 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
10354 return const_scalar_mask_from_tree (int_mode, exp);
10355 else
10357 tree type_for_mode
10358 = lang_hooks.types.type_for_mode (int_mode, 1);
10359 if (type_for_mode)
10360 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR,
10361 type_for_mode, exp);
10364 if (!tmp)
10366 vec<constructor_elt, va_gc> *v;
10367 /* Constructors need to be fixed-length. FIXME. */
10368 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
10369 vec_alloc (v, nunits);
10370 for (unsigned int i = 0; i < nunits; ++i)
10371 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
10372 tmp = build_constructor (type, v);
10374 return expand_expr (tmp, ignore ? const0_rtx : target,
10375 tmode, modifier);
10378 case CONST_DECL:
10379 if (modifier == EXPAND_WRITE)
10381 /* Writing into CONST_DECL is always invalid, but handle it
10382 gracefully. */
10383 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
10384 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
10385 op0 = expand_expr_addr_expr_1 (exp, NULL_RTX, address_mode,
10386 EXPAND_NORMAL, as);
10387 op0 = memory_address_addr_space (mode, op0, as);
10388 temp = gen_rtx_MEM (mode, op0);
10389 set_mem_addr_space (temp, as);
10390 return temp;
10392 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
10394 case REAL_CST:
10395 /* If optimized, generate immediate CONST_DOUBLE
10396 which will be turned into memory by reload if necessary.
10398 We used to force a register so that loop.c could see it. But
10399 this does not allow gen_* patterns to perform optimizations with
10400 the constants. It also produces two insns in cases like "x = 1.0;".
10401 On most machines, floating-point constants are not permitted in
10402 many insns, so we'd end up copying it to a register in any case.
10404 Now, we do the copying in expand_binop, if appropriate. */
10405 return const_double_from_real_value (TREE_REAL_CST (exp),
10406 TYPE_MODE (TREE_TYPE (exp)));
10408 case FIXED_CST:
10409 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
10410 TYPE_MODE (TREE_TYPE (exp)));
10412 case COMPLEX_CST:
10413 /* Handle evaluating a complex constant in a CONCAT target. */
10414 if (original_target && GET_CODE (original_target) == CONCAT)
10416 rtx rtarg, itarg;
10418 mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
10419 rtarg = XEXP (original_target, 0);
10420 itarg = XEXP (original_target, 1);
10422 /* Move the real and imaginary parts separately. */
10423 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
10424 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
10426 if (op0 != rtarg)
10427 emit_move_insn (rtarg, op0);
10428 if (op1 != itarg)
10429 emit_move_insn (itarg, op1);
10431 return original_target;
10434 /* fall through */
10436 case STRING_CST:
10437 temp = expand_expr_constant (exp, 1, modifier);
10439 /* temp contains a constant address.
10440 On RISC machines where a constant address isn't valid,
10441 make some insns to get that address into a register. */
10442 if (modifier != EXPAND_CONST_ADDRESS
10443 && modifier != EXPAND_INITIALIZER
10444 && modifier != EXPAND_SUM
10445 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
10446 MEM_ADDR_SPACE (temp)))
10447 return replace_equiv_address (temp,
10448 copy_rtx (XEXP (temp, 0)));
10449 return temp;
10451 case POLY_INT_CST:
10452 return immed_wide_int_const (poly_int_cst_value (exp), mode);
10454 case SAVE_EXPR:
10456 tree val = treeop0;
10457 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
10458 inner_reference_p);
10460 if (!SAVE_EXPR_RESOLVED_P (exp))
10462 /* We can indeed still hit this case, typically via builtin
10463 expanders calling save_expr immediately before expanding
10464 something. Assume this means that we only have to deal
10465 with non-BLKmode values. */
10466 gcc_assert (GET_MODE (ret) != BLKmode);
10468 val = build_decl (curr_insn_location (),
10469 VAR_DECL, NULL, TREE_TYPE (exp));
10470 DECL_ARTIFICIAL (val) = 1;
10471 DECL_IGNORED_P (val) = 1;
10472 treeop0 = val;
10473 TREE_OPERAND (exp, 0) = treeop0;
10474 SAVE_EXPR_RESOLVED_P (exp) = 1;
10476 if (!CONSTANT_P (ret))
10477 ret = copy_to_reg (ret);
10478 SET_DECL_RTL (val, ret);
10481 return ret;
10485 case CONSTRUCTOR:
10486 /* If we don't need the result, just ensure we evaluate any
10487 subexpressions. */
10488 if (ignore)
10490 unsigned HOST_WIDE_INT idx;
10491 tree value;
10493 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
10494 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
10496 return const0_rtx;
10499 return expand_constructor (exp, target, modifier, false);
10501 case TARGET_MEM_REF:
10503 addr_space_t as
10504 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10505 unsigned int align;
10507 op0 = addr_for_mem_ref (exp, as, true);
10508 op0 = memory_address_addr_space (mode, op0, as);
10509 temp = gen_rtx_MEM (mode, op0);
10510 set_mem_attributes (temp, exp, 0);
10511 set_mem_addr_space (temp, as);
10512 align = get_object_alignment (exp);
10513 if (modifier != EXPAND_WRITE
10514 && modifier != EXPAND_MEMORY
10515 && mode != BLKmode
10516 && align < GET_MODE_ALIGNMENT (mode))
10517 temp = expand_misaligned_mem_ref (temp, mode, unsignedp,
10518 align, NULL_RTX, NULL);
10519 return temp;
10522 case MEM_REF:
10524 const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
10525 addr_space_t as
10526 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10527 machine_mode address_mode;
10528 tree base = TREE_OPERAND (exp, 0);
10529 gimple *def_stmt;
10530 unsigned align;
10531 /* Handle expansion of non-aliased memory with non-BLKmode. That
10532 might end up in a register. */
10533 if (mem_ref_refers_to_non_mem_p (exp))
10535 poly_int64 offset = mem_ref_offset (exp).force_shwi ();
10536 base = TREE_OPERAND (base, 0);
10537 poly_uint64 type_size;
10538 if (known_eq (offset, 0)
10539 && !reverse
10540 && poly_int_tree_p (TYPE_SIZE (type), &type_size)
10541 && known_eq (GET_MODE_BITSIZE (DECL_MODE (base)), type_size))
10542 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
10543 target, tmode, modifier);
10544 if (TYPE_MODE (type) == BLKmode)
10546 temp = assign_stack_temp (DECL_MODE (base),
10547 GET_MODE_SIZE (DECL_MODE (base)));
10548 store_expr (base, temp, 0, false, false);
10549 temp = adjust_address (temp, BLKmode, offset);
10550 set_mem_size (temp, int_size_in_bytes (type));
10551 return temp;
10553 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
10554 bitsize_int (offset * BITS_PER_UNIT));
10555 REF_REVERSE_STORAGE_ORDER (exp) = reverse;
10556 return expand_expr (exp, target, tmode, modifier);
10558 address_mode = targetm.addr_space.address_mode (as);
10559 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
10561 tree mask = gimple_assign_rhs2 (def_stmt);
10562 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
10563 gimple_assign_rhs1 (def_stmt), mask);
10564 TREE_OPERAND (exp, 0) = base;
10566 align = get_object_alignment (exp);
10567 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
10568 op0 = memory_address_addr_space (mode, op0, as);
10569 if (!integer_zerop (TREE_OPERAND (exp, 1)))
10571 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
10572 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
10573 op0 = memory_address_addr_space (mode, op0, as);
10575 temp = gen_rtx_MEM (mode, op0);
10576 set_mem_attributes (temp, exp, 0);
10577 set_mem_addr_space (temp, as);
10578 if (TREE_THIS_VOLATILE (exp))
10579 MEM_VOLATILE_P (temp) = 1;
10580 if (modifier != EXPAND_WRITE
10581 && modifier != EXPAND_MEMORY
10582 && !inner_reference_p
10583 && mode != BLKmode
10584 && align < GET_MODE_ALIGNMENT (mode))
10585 temp = expand_misaligned_mem_ref (temp, mode, unsignedp, align,
10586 modifier == EXPAND_STACK_PARM
10587 ? NULL_RTX : target, alt_rtl);
10588 if (reverse
10589 && modifier != EXPAND_MEMORY
10590 && modifier != EXPAND_WRITE)
10591 temp = flip_storage_order (mode, temp);
10592 return temp;
10595 case ARRAY_REF:
10598 tree array = treeop0;
10599 tree index = treeop1;
10600 tree init;
10602 /* Fold an expression like: "foo"[2].
10603 This is not done in fold so it won't happen inside &.
10604 Don't fold if this is for wide characters since it's too
10605 difficult to do correctly and this is a very rare case. */
10607 if (modifier != EXPAND_CONST_ADDRESS
10608 && modifier != EXPAND_INITIALIZER
10609 && modifier != EXPAND_MEMORY)
10611 tree t = fold_read_from_constant_string (exp);
10613 if (t)
10614 return expand_expr (t, target, tmode, modifier);
10617 /* If this is a constant index into a constant array,
10618 just get the value from the array. Handle both the cases when
10619 we have an explicit constructor and when our operand is a variable
10620 that was declared const. */
10622 if (modifier != EXPAND_CONST_ADDRESS
10623 && modifier != EXPAND_INITIALIZER
10624 && modifier != EXPAND_MEMORY
10625 && TREE_CODE (array) == CONSTRUCTOR
10626 && ! TREE_SIDE_EFFECTS (array)
10627 && TREE_CODE (index) == INTEGER_CST)
10629 unsigned HOST_WIDE_INT ix;
10630 tree field, value;
10632 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10633 field, value)
10634 if (tree_int_cst_equal (field, index))
10636 if (!TREE_SIDE_EFFECTS (value))
10637 return expand_expr (fold (value), target, tmode, modifier);
10638 break;
10642 else if (optimize >= 1
10643 && modifier != EXPAND_CONST_ADDRESS
10644 && modifier != EXPAND_INITIALIZER
10645 && modifier != EXPAND_MEMORY
10646 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10647 && TREE_CODE (index) == INTEGER_CST
10648 && (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
10649 && (init = ctor_for_folding (array)) != error_mark_node)
10651 if (init == NULL_TREE)
10653 tree value = build_zero_cst (type);
10654 if (TREE_CODE (value) == CONSTRUCTOR)
10656 /* If VALUE is a CONSTRUCTOR, this optimization is only
10657 useful if this doesn't store the CONSTRUCTOR into
10658 memory. If it does, it is more efficient to just
10659 load the data from the array directly. */
10660 rtx ret = expand_constructor (value, target,
10661 modifier, true);
10662 if (ret == NULL_RTX)
10663 value = NULL_TREE;
10666 if (value)
10667 return expand_expr (value, target, tmode, modifier);
10669 else if (TREE_CODE (init) == CONSTRUCTOR)
10671 unsigned HOST_WIDE_INT ix;
10672 tree field, value;
10674 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10675 field, value)
10676 if (tree_int_cst_equal (field, index))
10678 if (TREE_SIDE_EFFECTS (value))
10679 break;
10681 if (TREE_CODE (value) == CONSTRUCTOR)
10683 /* If VALUE is a CONSTRUCTOR, this
10684 optimization is only useful if
10685 this doesn't store the CONSTRUCTOR
10686 into memory. If it does, it is more
10687 efficient to just load the data from
10688 the array directly. */
10689 rtx ret = expand_constructor (value, target,
10690 modifier, true);
10691 if (ret == NULL_RTX)
10692 break;
10695 return
10696 expand_expr (fold (value), target, tmode, modifier);
10699 else if (TREE_CODE (init) == STRING_CST)
10701 tree low_bound = array_ref_low_bound (exp);
10702 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10704 /* Optimize the special case of a zero lower bound.
10706 We convert the lower bound to sizetype to avoid problems
10707 with constant folding. E.g. suppose the lower bound is
10708 1 and its mode is QI. Without the conversion
10709 (ARRAY + (INDEX - (unsigned char)1))
10710 becomes
10711 (ARRAY + (-(unsigned char)1) + INDEX)
10712 which becomes
10713 (ARRAY + 255 + INDEX). Oops! */
10714 if (!integer_zerop (low_bound))
10715 index1 = size_diffop_loc (loc, index1,
10716 fold_convert_loc (loc, sizetype,
10717 low_bound));
10719 if (tree_fits_uhwi_p (index1)
10720 && compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10722 tree char_type = TREE_TYPE (TREE_TYPE (init));
10723 scalar_int_mode char_mode;
10725 if (is_int_mode (TYPE_MODE (char_type), &char_mode)
10726 && GET_MODE_SIZE (char_mode) == 1)
10727 return gen_int_mode (TREE_STRING_POINTER (init)
10728 [TREE_INT_CST_LOW (index1)],
10729 char_mode);
10734 goto normal_inner_ref;
10736 case COMPONENT_REF:
10737 /* If the operand is a CONSTRUCTOR, we can just extract the
10738 appropriate field if it is present. */
10739 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10741 unsigned HOST_WIDE_INT idx;
10742 tree field, value;
10743 scalar_int_mode field_mode;
10745 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10746 idx, field, value)
10747 if (field == treeop1
10748 /* We can normally use the value of the field in the
10749 CONSTRUCTOR. However, if this is a bitfield in
10750 an integral mode that we can fit in a HOST_WIDE_INT,
10751 we must mask only the number of bits in the bitfield,
10752 since this is done implicitly by the constructor. If
10753 the bitfield does not meet either of those conditions,
10754 we can't do this optimization. */
10755 && (! DECL_BIT_FIELD (field)
10756 || (is_int_mode (DECL_MODE (field), &field_mode)
10757 && (GET_MODE_PRECISION (field_mode)
10758 <= HOST_BITS_PER_WIDE_INT))))
10760 if (DECL_BIT_FIELD (field)
10761 && modifier == EXPAND_STACK_PARM)
10762 target = 0;
10763 op0 = expand_expr (value, target, tmode, modifier);
10764 if (DECL_BIT_FIELD (field))
10766 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10767 scalar_int_mode imode
10768 = SCALAR_INT_TYPE_MODE (TREE_TYPE (field));
10770 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10772 op1 = gen_int_mode ((HOST_WIDE_INT_1 << bitsize) - 1,
10773 imode);
10774 op0 = expand_and (imode, op0, op1, target);
10776 else
10778 int count = GET_MODE_PRECISION (imode) - bitsize;
10780 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10781 target, 0);
10782 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10783 target, 0);
10787 return op0;
10790 goto normal_inner_ref;
10792 case BIT_FIELD_REF:
10793 case ARRAY_RANGE_REF:
10794 normal_inner_ref:
10796 machine_mode mode1, mode2;
10797 poly_int64 bitsize, bitpos, bytepos;
10798 tree offset;
10799 int reversep, volatilep = 0, must_force_mem;
10800 tree tem
10801 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10802 &unsignedp, &reversep, &volatilep);
10803 rtx orig_op0, memloc;
10804 bool clear_mem_expr = false;
10806 /* If we got back the original object, something is wrong. Perhaps
10807 we are evaluating an expression too early. In any event, don't
10808 infinitely recurse. */
10809 gcc_assert (tem != exp);
10811 /* If TEM's type is a union of variable size, pass TARGET to the inner
10812 computation, since it will need a temporary and TARGET is known
10813 to have to do. This occurs in unchecked conversion in Ada. */
10814 orig_op0 = op0
10815 = expand_expr_real (tem,
10816 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10817 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10818 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10819 != INTEGER_CST)
10820 && modifier != EXPAND_STACK_PARM
10821 ? target : NULL_RTX),
10822 VOIDmode,
10823 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10824 NULL, true);
10826 /* If the field has a mode, we want to access it in the
10827 field's mode, not the computed mode.
10828 If a MEM has VOIDmode (external with incomplete type),
10829 use BLKmode for it instead. */
10830 if (MEM_P (op0))
10832 if (mode1 != VOIDmode)
10833 op0 = adjust_address (op0, mode1, 0);
10834 else if (GET_MODE (op0) == VOIDmode)
10835 op0 = adjust_address (op0, BLKmode, 0);
10838 mode2
10839 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10841 /* Make sure bitpos is not negative, it can wreak havoc later. */
10842 if (maybe_lt (bitpos, 0))
10844 gcc_checking_assert (offset == NULL_TREE);
10845 offset = size_int (bits_to_bytes_round_down (bitpos));
10846 bitpos = num_trailing_bits (bitpos);
10849 /* If we have either an offset, a BLKmode result, or a reference
10850 outside the underlying object, we must force it to memory.
10851 Such a case can occur in Ada if we have unchecked conversion
10852 of an expression from a scalar type to an aggregate type or
10853 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10854 passed a partially uninitialized object or a view-conversion
10855 to a larger size. */
10856 must_force_mem = (offset
10857 || mode1 == BLKmode
10858 || (mode == BLKmode
10859 && !int_mode_for_size (bitsize, 1).exists ())
10860 || maybe_gt (bitpos + bitsize,
10861 GET_MODE_BITSIZE (mode2)));
10863 /* Handle CONCAT first. */
10864 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10866 if (known_eq (bitpos, 0)
10867 && known_eq (bitsize, GET_MODE_BITSIZE (GET_MODE (op0)))
10868 && COMPLEX_MODE_P (mode1)
10869 && COMPLEX_MODE_P (GET_MODE (op0))
10870 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1))
10871 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0)))))
10873 if (reversep)
10874 op0 = flip_storage_order (GET_MODE (op0), op0);
10875 if (mode1 != GET_MODE (op0))
10877 rtx parts[2];
10878 for (int i = 0; i < 2; i++)
10880 rtx op = read_complex_part (op0, i != 0);
10881 if (GET_CODE (op) == SUBREG)
10882 op = force_reg (GET_MODE (op), op);
10883 temp = gen_lowpart_common (GET_MODE_INNER (mode1), op);
10884 if (temp)
10885 op = temp;
10886 else
10888 if (!REG_P (op) && !MEM_P (op))
10889 op = force_reg (GET_MODE (op), op);
10890 op = gen_lowpart (GET_MODE_INNER (mode1), op);
10892 parts[i] = op;
10894 op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]);
10896 return op0;
10898 if (known_eq (bitpos, 0)
10899 && known_eq (bitsize,
10900 GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
10901 && maybe_ne (bitsize, 0))
10903 op0 = XEXP (op0, 0);
10904 mode2 = GET_MODE (op0);
10906 else if (known_eq (bitpos,
10907 GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
10908 && known_eq (bitsize,
10909 GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1))))
10910 && maybe_ne (bitpos, 0)
10911 && maybe_ne (bitsize, 0))
10913 op0 = XEXP (op0, 1);
10914 bitpos = 0;
10915 mode2 = GET_MODE (op0);
10917 else
10918 /* Otherwise force into memory. */
10919 must_force_mem = 1;
10922 /* If this is a constant, put it in a register if it is a legitimate
10923 constant and we don't need a memory reference. */
10924 if (CONSTANT_P (op0)
10925 && mode2 != BLKmode
10926 && targetm.legitimate_constant_p (mode2, op0)
10927 && !must_force_mem)
10928 op0 = force_reg (mode2, op0);
10930 /* Otherwise, if this is a constant, try to force it to the constant
10931 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10932 is a legitimate constant. */
10933 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10934 op0 = validize_mem (memloc);
10936 /* Otherwise, if this is a constant or the object is not in memory
10937 and need be, put it there. */
10938 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10940 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10941 emit_move_insn (memloc, op0);
10942 op0 = memloc;
10943 clear_mem_expr = true;
10946 if (offset)
10948 machine_mode address_mode;
10949 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10950 EXPAND_SUM);
10952 gcc_assert (MEM_P (op0));
10954 address_mode = get_address_mode (op0);
10955 if (GET_MODE (offset_rtx) != address_mode)
10957 /* We cannot be sure that the RTL in offset_rtx is valid outside
10958 of a memory address context, so force it into a register
10959 before attempting to convert it to the desired mode. */
10960 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10961 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10964 /* See the comment in expand_assignment for the rationale. */
10965 if (mode1 != VOIDmode
10966 && maybe_ne (bitpos, 0)
10967 && maybe_gt (bitsize, 0)
10968 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
10969 && multiple_p (bitpos, bitsize)
10970 && multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
10971 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10973 op0 = adjust_address (op0, mode1, bytepos);
10974 bitpos = 0;
10977 op0 = offset_address (op0, offset_rtx,
10978 highest_pow2_factor (offset));
10981 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10982 record its alignment as BIGGEST_ALIGNMENT. */
10983 if (MEM_P (op0)
10984 && known_eq (bitpos, 0)
10985 && offset != 0
10986 && is_aligning_offset (offset, tem))
10987 set_mem_align (op0, BIGGEST_ALIGNMENT);
10989 /* Don't forget about volatility even if this is a bitfield. */
10990 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10992 if (op0 == orig_op0)
10993 op0 = copy_rtx (op0);
10995 MEM_VOLATILE_P (op0) = 1;
10998 if (MEM_P (op0) && TREE_CODE (tem) == FUNCTION_DECL)
11000 if (op0 == orig_op0)
11001 op0 = copy_rtx (op0);
11003 set_mem_align (op0, BITS_PER_UNIT);
11006 /* In cases where an aligned union has an unaligned object
11007 as a field, we might be extracting a BLKmode value from
11008 an integer-mode (e.g., SImode) object. Handle this case
11009 by doing the extract into an object as wide as the field
11010 (which we know to be the width of a basic mode), then
11011 storing into memory, and changing the mode to BLKmode. */
11012 if (mode1 == VOIDmode
11013 || REG_P (op0) || GET_CODE (op0) == SUBREG
11014 || (mode1 != BLKmode && ! direct_load[(int) mode1]
11015 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
11016 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
11017 && modifier != EXPAND_CONST_ADDRESS
11018 && modifier != EXPAND_INITIALIZER
11019 && modifier != EXPAND_MEMORY)
11020 /* If the bitfield is volatile and the bitsize
11021 is narrower than the access size of the bitfield,
11022 we need to extract bitfields from the access. */
11023 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
11024 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
11025 && mode1 != BLKmode
11026 && maybe_lt (bitsize, GET_MODE_SIZE (mode1) * BITS_PER_UNIT))
11027 /* If the field isn't aligned enough to fetch as a memref,
11028 fetch it as a bit field. */
11029 || (mode1 != BLKmode
11030 && (((MEM_P (op0)
11031 ? MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
11032 || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode1))
11033 : TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
11034 || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
11035 && modifier != EXPAND_MEMORY
11036 && ((modifier == EXPAND_CONST_ADDRESS
11037 || modifier == EXPAND_INITIALIZER)
11038 ? STRICT_ALIGNMENT
11039 : targetm.slow_unaligned_access (mode1,
11040 MEM_ALIGN (op0))))
11041 || !multiple_p (bitpos, BITS_PER_UNIT)))
11042 /* If the type and the field are a constant size and the
11043 size of the type isn't the same size as the bitfield,
11044 we must use bitfield operations. */
11045 || (known_size_p (bitsize)
11046 && TYPE_SIZE (TREE_TYPE (exp))
11047 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
11048 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
11049 bitsize)))
11051 machine_mode ext_mode = mode;
11053 if (ext_mode == BLKmode
11054 && ! (target != 0 && MEM_P (op0)
11055 && MEM_P (target)
11056 && multiple_p (bitpos, BITS_PER_UNIT)))
11057 ext_mode = int_mode_for_size (bitsize, 1).else_blk ();
11059 if (ext_mode == BLKmode)
11061 if (target == 0)
11062 target = assign_temp (type, 1, 1);
11064 /* ??? Unlike the similar test a few lines below, this one is
11065 very likely obsolete. */
11066 if (known_eq (bitsize, 0))
11067 return target;
11069 /* In this case, BITPOS must start at a byte boundary and
11070 TARGET, if specified, must be a MEM. */
11071 gcc_assert (MEM_P (op0)
11072 && (!target || MEM_P (target)));
11074 bytepos = exact_div (bitpos, BITS_PER_UNIT);
11075 poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
11076 emit_block_move (target,
11077 adjust_address (op0, VOIDmode, bytepos),
11078 gen_int_mode (bytesize, Pmode),
11079 (modifier == EXPAND_STACK_PARM
11080 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
11082 return target;
11085 /* If we have nothing to extract, the result will be 0 for targets
11086 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
11087 return 0 for the sake of consistency, as reading a zero-sized
11088 bitfield is valid in Ada and the value is fully specified. */
11089 if (known_eq (bitsize, 0))
11090 return const0_rtx;
11092 op0 = validize_mem (op0);
11094 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
11095 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
11097 /* If the result has aggregate type and the extraction is done in
11098 an integral mode, then the field may be not aligned on a byte
11099 boundary; in this case, if it has reverse storage order, it
11100 needs to be extracted as a scalar field with reverse storage
11101 order and put back into memory order afterwards. */
11102 if (AGGREGATE_TYPE_P (type)
11103 && GET_MODE_CLASS (ext_mode) == MODE_INT)
11104 reversep = TYPE_REVERSE_STORAGE_ORDER (type);
11106 gcc_checking_assert (known_ge (bitpos, 0));
11107 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
11108 (modifier == EXPAND_STACK_PARM
11109 ? NULL_RTX : target),
11110 ext_mode, ext_mode, reversep, alt_rtl);
11112 /* If the result has aggregate type and the mode of OP0 is an
11113 integral mode then, if BITSIZE is narrower than this mode
11114 and this is for big-endian data, we must put the field
11115 into the high-order bits. And we must also put it back
11116 into memory order if it has been previously reversed. */
11117 scalar_int_mode op0_mode;
11118 if (AGGREGATE_TYPE_P (type)
11119 && is_int_mode (GET_MODE (op0), &op0_mode))
11121 HOST_WIDE_INT size = GET_MODE_BITSIZE (op0_mode);
11123 gcc_checking_assert (known_le (bitsize, size));
11124 if (maybe_lt (bitsize, size)
11125 && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
11126 op0 = expand_shift (LSHIFT_EXPR, op0_mode, op0,
11127 size - bitsize, op0, 1);
11129 if (reversep)
11130 op0 = flip_storage_order (op0_mode, op0);
11133 /* If the result type is BLKmode, store the data into a temporary
11134 of the appropriate type, but with the mode corresponding to the
11135 mode for the data we have (op0's mode). */
11136 if (mode == BLKmode)
11138 rtx new_rtx
11139 = assign_stack_temp_for_type (ext_mode,
11140 GET_MODE_BITSIZE (ext_mode),
11141 type);
11142 emit_move_insn (new_rtx, op0);
11143 op0 = copy_rtx (new_rtx);
11144 PUT_MODE (op0, BLKmode);
11147 return op0;
11150 /* If the result is BLKmode, use that to access the object
11151 now as well. */
11152 if (mode == BLKmode)
11153 mode1 = BLKmode;
11155 /* Get a reference to just this component. */
11156 bytepos = bits_to_bytes_round_down (bitpos);
11157 if (modifier == EXPAND_CONST_ADDRESS
11158 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
11159 op0 = adjust_address_nv (op0, mode1, bytepos);
11160 else
11161 op0 = adjust_address (op0, mode1, bytepos);
11163 if (op0 == orig_op0)
11164 op0 = copy_rtx (op0);
11166 /* Don't set memory attributes if the base expression is
11167 SSA_NAME that got expanded as a MEM or a CONSTANT. In that case,
11168 we should just honor its original memory attributes. */
11169 if (!(TREE_CODE (tem) == SSA_NAME
11170 && (MEM_P (orig_op0) || CONSTANT_P (orig_op0))))
11171 set_mem_attributes (op0, exp, 0);
11173 if (REG_P (XEXP (op0, 0)))
11174 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
11176 /* If op0 is a temporary because the original expressions was forced
11177 to memory, clear MEM_EXPR so that the original expression cannot
11178 be marked as addressable through MEM_EXPR of the temporary. */
11179 if (clear_mem_expr)
11180 set_mem_expr (op0, NULL_TREE);
11182 MEM_VOLATILE_P (op0) |= volatilep;
11184 if (reversep
11185 && modifier != EXPAND_MEMORY
11186 && modifier != EXPAND_WRITE)
11187 op0 = flip_storage_order (mode1, op0);
11189 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
11190 || modifier == EXPAND_CONST_ADDRESS
11191 || modifier == EXPAND_INITIALIZER)
11192 return op0;
11194 if (target == 0)
11195 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
11197 convert_move (target, op0, unsignedp);
11198 return target;
11201 case OBJ_TYPE_REF:
11202 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
11204 case CALL_EXPR:
11205 /* All valid uses of __builtin_va_arg_pack () are removed during
11206 inlining. */
11207 if (CALL_EXPR_VA_ARG_PACK (exp))
11208 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
11210 tree fndecl = get_callee_fndecl (exp), attr;
11212 if (fndecl
11213 /* Don't diagnose the error attribute in thunks, those are
11214 artificially created. */
11215 && !CALL_FROM_THUNK_P (exp)
11216 && (attr = lookup_attribute ("error",
11217 DECL_ATTRIBUTES (fndecl))) != NULL)
11219 const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
11220 error ("%Kcall to %qs declared with attribute error: %s", exp,
11221 identifier_to_locale (ident),
11222 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
11224 if (fndecl
11225 /* Don't diagnose the warning attribute in thunks, those are
11226 artificially created. */
11227 && !CALL_FROM_THUNK_P (exp)
11228 && (attr = lookup_attribute ("warning",
11229 DECL_ATTRIBUTES (fndecl))) != NULL)
11231 const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
11232 warning_at (tree_nonartificial_location (exp),
11233 OPT_Wattribute_warning,
11234 "%Kcall to %qs declared with attribute warning: %s",
11235 exp, identifier_to_locale (ident),
11236 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
11239 /* Check for a built-in function. */
11240 if (fndecl && fndecl_built_in_p (fndecl))
11242 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
11243 return expand_builtin (exp, target, subtarget, tmode, ignore);
11246 return expand_call (exp, target, ignore);
11248 case VIEW_CONVERT_EXPR:
11249 op0 = NULL_RTX;
11251 /* If we are converting to BLKmode, try to avoid an intermediate
11252 temporary by fetching an inner memory reference. */
11253 if (mode == BLKmode
11254 && poly_int_tree_p (TYPE_SIZE (type))
11255 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
11256 && handled_component_p (treeop0))
11258 machine_mode mode1;
11259 poly_int64 bitsize, bitpos, bytepos;
11260 tree offset;
11261 int reversep, volatilep = 0;
11262 tree tem
11263 = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
11264 &unsignedp, &reversep, &volatilep);
11266 /* ??? We should work harder and deal with non-zero offsets. */
11267 if (!offset
11268 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
11269 && !reversep
11270 && known_size_p (bitsize)
11271 && known_eq (wi::to_poly_offset (TYPE_SIZE (type)), bitsize))
11273 /* See the normal_inner_ref case for the rationale. */
11274 rtx orig_op0
11275 = expand_expr_real (tem,
11276 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
11277 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
11278 != INTEGER_CST)
11279 && modifier != EXPAND_STACK_PARM
11280 ? target : NULL_RTX),
11281 VOIDmode,
11282 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
11283 NULL, true);
11285 if (MEM_P (orig_op0))
11287 op0 = orig_op0;
11289 /* Get a reference to just this component. */
11290 if (modifier == EXPAND_CONST_ADDRESS
11291 || modifier == EXPAND_SUM
11292 || modifier == EXPAND_INITIALIZER)
11293 op0 = adjust_address_nv (op0, mode, bytepos);
11294 else
11295 op0 = adjust_address (op0, mode, bytepos);
11297 if (op0 == orig_op0)
11298 op0 = copy_rtx (op0);
11300 set_mem_attributes (op0, treeop0, 0);
11301 if (REG_P (XEXP (op0, 0)))
11302 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
11304 MEM_VOLATILE_P (op0) |= volatilep;
11309 if (!op0)
11310 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
11311 NULL, inner_reference_p);
11313 /* If the input and output modes are both the same, we are done. */
11314 if (mode == GET_MODE (op0))
11316 /* If neither mode is BLKmode, and both modes are the same size
11317 then we can use gen_lowpart. */
11318 else if (mode != BLKmode
11319 && GET_MODE (op0) != BLKmode
11320 && known_eq (GET_MODE_PRECISION (mode),
11321 GET_MODE_PRECISION (GET_MODE (op0)))
11322 && !COMPLEX_MODE_P (GET_MODE (op0)))
11324 if (GET_CODE (op0) == SUBREG)
11325 op0 = force_reg (GET_MODE (op0), op0);
11326 temp = gen_lowpart_common (mode, op0);
11327 if (temp)
11328 op0 = temp;
11329 else
11331 if (!REG_P (op0) && !MEM_P (op0))
11332 op0 = force_reg (GET_MODE (op0), op0);
11333 op0 = gen_lowpart (mode, op0);
11336 /* If both types are integral, convert from one mode to the other. */
11337 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
11338 op0 = convert_modes (mode, GET_MODE (op0), op0,
11339 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
11340 /* If the output type is a bit-field type, do an extraction. */
11341 else if (reduce_bit_field)
11342 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
11343 TYPE_UNSIGNED (type), NULL_RTX,
11344 mode, mode, false, NULL);
11345 /* As a last resort, spill op0 to memory, and reload it in a
11346 different mode. */
11347 else if (!MEM_P (op0))
11349 /* If the operand is not a MEM, force it into memory. Since we
11350 are going to be changing the mode of the MEM, don't call
11351 force_const_mem for constants because we don't allow pool
11352 constants to change mode. */
11353 tree inner_type = TREE_TYPE (treeop0);
11355 gcc_assert (!TREE_ADDRESSABLE (exp));
11357 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
11358 target
11359 = assign_stack_temp_for_type
11360 (TYPE_MODE (inner_type),
11361 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
11363 emit_move_insn (target, op0);
11364 op0 = target;
11367 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
11368 output type is such that the operand is known to be aligned, indicate
11369 that it is. Otherwise, we need only be concerned about alignment for
11370 non-BLKmode results. */
11371 if (MEM_P (op0))
11373 enum insn_code icode;
11375 if (modifier != EXPAND_WRITE
11376 && modifier != EXPAND_MEMORY
11377 && !inner_reference_p
11378 && mode != BLKmode
11379 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
11381 /* If the target does have special handling for unaligned
11382 loads of mode then use them. */
11383 if ((icode = optab_handler (movmisalign_optab, mode))
11384 != CODE_FOR_nothing)
11386 rtx reg;
11388 op0 = adjust_address (op0, mode, 0);
11389 /* We've already validated the memory, and we're creating a
11390 new pseudo destination. The predicates really can't
11391 fail. */
11392 reg = gen_reg_rtx (mode);
11394 /* Nor can the insn generator. */
11395 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
11396 emit_insn (insn);
11397 return reg;
11399 else if (STRICT_ALIGNMENT)
11401 poly_uint64 mode_size = GET_MODE_SIZE (mode);
11402 poly_uint64 temp_size = mode_size;
11403 if (GET_MODE (op0) != BLKmode)
11404 temp_size = upper_bound (temp_size,
11405 GET_MODE_SIZE (GET_MODE (op0)));
11406 rtx new_rtx
11407 = assign_stack_temp_for_type (mode, temp_size, type);
11408 rtx new_with_op0_mode
11409 = adjust_address (new_rtx, GET_MODE (op0), 0);
11411 gcc_assert (!TREE_ADDRESSABLE (exp));
11413 if (GET_MODE (op0) == BLKmode)
11415 rtx size_rtx = gen_int_mode (mode_size, Pmode);
11416 emit_block_move (new_with_op0_mode, op0, size_rtx,
11417 (modifier == EXPAND_STACK_PARM
11418 ? BLOCK_OP_CALL_PARM
11419 : BLOCK_OP_NORMAL));
11421 else
11422 emit_move_insn (new_with_op0_mode, op0);
11424 op0 = new_rtx;
11428 op0 = adjust_address (op0, mode, 0);
11431 return op0;
11433 case MODIFY_EXPR:
11435 tree lhs = treeop0;
11436 tree rhs = treeop1;
11437 gcc_assert (ignore);
11439 /* Check for |= or &= of a bitfield of size one into another bitfield
11440 of size 1. In this case, (unless we need the result of the
11441 assignment) we can do this more efficiently with a
11442 test followed by an assignment, if necessary.
11444 ??? At this point, we can't get a BIT_FIELD_REF here. But if
11445 things change so we do, this code should be enhanced to
11446 support it. */
11447 if (TREE_CODE (lhs) == COMPONENT_REF
11448 && (TREE_CODE (rhs) == BIT_IOR_EXPR
11449 || TREE_CODE (rhs) == BIT_AND_EXPR)
11450 && TREE_OPERAND (rhs, 0) == lhs
11451 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
11452 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
11453 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
11455 rtx_code_label *label = gen_label_rtx ();
11456 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
11457 profile_probability prob = profile_probability::uninitialized ();
11458 if (value)
11459 jumpifnot (TREE_OPERAND (rhs, 1), label, prob);
11460 else
11461 jumpif (TREE_OPERAND (rhs, 1), label, prob);
11462 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
11463 false);
11464 do_pending_stack_adjust ();
11465 emit_label (label);
11466 return const0_rtx;
11469 expand_assignment (lhs, rhs, false);
11470 return const0_rtx;
11473 case ADDR_EXPR:
11474 return expand_expr_addr_expr (exp, target, tmode, modifier);
11476 case REALPART_EXPR:
11477 op0 = expand_normal (treeop0);
11478 return read_complex_part (op0, false);
11480 case IMAGPART_EXPR:
11481 op0 = expand_normal (treeop0);
11482 return read_complex_part (op0, true);
11484 case RETURN_EXPR:
11485 case LABEL_EXPR:
11486 case GOTO_EXPR:
11487 case SWITCH_EXPR:
11488 case ASM_EXPR:
11489 /* Expanded in cfgexpand.c. */
11490 gcc_unreachable ();
11492 case TRY_CATCH_EXPR:
11493 case CATCH_EXPR:
11494 case EH_FILTER_EXPR:
11495 case TRY_FINALLY_EXPR:
11496 case EH_ELSE_EXPR:
11497 /* Lowered by tree-eh.c. */
11498 gcc_unreachable ();
11500 case WITH_CLEANUP_EXPR:
11501 case CLEANUP_POINT_EXPR:
11502 case TARGET_EXPR:
11503 case CASE_LABEL_EXPR:
11504 case VA_ARG_EXPR:
11505 case BIND_EXPR:
11506 case INIT_EXPR:
11507 case CONJ_EXPR:
11508 case COMPOUND_EXPR:
11509 case PREINCREMENT_EXPR:
11510 case PREDECREMENT_EXPR:
11511 case POSTINCREMENT_EXPR:
11512 case POSTDECREMENT_EXPR:
11513 case LOOP_EXPR:
11514 case EXIT_EXPR:
11515 case COMPOUND_LITERAL_EXPR:
11516 /* Lowered by gimplify.c. */
11517 gcc_unreachable ();
11519 case FDESC_EXPR:
11520 /* Function descriptors are not valid except for as
11521 initialization constants, and should not be expanded. */
11522 gcc_unreachable ();
11524 case WITH_SIZE_EXPR:
11525 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11526 have pulled out the size to use in whatever context it needed. */
11527 return expand_expr_real (treeop0, original_target, tmode,
11528 modifier, alt_rtl, inner_reference_p);
11530 default:
11531 return expand_expr_real_2 (&ops, target, tmode, modifier);
11535 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11536 signedness of TYPE), possibly returning the result in TARGET.
11537 TYPE is known to be a partial integer type. */
11538 static rtx
11539 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
11541 scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
11542 HOST_WIDE_INT prec = TYPE_PRECISION (type);
11543 gcc_assert ((GET_MODE (exp) == VOIDmode || GET_MODE (exp) == mode)
11544 && (!target || GET_MODE (target) == mode));
11546 /* For constant values, reduce using wide_int_to_tree. */
11547 if (poly_int_rtx_p (exp))
11549 auto value = wi::to_poly_wide (exp, mode);
11550 tree t = wide_int_to_tree (type, value);
11551 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
11553 else if (TYPE_UNSIGNED (type))
11555 rtx mask = immed_wide_int_const
11556 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
11557 return expand_and (mode, exp, mask, target);
11559 else
11561 int count = GET_MODE_PRECISION (mode) - prec;
11562 exp = expand_shift (LSHIFT_EXPR, mode, exp, count, target, 0);
11563 return expand_shift (RSHIFT_EXPR, mode, exp, count, target, 0);
11567 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11568 when applied to the address of EXP produces an address known to be
11569 aligned more than BIGGEST_ALIGNMENT. */
11571 static int
11572 is_aligning_offset (const_tree offset, const_tree exp)
11574 /* Strip off any conversions. */
11575 while (CONVERT_EXPR_P (offset))
11576 offset = TREE_OPERAND (offset, 0);
11578 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11579 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11580 if (TREE_CODE (offset) != BIT_AND_EXPR
11581 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
11582 || compare_tree_int (TREE_OPERAND (offset, 1),
11583 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
11584 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1))
11585 return 0;
11587 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11588 It must be NEGATE_EXPR. Then strip any more conversions. */
11589 offset = TREE_OPERAND (offset, 0);
11590 while (CONVERT_EXPR_P (offset))
11591 offset = TREE_OPERAND (offset, 0);
11593 if (TREE_CODE (offset) != NEGATE_EXPR)
11594 return 0;
11596 offset = TREE_OPERAND (offset, 0);
11597 while (CONVERT_EXPR_P (offset))
11598 offset = TREE_OPERAND (offset, 0);
11600 /* This must now be the address of EXP. */
11601 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
11604 /* If EXPR is a constant initializer (either an expression or CONSTRUCTOR),
11605 attempt to obtain its native representation as an array of nonzero BYTES.
11606 Return true on success and false on failure (the latter without modifying
11607 BYTES). */
11609 static bool
11610 convert_to_bytes (tree type, tree expr, vec<unsigned char> *bytes)
11612 if (TREE_CODE (expr) == CONSTRUCTOR)
11614 /* Set to the size of the CONSTRUCTOR elements. */
11615 unsigned HOST_WIDE_INT ctor_size = bytes->length ();
11617 if (TREE_CODE (type) == ARRAY_TYPE)
11619 tree val, idx;
11620 tree eltype = TREE_TYPE (type);
11621 unsigned HOST_WIDE_INT elsize =
11622 tree_to_uhwi (TYPE_SIZE_UNIT (eltype));
11624 /* Jump through hoops to determine the lower bound for languages
11625 like Ada that can set it to an (almost) arbitrary value. */
11626 tree dom = TYPE_DOMAIN (type);
11627 if (!dom)
11628 return false;
11629 tree min = TYPE_MIN_VALUE (dom);
11630 if (!min || !tree_fits_uhwi_p (min))
11631 return false;
11632 unsigned HOST_WIDE_INT i, last_idx = tree_to_uhwi (min) - 1;
11633 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, idx, val)
11635 /* Append zeros for elements with no initializers. */
11636 if (!tree_fits_uhwi_p (idx))
11637 return false;
11638 unsigned HOST_WIDE_INT cur_idx = tree_to_uhwi (idx);
11639 if (unsigned HOST_WIDE_INT size = cur_idx - (last_idx + 1))
11641 size = size * elsize + bytes->length ();
11642 bytes->safe_grow_cleared (size, true);
11645 if (!convert_to_bytes (eltype, val, bytes))
11646 return false;
11648 last_idx = cur_idx;
11651 else if (TREE_CODE (type) == RECORD_TYPE)
11653 tree val, fld;
11654 unsigned HOST_WIDE_INT i;
11655 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, fld, val)
11657 /* Append zeros for members with no initializers and
11658 any padding. */
11659 unsigned HOST_WIDE_INT cur_off = int_byte_position (fld);
11660 if (bytes->length () < cur_off)
11661 bytes->safe_grow_cleared (cur_off, true);
11663 if (!convert_to_bytes (TREE_TYPE (val), val, bytes))
11664 return false;
11667 else
11668 return false;
11670 /* Compute the size of the COSNTRUCTOR elements. */
11671 ctor_size = bytes->length () - ctor_size;
11673 /* Append zeros to the byte vector to the full size of the type.
11674 The type size can be less than the size of the CONSTRUCTOR
11675 if the latter contains initializers for a flexible array
11676 member. */
11677 tree size = TYPE_SIZE_UNIT (type);
11678 unsigned HOST_WIDE_INT type_size = tree_to_uhwi (size);
11679 if (ctor_size < type_size)
11680 if (unsigned HOST_WIDE_INT size_grow = type_size - ctor_size)
11681 bytes->safe_grow_cleared (bytes->length () + size_grow, true);
11683 return true;
11686 /* Except for RECORD_TYPE which may have an initialized flexible array
11687 member, the size of a type is the same as the size of the initializer
11688 (including any implicitly zeroed out members and padding). Allocate
11689 just enough for that many bytes. */
11690 tree expr_size = TYPE_SIZE_UNIT (TREE_TYPE (expr));
11691 if (!expr_size || !tree_fits_uhwi_p (expr_size))
11692 return false;
11693 const unsigned HOST_WIDE_INT expr_bytes = tree_to_uhwi (expr_size);
11694 const unsigned bytes_sofar = bytes->length ();
11695 /* native_encode_expr can convert at most INT_MAX bytes. vec is limited
11696 to at most UINT_MAX. */
11697 if (bytes_sofar + expr_bytes > INT_MAX)
11698 return false;
11700 /* Unlike for RECORD_TYPE, there is no need to clear the memory since
11701 it's completely overwritten by native_encode_expr. */
11702 bytes->safe_grow (bytes_sofar + expr_bytes, true);
11703 unsigned char *pnext = bytes->begin () + bytes_sofar;
11704 int nbytes = native_encode_expr (expr, pnext, expr_bytes, 0);
11705 /* NBYTES is zero on failure. Otherwise it should equal EXPR_BYTES. */
11706 return (unsigned HOST_WIDE_INT) nbytes == expr_bytes;
11709 /* Return a STRING_CST corresponding to ARG's constant initializer either
11710 if it's a string constant, or, when VALREP is set, any other constant,
11711 or null otherwise.
11712 On success, set *PTR_OFFSET to the (possibly non-constant) byte offset
11713 within the byte string that ARG is references. If nonnull set *MEM_SIZE
11714 to the size of the byte string. If nonnull, set *DECL to the constant
11715 declaration ARG refers to. */
11717 static tree
11718 constant_byte_string (tree arg, tree *ptr_offset, tree *mem_size, tree *decl,
11719 bool valrep = false)
11721 tree dummy = NULL_TREE;;
11722 if (!mem_size)
11723 mem_size = &dummy;
11725 /* Store the type of the original expression before conversions
11726 via NOP_EXPR or POINTER_PLUS_EXPR to other types have been
11727 removed. */
11728 tree argtype = TREE_TYPE (arg);
11730 tree array;
11731 STRIP_NOPS (arg);
11733 /* Non-constant index into the character array in an ARRAY_REF
11734 expression or null. */
11735 tree varidx = NULL_TREE;
11737 poly_int64 base_off = 0;
11739 if (TREE_CODE (arg) == ADDR_EXPR)
11741 arg = TREE_OPERAND (arg, 0);
11742 tree ref = arg;
11743 if (TREE_CODE (arg) == ARRAY_REF)
11745 tree idx = TREE_OPERAND (arg, 1);
11746 if (TREE_CODE (idx) != INTEGER_CST)
11748 /* From a pointer (but not array) argument extract the variable
11749 index to prevent get_addr_base_and_unit_offset() from failing
11750 due to it. Use it later to compute the non-constant offset
11751 into the string and return it to the caller. */
11752 varidx = idx;
11753 ref = TREE_OPERAND (arg, 0);
11755 if (TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE)
11756 return NULL_TREE;
11758 if (!integer_zerop (array_ref_low_bound (arg)))
11759 return NULL_TREE;
11761 if (!integer_onep (array_ref_element_size (arg)))
11762 return NULL_TREE;
11765 array = get_addr_base_and_unit_offset (ref, &base_off);
11766 if (!array
11767 || (TREE_CODE (array) != VAR_DECL
11768 && TREE_CODE (array) != CONST_DECL
11769 && TREE_CODE (array) != STRING_CST))
11770 return NULL_TREE;
11772 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
11774 tree arg0 = TREE_OPERAND (arg, 0);
11775 tree arg1 = TREE_OPERAND (arg, 1);
11777 tree offset;
11778 tree str = string_constant (arg0, &offset, mem_size, decl);
11779 if (!str)
11781 str = string_constant (arg1, &offset, mem_size, decl);
11782 arg1 = arg0;
11785 if (str)
11787 /* Avoid pointers to arrays (see bug 86622). */
11788 if (POINTER_TYPE_P (TREE_TYPE (arg))
11789 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == ARRAY_TYPE
11790 && !(decl && !*decl)
11791 && !(decl && tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl))
11792 && tree_fits_uhwi_p (*mem_size)
11793 && tree_int_cst_equal (*mem_size, DECL_SIZE_UNIT (*decl))))
11794 return NULL_TREE;
11796 tree type = TREE_TYPE (offset);
11797 arg1 = fold_convert (type, arg1);
11798 *ptr_offset = fold_build2 (PLUS_EXPR, type, offset, arg1);
11799 return str;
11801 return NULL_TREE;
11803 else if (TREE_CODE (arg) == SSA_NAME)
11805 gimple *stmt = SSA_NAME_DEF_STMT (arg);
11806 if (!is_gimple_assign (stmt))
11807 return NULL_TREE;
11809 tree rhs1 = gimple_assign_rhs1 (stmt);
11810 tree_code code = gimple_assign_rhs_code (stmt);
11811 if (code == ADDR_EXPR)
11812 return string_constant (rhs1, ptr_offset, mem_size, decl);
11813 else if (code != POINTER_PLUS_EXPR)
11814 return NULL_TREE;
11816 tree offset;
11817 if (tree str = string_constant (rhs1, &offset, mem_size, decl))
11819 /* Avoid pointers to arrays (see bug 86622). */
11820 if (POINTER_TYPE_P (TREE_TYPE (rhs1))
11821 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs1))) == ARRAY_TYPE
11822 && !(decl && !*decl)
11823 && !(decl && tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl))
11824 && tree_fits_uhwi_p (*mem_size)
11825 && tree_int_cst_equal (*mem_size, DECL_SIZE_UNIT (*decl))))
11826 return NULL_TREE;
11828 tree rhs2 = gimple_assign_rhs2 (stmt);
11829 tree type = TREE_TYPE (offset);
11830 rhs2 = fold_convert (type, rhs2);
11831 *ptr_offset = fold_build2 (PLUS_EXPR, type, offset, rhs2);
11832 return str;
11834 return NULL_TREE;
11836 else if (DECL_P (arg))
11837 array = arg;
11838 else
11839 return NULL_TREE;
11841 tree offset = wide_int_to_tree (sizetype, base_off);
11842 if (varidx)
11844 if (TREE_CODE (TREE_TYPE (array)) != ARRAY_TYPE)
11845 return NULL_TREE;
11847 gcc_assert (TREE_CODE (arg) == ARRAY_REF);
11848 tree chartype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (arg, 0)));
11849 if (TREE_CODE (chartype) != INTEGER_TYPE)
11850 return NULL;
11852 offset = fold_convert (sizetype, varidx);
11855 if (TREE_CODE (array) == STRING_CST)
11857 *ptr_offset = fold_convert (sizetype, offset);
11858 *mem_size = TYPE_SIZE_UNIT (TREE_TYPE (array));
11859 if (decl)
11860 *decl = NULL_TREE;
11861 gcc_checking_assert (tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (array)))
11862 >= TREE_STRING_LENGTH (array));
11863 return array;
11866 tree init = ctor_for_folding (array);
11867 if (!init || init == error_mark_node)
11868 return NULL_TREE;
11870 if (valrep)
11872 HOST_WIDE_INT cstoff;
11873 if (!base_off.is_constant (&cstoff))
11874 return NULL_TREE;
11876 /* If value representation was requested convert the initializer
11877 for the whole array or object into a string of bytes forming
11878 its value representation and return it. */
11879 auto_vec<unsigned char> bytes;
11880 if (!convert_to_bytes (TREE_TYPE (init), init, &bytes))
11881 return NULL_TREE;
11883 unsigned n = bytes.length ();
11884 const char *p = reinterpret_cast<const char *>(bytes.address ());
11885 init = build_string_literal (n, p, char_type_node);
11886 init = TREE_OPERAND (init, 0);
11887 init = TREE_OPERAND (init, 0);
11889 *mem_size = size_int (TREE_STRING_LENGTH (init));
11890 *ptr_offset = wide_int_to_tree (ssizetype, base_off);
11892 if (decl)
11893 *decl = array;
11895 return init;
11898 if (TREE_CODE (init) == CONSTRUCTOR)
11900 /* Convert the 64-bit constant offset to a wider type to avoid
11901 overflow and use it to obtain the initializer for the subobject
11902 it points into. */
11903 offset_int wioff;
11904 if (!base_off.is_constant (&wioff))
11905 return NULL_TREE;
11907 wioff *= BITS_PER_UNIT;
11908 if (!wi::fits_uhwi_p (wioff))
11909 return NULL_TREE;
11911 base_off = wioff.to_uhwi ();
11912 unsigned HOST_WIDE_INT fieldoff = 0;
11913 init = fold_ctor_reference (TREE_TYPE (arg), init, base_off, 0, array,
11914 &fieldoff);
11915 if (!init || init == error_mark_node)
11916 return NULL_TREE;
11918 HOST_WIDE_INT cstoff;
11919 if (!base_off.is_constant (&cstoff))
11920 return NULL_TREE;
11922 cstoff = (cstoff - fieldoff) / BITS_PER_UNIT;
11923 tree off = build_int_cst (sizetype, cstoff);
11924 if (varidx)
11925 offset = fold_build2 (PLUS_EXPR, TREE_TYPE (offset), offset, off);
11926 else
11927 offset = off;
11930 *ptr_offset = offset;
11932 tree inittype = TREE_TYPE (init);
11934 if (TREE_CODE (init) == INTEGER_CST
11935 && (TREE_CODE (TREE_TYPE (array)) == INTEGER_TYPE
11936 || TYPE_MAIN_VARIANT (inittype) == char_type_node))
11938 /* For a reference to (address of) a single constant character,
11939 store the native representation of the character in CHARBUF.
11940 If the reference is to an element of an array or a member
11941 of a struct, only consider narrow characters until ctors
11942 for wide character arrays are transformed to STRING_CSTs
11943 like those for narrow arrays. */
11944 unsigned char charbuf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11945 int len = native_encode_expr (init, charbuf, sizeof charbuf, 0);
11946 if (len > 0)
11948 /* Construct a string literal with elements of INITTYPE and
11949 the representation above. Then strip
11950 the ADDR_EXPR (ARRAY_REF (...)) around the STRING_CST. */
11951 init = build_string_literal (len, (char *)charbuf, inittype);
11952 init = TREE_OPERAND (TREE_OPERAND (init, 0), 0);
11956 tree initsize = TYPE_SIZE_UNIT (inittype);
11958 if (TREE_CODE (init) == CONSTRUCTOR && initializer_zerop (init))
11960 /* Fold an empty/zero constructor for an implicitly initialized
11961 object or subobject into the empty string. */
11963 /* Determine the character type from that of the original
11964 expression. */
11965 tree chartype = argtype;
11966 if (POINTER_TYPE_P (chartype))
11967 chartype = TREE_TYPE (chartype);
11968 while (TREE_CODE (chartype) == ARRAY_TYPE)
11969 chartype = TREE_TYPE (chartype);
11971 if (INTEGRAL_TYPE_P (chartype)
11972 && TYPE_PRECISION (chartype) == TYPE_PRECISION (char_type_node))
11974 /* Convert a char array to an empty STRING_CST having an array
11975 of the expected type and size. */
11976 if (!initsize)
11977 initsize = integer_zero_node;
11979 unsigned HOST_WIDE_INT size = tree_to_uhwi (initsize);
11980 init = build_string_literal (size, NULL, chartype, size);
11981 init = TREE_OPERAND (init, 0);
11982 init = TREE_OPERAND (init, 0);
11984 *ptr_offset = integer_zero_node;
11988 if (decl)
11989 *decl = array;
11991 if (TREE_CODE (init) != STRING_CST)
11992 return NULL_TREE;
11994 *mem_size = initsize;
11996 gcc_checking_assert (tree_to_shwi (initsize) >= TREE_STRING_LENGTH (init));
11998 return init;
12001 /* Return STRING_CST if an ARG corresponds to a string constant or zero
12002 if it doesn't. If we return nonzero, set *PTR_OFFSET to the (possibly
12003 non-constant) offset in bytes within the string that ARG is accessing.
12004 If MEM_SIZE is non-zero the storage size of the memory is returned.
12005 If DECL is non-zero the constant declaration is returned if available. */
12007 tree
12008 string_constant (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
12010 return constant_byte_string (arg, ptr_offset, mem_size, decl, false);
12013 /* Similar to string_constant, return a STRING_CST corresponding
12014 to the value representation of the first argument if it's
12015 a constant. */
12017 tree
12018 byte_representation (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
12020 return constant_byte_string (arg, ptr_offset, mem_size, decl, true);
12023 /* Optimize x % C1 == C2 for signed modulo if C1 is a power of two and C2
12024 is non-zero and C3 ((1<<(prec-1)) | (C1 - 1)):
12025 for C2 > 0 to x & C3 == C2
12026 for C2 < 0 to x & C3 == (C2 & C3). */
12027 enum tree_code
12028 maybe_optimize_pow2p_mod_cmp (enum tree_code code, tree *arg0, tree *arg1)
12030 gimple *stmt = get_def_for_expr (*arg0, TRUNC_MOD_EXPR);
12031 tree treeop0 = gimple_assign_rhs1 (stmt);
12032 tree treeop1 = gimple_assign_rhs2 (stmt);
12033 tree type = TREE_TYPE (*arg0);
12034 scalar_int_mode mode;
12035 if (!is_a <scalar_int_mode> (TYPE_MODE (type), &mode))
12036 return code;
12037 if (GET_MODE_BITSIZE (mode) != TYPE_PRECISION (type)
12038 || TYPE_PRECISION (type) <= 1
12039 || TYPE_UNSIGNED (type)
12040 /* Signed x % c == 0 should have been optimized into unsigned modulo
12041 earlier. */
12042 || integer_zerop (*arg1)
12043 /* If c is known to be non-negative, modulo will be expanded as unsigned
12044 modulo. */
12045 || get_range_pos_neg (treeop0) == 1)
12046 return code;
12048 /* x % c == d where d < 0 && d <= -c should be always false. */
12049 if (tree_int_cst_sgn (*arg1) == -1
12050 && -wi::to_widest (treeop1) >= wi::to_widest (*arg1))
12051 return code;
12053 int prec = TYPE_PRECISION (type);
12054 wide_int w = wi::to_wide (treeop1) - 1;
12055 w |= wi::shifted_mask (0, prec - 1, true, prec);
12056 tree c3 = wide_int_to_tree (type, w);
12057 tree c4 = *arg1;
12058 if (tree_int_cst_sgn (*arg1) == -1)
12059 c4 = wide_int_to_tree (type, w & wi::to_wide (*arg1));
12061 rtx op0 = expand_normal (treeop0);
12062 treeop0 = make_tree (TREE_TYPE (treeop0), op0);
12064 bool speed_p = optimize_insn_for_speed_p ();
12066 do_pending_stack_adjust ();
12068 location_t loc = gimple_location (stmt);
12069 struct separate_ops ops;
12070 ops.code = TRUNC_MOD_EXPR;
12071 ops.location = loc;
12072 ops.type = TREE_TYPE (treeop0);
12073 ops.op0 = treeop0;
12074 ops.op1 = treeop1;
12075 ops.op2 = NULL_TREE;
12076 start_sequence ();
12077 rtx mor = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type),
12078 EXPAND_NORMAL);
12079 rtx_insn *moinsns = get_insns ();
12080 end_sequence ();
12082 unsigned mocost = seq_cost (moinsns, speed_p);
12083 mocost += rtx_cost (mor, mode, EQ, 0, speed_p);
12084 mocost += rtx_cost (expand_normal (*arg1), mode, EQ, 1, speed_p);
12086 ops.code = BIT_AND_EXPR;
12087 ops.location = loc;
12088 ops.type = TREE_TYPE (treeop0);
12089 ops.op0 = treeop0;
12090 ops.op1 = c3;
12091 ops.op2 = NULL_TREE;
12092 start_sequence ();
12093 rtx mur = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type),
12094 EXPAND_NORMAL);
12095 rtx_insn *muinsns = get_insns ();
12096 end_sequence ();
12098 unsigned mucost = seq_cost (muinsns, speed_p);
12099 mucost += rtx_cost (mur, mode, EQ, 0, speed_p);
12100 mucost += rtx_cost (expand_normal (c4), mode, EQ, 1, speed_p);
12102 if (mocost <= mucost)
12104 emit_insn (moinsns);
12105 *arg0 = make_tree (TREE_TYPE (*arg0), mor);
12106 return code;
12109 emit_insn (muinsns);
12110 *arg0 = make_tree (TREE_TYPE (*arg0), mur);
12111 *arg1 = c4;
12112 return code;
12115 /* Attempt to optimize unsigned (X % C1) == C2 (or (X % C1) != C2).
12116 If C1 is odd to:
12117 (X - C2) * C3 <= C4 (or >), where
12118 C3 is modular multiplicative inverse of C1 and 1<<prec and
12119 C4 is ((1<<prec) - 1) / C1 or ((1<<prec) - 1) / C1 - 1 (the latter
12120 if C2 > ((1<<prec) - 1) % C1).
12121 If C1 is even, S = ctz (C1) and C2 is 0, use
12122 ((X * C3) r>> S) <= C4, where C3 is modular multiplicative
12123 inverse of C1>>S and 1<<prec and C4 is (((1<<prec) - 1) / (C1>>S)) >> S.
12125 For signed (X % C1) == 0 if C1 is odd to (all operations in it
12126 unsigned):
12127 (X * C3) + C4 <= 2 * C4, where
12128 C3 is modular multiplicative inverse of (unsigned) C1 and 1<<prec and
12129 C4 is ((1<<(prec - 1) - 1) / C1).
12130 If C1 is even, S = ctz(C1), use
12131 ((X * C3) + C4) r>> S <= (C4 >> (S - 1))
12132 where C3 is modular multiplicative inverse of (unsigned)(C1>>S) and 1<<prec
12133 and C4 is ((1<<(prec - 1) - 1) / (C1>>S)) & (-1<<S).
12135 See the Hacker's Delight book, section 10-17. */
12136 enum tree_code
12137 maybe_optimize_mod_cmp (enum tree_code code, tree *arg0, tree *arg1)
12139 gcc_checking_assert (code == EQ_EXPR || code == NE_EXPR);
12140 gcc_checking_assert (TREE_CODE (*arg1) == INTEGER_CST);
12142 if (optimize < 2)
12143 return code;
12145 gimple *stmt = get_def_for_expr (*arg0, TRUNC_MOD_EXPR);
12146 if (stmt == NULL)
12147 return code;
12149 tree treeop0 = gimple_assign_rhs1 (stmt);
12150 tree treeop1 = gimple_assign_rhs2 (stmt);
12151 if (TREE_CODE (treeop0) != SSA_NAME
12152 || TREE_CODE (treeop1) != INTEGER_CST
12153 /* Don't optimize the undefined behavior case x % 0;
12154 x % 1 should have been optimized into zero, punt if
12155 it makes it here for whatever reason;
12156 x % -c should have been optimized into x % c. */
12157 || compare_tree_int (treeop1, 2) <= 0
12158 /* Likewise x % c == d where d >= c should be always false. */
12159 || tree_int_cst_le (treeop1, *arg1))
12160 return code;
12162 /* Unsigned x % pow2 is handled right already, for signed
12163 modulo handle it in maybe_optimize_pow2p_mod_cmp. */
12164 if (integer_pow2p (treeop1))
12165 return maybe_optimize_pow2p_mod_cmp (code, arg0, arg1);
12167 tree type = TREE_TYPE (*arg0);
12168 scalar_int_mode mode;
12169 if (!is_a <scalar_int_mode> (TYPE_MODE (type), &mode))
12170 return code;
12171 if (GET_MODE_BITSIZE (mode) != TYPE_PRECISION (type)
12172 || TYPE_PRECISION (type) <= 1)
12173 return code;
12175 signop sgn = UNSIGNED;
12176 /* If both operands are known to have the sign bit clear, handle
12177 even the signed modulo case as unsigned. treeop1 is always
12178 positive >= 2, checked above. */
12179 if (!TYPE_UNSIGNED (type) && get_range_pos_neg (treeop0) != 1)
12180 sgn = SIGNED;
12182 if (!TYPE_UNSIGNED (type))
12184 if (tree_int_cst_sgn (*arg1) == -1)
12185 return code;
12186 type = unsigned_type_for (type);
12187 if (!type || TYPE_MODE (type) != TYPE_MODE (TREE_TYPE (*arg0)))
12188 return code;
12191 int prec = TYPE_PRECISION (type);
12192 wide_int w = wi::to_wide (treeop1);
12193 int shift = wi::ctz (w);
12194 /* Unsigned (X % C1) == C2 is equivalent to (X - C2) % C1 == 0 if
12195 C2 <= -1U % C1, because for any Z >= 0U - C2 in that case (Z % C1) != 0.
12196 If C1 is odd, we can handle all cases by subtracting
12197 C4 below. We could handle even the even C1 and C2 > -1U % C1 cases
12198 e.g. by testing for overflow on the subtraction, punt on that for now
12199 though. */
12200 if ((sgn == SIGNED || shift) && !integer_zerop (*arg1))
12202 if (sgn == SIGNED)
12203 return code;
12204 wide_int x = wi::umod_trunc (wi::mask (prec, false, prec), w);
12205 if (wi::gtu_p (wi::to_wide (*arg1), x))
12206 return code;
12209 imm_use_iterator imm_iter;
12210 use_operand_p use_p;
12211 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, treeop0)
12213 gimple *use_stmt = USE_STMT (use_p);
12214 /* Punt if treeop0 is used in the same bb in a division
12215 or another modulo with the same divisor. We should expect
12216 the division and modulo combined together. */
12217 if (use_stmt == stmt
12218 || gimple_bb (use_stmt) != gimple_bb (stmt))
12219 continue;
12220 if (!is_gimple_assign (use_stmt)
12221 || (gimple_assign_rhs_code (use_stmt) != TRUNC_DIV_EXPR
12222 && gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR))
12223 continue;
12224 if (gimple_assign_rhs1 (use_stmt) != treeop0
12225 || !operand_equal_p (gimple_assign_rhs2 (use_stmt), treeop1, 0))
12226 continue;
12227 return code;
12230 w = wi::lrshift (w, shift);
12231 wide_int a = wide_int::from (w, prec + 1, UNSIGNED);
12232 wide_int b = wi::shifted_mask (prec, 1, false, prec + 1);
12233 wide_int m = wide_int::from (wi::mod_inv (a, b), prec, UNSIGNED);
12234 tree c3 = wide_int_to_tree (type, m);
12235 tree c5 = NULL_TREE;
12236 wide_int d, e;
12237 if (sgn == UNSIGNED)
12239 d = wi::divmod_trunc (wi::mask (prec, false, prec), w, UNSIGNED, &e);
12240 /* Use <= floor ((1<<prec) - 1) / C1 only if C2 <= ((1<<prec) - 1) % C1,
12241 otherwise use < or subtract one from C4. E.g. for
12242 x % 3U == 0 we transform this into x * 0xaaaaaaab <= 0x55555555, but
12243 x % 3U == 1 already needs to be
12244 (x - 1) * 0xaaaaaaabU <= 0x55555554. */
12245 if (!shift && wi::gtu_p (wi::to_wide (*arg1), e))
12246 d -= 1;
12247 if (shift)
12248 d = wi::lrshift (d, shift);
12250 else
12252 e = wi::udiv_trunc (wi::mask (prec - 1, false, prec), w);
12253 if (!shift)
12254 d = wi::lshift (e, 1);
12255 else
12257 e = wi::bit_and (e, wi::mask (shift, true, prec));
12258 d = wi::lrshift (e, shift - 1);
12260 c5 = wide_int_to_tree (type, e);
12262 tree c4 = wide_int_to_tree (type, d);
12264 rtx op0 = expand_normal (treeop0);
12265 treeop0 = make_tree (TREE_TYPE (treeop0), op0);
12267 bool speed_p = optimize_insn_for_speed_p ();
12269 do_pending_stack_adjust ();
12271 location_t loc = gimple_location (stmt);
12272 struct separate_ops ops;
12273 ops.code = TRUNC_MOD_EXPR;
12274 ops.location = loc;
12275 ops.type = TREE_TYPE (treeop0);
12276 ops.op0 = treeop0;
12277 ops.op1 = treeop1;
12278 ops.op2 = NULL_TREE;
12279 start_sequence ();
12280 rtx mor = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type),
12281 EXPAND_NORMAL);
12282 rtx_insn *moinsns = get_insns ();
12283 end_sequence ();
12285 unsigned mocost = seq_cost (moinsns, speed_p);
12286 mocost += rtx_cost (mor, mode, EQ, 0, speed_p);
12287 mocost += rtx_cost (expand_normal (*arg1), mode, EQ, 1, speed_p);
12289 tree t = fold_convert_loc (loc, type, treeop0);
12290 if (!integer_zerop (*arg1))
12291 t = fold_build2_loc (loc, MINUS_EXPR, type, t, fold_convert (type, *arg1));
12292 t = fold_build2_loc (loc, MULT_EXPR, type, t, c3);
12293 if (sgn == SIGNED)
12294 t = fold_build2_loc (loc, PLUS_EXPR, type, t, c5);
12295 if (shift)
12297 tree s = build_int_cst (NULL_TREE, shift);
12298 t = fold_build2_loc (loc, RROTATE_EXPR, type, t, s);
12301 start_sequence ();
12302 rtx mur = expand_normal (t);
12303 rtx_insn *muinsns = get_insns ();
12304 end_sequence ();
12306 unsigned mucost = seq_cost (muinsns, speed_p);
12307 mucost += rtx_cost (mur, mode, LE, 0, speed_p);
12308 mucost += rtx_cost (expand_normal (c4), mode, LE, 1, speed_p);
12310 if (mocost <= mucost)
12312 emit_insn (moinsns);
12313 *arg0 = make_tree (TREE_TYPE (*arg0), mor);
12314 return code;
12317 emit_insn (muinsns);
12318 *arg0 = make_tree (type, mur);
12319 *arg1 = c4;
12320 return code == EQ_EXPR ? LE_EXPR : GT_EXPR;
12323 /* Generate code to calculate OPS, and exploded expression
12324 using a store-flag instruction and return an rtx for the result.
12325 OPS reflects a comparison.
12327 If TARGET is nonzero, store the result there if convenient.
12329 Return zero if there is no suitable set-flag instruction
12330 available on this machine.
12332 Once expand_expr has been called on the arguments of the comparison,
12333 we are committed to doing the store flag, since it is not safe to
12334 re-evaluate the expression. We emit the store-flag insn by calling
12335 emit_store_flag, but only expand the arguments if we have a reason
12336 to believe that emit_store_flag will be successful. If we think that
12337 it will, but it isn't, we have to simulate the store-flag with a
12338 set/jump/set sequence. */
12340 static rtx
12341 do_store_flag (sepops ops, rtx target, machine_mode mode)
12343 enum rtx_code code;
12344 tree arg0, arg1, type;
12345 machine_mode operand_mode;
12346 int unsignedp;
12347 rtx op0, op1;
12348 rtx subtarget = target;
12349 location_t loc = ops->location;
12351 arg0 = ops->op0;
12352 arg1 = ops->op1;
12354 /* Don't crash if the comparison was erroneous. */
12355 if (arg0 == error_mark_node || arg1 == error_mark_node)
12356 return const0_rtx;
12358 type = TREE_TYPE (arg0);
12359 operand_mode = TYPE_MODE (type);
12360 unsignedp = TYPE_UNSIGNED (type);
12362 /* We won't bother with BLKmode store-flag operations because it would mean
12363 passing a lot of information to emit_store_flag. */
12364 if (operand_mode == BLKmode)
12365 return 0;
12367 /* We won't bother with store-flag operations involving function pointers
12368 when function pointers must be canonicalized before comparisons. */
12369 if (targetm.have_canonicalize_funcptr_for_compare ()
12370 && ((POINTER_TYPE_P (TREE_TYPE (arg0))
12371 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg0))))
12372 || (POINTER_TYPE_P (TREE_TYPE (arg1))
12373 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg1))))))
12374 return 0;
12376 STRIP_NOPS (arg0);
12377 STRIP_NOPS (arg1);
12379 /* For vector typed comparisons emit code to generate the desired
12380 all-ones or all-zeros mask. */
12381 if (TREE_CODE (ops->type) == VECTOR_TYPE)
12383 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
12384 if (VECTOR_BOOLEAN_TYPE_P (ops->type)
12385 && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type, ops->code))
12386 return expand_vec_cmp_expr (ops->type, ifexp, target);
12387 else
12388 gcc_unreachable ();
12391 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
12392 into (x - C2) * C3 < C4. */
12393 if ((ops->code == EQ_EXPR || ops->code == NE_EXPR)
12394 && TREE_CODE (arg0) == SSA_NAME
12395 && TREE_CODE (arg1) == INTEGER_CST)
12397 enum tree_code new_code = maybe_optimize_mod_cmp (ops->code,
12398 &arg0, &arg1);
12399 if (new_code != ops->code)
12401 struct separate_ops nops = *ops;
12402 nops.code = ops->code = new_code;
12403 nops.op0 = arg0;
12404 nops.op1 = arg1;
12405 nops.type = TREE_TYPE (arg0);
12406 return do_store_flag (&nops, target, mode);
12410 /* Get the rtx comparison code to use. We know that EXP is a comparison
12411 operation of some type. Some comparisons against 1 and -1 can be
12412 converted to comparisons with zero. Do so here so that the tests
12413 below will be aware that we have a comparison with zero. These
12414 tests will not catch constants in the first operand, but constants
12415 are rarely passed as the first operand. */
12417 switch (ops->code)
12419 case EQ_EXPR:
12420 code = EQ;
12421 break;
12422 case NE_EXPR:
12423 code = NE;
12424 break;
12425 case LT_EXPR:
12426 if (integer_onep (arg1))
12427 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
12428 else
12429 code = unsignedp ? LTU : LT;
12430 break;
12431 case LE_EXPR:
12432 if (! unsignedp && integer_all_onesp (arg1))
12433 arg1 = integer_zero_node, code = LT;
12434 else
12435 code = unsignedp ? LEU : LE;
12436 break;
12437 case GT_EXPR:
12438 if (! unsignedp && integer_all_onesp (arg1))
12439 arg1 = integer_zero_node, code = GE;
12440 else
12441 code = unsignedp ? GTU : GT;
12442 break;
12443 case GE_EXPR:
12444 if (integer_onep (arg1))
12445 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
12446 else
12447 code = unsignedp ? GEU : GE;
12448 break;
12450 case UNORDERED_EXPR:
12451 code = UNORDERED;
12452 break;
12453 case ORDERED_EXPR:
12454 code = ORDERED;
12455 break;
12456 case UNLT_EXPR:
12457 code = UNLT;
12458 break;
12459 case UNLE_EXPR:
12460 code = UNLE;
12461 break;
12462 case UNGT_EXPR:
12463 code = UNGT;
12464 break;
12465 case UNGE_EXPR:
12466 code = UNGE;
12467 break;
12468 case UNEQ_EXPR:
12469 code = UNEQ;
12470 break;
12471 case LTGT_EXPR:
12472 code = LTGT;
12473 break;
12475 default:
12476 gcc_unreachable ();
12479 /* Put a constant second. */
12480 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
12481 || TREE_CODE (arg0) == FIXED_CST)
12483 std::swap (arg0, arg1);
12484 code = swap_condition (code);
12487 /* If this is an equality or inequality test of a single bit, we can
12488 do this by shifting the bit being tested to the low-order bit and
12489 masking the result with the constant 1. If the condition was EQ,
12490 we xor it with 1. This does not require an scc insn and is faster
12491 than an scc insn even if we have it.
12493 The code to make this transformation was moved into fold_single_bit_test,
12494 so we just call into the folder and expand its result. */
12496 if ((code == NE || code == EQ)
12497 && integer_zerop (arg1)
12498 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
12500 gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
12501 if (srcstmt
12502 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
12504 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
12505 type = lang_hooks.types.type_for_mode (mode, unsignedp);
12506 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
12507 gimple_assign_rhs1 (srcstmt),
12508 gimple_assign_rhs2 (srcstmt));
12509 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
12510 if (temp)
12511 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
12515 if (! get_subtarget (target)
12516 || GET_MODE (subtarget) != operand_mode)
12517 subtarget = 0;
12519 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
12521 if (target == 0)
12522 target = gen_reg_rtx (mode);
12524 /* Try a cstore if possible. */
12525 return emit_store_flag_force (target, code, op0, op1,
12526 operand_mode, unsignedp,
12527 (TYPE_PRECISION (ops->type) == 1
12528 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
12531 /* Attempt to generate a casesi instruction. Returns 1 if successful,
12532 0 otherwise (i.e. if there is no casesi instruction).
12534 DEFAULT_PROBABILITY is the probability of jumping to the default
12535 label. */
12537 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
12538 rtx table_label, rtx default_label, rtx fallback_label,
12539 profile_probability default_probability)
12541 class expand_operand ops[5];
12542 scalar_int_mode index_mode = SImode;
12543 rtx op1, op2, index;
12545 if (! targetm.have_casesi ())
12546 return 0;
12548 /* The index must be some form of integer. Convert it to SImode. */
12549 scalar_int_mode omode = SCALAR_INT_TYPE_MODE (index_type);
12550 if (GET_MODE_BITSIZE (omode) > GET_MODE_BITSIZE (index_mode))
12552 rtx rangertx = expand_normal (range);
12554 /* We must handle the endpoints in the original mode. */
12555 index_expr = build2 (MINUS_EXPR, index_type,
12556 index_expr, minval);
12557 minval = integer_zero_node;
12558 index = expand_normal (index_expr);
12559 if (default_label)
12560 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
12561 omode, 1, default_label,
12562 default_probability);
12563 /* Now we can safely truncate. */
12564 index = convert_to_mode (index_mode, index, 0);
12566 else
12568 if (omode != index_mode)
12570 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
12571 index_expr = fold_convert (index_type, index_expr);
12574 index = expand_normal (index_expr);
12577 do_pending_stack_adjust ();
12579 op1 = expand_normal (minval);
12580 op2 = expand_normal (range);
12582 create_input_operand (&ops[0], index, index_mode);
12583 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
12584 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
12585 create_fixed_operand (&ops[3], table_label);
12586 create_fixed_operand (&ops[4], (default_label
12587 ? default_label
12588 : fallback_label));
12589 expand_jump_insn (targetm.code_for_casesi, 5, ops);
12590 return 1;
12593 /* Attempt to generate a tablejump instruction; same concept. */
12594 /* Subroutine of the next function.
12596 INDEX is the value being switched on, with the lowest value
12597 in the table already subtracted.
12598 MODE is its expected mode (needed if INDEX is constant).
12599 RANGE is the length of the jump table.
12600 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
12602 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
12603 index value is out of range.
12604 DEFAULT_PROBABILITY is the probability of jumping to
12605 the default label. */
12607 static void
12608 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
12609 rtx default_label, profile_probability default_probability)
12611 rtx temp, vector;
12613 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
12614 cfun->cfg->max_jumptable_ents = INTVAL (range);
12616 /* Do an unsigned comparison (in the proper mode) between the index
12617 expression and the value which represents the length of the range.
12618 Since we just finished subtracting the lower bound of the range
12619 from the index expression, this comparison allows us to simultaneously
12620 check that the original index expression value is both greater than
12621 or equal to the minimum value of the range and less than or equal to
12622 the maximum value of the range. */
12624 if (default_label)
12625 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
12626 default_label, default_probability);
12628 /* If index is in range, it must fit in Pmode.
12629 Convert to Pmode so we can index with it. */
12630 if (mode != Pmode)
12632 unsigned int width;
12634 /* We know the value of INDEX is between 0 and RANGE. If we have a
12635 sign-extended subreg, and RANGE does not have the sign bit set, then
12636 we have a value that is valid for both sign and zero extension. In
12637 this case, we get better code if we sign extend. */
12638 if (GET_CODE (index) == SUBREG
12639 && SUBREG_PROMOTED_VAR_P (index)
12640 && SUBREG_PROMOTED_SIGNED_P (index)
12641 && ((width = GET_MODE_PRECISION (as_a <scalar_int_mode> (mode)))
12642 <= HOST_BITS_PER_WIDE_INT)
12643 && ! (UINTVAL (range) & (HOST_WIDE_INT_1U << (width - 1))))
12644 index = convert_to_mode (Pmode, index, 0);
12645 else
12646 index = convert_to_mode (Pmode, index, 1);
12649 /* Don't let a MEM slip through, because then INDEX that comes
12650 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
12651 and break_out_memory_refs will go to work on it and mess it up. */
12652 #ifdef PIC_CASE_VECTOR_ADDRESS
12653 if (flag_pic && !REG_P (index))
12654 index = copy_to_mode_reg (Pmode, index);
12655 #endif
12657 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
12658 GET_MODE_SIZE, because this indicates how large insns are. The other
12659 uses should all be Pmode, because they are addresses. This code
12660 could fail if addresses and insns are not the same size. */
12661 index = simplify_gen_binary (MULT, Pmode, index,
12662 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
12663 Pmode));
12664 index = simplify_gen_binary (PLUS, Pmode, index,
12665 gen_rtx_LABEL_REF (Pmode, table_label));
12667 #ifdef PIC_CASE_VECTOR_ADDRESS
12668 if (flag_pic)
12669 index = PIC_CASE_VECTOR_ADDRESS (index);
12670 else
12671 #endif
12672 index = memory_address (CASE_VECTOR_MODE, index);
12673 temp = gen_reg_rtx (CASE_VECTOR_MODE);
12674 vector = gen_const_mem (CASE_VECTOR_MODE, index);
12675 convert_move (temp, vector, 0);
12677 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
12679 /* If we are generating PIC code or if the table is PC-relative, the
12680 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
12681 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
12682 emit_barrier ();
12686 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
12687 rtx table_label, rtx default_label,
12688 profile_probability default_probability)
12690 rtx index;
12692 if (! targetm.have_tablejump ())
12693 return 0;
12695 index_expr = fold_build2 (MINUS_EXPR, index_type,
12696 fold_convert (index_type, index_expr),
12697 fold_convert (index_type, minval));
12698 index = expand_normal (index_expr);
12699 do_pending_stack_adjust ();
12701 do_tablejump (index, TYPE_MODE (index_type),
12702 convert_modes (TYPE_MODE (index_type),
12703 TYPE_MODE (TREE_TYPE (range)),
12704 expand_normal (range),
12705 TYPE_UNSIGNED (TREE_TYPE (range))),
12706 table_label, default_label, default_probability);
12707 return 1;
12710 /* Return a CONST_VECTOR rtx representing vector mask for
12711 a VECTOR_CST of booleans. */
12712 static rtx
12713 const_vector_mask_from_tree (tree exp)
12715 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
12716 machine_mode inner = GET_MODE_INNER (mode);
12718 rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp),
12719 VECTOR_CST_NELTS_PER_PATTERN (exp));
12720 unsigned int count = builder.encoded_nelts ();
12721 for (unsigned int i = 0; i < count; ++i)
12723 tree elt = VECTOR_CST_ELT (exp, i);
12724 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
12725 if (integer_zerop (elt))
12726 builder.quick_push (CONST0_RTX (inner));
12727 else if (integer_onep (elt)
12728 || integer_minus_onep (elt))
12729 builder.quick_push (CONSTM1_RTX (inner));
12730 else
12731 gcc_unreachable ();
12733 return builder.build ();
12736 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones.
12737 Return a constant scalar rtx of mode MODE in which bit X is set if element
12738 X of EXP is nonzero. */
12739 static rtx
12740 const_scalar_mask_from_tree (scalar_int_mode mode, tree exp)
12742 wide_int res = wi::zero (GET_MODE_PRECISION (mode));
12743 tree elt;
12745 /* The result has a fixed number of bits so the input must too. */
12746 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
12747 for (unsigned int i = 0; i < nunits; ++i)
12749 elt = VECTOR_CST_ELT (exp, i);
12750 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
12751 if (integer_all_onesp (elt))
12752 res = wi::set_bit (res, i);
12753 else
12754 gcc_assert (integer_zerop (elt));
12757 return immed_wide_int_const (res, mode);
12760 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
12761 static rtx
12762 const_vector_from_tree (tree exp)
12764 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
12766 if (initializer_zerop (exp))
12767 return CONST0_RTX (mode);
12769 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
12770 return const_vector_mask_from_tree (exp);
12772 machine_mode inner = GET_MODE_INNER (mode);
12774 rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp),
12775 VECTOR_CST_NELTS_PER_PATTERN (exp));
12776 unsigned int count = builder.encoded_nelts ();
12777 for (unsigned int i = 0; i < count; ++i)
12779 tree elt = VECTOR_CST_ELT (exp, i);
12780 if (TREE_CODE (elt) == REAL_CST)
12781 builder.quick_push (const_double_from_real_value (TREE_REAL_CST (elt),
12782 inner));
12783 else if (TREE_CODE (elt) == FIXED_CST)
12784 builder.quick_push (CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
12785 inner));
12786 else
12787 builder.quick_push (immed_wide_int_const (wi::to_poly_wide (elt),
12788 inner));
12790 return builder.build ();
12793 /* Build a decl for a personality function given a language prefix. */
12795 tree
12796 build_personality_function (const char *lang)
12798 const char *unwind_and_version;
12799 tree decl, type;
12800 char *name;
12802 switch (targetm_common.except_unwind_info (&global_options))
12804 case UI_NONE:
12805 return NULL;
12806 case UI_SJLJ:
12807 unwind_and_version = "_sj0";
12808 break;
12809 case UI_DWARF2:
12810 case UI_TARGET:
12811 unwind_and_version = "_v0";
12812 break;
12813 case UI_SEH:
12814 unwind_and_version = "_seh0";
12815 break;
12816 default:
12817 gcc_unreachable ();
12820 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
12822 type = build_function_type_list (unsigned_type_node,
12823 integer_type_node, integer_type_node,
12824 long_long_unsigned_type_node,
12825 ptr_type_node, ptr_type_node, NULL_TREE);
12826 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
12827 get_identifier (name), type);
12828 DECL_ARTIFICIAL (decl) = 1;
12829 DECL_EXTERNAL (decl) = 1;
12830 TREE_PUBLIC (decl) = 1;
12832 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
12833 are the flags assigned by targetm.encode_section_info. */
12834 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
12836 return decl;
12839 /* Extracts the personality function of DECL and returns the corresponding
12840 libfunc. */
12843 get_personality_function (tree decl)
12845 tree personality = DECL_FUNCTION_PERSONALITY (decl);
12846 enum eh_personality_kind pk;
12848 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
12849 if (pk == eh_personality_none)
12850 return NULL;
12852 if (!personality
12853 && pk == eh_personality_any)
12854 personality = lang_hooks.eh_personality ();
12856 if (pk == eh_personality_lang)
12857 gcc_assert (personality != NULL_TREE);
12859 return XEXP (DECL_RTL (personality), 0);
12862 /* Returns a tree for the size of EXP in bytes. */
12864 static tree
12865 tree_expr_size (const_tree exp)
12867 if (DECL_P (exp)
12868 && DECL_SIZE_UNIT (exp) != 0)
12869 return DECL_SIZE_UNIT (exp);
12870 else
12871 return size_in_bytes (TREE_TYPE (exp));
12874 /* Return an rtx for the size in bytes of the value of EXP. */
12877 expr_size (tree exp)
12879 tree size;
12881 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
12882 size = TREE_OPERAND (exp, 1);
12883 else
12885 size = tree_expr_size (exp);
12886 gcc_assert (size);
12887 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
12890 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
12893 /* Return a wide integer for the size in bytes of the value of EXP, or -1
12894 if the size can vary or is larger than an integer. */
12896 static HOST_WIDE_INT
12897 int_expr_size (tree exp)
12899 tree size;
12901 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
12902 size = TREE_OPERAND (exp, 1);
12903 else
12905 size = tree_expr_size (exp);
12906 gcc_assert (size);
12909 if (size == 0 || !tree_fits_shwi_p (size))
12910 return -1;
12912 return tree_to_shwi (size);