aix: Fix _STDC_FORMAT_MACROS in inttypes.h [PR97044]
[official-gcc.git] / gcc / expr.c
blob1c79518ee4dcb2de0628cc39e4367e90888fb2d2
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "ssa.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "regs.h"
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "attribs.h"
43 #include "varasm.h"
44 #include "except.h"
45 #include "insn-attr.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "stmt.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
51 #include "expr.h"
52 #include "optabs-tree.h"
53 #include "libfuncs.h"
54 #include "reload.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa-live.h"
59 #include "tree-outof-ssa.h"
60 #include "tree-ssa-address.h"
61 #include "builtins.h"
62 #include "ccmp.h"
63 #include "gimple-fold.h"
64 #include "rtx-vector-builder.h"
67 /* If this is nonzero, we do not bother generating VOLATILE
68 around volatile memory references, and we are willing to
69 output indirect addresses. If cse is to follow, we reject
70 indirect addresses so a useful potential cse is generated;
71 if it is used only once, instruction combination will produce
72 the same indirect address eventually. */
73 int cse_not_expected;
75 static bool block_move_libcall_safe_for_call_parm (void);
76 static bool emit_block_move_via_pattern (rtx, rtx, rtx, unsigned, unsigned,
77 HOST_WIDE_INT, unsigned HOST_WIDE_INT,
78 unsigned HOST_WIDE_INT,
79 unsigned HOST_WIDE_INT, bool);
80 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
81 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
82 static rtx_insn *compress_float_constant (rtx, rtx);
83 static rtx get_subtarget (rtx);
84 static void store_constructor (tree, rtx, int, poly_int64, bool);
85 static rtx store_field (rtx, poly_int64, poly_int64, poly_uint64, poly_uint64,
86 machine_mode, tree, alias_set_type, bool, bool);
88 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
90 static int is_aligning_offset (const_tree, const_tree);
91 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
92 static rtx do_store_flag (sepops, rtx, machine_mode);
93 #ifdef PUSH_ROUNDING
94 static void emit_single_push_insn (machine_mode, rtx, tree);
95 #endif
96 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx,
97 profile_probability);
98 static rtx const_vector_from_tree (tree);
99 static rtx const_scalar_mask_from_tree (scalar_int_mode, tree);
100 static tree tree_expr_size (const_tree);
101 static HOST_WIDE_INT int_expr_size (tree);
102 static void convert_mode_scalar (rtx, rtx, int);
105 /* This is run to set up which modes can be used
106 directly in memory and to initialize the block move optab. It is run
107 at the beginning of compilation and when the target is reinitialized. */
109 void
110 init_expr_target (void)
112 rtx pat;
113 int num_clobbers;
114 rtx mem, mem1;
115 rtx reg;
117 /* Try indexing by frame ptr and try by stack ptr.
118 It is known that on the Convex the stack ptr isn't a valid index.
119 With luck, one or the other is valid on any machine. */
120 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
121 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
123 /* A scratch register we can modify in-place below to avoid
124 useless RTL allocations. */
125 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
127 rtx_insn *insn = as_a<rtx_insn *> (rtx_alloc (INSN));
128 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
129 PATTERN (insn) = pat;
131 for (machine_mode mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
132 mode = (machine_mode) ((int) mode + 1))
134 int regno;
136 direct_load[(int) mode] = direct_store[(int) mode] = 0;
137 PUT_MODE (mem, mode);
138 PUT_MODE (mem1, mode);
140 /* See if there is some register that can be used in this mode and
141 directly loaded or stored from memory. */
143 if (mode != VOIDmode && mode != BLKmode)
144 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
145 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
146 regno++)
148 if (!targetm.hard_regno_mode_ok (regno, mode))
149 continue;
151 set_mode_and_regno (reg, mode, regno);
153 SET_SRC (pat) = mem;
154 SET_DEST (pat) = reg;
155 if (recog (pat, insn, &num_clobbers) >= 0)
156 direct_load[(int) mode] = 1;
158 SET_SRC (pat) = mem1;
159 SET_DEST (pat) = reg;
160 if (recog (pat, insn, &num_clobbers) >= 0)
161 direct_load[(int) mode] = 1;
163 SET_SRC (pat) = reg;
164 SET_DEST (pat) = mem;
165 if (recog (pat, insn, &num_clobbers) >= 0)
166 direct_store[(int) mode] = 1;
168 SET_SRC (pat) = reg;
169 SET_DEST (pat) = mem1;
170 if (recog (pat, insn, &num_clobbers) >= 0)
171 direct_store[(int) mode] = 1;
175 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
177 opt_scalar_float_mode mode_iter;
178 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_FLOAT)
180 scalar_float_mode mode = mode_iter.require ();
181 scalar_float_mode srcmode;
182 FOR_EACH_MODE_UNTIL (srcmode, mode)
184 enum insn_code ic;
186 ic = can_extend_p (mode, srcmode, 0);
187 if (ic == CODE_FOR_nothing)
188 continue;
190 PUT_MODE (mem, srcmode);
192 if (insn_operand_matches (ic, 1, mem))
193 float_extend_from_mem[mode][srcmode] = true;
198 /* This is run at the start of compiling a function. */
200 void
201 init_expr (void)
203 memset (&crtl->expr, 0, sizeof (crtl->expr));
206 /* Copy data from FROM to TO, where the machine modes are not the same.
207 Both modes may be integer, or both may be floating, or both may be
208 fixed-point.
209 UNSIGNEDP should be nonzero if FROM is an unsigned type.
210 This causes zero-extension instead of sign-extension. */
212 void
213 convert_move (rtx to, rtx from, int unsignedp)
215 machine_mode to_mode = GET_MODE (to);
216 machine_mode from_mode = GET_MODE (from);
218 gcc_assert (to_mode != BLKmode);
219 gcc_assert (from_mode != BLKmode);
221 /* If the source and destination are already the same, then there's
222 nothing to do. */
223 if (to == from)
224 return;
226 /* If FROM is a SUBREG that indicates that we have already done at least
227 the required extension, strip it. We don't handle such SUBREGs as
228 TO here. */
230 scalar_int_mode to_int_mode;
231 if (GET_CODE (from) == SUBREG
232 && SUBREG_PROMOTED_VAR_P (from)
233 && is_a <scalar_int_mode> (to_mode, &to_int_mode)
234 && (GET_MODE_PRECISION (subreg_promoted_mode (from))
235 >= GET_MODE_PRECISION (to_int_mode))
236 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
238 from = gen_lowpart (to_int_mode, SUBREG_REG (from));
239 from_mode = to_int_mode;
242 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
244 if (to_mode == from_mode
245 || (from_mode == VOIDmode && CONSTANT_P (from)))
247 emit_move_insn (to, from);
248 return;
251 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
253 if (GET_MODE_UNIT_PRECISION (to_mode)
254 > GET_MODE_UNIT_PRECISION (from_mode))
256 optab op = unsignedp ? zext_optab : sext_optab;
257 insn_code icode = convert_optab_handler (op, to_mode, from_mode);
258 if (icode != CODE_FOR_nothing)
260 emit_unop_insn (icode, to, from,
261 unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
262 return;
266 if (GET_MODE_UNIT_PRECISION (to_mode)
267 < GET_MODE_UNIT_PRECISION (from_mode))
269 insn_code icode = convert_optab_handler (trunc_optab,
270 to_mode, from_mode);
271 if (icode != CODE_FOR_nothing)
273 emit_unop_insn (icode, to, from, TRUNCATE);
274 return;
278 gcc_assert (known_eq (GET_MODE_BITSIZE (from_mode),
279 GET_MODE_BITSIZE (to_mode)));
281 if (VECTOR_MODE_P (to_mode))
282 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
283 else
284 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
286 emit_move_insn (to, from);
287 return;
290 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
292 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
293 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
294 return;
297 convert_mode_scalar (to, from, unsignedp);
300 /* Like convert_move, but deals only with scalar modes. */
302 static void
303 convert_mode_scalar (rtx to, rtx from, int unsignedp)
305 /* Both modes should be scalar types. */
306 scalar_mode from_mode = as_a <scalar_mode> (GET_MODE (from));
307 scalar_mode to_mode = as_a <scalar_mode> (GET_MODE (to));
308 bool to_real = SCALAR_FLOAT_MODE_P (to_mode);
309 bool from_real = SCALAR_FLOAT_MODE_P (from_mode);
310 enum insn_code code;
311 rtx libcall;
313 gcc_assert (to_real == from_real);
315 /* rtx code for making an equivalent value. */
316 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
317 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
319 if (to_real)
321 rtx value;
322 rtx_insn *insns;
323 convert_optab tab;
325 gcc_assert ((GET_MODE_PRECISION (from_mode)
326 != GET_MODE_PRECISION (to_mode))
327 || (DECIMAL_FLOAT_MODE_P (from_mode)
328 != DECIMAL_FLOAT_MODE_P (to_mode)));
330 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
331 /* Conversion between decimal float and binary float, same size. */
332 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
333 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
334 tab = sext_optab;
335 else
336 tab = trunc_optab;
338 /* Try converting directly if the insn is supported. */
340 code = convert_optab_handler (tab, to_mode, from_mode);
341 if (code != CODE_FOR_nothing)
343 emit_unop_insn (code, to, from,
344 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
345 return;
348 /* Otherwise use a libcall. */
349 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
351 /* Is this conversion implemented yet? */
352 gcc_assert (libcall);
354 start_sequence ();
355 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
356 from, from_mode);
357 insns = get_insns ();
358 end_sequence ();
359 emit_libcall_block (insns, to, value,
360 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
361 from)
362 : gen_rtx_FLOAT_EXTEND (to_mode, from));
363 return;
366 /* Handle pointer conversion. */ /* SPEE 900220. */
367 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
369 convert_optab ctab;
371 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
372 ctab = trunc_optab;
373 else if (unsignedp)
374 ctab = zext_optab;
375 else
376 ctab = sext_optab;
378 if (convert_optab_handler (ctab, to_mode, from_mode)
379 != CODE_FOR_nothing)
381 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
382 to, from, UNKNOWN);
383 return;
387 /* Targets are expected to provide conversion insns between PxImode and
388 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
389 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
391 scalar_int_mode full_mode
392 = smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode));
394 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
395 != CODE_FOR_nothing);
397 if (full_mode != from_mode)
398 from = convert_to_mode (full_mode, from, unsignedp);
399 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
400 to, from, UNKNOWN);
401 return;
403 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
405 rtx new_from;
406 scalar_int_mode full_mode
407 = smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode));
408 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
409 enum insn_code icode;
411 icode = convert_optab_handler (ctab, full_mode, from_mode);
412 gcc_assert (icode != CODE_FOR_nothing);
414 if (to_mode == full_mode)
416 emit_unop_insn (icode, to, from, UNKNOWN);
417 return;
420 new_from = gen_reg_rtx (full_mode);
421 emit_unop_insn (icode, new_from, from, UNKNOWN);
423 /* else proceed to integer conversions below. */
424 from_mode = full_mode;
425 from = new_from;
428 /* Make sure both are fixed-point modes or both are not. */
429 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
430 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
431 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
433 /* If we widen from_mode to to_mode and they are in the same class,
434 we won't saturate the result.
435 Otherwise, always saturate the result to play safe. */
436 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
437 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
438 expand_fixed_convert (to, from, 0, 0);
439 else
440 expand_fixed_convert (to, from, 0, 1);
441 return;
444 /* Now both modes are integers. */
446 /* Handle expanding beyond a word. */
447 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
448 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
450 rtx_insn *insns;
451 rtx lowpart;
452 rtx fill_value;
453 rtx lowfrom;
454 int i;
455 scalar_mode lowpart_mode;
456 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
458 /* Try converting directly if the insn is supported. */
459 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
460 != CODE_FOR_nothing)
462 /* If FROM is a SUBREG, put it into a register. Do this
463 so that we always generate the same set of insns for
464 better cse'ing; if an intermediate assignment occurred,
465 we won't be doing the operation directly on the SUBREG. */
466 if (optimize > 0 && GET_CODE (from) == SUBREG)
467 from = force_reg (from_mode, from);
468 emit_unop_insn (code, to, from, equiv_code);
469 return;
471 /* Next, try converting via full word. */
472 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
473 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
474 != CODE_FOR_nothing))
476 rtx word_to = gen_reg_rtx (word_mode);
477 if (REG_P (to))
479 if (reg_overlap_mentioned_p (to, from))
480 from = force_reg (from_mode, from);
481 emit_clobber (to);
483 convert_move (word_to, from, unsignedp);
484 emit_unop_insn (code, to, word_to, equiv_code);
485 return;
488 /* No special multiword conversion insn; do it by hand. */
489 start_sequence ();
491 /* Since we will turn this into a no conflict block, we must ensure
492 the source does not overlap the target so force it into an isolated
493 register when maybe so. Likewise for any MEM input, since the
494 conversion sequence might require several references to it and we
495 must ensure we're getting the same value every time. */
497 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
498 from = force_reg (from_mode, from);
500 /* Get a copy of FROM widened to a word, if necessary. */
501 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
502 lowpart_mode = word_mode;
503 else
504 lowpart_mode = from_mode;
506 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
508 lowpart = gen_lowpart (lowpart_mode, to);
509 emit_move_insn (lowpart, lowfrom);
511 /* Compute the value to put in each remaining word. */
512 if (unsignedp)
513 fill_value = const0_rtx;
514 else
515 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
516 LT, lowfrom, const0_rtx,
517 lowpart_mode, 0, -1);
519 /* Fill the remaining words. */
520 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
522 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
523 rtx subword = operand_subword (to, index, 1, to_mode);
525 gcc_assert (subword);
527 if (fill_value != subword)
528 emit_move_insn (subword, fill_value);
531 insns = get_insns ();
532 end_sequence ();
534 emit_insn (insns);
535 return;
538 /* Truncating multi-word to a word or less. */
539 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
540 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
542 if (!((MEM_P (from)
543 && ! MEM_VOLATILE_P (from)
544 && direct_load[(int) to_mode]
545 && ! mode_dependent_address_p (XEXP (from, 0),
546 MEM_ADDR_SPACE (from)))
547 || REG_P (from)
548 || GET_CODE (from) == SUBREG))
549 from = force_reg (from_mode, from);
550 convert_move (to, gen_lowpart (word_mode, from), 0);
551 return;
554 /* Now follow all the conversions between integers
555 no more than a word long. */
557 /* For truncation, usually we can just refer to FROM in a narrower mode. */
558 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
559 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
561 if (!((MEM_P (from)
562 && ! MEM_VOLATILE_P (from)
563 && direct_load[(int) to_mode]
564 && ! mode_dependent_address_p (XEXP (from, 0),
565 MEM_ADDR_SPACE (from)))
566 || REG_P (from)
567 || GET_CODE (from) == SUBREG))
568 from = force_reg (from_mode, from);
569 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
570 && !targetm.hard_regno_mode_ok (REGNO (from), to_mode))
571 from = copy_to_reg (from);
572 emit_move_insn (to, gen_lowpart (to_mode, from));
573 return;
576 /* Handle extension. */
577 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
579 /* Convert directly if that works. */
580 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
581 != CODE_FOR_nothing)
583 emit_unop_insn (code, to, from, equiv_code);
584 return;
586 else
588 rtx tmp;
589 int shift_amount;
591 /* Search for a mode to convert via. */
592 opt_scalar_mode intermediate_iter;
593 FOR_EACH_MODE_FROM (intermediate_iter, from_mode)
595 scalar_mode intermediate = intermediate_iter.require ();
596 if (((can_extend_p (to_mode, intermediate, unsignedp)
597 != CODE_FOR_nothing)
598 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
599 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode,
600 intermediate)))
601 && (can_extend_p (intermediate, from_mode, unsignedp)
602 != CODE_FOR_nothing))
604 convert_move (to, convert_to_mode (intermediate, from,
605 unsignedp), unsignedp);
606 return;
610 /* No suitable intermediate mode.
611 Generate what we need with shifts. */
612 shift_amount = (GET_MODE_PRECISION (to_mode)
613 - GET_MODE_PRECISION (from_mode));
614 from = gen_lowpart (to_mode, force_reg (from_mode, from));
615 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
616 to, unsignedp);
617 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
618 to, unsignedp);
619 if (tmp != to)
620 emit_move_insn (to, tmp);
621 return;
625 /* Support special truncate insns for certain modes. */
626 if (convert_optab_handler (trunc_optab, to_mode,
627 from_mode) != CODE_FOR_nothing)
629 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
630 to, from, UNKNOWN);
631 return;
634 /* Handle truncation of volatile memrefs, and so on;
635 the things that couldn't be truncated directly,
636 and for which there was no special instruction.
638 ??? Code above formerly short-circuited this, for most integer
639 mode pairs, with a force_reg in from_mode followed by a recursive
640 call to this routine. Appears always to have been wrong. */
641 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
643 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
644 emit_move_insn (to, temp);
645 return;
648 /* Mode combination is not recognized. */
649 gcc_unreachable ();
652 /* Return an rtx for a value that would result
653 from converting X to mode MODE.
654 Both X and MODE may be floating, or both integer.
655 UNSIGNEDP is nonzero if X is an unsigned value.
656 This can be done by referring to a part of X in place
657 or by copying to a new temporary with conversion. */
660 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
662 return convert_modes (mode, VOIDmode, x, unsignedp);
665 /* Return an rtx for a value that would result
666 from converting X from mode OLDMODE to mode MODE.
667 Both modes may be floating, or both integer.
668 UNSIGNEDP is nonzero if X is an unsigned value.
670 This can be done by referring to a part of X in place
671 or by copying to a new temporary with conversion.
673 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
676 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
678 rtx temp;
679 scalar_int_mode int_mode;
681 /* If FROM is a SUBREG that indicates that we have already done at least
682 the required extension, strip it. */
684 if (GET_CODE (x) == SUBREG
685 && SUBREG_PROMOTED_VAR_P (x)
686 && is_a <scalar_int_mode> (mode, &int_mode)
687 && (GET_MODE_PRECISION (subreg_promoted_mode (x))
688 >= GET_MODE_PRECISION (int_mode))
689 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
690 x = gen_lowpart (int_mode, SUBREG_REG (x));
692 if (GET_MODE (x) != VOIDmode)
693 oldmode = GET_MODE (x);
695 if (mode == oldmode)
696 return x;
698 if (CONST_SCALAR_INT_P (x)
699 && is_a <scalar_int_mode> (mode, &int_mode))
701 /* If the caller did not tell us the old mode, then there is not
702 much to do with respect to canonicalization. We have to
703 assume that all the bits are significant. */
704 if (!is_a <scalar_int_mode> (oldmode))
705 oldmode = MAX_MODE_INT;
706 wide_int w = wide_int::from (rtx_mode_t (x, oldmode),
707 GET_MODE_PRECISION (int_mode),
708 unsignedp ? UNSIGNED : SIGNED);
709 return immed_wide_int_const (w, int_mode);
712 /* We can do this with a gen_lowpart if both desired and current modes
713 are integer, and this is either a constant integer, a register, or a
714 non-volatile MEM. */
715 scalar_int_mode int_oldmode;
716 if (is_int_mode (mode, &int_mode)
717 && is_int_mode (oldmode, &int_oldmode)
718 && GET_MODE_PRECISION (int_mode) <= GET_MODE_PRECISION (int_oldmode)
719 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) int_mode])
720 || CONST_POLY_INT_P (x)
721 || (REG_P (x)
722 && (!HARD_REGISTER_P (x)
723 || targetm.hard_regno_mode_ok (REGNO (x), int_mode))
724 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, GET_MODE (x)))))
725 return gen_lowpart (int_mode, x);
727 /* Converting from integer constant into mode is always equivalent to an
728 subreg operation. */
729 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
731 gcc_assert (known_eq (GET_MODE_BITSIZE (mode),
732 GET_MODE_BITSIZE (oldmode)));
733 return simplify_gen_subreg (mode, x, oldmode, 0);
736 temp = gen_reg_rtx (mode);
737 convert_move (temp, x, unsignedp);
738 return temp;
741 /* Return the largest alignment we can use for doing a move (or store)
742 of MAX_PIECES. ALIGN is the largest alignment we could use. */
744 static unsigned int
745 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
747 scalar_int_mode tmode
748 = int_mode_for_size (max_pieces * BITS_PER_UNIT, 1).require ();
750 if (align >= GET_MODE_ALIGNMENT (tmode))
751 align = GET_MODE_ALIGNMENT (tmode);
752 else
754 scalar_int_mode xmode = NARROWEST_INT_MODE;
755 opt_scalar_int_mode mode_iter;
756 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
758 tmode = mode_iter.require ();
759 if (GET_MODE_SIZE (tmode) > max_pieces
760 || targetm.slow_unaligned_access (tmode, align))
761 break;
762 xmode = tmode;
765 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
768 return align;
771 /* Return the widest integer mode that is narrower than SIZE bytes. */
773 static scalar_int_mode
774 widest_int_mode_for_size (unsigned int size)
776 scalar_int_mode result = NARROWEST_INT_MODE;
778 gcc_checking_assert (size > 1);
780 opt_scalar_int_mode tmode;
781 FOR_EACH_MODE_IN_CLASS (tmode, MODE_INT)
782 if (GET_MODE_SIZE (tmode.require ()) < size)
783 result = tmode.require ();
785 return result;
788 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
789 and should be performed piecewise. */
791 static bool
792 can_do_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align,
793 enum by_pieces_operation op)
795 return targetm.use_by_pieces_infrastructure_p (len, align, op,
796 optimize_insn_for_speed_p ());
799 /* Determine whether the LEN bytes can be moved by using several move
800 instructions. Return nonzero if a call to move_by_pieces should
801 succeed. */
803 bool
804 can_move_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align)
806 return can_do_by_pieces (len, align, MOVE_BY_PIECES);
809 /* Return number of insns required to perform operation OP by pieces
810 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
812 unsigned HOST_WIDE_INT
813 by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
814 unsigned int max_size, by_pieces_operation op)
816 unsigned HOST_WIDE_INT n_insns = 0;
818 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
820 while (max_size > 1 && l > 0)
822 scalar_int_mode mode = widest_int_mode_for_size (max_size);
823 enum insn_code icode;
825 unsigned int modesize = GET_MODE_SIZE (mode);
827 icode = optab_handler (mov_optab, mode);
828 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
830 unsigned HOST_WIDE_INT n_pieces = l / modesize;
831 l %= modesize;
832 switch (op)
834 default:
835 n_insns += n_pieces;
836 break;
838 case COMPARE_BY_PIECES:
839 int batch = targetm.compare_by_pieces_branch_ratio (mode);
840 int batch_ops = 4 * batch - 1;
841 unsigned HOST_WIDE_INT full = n_pieces / batch;
842 n_insns += full * batch_ops;
843 if (n_pieces % batch != 0)
844 n_insns++;
845 break;
849 max_size = modesize;
852 gcc_assert (!l);
853 return n_insns;
856 /* Used when performing piecewise block operations, holds information
857 about one of the memory objects involved. The member functions
858 can be used to generate code for loading from the object and
859 updating the address when iterating. */
861 class pieces_addr
863 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
864 stack pushes. */
865 rtx m_obj;
866 /* The address of the object. Can differ from that seen in the
867 MEM rtx if we copied the address to a register. */
868 rtx m_addr;
869 /* Nonzero if the address on the object has an autoincrement already,
870 signifies whether that was an increment or decrement. */
871 signed char m_addr_inc;
872 /* Nonzero if we intend to use autoinc without the address already
873 having autoinc form. We will insert add insns around each memory
874 reference, expecting later passes to form autoinc addressing modes.
875 The only supported options are predecrement and postincrement. */
876 signed char m_explicit_inc;
877 /* True if we have either of the two possible cases of using
878 autoincrement. */
879 bool m_auto;
880 /* True if this is an address to be used for load operations rather
881 than stores. */
882 bool m_is_load;
884 /* Optionally, a function to obtain constants for any given offset into
885 the objects, and data associated with it. */
886 by_pieces_constfn m_constfn;
887 void *m_cfndata;
888 public:
889 pieces_addr (rtx, bool, by_pieces_constfn, void *);
890 rtx adjust (scalar_int_mode, HOST_WIDE_INT);
891 void increment_address (HOST_WIDE_INT);
892 void maybe_predec (HOST_WIDE_INT);
893 void maybe_postinc (HOST_WIDE_INT);
894 void decide_autoinc (machine_mode, bool, HOST_WIDE_INT);
895 int get_addr_inc ()
897 return m_addr_inc;
901 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
902 true if the operation to be performed on this object is a load
903 rather than a store. For stores, OBJ can be NULL, in which case we
904 assume the operation is a stack push. For loads, the optional
905 CONSTFN and its associated CFNDATA can be used in place of the
906 memory load. */
908 pieces_addr::pieces_addr (rtx obj, bool is_load, by_pieces_constfn constfn,
909 void *cfndata)
910 : m_obj (obj), m_is_load (is_load), m_constfn (constfn), m_cfndata (cfndata)
912 m_addr_inc = 0;
913 m_auto = false;
914 if (obj)
916 rtx addr = XEXP (obj, 0);
917 rtx_code code = GET_CODE (addr);
918 m_addr = addr;
919 bool dec = code == PRE_DEC || code == POST_DEC;
920 bool inc = code == PRE_INC || code == POST_INC;
921 m_auto = inc || dec;
922 if (m_auto)
923 m_addr_inc = dec ? -1 : 1;
925 /* While we have always looked for these codes here, the code
926 implementing the memory operation has never handled them.
927 Support could be added later if necessary or beneficial. */
928 gcc_assert (code != PRE_INC && code != POST_DEC);
930 else
932 m_addr = NULL_RTX;
933 if (!is_load)
935 m_auto = true;
936 if (STACK_GROWS_DOWNWARD)
937 m_addr_inc = -1;
938 else
939 m_addr_inc = 1;
941 else
942 gcc_assert (constfn != NULL);
944 m_explicit_inc = 0;
945 if (constfn)
946 gcc_assert (is_load);
949 /* Decide whether to use autoinc for an address involved in a memory op.
950 MODE is the mode of the accesses, REVERSE is true if we've decided to
951 perform the operation starting from the end, and LEN is the length of
952 the operation. Don't override an earlier decision to set m_auto. */
954 void
955 pieces_addr::decide_autoinc (machine_mode ARG_UNUSED (mode), bool reverse,
956 HOST_WIDE_INT len)
958 if (m_auto || m_obj == NULL_RTX)
959 return;
961 bool use_predec = (m_is_load
962 ? USE_LOAD_PRE_DECREMENT (mode)
963 : USE_STORE_PRE_DECREMENT (mode));
964 bool use_postinc = (m_is_load
965 ? USE_LOAD_POST_INCREMENT (mode)
966 : USE_STORE_POST_INCREMENT (mode));
967 machine_mode addr_mode = get_address_mode (m_obj);
969 if (use_predec && reverse)
971 m_addr = copy_to_mode_reg (addr_mode,
972 plus_constant (addr_mode,
973 m_addr, len));
974 m_auto = true;
975 m_explicit_inc = -1;
977 else if (use_postinc && !reverse)
979 m_addr = copy_to_mode_reg (addr_mode, m_addr);
980 m_auto = true;
981 m_explicit_inc = 1;
983 else if (CONSTANT_P (m_addr))
984 m_addr = copy_to_mode_reg (addr_mode, m_addr);
987 /* Adjust the address to refer to the data at OFFSET in MODE. If we
988 are using autoincrement for this address, we don't add the offset,
989 but we still modify the MEM's properties. */
992 pieces_addr::adjust (scalar_int_mode mode, HOST_WIDE_INT offset)
994 if (m_constfn)
995 return m_constfn (m_cfndata, offset, mode);
996 if (m_obj == NULL_RTX)
997 return NULL_RTX;
998 if (m_auto)
999 return adjust_automodify_address (m_obj, mode, m_addr, offset);
1000 else
1001 return adjust_address (m_obj, mode, offset);
1004 /* Emit an add instruction to increment the address by SIZE. */
1006 void
1007 pieces_addr::increment_address (HOST_WIDE_INT size)
1009 rtx amount = gen_int_mode (size, GET_MODE (m_addr));
1010 emit_insn (gen_add2_insn (m_addr, amount));
1013 /* If we are supposed to decrement the address after each access, emit code
1014 to do so now. Increment by SIZE (which has should have the correct sign
1015 already). */
1017 void
1018 pieces_addr::maybe_predec (HOST_WIDE_INT size)
1020 if (m_explicit_inc >= 0)
1021 return;
1022 gcc_assert (HAVE_PRE_DECREMENT);
1023 increment_address (size);
1026 /* If we are supposed to decrement the address after each access, emit code
1027 to do so now. Increment by SIZE. */
1029 void
1030 pieces_addr::maybe_postinc (HOST_WIDE_INT size)
1032 if (m_explicit_inc <= 0)
1033 return;
1034 gcc_assert (HAVE_POST_INCREMENT);
1035 increment_address (size);
1038 /* This structure is used by do_op_by_pieces to describe the operation
1039 to be performed. */
1041 class op_by_pieces_d
1043 protected:
1044 pieces_addr m_to, m_from;
1045 unsigned HOST_WIDE_INT m_len;
1046 HOST_WIDE_INT m_offset;
1047 unsigned int m_align;
1048 unsigned int m_max_size;
1049 bool m_reverse;
1051 /* Virtual functions, overriden by derived classes for the specific
1052 operation. */
1053 virtual void generate (rtx, rtx, machine_mode) = 0;
1054 virtual bool prepare_mode (machine_mode, unsigned int) = 0;
1055 virtual void finish_mode (machine_mode)
1059 public:
1060 op_by_pieces_d (rtx, bool, rtx, bool, by_pieces_constfn, void *,
1061 unsigned HOST_WIDE_INT, unsigned int);
1062 void run ();
1065 /* The constructor for an op_by_pieces_d structure. We require two
1066 objects named TO and FROM, which are identified as loads or stores
1067 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1068 and its associated FROM_CFN_DATA can be used to replace loads with
1069 constant values. LEN describes the length of the operation. */
1071 op_by_pieces_d::op_by_pieces_d (rtx to, bool to_load,
1072 rtx from, bool from_load,
1073 by_pieces_constfn from_cfn,
1074 void *from_cfn_data,
1075 unsigned HOST_WIDE_INT len,
1076 unsigned int align)
1077 : m_to (to, to_load, NULL, NULL),
1078 m_from (from, from_load, from_cfn, from_cfn_data),
1079 m_len (len), m_max_size (MOVE_MAX_PIECES + 1)
1081 int toi = m_to.get_addr_inc ();
1082 int fromi = m_from.get_addr_inc ();
1083 if (toi >= 0 && fromi >= 0)
1084 m_reverse = false;
1085 else if (toi <= 0 && fromi <= 0)
1086 m_reverse = true;
1087 else
1088 gcc_unreachable ();
1090 m_offset = m_reverse ? len : 0;
1091 align = MIN (to ? MEM_ALIGN (to) : align,
1092 from ? MEM_ALIGN (from) : align);
1094 /* If copying requires more than two move insns,
1095 copy addresses to registers (to make displacements shorter)
1096 and use post-increment if available. */
1097 if (by_pieces_ninsns (len, align, m_max_size, MOVE_BY_PIECES) > 2)
1099 /* Find the mode of the largest comparison. */
1100 scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1102 m_from.decide_autoinc (mode, m_reverse, len);
1103 m_to.decide_autoinc (mode, m_reverse, len);
1106 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1107 m_align = align;
1110 /* This function contains the main loop used for expanding a block
1111 operation. First move what we can in the largest integer mode,
1112 then go to successively smaller modes. For every access, call
1113 GENFUN with the two operands and the EXTRA_DATA. */
1115 void
1116 op_by_pieces_d::run ()
1118 while (m_max_size > 1 && m_len > 0)
1120 scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1122 if (prepare_mode (mode, m_align))
1124 unsigned int size = GET_MODE_SIZE (mode);
1125 rtx to1 = NULL_RTX, from1;
1127 while (m_len >= size)
1129 if (m_reverse)
1130 m_offset -= size;
1132 to1 = m_to.adjust (mode, m_offset);
1133 from1 = m_from.adjust (mode, m_offset);
1135 m_to.maybe_predec (-(HOST_WIDE_INT)size);
1136 m_from.maybe_predec (-(HOST_WIDE_INT)size);
1138 generate (to1, from1, mode);
1140 m_to.maybe_postinc (size);
1141 m_from.maybe_postinc (size);
1143 if (!m_reverse)
1144 m_offset += size;
1146 m_len -= size;
1149 finish_mode (mode);
1152 m_max_size = GET_MODE_SIZE (mode);
1155 /* The code above should have handled everything. */
1156 gcc_assert (!m_len);
1159 /* Derived class from op_by_pieces_d, providing support for block move
1160 operations. */
1162 class move_by_pieces_d : public op_by_pieces_d
1164 insn_gen_fn m_gen_fun;
1165 void generate (rtx, rtx, machine_mode);
1166 bool prepare_mode (machine_mode, unsigned int);
1168 public:
1169 move_by_pieces_d (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1170 unsigned int align)
1171 : op_by_pieces_d (to, false, from, true, NULL, NULL, len, align)
1174 rtx finish_retmode (memop_ret);
1177 /* Return true if MODE can be used for a set of copies, given an
1178 alignment ALIGN. Prepare whatever data is necessary for later
1179 calls to generate. */
1181 bool
1182 move_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1184 insn_code icode = optab_handler (mov_optab, mode);
1185 m_gen_fun = GEN_FCN (icode);
1186 return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1189 /* A callback used when iterating for a compare_by_pieces_operation.
1190 OP0 and OP1 are the values that have been loaded and should be
1191 compared in MODE. If OP0 is NULL, this means we should generate a
1192 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1193 gen function that should be used to generate the mode. */
1195 void
1196 move_by_pieces_d::generate (rtx op0, rtx op1,
1197 machine_mode mode ATTRIBUTE_UNUSED)
1199 #ifdef PUSH_ROUNDING
1200 if (op0 == NULL_RTX)
1202 emit_single_push_insn (mode, op1, NULL);
1203 return;
1205 #endif
1206 emit_insn (m_gen_fun (op0, op1));
1209 /* Perform the final adjustment at the end of a string to obtain the
1210 correct return value for the block operation.
1211 Return value is based on RETMODE argument. */
1214 move_by_pieces_d::finish_retmode (memop_ret retmode)
1216 gcc_assert (!m_reverse);
1217 if (retmode == RETURN_END_MINUS_ONE)
1219 m_to.maybe_postinc (-1);
1220 --m_offset;
1222 return m_to.adjust (QImode, m_offset);
1225 /* Generate several move instructions to copy LEN bytes from block FROM to
1226 block TO. (These are MEM rtx's with BLKmode).
1228 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1229 used to push FROM to the stack.
1231 ALIGN is maximum stack alignment we can assume.
1233 Return value is based on RETMODE argument. */
1236 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1237 unsigned int align, memop_ret retmode)
1239 #ifndef PUSH_ROUNDING
1240 if (to == NULL)
1241 gcc_unreachable ();
1242 #endif
1244 move_by_pieces_d data (to, from, len, align);
1246 data.run ();
1248 if (retmode != RETURN_BEGIN)
1249 return data.finish_retmode (retmode);
1250 else
1251 return to;
1254 /* Derived class from op_by_pieces_d, providing support for block move
1255 operations. */
1257 class store_by_pieces_d : public op_by_pieces_d
1259 insn_gen_fn m_gen_fun;
1260 void generate (rtx, rtx, machine_mode);
1261 bool prepare_mode (machine_mode, unsigned int);
1263 public:
1264 store_by_pieces_d (rtx to, by_pieces_constfn cfn, void *cfn_data,
1265 unsigned HOST_WIDE_INT len, unsigned int align)
1266 : op_by_pieces_d (to, false, NULL_RTX, true, cfn, cfn_data, len, align)
1269 rtx finish_retmode (memop_ret);
1272 /* Return true if MODE can be used for a set of stores, given an
1273 alignment ALIGN. Prepare whatever data is necessary for later
1274 calls to generate. */
1276 bool
1277 store_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1279 insn_code icode = optab_handler (mov_optab, mode);
1280 m_gen_fun = GEN_FCN (icode);
1281 return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1284 /* A callback used when iterating for a store_by_pieces_operation.
1285 OP0 and OP1 are the values that have been loaded and should be
1286 compared in MODE. If OP0 is NULL, this means we should generate a
1287 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1288 gen function that should be used to generate the mode. */
1290 void
1291 store_by_pieces_d::generate (rtx op0, rtx op1, machine_mode)
1293 emit_insn (m_gen_fun (op0, op1));
1296 /* Perform the final adjustment at the end of a string to obtain the
1297 correct return value for the block operation.
1298 Return value is based on RETMODE argument. */
1301 store_by_pieces_d::finish_retmode (memop_ret retmode)
1303 gcc_assert (!m_reverse);
1304 if (retmode == RETURN_END_MINUS_ONE)
1306 m_to.maybe_postinc (-1);
1307 --m_offset;
1309 return m_to.adjust (QImode, m_offset);
1312 /* Determine whether the LEN bytes generated by CONSTFUN can be
1313 stored to memory using several move instructions. CONSTFUNDATA is
1314 a pointer which will be passed as argument in every CONSTFUN call.
1315 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1316 a memset operation and false if it's a copy of a constant string.
1317 Return nonzero if a call to store_by_pieces should succeed. */
1320 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1321 rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1322 void *constfundata, unsigned int align, bool memsetp)
1324 unsigned HOST_WIDE_INT l;
1325 unsigned int max_size;
1326 HOST_WIDE_INT offset = 0;
1327 enum insn_code icode;
1328 int reverse;
1329 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1330 rtx cst ATTRIBUTE_UNUSED;
1332 if (len == 0)
1333 return 1;
1335 if (!targetm.use_by_pieces_infrastructure_p (len, align,
1336 memsetp
1337 ? SET_BY_PIECES
1338 : STORE_BY_PIECES,
1339 optimize_insn_for_speed_p ()))
1340 return 0;
1342 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
1344 /* We would first store what we can in the largest integer mode, then go to
1345 successively smaller modes. */
1347 for (reverse = 0;
1348 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
1349 reverse++)
1351 l = len;
1352 max_size = STORE_MAX_PIECES + 1;
1353 while (max_size > 1 && l > 0)
1355 scalar_int_mode mode = widest_int_mode_for_size (max_size);
1357 icode = optab_handler (mov_optab, mode);
1358 if (icode != CODE_FOR_nothing
1359 && align >= GET_MODE_ALIGNMENT (mode))
1361 unsigned int size = GET_MODE_SIZE (mode);
1363 while (l >= size)
1365 if (reverse)
1366 offset -= size;
1368 cst = (*constfun) (constfundata, offset, mode);
1369 if (!targetm.legitimate_constant_p (mode, cst))
1370 return 0;
1372 if (!reverse)
1373 offset += size;
1375 l -= size;
1379 max_size = GET_MODE_SIZE (mode);
1382 /* The code above should have handled everything. */
1383 gcc_assert (!l);
1386 return 1;
1389 /* Generate several move instructions to store LEN bytes generated by
1390 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1391 pointer which will be passed as argument in every CONSTFUN call.
1392 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1393 a memset operation and false if it's a copy of a constant string.
1394 Return value is based on RETMODE argument. */
1397 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
1398 rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1399 void *constfundata, unsigned int align, bool memsetp,
1400 memop_ret retmode)
1402 if (len == 0)
1404 gcc_assert (retmode != RETURN_END_MINUS_ONE);
1405 return to;
1408 gcc_assert (targetm.use_by_pieces_infrastructure_p
1409 (len, align,
1410 memsetp ? SET_BY_PIECES : STORE_BY_PIECES,
1411 optimize_insn_for_speed_p ()));
1413 store_by_pieces_d data (to, constfun, constfundata, len, align);
1414 data.run ();
1416 if (retmode != RETURN_BEGIN)
1417 return data.finish_retmode (retmode);
1418 else
1419 return to;
1422 /* Callback routine for clear_by_pieces.
1423 Return const0_rtx unconditionally. */
1425 static rtx
1426 clear_by_pieces_1 (void *, HOST_WIDE_INT, scalar_int_mode)
1428 return const0_rtx;
1431 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1432 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1434 static void
1435 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
1437 if (len == 0)
1438 return;
1440 store_by_pieces_d data (to, clear_by_pieces_1, NULL, len, align);
1441 data.run ();
1444 /* Context used by compare_by_pieces_genfn. It stores the fail label
1445 to jump to in case of miscomparison, and for branch ratios greater than 1,
1446 it stores an accumulator and the current and maximum counts before
1447 emitting another branch. */
1449 class compare_by_pieces_d : public op_by_pieces_d
1451 rtx_code_label *m_fail_label;
1452 rtx m_accumulator;
1453 int m_count, m_batch;
1455 void generate (rtx, rtx, machine_mode);
1456 bool prepare_mode (machine_mode, unsigned int);
1457 void finish_mode (machine_mode);
1458 public:
1459 compare_by_pieces_d (rtx op0, rtx op1, by_pieces_constfn op1_cfn,
1460 void *op1_cfn_data, HOST_WIDE_INT len, int align,
1461 rtx_code_label *fail_label)
1462 : op_by_pieces_d (op0, true, op1, true, op1_cfn, op1_cfn_data, len, align)
1464 m_fail_label = fail_label;
1468 /* A callback used when iterating for a compare_by_pieces_operation.
1469 OP0 and OP1 are the values that have been loaded and should be
1470 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1471 context structure. */
1473 void
1474 compare_by_pieces_d::generate (rtx op0, rtx op1, machine_mode mode)
1476 if (m_batch > 1)
1478 rtx temp = expand_binop (mode, sub_optab, op0, op1, NULL_RTX,
1479 true, OPTAB_LIB_WIDEN);
1480 if (m_count != 0)
1481 temp = expand_binop (mode, ior_optab, m_accumulator, temp, temp,
1482 true, OPTAB_LIB_WIDEN);
1483 m_accumulator = temp;
1485 if (++m_count < m_batch)
1486 return;
1488 m_count = 0;
1489 op0 = m_accumulator;
1490 op1 = const0_rtx;
1491 m_accumulator = NULL_RTX;
1493 do_compare_rtx_and_jump (op0, op1, NE, true, mode, NULL_RTX, NULL,
1494 m_fail_label, profile_probability::uninitialized ());
1497 /* Return true if MODE can be used for a set of moves and comparisons,
1498 given an alignment ALIGN. Prepare whatever data is necessary for
1499 later calls to generate. */
1501 bool
1502 compare_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1504 insn_code icode = optab_handler (mov_optab, mode);
1505 if (icode == CODE_FOR_nothing
1506 || align < GET_MODE_ALIGNMENT (mode)
1507 || !can_compare_p (EQ, mode, ccp_jump))
1508 return false;
1509 m_batch = targetm.compare_by_pieces_branch_ratio (mode);
1510 if (m_batch < 0)
1511 return false;
1512 m_accumulator = NULL_RTX;
1513 m_count = 0;
1514 return true;
1517 /* Called after expanding a series of comparisons in MODE. If we have
1518 accumulated results for which we haven't emitted a branch yet, do
1519 so now. */
1521 void
1522 compare_by_pieces_d::finish_mode (machine_mode mode)
1524 if (m_accumulator != NULL_RTX)
1525 do_compare_rtx_and_jump (m_accumulator, const0_rtx, NE, true, mode,
1526 NULL_RTX, NULL, m_fail_label,
1527 profile_probability::uninitialized ());
1530 /* Generate several move instructions to compare LEN bytes from blocks
1531 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1533 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1534 used to push FROM to the stack.
1536 ALIGN is maximum stack alignment we can assume.
1538 Optionally, the caller can pass a constfn and associated data in A1_CFN
1539 and A1_CFN_DATA. describing that the second operand being compared is a
1540 known constant and how to obtain its data. */
1542 static rtx
1543 compare_by_pieces (rtx arg0, rtx arg1, unsigned HOST_WIDE_INT len,
1544 rtx target, unsigned int align,
1545 by_pieces_constfn a1_cfn, void *a1_cfn_data)
1547 rtx_code_label *fail_label = gen_label_rtx ();
1548 rtx_code_label *end_label = gen_label_rtx ();
1550 if (target == NULL_RTX
1551 || !REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
1552 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
1554 compare_by_pieces_d data (arg0, arg1, a1_cfn, a1_cfn_data, len, align,
1555 fail_label);
1557 data.run ();
1559 emit_move_insn (target, const0_rtx);
1560 emit_jump (end_label);
1561 emit_barrier ();
1562 emit_label (fail_label);
1563 emit_move_insn (target, const1_rtx);
1564 emit_label (end_label);
1566 return target;
1569 /* Emit code to move a block Y to a block X. This may be done with
1570 string-move instructions, with multiple scalar move instructions,
1571 or with a library call.
1573 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1574 SIZE is an rtx that says how long they are.
1575 ALIGN is the maximum alignment we can assume they have.
1576 METHOD describes what kind of copy this is, and what mechanisms may be used.
1577 MIN_SIZE is the minimal size of block to move
1578 MAX_SIZE is the maximal size of block to move, if it cannot be represented
1579 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1581 Return the address of the new block, if memcpy is called and returns it,
1582 0 otherwise. */
1585 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1586 unsigned int expected_align, HOST_WIDE_INT expected_size,
1587 unsigned HOST_WIDE_INT min_size,
1588 unsigned HOST_WIDE_INT max_size,
1589 unsigned HOST_WIDE_INT probable_max_size,
1590 bool bail_out_libcall, bool *is_move_done,
1591 bool might_overlap)
1593 int may_use_call;
1594 rtx retval = 0;
1595 unsigned int align;
1597 if (is_move_done)
1598 *is_move_done = true;
1600 gcc_assert (size);
1601 if (CONST_INT_P (size) && INTVAL (size) == 0)
1602 return 0;
1604 switch (method)
1606 case BLOCK_OP_NORMAL:
1607 case BLOCK_OP_TAILCALL:
1608 may_use_call = 1;
1609 break;
1611 case BLOCK_OP_CALL_PARM:
1612 may_use_call = block_move_libcall_safe_for_call_parm ();
1614 /* Make inhibit_defer_pop nonzero around the library call
1615 to force it to pop the arguments right away. */
1616 NO_DEFER_POP;
1617 break;
1619 case BLOCK_OP_NO_LIBCALL:
1620 may_use_call = 0;
1621 break;
1623 case BLOCK_OP_NO_LIBCALL_RET:
1624 may_use_call = -1;
1625 break;
1627 default:
1628 gcc_unreachable ();
1631 gcc_assert (MEM_P (x) && MEM_P (y));
1632 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1633 gcc_assert (align >= BITS_PER_UNIT);
1635 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1636 block copy is more efficient for other large modes, e.g. DCmode. */
1637 x = adjust_address (x, BLKmode, 0);
1638 y = adjust_address (y, BLKmode, 0);
1640 /* If source and destination are the same, no need to copy anything. */
1641 if (rtx_equal_p (x, y)
1642 && !MEM_VOLATILE_P (x)
1643 && !MEM_VOLATILE_P (y))
1644 return 0;
1646 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1647 can be incorrect is coming from __builtin_memcpy. */
1648 poly_int64 const_size;
1649 if (poly_int_rtx_p (size, &const_size))
1651 x = shallow_copy_rtx (x);
1652 y = shallow_copy_rtx (y);
1653 set_mem_size (x, const_size);
1654 set_mem_size (y, const_size);
1657 bool pieces_ok = CONST_INT_P (size)
1658 && can_move_by_pieces (INTVAL (size), align);
1659 bool pattern_ok = false;
1661 if (!pieces_ok || might_overlap)
1663 pattern_ok
1664 = emit_block_move_via_pattern (x, y, size, align,
1665 expected_align, expected_size,
1666 min_size, max_size, probable_max_size,
1667 might_overlap);
1668 if (!pattern_ok && might_overlap)
1670 /* Do not try any of the other methods below as they are not safe
1671 for overlapping moves. */
1672 *is_move_done = false;
1673 return retval;
1677 if (pattern_ok)
1679 else if (pieces_ok)
1680 move_by_pieces (x, y, INTVAL (size), align, RETURN_BEGIN);
1681 else if (may_use_call && !might_overlap
1682 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1683 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1685 if (bail_out_libcall)
1687 if (is_move_done)
1688 *is_move_done = false;
1689 return retval;
1692 if (may_use_call < 0)
1693 return pc_rtx;
1695 retval = emit_block_copy_via_libcall (x, y, size,
1696 method == BLOCK_OP_TAILCALL);
1698 else if (might_overlap)
1699 *is_move_done = false;
1700 else
1701 emit_block_move_via_loop (x, y, size, align);
1703 if (method == BLOCK_OP_CALL_PARM)
1704 OK_DEFER_POP;
1706 return retval;
1710 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1712 unsigned HOST_WIDE_INT max, min = 0;
1713 if (GET_CODE (size) == CONST_INT)
1714 min = max = UINTVAL (size);
1715 else
1716 max = GET_MODE_MASK (GET_MODE (size));
1717 return emit_block_move_hints (x, y, size, method, 0, -1,
1718 min, max, max);
1721 /* A subroutine of emit_block_move. Returns true if calling the
1722 block move libcall will not clobber any parameters which may have
1723 already been placed on the stack. */
1725 static bool
1726 block_move_libcall_safe_for_call_parm (void)
1728 tree fn;
1730 /* If arguments are pushed on the stack, then they're safe. */
1731 if (PUSH_ARGS)
1732 return true;
1734 /* If registers go on the stack anyway, any argument is sure to clobber
1735 an outgoing argument. */
1736 #if defined (REG_PARM_STACK_SPACE)
1737 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1738 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1739 depend on its argument. */
1740 (void) fn;
1741 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1742 && REG_PARM_STACK_SPACE (fn) != 0)
1743 return false;
1744 #endif
1746 /* If any argument goes in memory, then it might clobber an outgoing
1747 argument. */
1749 CUMULATIVE_ARGS args_so_far_v;
1750 cumulative_args_t args_so_far;
1751 tree arg;
1753 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1754 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1755 args_so_far = pack_cumulative_args (&args_so_far_v);
1757 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1758 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1760 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1761 function_arg_info arg_info (mode, /*named=*/true);
1762 rtx tmp = targetm.calls.function_arg (args_so_far, arg_info);
1763 if (!tmp || !REG_P (tmp))
1764 return false;
1765 if (targetm.calls.arg_partial_bytes (args_so_far, arg_info))
1766 return false;
1767 targetm.calls.function_arg_advance (args_so_far, arg_info);
1770 return true;
1773 /* A subroutine of emit_block_move. Expand a cpymem or movmem pattern;
1774 return true if successful.
1776 X is the destination of the copy or move.
1777 Y is the source of the copy or move.
1778 SIZE is the size of the block to be moved.
1780 MIGHT_OVERLAP indicates this originated with expansion of a
1781 builtin_memmove() and the source and destination blocks may
1782 overlap.
1785 static bool
1786 emit_block_move_via_pattern (rtx x, rtx y, rtx size, unsigned int align,
1787 unsigned int expected_align,
1788 HOST_WIDE_INT expected_size,
1789 unsigned HOST_WIDE_INT min_size,
1790 unsigned HOST_WIDE_INT max_size,
1791 unsigned HOST_WIDE_INT probable_max_size,
1792 bool might_overlap)
1794 if (expected_align < align)
1795 expected_align = align;
1796 if (expected_size != -1)
1798 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1799 expected_size = probable_max_size;
1800 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1801 expected_size = min_size;
1804 /* Since this is a move insn, we don't care about volatility. */
1805 temporary_volatile_ok v (true);
1807 /* Try the most limited insn first, because there's no point
1808 including more than one in the machine description unless
1809 the more limited one has some advantage. */
1811 opt_scalar_int_mode mode_iter;
1812 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
1814 scalar_int_mode mode = mode_iter.require ();
1815 enum insn_code code;
1816 if (might_overlap)
1817 code = direct_optab_handler (movmem_optab, mode);
1818 else
1819 code = direct_optab_handler (cpymem_optab, mode);
1821 if (code != CODE_FOR_nothing
1822 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1823 here because if SIZE is less than the mode mask, as it is
1824 returned by the macro, it will definitely be less than the
1825 actual mode mask. Since SIZE is within the Pmode address
1826 space, we limit MODE to Pmode. */
1827 && ((CONST_INT_P (size)
1828 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1829 <= (GET_MODE_MASK (mode) >> 1)))
1830 || max_size <= (GET_MODE_MASK (mode) >> 1)
1831 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1833 class expand_operand ops[9];
1834 unsigned int nops;
1836 /* ??? When called via emit_block_move_for_call, it'd be
1837 nice if there were some way to inform the backend, so
1838 that it doesn't fail the expansion because it thinks
1839 emitting the libcall would be more efficient. */
1840 nops = insn_data[(int) code].n_generator_args;
1841 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1843 create_fixed_operand (&ops[0], x);
1844 create_fixed_operand (&ops[1], y);
1845 /* The check above guarantees that this size conversion is valid. */
1846 create_convert_operand_to (&ops[2], size, mode, true);
1847 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1848 if (nops >= 6)
1850 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1851 create_integer_operand (&ops[5], expected_size);
1853 if (nops >= 8)
1855 create_integer_operand (&ops[6], min_size);
1856 /* If we cannot represent the maximal size,
1857 make parameter NULL. */
1858 if ((HOST_WIDE_INT) max_size != -1)
1859 create_integer_operand (&ops[7], max_size);
1860 else
1861 create_fixed_operand (&ops[7], NULL);
1863 if (nops == 9)
1865 /* If we cannot represent the maximal size,
1866 make parameter NULL. */
1867 if ((HOST_WIDE_INT) probable_max_size != -1)
1868 create_integer_operand (&ops[8], probable_max_size);
1869 else
1870 create_fixed_operand (&ops[8], NULL);
1872 if (maybe_expand_insn (code, nops, ops))
1873 return true;
1877 return false;
1880 /* A subroutine of emit_block_move. Copy the data via an explicit
1881 loop. This is used only when libcalls are forbidden. */
1882 /* ??? It'd be nice to copy in hunks larger than QImode. */
1884 static void
1885 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1886 unsigned int align ATTRIBUTE_UNUSED)
1888 rtx_code_label *cmp_label, *top_label;
1889 rtx iter, x_addr, y_addr, tmp;
1890 machine_mode x_addr_mode = get_address_mode (x);
1891 machine_mode y_addr_mode = get_address_mode (y);
1892 machine_mode iter_mode;
1894 iter_mode = GET_MODE (size);
1895 if (iter_mode == VOIDmode)
1896 iter_mode = word_mode;
1898 top_label = gen_label_rtx ();
1899 cmp_label = gen_label_rtx ();
1900 iter = gen_reg_rtx (iter_mode);
1902 emit_move_insn (iter, const0_rtx);
1904 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1905 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1906 do_pending_stack_adjust ();
1908 emit_jump (cmp_label);
1909 emit_label (top_label);
1911 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1912 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1914 if (x_addr_mode != y_addr_mode)
1915 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1916 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1918 x = change_address (x, QImode, x_addr);
1919 y = change_address (y, QImode, y_addr);
1921 emit_move_insn (x, y);
1923 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1924 true, OPTAB_LIB_WIDEN);
1925 if (tmp != iter)
1926 emit_move_insn (iter, tmp);
1928 emit_label (cmp_label);
1930 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1931 true, top_label,
1932 profile_probability::guessed_always ()
1933 .apply_scale (9, 10));
1936 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1937 TAILCALL is true if this is a tail call. */
1940 emit_block_op_via_libcall (enum built_in_function fncode, rtx dst, rtx src,
1941 rtx size, bool tailcall)
1943 rtx dst_addr, src_addr;
1944 tree call_expr, dst_tree, src_tree, size_tree;
1945 machine_mode size_mode;
1947 /* Since dst and src are passed to a libcall, mark the corresponding
1948 tree EXPR as addressable. */
1949 tree dst_expr = MEM_EXPR (dst);
1950 tree src_expr = MEM_EXPR (src);
1951 if (dst_expr)
1952 mark_addressable (dst_expr);
1953 if (src_expr)
1954 mark_addressable (src_expr);
1956 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1957 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1958 dst_tree = make_tree (ptr_type_node, dst_addr);
1960 src_addr = copy_addr_to_reg (XEXP (src, 0));
1961 src_addr = convert_memory_address (ptr_mode, src_addr);
1962 src_tree = make_tree (ptr_type_node, src_addr);
1964 size_mode = TYPE_MODE (sizetype);
1965 size = convert_to_mode (size_mode, size, 1);
1966 size = copy_to_mode_reg (size_mode, size);
1967 size_tree = make_tree (sizetype, size);
1969 /* It is incorrect to use the libcall calling conventions for calls to
1970 memcpy/memmove/memcmp because they can be provided by the user. */
1971 tree fn = builtin_decl_implicit (fncode);
1972 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1973 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1975 return expand_call (call_expr, NULL_RTX, false);
1978 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1979 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1980 otherwise return null. */
1983 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
1984 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
1985 HOST_WIDE_INT align)
1987 machine_mode insn_mode = insn_data[icode].operand[0].mode;
1989 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
1990 target = NULL_RTX;
1992 class expand_operand ops[5];
1993 create_output_operand (&ops[0], target, insn_mode);
1994 create_fixed_operand (&ops[1], arg1_rtx);
1995 create_fixed_operand (&ops[2], arg2_rtx);
1996 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
1997 TYPE_UNSIGNED (arg3_type));
1998 create_integer_operand (&ops[4], align);
1999 if (maybe_expand_insn (icode, 5, ops))
2000 return ops[0].value;
2001 return NULL_RTX;
2004 /* Expand a block compare between X and Y with length LEN using the
2005 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
2006 of the expression that was used to calculate the length. ALIGN
2007 gives the known minimum common alignment. */
2009 static rtx
2010 emit_block_cmp_via_cmpmem (rtx x, rtx y, rtx len, tree len_type, rtx target,
2011 unsigned align)
2013 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
2014 implementing memcmp because it will stop if it encounters two
2015 zero bytes. */
2016 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
2018 if (icode == CODE_FOR_nothing)
2019 return NULL_RTX;
2021 return expand_cmpstrn_or_cmpmem (icode, target, x, y, len_type, len, align);
2024 /* Emit code to compare a block Y to a block X. This may be done with
2025 string-compare instructions, with multiple scalar instructions,
2026 or with a library call.
2028 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
2029 they are. LEN_TYPE is the type of the expression that was used to
2030 calculate it.
2032 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
2033 value of a normal memcmp call, instead we can just compare for equality.
2034 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
2035 returning NULL_RTX.
2037 Optionally, the caller can pass a constfn and associated data in Y_CFN
2038 and Y_CFN_DATA. describing that the second operand being compared is a
2039 known constant and how to obtain its data.
2040 Return the result of the comparison, or NULL_RTX if we failed to
2041 perform the operation. */
2044 emit_block_cmp_hints (rtx x, rtx y, rtx len, tree len_type, rtx target,
2045 bool equality_only, by_pieces_constfn y_cfn,
2046 void *y_cfndata)
2048 rtx result = 0;
2050 if (CONST_INT_P (len) && INTVAL (len) == 0)
2051 return const0_rtx;
2053 gcc_assert (MEM_P (x) && MEM_P (y));
2054 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
2055 gcc_assert (align >= BITS_PER_UNIT);
2057 x = adjust_address (x, BLKmode, 0);
2058 y = adjust_address (y, BLKmode, 0);
2060 if (equality_only
2061 && CONST_INT_P (len)
2062 && can_do_by_pieces (INTVAL (len), align, COMPARE_BY_PIECES))
2063 result = compare_by_pieces (x, y, INTVAL (len), target, align,
2064 y_cfn, y_cfndata);
2065 else
2066 result = emit_block_cmp_via_cmpmem (x, y, len, len_type, target, align);
2068 return result;
2071 /* Copy all or part of a value X into registers starting at REGNO.
2072 The number of registers to be filled is NREGS. */
2074 void
2075 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
2077 if (nregs == 0)
2078 return;
2080 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
2081 x = validize_mem (force_const_mem (mode, x));
2083 /* See if the machine can do this with a load multiple insn. */
2084 if (targetm.have_load_multiple ())
2086 rtx_insn *last = get_last_insn ();
2087 rtx first = gen_rtx_REG (word_mode, regno);
2088 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
2089 GEN_INT (nregs)))
2091 emit_insn (pat);
2092 return;
2094 else
2095 delete_insns_since (last);
2098 for (int i = 0; i < nregs; i++)
2099 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2100 operand_subword_force (x, i, mode));
2103 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2104 The number of registers to be filled is NREGS. */
2106 void
2107 move_block_from_reg (int regno, rtx x, int nregs)
2109 if (nregs == 0)
2110 return;
2112 /* See if the machine can do this with a store multiple insn. */
2113 if (targetm.have_store_multiple ())
2115 rtx_insn *last = get_last_insn ();
2116 rtx first = gen_rtx_REG (word_mode, regno);
2117 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
2118 GEN_INT (nregs)))
2120 emit_insn (pat);
2121 return;
2123 else
2124 delete_insns_since (last);
2127 for (int i = 0; i < nregs; i++)
2129 rtx tem = operand_subword (x, i, 1, BLKmode);
2131 gcc_assert (tem);
2133 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2137 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2138 ORIG, where ORIG is a non-consecutive group of registers represented by
2139 a PARALLEL. The clone is identical to the original except in that the
2140 original set of registers is replaced by a new set of pseudo registers.
2141 The new set has the same modes as the original set. */
2144 gen_group_rtx (rtx orig)
2146 int i, length;
2147 rtx *tmps;
2149 gcc_assert (GET_CODE (orig) == PARALLEL);
2151 length = XVECLEN (orig, 0);
2152 tmps = XALLOCAVEC (rtx, length);
2154 /* Skip a NULL entry in first slot. */
2155 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2157 if (i)
2158 tmps[0] = 0;
2160 for (; i < length; i++)
2162 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2163 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2165 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2168 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2171 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2172 except that values are placed in TMPS[i], and must later be moved
2173 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2175 static void
2176 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type,
2177 poly_int64 ssize)
2179 rtx src;
2180 int start, i;
2181 machine_mode m = GET_MODE (orig_src);
2183 gcc_assert (GET_CODE (dst) == PARALLEL);
2185 if (m != VOIDmode
2186 && !SCALAR_INT_MODE_P (m)
2187 && !MEM_P (orig_src)
2188 && GET_CODE (orig_src) != CONCAT)
2190 scalar_int_mode imode;
2191 if (int_mode_for_mode (GET_MODE (orig_src)).exists (&imode))
2193 src = gen_reg_rtx (imode);
2194 emit_move_insn (gen_lowpart (GET_MODE (orig_src), src), orig_src);
2196 else
2198 src = assign_stack_temp (GET_MODE (orig_src), ssize);
2199 emit_move_insn (src, orig_src);
2201 emit_group_load_1 (tmps, dst, src, type, ssize);
2202 return;
2205 /* Check for a NULL entry, used to indicate that the parameter goes
2206 both on the stack and in registers. */
2207 if (XEXP (XVECEXP (dst, 0, 0), 0))
2208 start = 0;
2209 else
2210 start = 1;
2212 /* Process the pieces. */
2213 for (i = start; i < XVECLEN (dst, 0); i++)
2215 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2216 poly_int64 bytepos = rtx_to_poly_int64 (XEXP (XVECEXP (dst, 0, i), 1));
2217 poly_int64 bytelen = GET_MODE_SIZE (mode);
2218 poly_int64 shift = 0;
2220 /* Handle trailing fragments that run over the size of the struct.
2221 It's the target's responsibility to make sure that the fragment
2222 cannot be strictly smaller in some cases and strictly larger
2223 in others. */
2224 gcc_checking_assert (ordered_p (bytepos + bytelen, ssize));
2225 if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2227 /* Arrange to shift the fragment to where it belongs.
2228 extract_bit_field loads to the lsb of the reg. */
2229 if (
2230 #ifdef BLOCK_REG_PADDING
2231 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2232 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2233 #else
2234 BYTES_BIG_ENDIAN
2235 #endif
2237 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2238 bytelen = ssize - bytepos;
2239 gcc_assert (maybe_gt (bytelen, 0));
2242 /* If we won't be loading directly from memory, protect the real source
2243 from strange tricks we might play; but make sure that the source can
2244 be loaded directly into the destination. */
2245 src = orig_src;
2246 if (!MEM_P (orig_src)
2247 && (!CONSTANT_P (orig_src)
2248 || (GET_MODE (orig_src) != mode
2249 && GET_MODE (orig_src) != VOIDmode)))
2251 if (GET_MODE (orig_src) == VOIDmode)
2252 src = gen_reg_rtx (mode);
2253 else
2254 src = gen_reg_rtx (GET_MODE (orig_src));
2256 emit_move_insn (src, orig_src);
2259 /* Optimize the access just a bit. */
2260 if (MEM_P (src)
2261 && (! targetm.slow_unaligned_access (mode, MEM_ALIGN (src))
2262 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2263 && multiple_p (bytepos * BITS_PER_UNIT, GET_MODE_ALIGNMENT (mode))
2264 && known_eq (bytelen, GET_MODE_SIZE (mode)))
2266 tmps[i] = gen_reg_rtx (mode);
2267 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2269 else if (COMPLEX_MODE_P (mode)
2270 && GET_MODE (src) == mode
2271 && known_eq (bytelen, GET_MODE_SIZE (mode)))
2272 /* Let emit_move_complex do the bulk of the work. */
2273 tmps[i] = src;
2274 else if (GET_CODE (src) == CONCAT)
2276 poly_int64 slen = GET_MODE_SIZE (GET_MODE (src));
2277 poly_int64 slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2278 unsigned int elt;
2279 poly_int64 subpos;
2281 if (can_div_trunc_p (bytepos, slen0, &elt, &subpos)
2282 && known_le (subpos + bytelen, slen0))
2284 /* The following assumes that the concatenated objects all
2285 have the same size. In this case, a simple calculation
2286 can be used to determine the object and the bit field
2287 to be extracted. */
2288 tmps[i] = XEXP (src, elt);
2289 if (maybe_ne (subpos, 0)
2290 || maybe_ne (subpos + bytelen, slen0)
2291 || (!CONSTANT_P (tmps[i])
2292 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)))
2293 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2294 subpos * BITS_PER_UNIT,
2295 1, NULL_RTX, mode, mode, false,
2296 NULL);
2298 else
2300 rtx mem;
2302 gcc_assert (known_eq (bytepos, 0));
2303 mem = assign_stack_temp (GET_MODE (src), slen);
2304 emit_move_insn (mem, src);
2305 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
2306 0, 1, NULL_RTX, mode, mode, false,
2307 NULL);
2310 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2311 SIMD register, which is currently broken. While we get GCC
2312 to emit proper RTL for these cases, let's dump to memory. */
2313 else if (VECTOR_MODE_P (GET_MODE (dst))
2314 && REG_P (src))
2316 poly_uint64 slen = GET_MODE_SIZE (GET_MODE (src));
2317 rtx mem;
2319 mem = assign_stack_temp (GET_MODE (src), slen);
2320 emit_move_insn (mem, src);
2321 tmps[i] = adjust_address (mem, mode, bytepos);
2323 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
2324 && XVECLEN (dst, 0) > 1)
2325 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
2326 else if (CONSTANT_P (src))
2328 if (known_eq (bytelen, ssize))
2329 tmps[i] = src;
2330 else
2332 rtx first, second;
2334 /* TODO: const_wide_int can have sizes other than this... */
2335 gcc_assert (known_eq (2 * bytelen, ssize));
2336 split_double (src, &first, &second);
2337 if (i)
2338 tmps[i] = second;
2339 else
2340 tmps[i] = first;
2343 else if (REG_P (src) && GET_MODE (src) == mode)
2344 tmps[i] = src;
2345 else
2346 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2347 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2348 mode, mode, false, NULL);
2350 if (maybe_ne (shift, 0))
2351 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
2352 shift, tmps[i], 0);
2356 /* Emit code to move a block SRC of type TYPE to a block DST,
2357 where DST is non-consecutive registers represented by a PARALLEL.
2358 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2359 if not known. */
2361 void
2362 emit_group_load (rtx dst, rtx src, tree type, poly_int64 ssize)
2364 rtx *tmps;
2365 int i;
2367 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
2368 emit_group_load_1 (tmps, dst, src, type, ssize);
2370 /* Copy the extracted pieces into the proper (probable) hard regs. */
2371 for (i = 0; i < XVECLEN (dst, 0); i++)
2373 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
2374 if (d == NULL)
2375 continue;
2376 emit_move_insn (d, tmps[i]);
2380 /* Similar, but load SRC into new pseudos in a format that looks like
2381 PARALLEL. This can later be fed to emit_group_move to get things
2382 in the right place. */
2385 emit_group_load_into_temps (rtx parallel, rtx src, tree type, poly_int64 ssize)
2387 rtvec vec;
2388 int i;
2390 vec = rtvec_alloc (XVECLEN (parallel, 0));
2391 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
2393 /* Convert the vector to look just like the original PARALLEL, except
2394 with the computed values. */
2395 for (i = 0; i < XVECLEN (parallel, 0); i++)
2397 rtx e = XVECEXP (parallel, 0, i);
2398 rtx d = XEXP (e, 0);
2400 if (d)
2402 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
2403 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
2405 RTVEC_ELT (vec, i) = e;
2408 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
2411 /* Emit code to move a block SRC to block DST, where SRC and DST are
2412 non-consecutive groups of registers, each represented by a PARALLEL. */
2414 void
2415 emit_group_move (rtx dst, rtx src)
2417 int i;
2419 gcc_assert (GET_CODE (src) == PARALLEL
2420 && GET_CODE (dst) == PARALLEL
2421 && XVECLEN (src, 0) == XVECLEN (dst, 0));
2423 /* Skip first entry if NULL. */
2424 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2425 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2426 XEXP (XVECEXP (src, 0, i), 0));
2429 /* Move a group of registers represented by a PARALLEL into pseudos. */
2432 emit_group_move_into_temps (rtx src)
2434 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
2435 int i;
2437 for (i = 0; i < XVECLEN (src, 0); i++)
2439 rtx e = XVECEXP (src, 0, i);
2440 rtx d = XEXP (e, 0);
2442 if (d)
2443 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
2444 RTVEC_ELT (vec, i) = e;
2447 return gen_rtx_PARALLEL (GET_MODE (src), vec);
2450 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2451 where SRC is non-consecutive registers represented by a PARALLEL.
2452 SSIZE represents the total size of block ORIG_DST, or -1 if not
2453 known. */
2455 void
2456 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED,
2457 poly_int64 ssize)
2459 rtx *tmps, dst;
2460 int start, finish, i;
2461 machine_mode m = GET_MODE (orig_dst);
2463 gcc_assert (GET_CODE (src) == PARALLEL);
2465 if (!SCALAR_INT_MODE_P (m)
2466 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
2468 scalar_int_mode imode;
2469 if (int_mode_for_mode (GET_MODE (orig_dst)).exists (&imode))
2471 dst = gen_reg_rtx (imode);
2472 emit_group_store (dst, src, type, ssize);
2473 dst = gen_lowpart (GET_MODE (orig_dst), dst);
2475 else
2477 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
2478 emit_group_store (dst, src, type, ssize);
2480 emit_move_insn (orig_dst, dst);
2481 return;
2484 /* Check for a NULL entry, used to indicate that the parameter goes
2485 both on the stack and in registers. */
2486 if (XEXP (XVECEXP (src, 0, 0), 0))
2487 start = 0;
2488 else
2489 start = 1;
2490 finish = XVECLEN (src, 0);
2492 tmps = XALLOCAVEC (rtx, finish);
2494 /* Copy the (probable) hard regs into pseudos. */
2495 for (i = start; i < finish; i++)
2497 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2498 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
2500 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2501 emit_move_insn (tmps[i], reg);
2503 else
2504 tmps[i] = reg;
2507 /* If we won't be storing directly into memory, protect the real destination
2508 from strange tricks we might play. */
2509 dst = orig_dst;
2510 if (GET_CODE (dst) == PARALLEL)
2512 rtx temp;
2514 /* We can get a PARALLEL dst if there is a conditional expression in
2515 a return statement. In that case, the dst and src are the same,
2516 so no action is necessary. */
2517 if (rtx_equal_p (dst, src))
2518 return;
2520 /* It is unclear if we can ever reach here, but we may as well handle
2521 it. Allocate a temporary, and split this into a store/load to/from
2522 the temporary. */
2523 temp = assign_stack_temp (GET_MODE (dst), ssize);
2524 emit_group_store (temp, src, type, ssize);
2525 emit_group_load (dst, temp, type, ssize);
2526 return;
2528 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2530 machine_mode outer = GET_MODE (dst);
2531 machine_mode inner;
2532 poly_int64 bytepos;
2533 bool done = false;
2534 rtx temp;
2536 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2537 dst = gen_reg_rtx (outer);
2539 /* Make life a bit easier for combine. */
2540 /* If the first element of the vector is the low part
2541 of the destination mode, use a paradoxical subreg to
2542 initialize the destination. */
2543 if (start < finish)
2545 inner = GET_MODE (tmps[start]);
2546 bytepos = subreg_lowpart_offset (inner, outer);
2547 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src, 0, start), 1)),
2548 bytepos))
2550 temp = simplify_gen_subreg (outer, tmps[start],
2551 inner, 0);
2552 if (temp)
2554 emit_move_insn (dst, temp);
2555 done = true;
2556 start++;
2561 /* If the first element wasn't the low part, try the last. */
2562 if (!done
2563 && start < finish - 1)
2565 inner = GET_MODE (tmps[finish - 1]);
2566 bytepos = subreg_lowpart_offset (inner, outer);
2567 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src, 0,
2568 finish - 1), 1)),
2569 bytepos))
2571 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2572 inner, 0);
2573 if (temp)
2575 emit_move_insn (dst, temp);
2576 done = true;
2577 finish--;
2582 /* Otherwise, simply initialize the result to zero. */
2583 if (!done)
2584 emit_move_insn (dst, CONST0_RTX (outer));
2587 /* Process the pieces. */
2588 for (i = start; i < finish; i++)
2590 poly_int64 bytepos = rtx_to_poly_int64 (XEXP (XVECEXP (src, 0, i), 1));
2591 machine_mode mode = GET_MODE (tmps[i]);
2592 poly_int64 bytelen = GET_MODE_SIZE (mode);
2593 poly_uint64 adj_bytelen;
2594 rtx dest = dst;
2596 /* Handle trailing fragments that run over the size of the struct.
2597 It's the target's responsibility to make sure that the fragment
2598 cannot be strictly smaller in some cases and strictly larger
2599 in others. */
2600 gcc_checking_assert (ordered_p (bytepos + bytelen, ssize));
2601 if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2602 adj_bytelen = ssize - bytepos;
2603 else
2604 adj_bytelen = bytelen;
2606 if (GET_CODE (dst) == CONCAT)
2608 if (known_le (bytepos + adj_bytelen,
2609 GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))))
2610 dest = XEXP (dst, 0);
2611 else if (known_ge (bytepos, GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))))
2613 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2614 dest = XEXP (dst, 1);
2616 else
2618 machine_mode dest_mode = GET_MODE (dest);
2619 machine_mode tmp_mode = GET_MODE (tmps[i]);
2621 gcc_assert (known_eq (bytepos, 0) && XVECLEN (src, 0));
2623 if (GET_MODE_ALIGNMENT (dest_mode)
2624 >= GET_MODE_ALIGNMENT (tmp_mode))
2626 dest = assign_stack_temp (dest_mode,
2627 GET_MODE_SIZE (dest_mode));
2628 emit_move_insn (adjust_address (dest,
2629 tmp_mode,
2630 bytepos),
2631 tmps[i]);
2632 dst = dest;
2634 else
2636 dest = assign_stack_temp (tmp_mode,
2637 GET_MODE_SIZE (tmp_mode));
2638 emit_move_insn (dest, tmps[i]);
2639 dst = adjust_address (dest, dest_mode, bytepos);
2641 break;
2645 /* Handle trailing fragments that run over the size of the struct. */
2646 if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2648 /* store_bit_field always takes its value from the lsb.
2649 Move the fragment to the lsb if it's not already there. */
2650 if (
2651 #ifdef BLOCK_REG_PADDING
2652 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2653 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2654 #else
2655 BYTES_BIG_ENDIAN
2656 #endif
2659 poly_int64 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2660 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2661 shift, tmps[i], 0);
2664 /* Make sure not to write past the end of the struct. */
2665 store_bit_field (dest,
2666 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2667 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2668 VOIDmode, tmps[i], false);
2671 /* Optimize the access just a bit. */
2672 else if (MEM_P (dest)
2673 && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (dest))
2674 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2675 && multiple_p (bytepos * BITS_PER_UNIT,
2676 GET_MODE_ALIGNMENT (mode))
2677 && known_eq (bytelen, GET_MODE_SIZE (mode)))
2678 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2680 else
2681 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2682 0, 0, mode, tmps[i], false);
2685 /* Copy from the pseudo into the (probable) hard reg. */
2686 if (orig_dst != dst)
2687 emit_move_insn (orig_dst, dst);
2690 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2691 of the value stored in X. */
2694 maybe_emit_group_store (rtx x, tree type)
2696 machine_mode mode = TYPE_MODE (type);
2697 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2698 if (GET_CODE (x) == PARALLEL)
2700 rtx result = gen_reg_rtx (mode);
2701 emit_group_store (result, x, type, int_size_in_bytes (type));
2702 return result;
2704 return x;
2707 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2709 This is used on targets that return BLKmode values in registers. */
2711 static void
2712 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2714 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2715 rtx src = NULL, dst = NULL;
2716 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2717 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2718 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2719 fixed_size_mode mode = as_a <fixed_size_mode> (GET_MODE (srcreg));
2720 fixed_size_mode tmode = as_a <fixed_size_mode> (GET_MODE (target));
2721 fixed_size_mode copy_mode;
2723 /* BLKmode registers created in the back-end shouldn't have survived. */
2724 gcc_assert (mode != BLKmode);
2726 /* If the structure doesn't take up a whole number of words, see whether
2727 SRCREG is padded on the left or on the right. If it's on the left,
2728 set PADDING_CORRECTION to the number of bits to skip.
2730 In most ABIs, the structure will be returned at the least end of
2731 the register, which translates to right padding on little-endian
2732 targets and left padding on big-endian targets. The opposite
2733 holds if the structure is returned at the most significant
2734 end of the register. */
2735 if (bytes % UNITS_PER_WORD != 0
2736 && (targetm.calls.return_in_msb (type)
2737 ? !BYTES_BIG_ENDIAN
2738 : BYTES_BIG_ENDIAN))
2739 padding_correction
2740 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2742 /* We can use a single move if we have an exact mode for the size. */
2743 else if (MEM_P (target)
2744 && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (target))
2745 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2746 && bytes == GET_MODE_SIZE (mode))
2748 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2749 return;
2752 /* And if we additionally have the same mode for a register. */
2753 else if (REG_P (target)
2754 && GET_MODE (target) == mode
2755 && bytes == GET_MODE_SIZE (mode))
2757 emit_move_insn (target, srcreg);
2758 return;
2761 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2762 into a new pseudo which is a full word. */
2763 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2765 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2766 mode = word_mode;
2769 /* Copy the structure BITSIZE bits at a time. If the target lives in
2770 memory, take care of not reading/writing past its end by selecting
2771 a copy mode suited to BITSIZE. This should always be possible given
2772 how it is computed.
2774 If the target lives in register, make sure not to select a copy mode
2775 larger than the mode of the register.
2777 We could probably emit more efficient code for machines which do not use
2778 strict alignment, but it doesn't seem worth the effort at the current
2779 time. */
2781 copy_mode = word_mode;
2782 if (MEM_P (target))
2784 opt_scalar_int_mode mem_mode = int_mode_for_size (bitsize, 1);
2785 if (mem_mode.exists ())
2786 copy_mode = mem_mode.require ();
2788 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2789 copy_mode = tmode;
2791 for (bitpos = 0, xbitpos = padding_correction;
2792 bitpos < bytes * BITS_PER_UNIT;
2793 bitpos += bitsize, xbitpos += bitsize)
2795 /* We need a new source operand each time xbitpos is on a
2796 word boundary and when xbitpos == padding_correction
2797 (the first time through). */
2798 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2799 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2801 /* We need a new destination operand each time bitpos is on
2802 a word boundary. */
2803 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2804 dst = target;
2805 else if (bitpos % BITS_PER_WORD == 0)
2806 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2808 /* Use xbitpos for the source extraction (right justified) and
2809 bitpos for the destination store (left justified). */
2810 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2811 extract_bit_field (src, bitsize,
2812 xbitpos % BITS_PER_WORD, 1,
2813 NULL_RTX, copy_mode, copy_mode,
2814 false, NULL),
2815 false);
2819 /* Copy BLKmode value SRC into a register of mode MODE_IN. Return the
2820 register if it contains any data, otherwise return null.
2822 This is used on targets that return BLKmode values in registers. */
2825 copy_blkmode_to_reg (machine_mode mode_in, tree src)
2827 int i, n_regs;
2828 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2829 unsigned int bitsize;
2830 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2831 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2832 fixed_size_mode mode = as_a <fixed_size_mode> (mode_in);
2833 fixed_size_mode dst_mode;
2834 scalar_int_mode min_mode;
2836 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2838 x = expand_normal (src);
2840 bytes = arg_int_size_in_bytes (TREE_TYPE (src));
2841 if (bytes == 0)
2842 return NULL_RTX;
2844 /* If the structure doesn't take up a whole number of words, see
2845 whether the register value should be padded on the left or on
2846 the right. Set PADDING_CORRECTION to the number of padding
2847 bits needed on the left side.
2849 In most ABIs, the structure will be returned at the least end of
2850 the register, which translates to right padding on little-endian
2851 targets and left padding on big-endian targets. The opposite
2852 holds if the structure is returned at the most significant
2853 end of the register. */
2854 if (bytes % UNITS_PER_WORD != 0
2855 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2856 ? !BYTES_BIG_ENDIAN
2857 : BYTES_BIG_ENDIAN))
2858 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2859 * BITS_PER_UNIT));
2861 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2862 dst_words = XALLOCAVEC (rtx, n_regs);
2863 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2864 min_mode = smallest_int_mode_for_size (bitsize);
2866 /* Copy the structure BITSIZE bits at a time. */
2867 for (bitpos = 0, xbitpos = padding_correction;
2868 bitpos < bytes * BITS_PER_UNIT;
2869 bitpos += bitsize, xbitpos += bitsize)
2871 /* We need a new destination pseudo each time xbitpos is
2872 on a word boundary and when xbitpos == padding_correction
2873 (the first time through). */
2874 if (xbitpos % BITS_PER_WORD == 0
2875 || xbitpos == padding_correction)
2877 /* Generate an appropriate register. */
2878 dst_word = gen_reg_rtx (word_mode);
2879 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2881 /* Clear the destination before we move anything into it. */
2882 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2885 /* Find the largest integer mode that can be used to copy all or as
2886 many bits as possible of the structure if the target supports larger
2887 copies. There are too many corner cases here w.r.t to alignments on
2888 the read/writes. So if there is any padding just use single byte
2889 operations. */
2890 opt_scalar_int_mode mode_iter;
2891 if (padding_correction == 0 && !STRICT_ALIGNMENT)
2893 FOR_EACH_MODE_FROM (mode_iter, min_mode)
2895 unsigned int msize = GET_MODE_BITSIZE (mode_iter.require ());
2896 if (msize <= ((bytes * BITS_PER_UNIT) - bitpos)
2897 && msize <= BITS_PER_WORD)
2898 bitsize = msize;
2899 else
2900 break;
2904 /* We need a new source operand each time bitpos is on a word
2905 boundary. */
2906 if (bitpos % BITS_PER_WORD == 0)
2907 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2909 /* Use bitpos for the source extraction (left justified) and
2910 xbitpos for the destination store (right justified). */
2911 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2912 0, 0, word_mode,
2913 extract_bit_field (src_word, bitsize,
2914 bitpos % BITS_PER_WORD, 1,
2915 NULL_RTX, word_mode, word_mode,
2916 false, NULL),
2917 false);
2920 if (mode == BLKmode)
2922 /* Find the smallest integer mode large enough to hold the
2923 entire structure. */
2924 opt_scalar_int_mode mode_iter;
2925 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
2926 if (GET_MODE_SIZE (mode_iter.require ()) >= bytes)
2927 break;
2929 /* A suitable mode should have been found. */
2930 mode = mode_iter.require ();
2933 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2934 dst_mode = word_mode;
2935 else
2936 dst_mode = mode;
2937 dst = gen_reg_rtx (dst_mode);
2939 for (i = 0; i < n_regs; i++)
2940 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2942 if (mode != dst_mode)
2943 dst = gen_lowpart (mode, dst);
2945 return dst;
2948 /* Add a USE expression for REG to the (possibly empty) list pointed
2949 to by CALL_FUSAGE. REG must denote a hard register. */
2951 void
2952 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2954 gcc_assert (REG_P (reg));
2956 if (!HARD_REGISTER_P (reg))
2957 return;
2959 *call_fusage
2960 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2963 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2964 to by CALL_FUSAGE. REG must denote a hard register. */
2966 void
2967 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2969 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2971 *call_fusage
2972 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2975 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2976 starting at REGNO. All of these registers must be hard registers. */
2978 void
2979 use_regs (rtx *call_fusage, int regno, int nregs)
2981 int i;
2983 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2985 for (i = 0; i < nregs; i++)
2986 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2989 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2990 PARALLEL REGS. This is for calls that pass values in multiple
2991 non-contiguous locations. The Irix 6 ABI has examples of this. */
2993 void
2994 use_group_regs (rtx *call_fusage, rtx regs)
2996 int i;
2998 for (i = 0; i < XVECLEN (regs, 0); i++)
3000 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
3002 /* A NULL entry means the parameter goes both on the stack and in
3003 registers. This can also be a MEM for targets that pass values
3004 partially on the stack and partially in registers. */
3005 if (reg != 0 && REG_P (reg))
3006 use_reg (call_fusage, reg);
3010 /* Return the defining gimple statement for SSA_NAME NAME if it is an
3011 assigment and the code of the expresion on the RHS is CODE. Return
3012 NULL otherwise. */
3014 static gimple *
3015 get_def_for_expr (tree name, enum tree_code code)
3017 gimple *def_stmt;
3019 if (TREE_CODE (name) != SSA_NAME)
3020 return NULL;
3022 def_stmt = get_gimple_for_ssa_name (name);
3023 if (!def_stmt
3024 || gimple_assign_rhs_code (def_stmt) != code)
3025 return NULL;
3027 return def_stmt;
3030 /* Return the defining gimple statement for SSA_NAME NAME if it is an
3031 assigment and the class of the expresion on the RHS is CLASS. Return
3032 NULL otherwise. */
3034 static gimple *
3035 get_def_for_expr_class (tree name, enum tree_code_class tclass)
3037 gimple *def_stmt;
3039 if (TREE_CODE (name) != SSA_NAME)
3040 return NULL;
3042 def_stmt = get_gimple_for_ssa_name (name);
3043 if (!def_stmt
3044 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
3045 return NULL;
3047 return def_stmt;
3050 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
3051 its length in bytes. */
3054 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
3055 unsigned int expected_align, HOST_WIDE_INT expected_size,
3056 unsigned HOST_WIDE_INT min_size,
3057 unsigned HOST_WIDE_INT max_size,
3058 unsigned HOST_WIDE_INT probable_max_size)
3060 machine_mode mode = GET_MODE (object);
3061 unsigned int align;
3063 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
3065 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
3066 just move a zero. Otherwise, do this a piece at a time. */
3067 poly_int64 size_val;
3068 if (mode != BLKmode
3069 && poly_int_rtx_p (size, &size_val)
3070 && known_eq (size_val, GET_MODE_SIZE (mode)))
3072 rtx zero = CONST0_RTX (mode);
3073 if (zero != NULL)
3075 emit_move_insn (object, zero);
3076 return NULL;
3079 if (COMPLEX_MODE_P (mode))
3081 zero = CONST0_RTX (GET_MODE_INNER (mode));
3082 if (zero != NULL)
3084 write_complex_part (object, zero, 0);
3085 write_complex_part (object, zero, 1);
3086 return NULL;
3091 if (size == const0_rtx)
3092 return NULL;
3094 align = MEM_ALIGN (object);
3096 if (CONST_INT_P (size)
3097 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
3098 CLEAR_BY_PIECES,
3099 optimize_insn_for_speed_p ()))
3100 clear_by_pieces (object, INTVAL (size), align);
3101 else if (set_storage_via_setmem (object, size, const0_rtx, align,
3102 expected_align, expected_size,
3103 min_size, max_size, probable_max_size))
3105 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
3106 return set_storage_via_libcall (object, size, const0_rtx,
3107 method == BLOCK_OP_TAILCALL);
3108 else
3109 gcc_unreachable ();
3111 return NULL;
3115 clear_storage (rtx object, rtx size, enum block_op_methods method)
3117 unsigned HOST_WIDE_INT max, min = 0;
3118 if (GET_CODE (size) == CONST_INT)
3119 min = max = UINTVAL (size);
3120 else
3121 max = GET_MODE_MASK (GET_MODE (size));
3122 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
3126 /* A subroutine of clear_storage. Expand a call to memset.
3127 Return the return value of memset, 0 otherwise. */
3130 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
3132 tree call_expr, fn, object_tree, size_tree, val_tree;
3133 machine_mode size_mode;
3135 object = copy_addr_to_reg (XEXP (object, 0));
3136 object_tree = make_tree (ptr_type_node, object);
3138 if (!CONST_INT_P (val))
3139 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
3140 val_tree = make_tree (integer_type_node, val);
3142 size_mode = TYPE_MODE (sizetype);
3143 size = convert_to_mode (size_mode, size, 1);
3144 size = copy_to_mode_reg (size_mode, size);
3145 size_tree = make_tree (sizetype, size);
3147 /* It is incorrect to use the libcall calling conventions for calls to
3148 memset because it can be provided by the user. */
3149 fn = builtin_decl_implicit (BUILT_IN_MEMSET);
3150 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
3151 CALL_EXPR_TAILCALL (call_expr) = tailcall;
3153 return expand_call (call_expr, NULL_RTX, false);
3156 /* Expand a setmem pattern; return true if successful. */
3158 bool
3159 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
3160 unsigned int expected_align, HOST_WIDE_INT expected_size,
3161 unsigned HOST_WIDE_INT min_size,
3162 unsigned HOST_WIDE_INT max_size,
3163 unsigned HOST_WIDE_INT probable_max_size)
3165 /* Try the most limited insn first, because there's no point
3166 including more than one in the machine description unless
3167 the more limited one has some advantage. */
3169 if (expected_align < align)
3170 expected_align = align;
3171 if (expected_size != -1)
3173 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
3174 expected_size = max_size;
3175 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
3176 expected_size = min_size;
3179 opt_scalar_int_mode mode_iter;
3180 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
3182 scalar_int_mode mode = mode_iter.require ();
3183 enum insn_code code = direct_optab_handler (setmem_optab, mode);
3185 if (code != CODE_FOR_nothing
3186 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3187 here because if SIZE is less than the mode mask, as it is
3188 returned by the macro, it will definitely be less than the
3189 actual mode mask. Since SIZE is within the Pmode address
3190 space, we limit MODE to Pmode. */
3191 && ((CONST_INT_P (size)
3192 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3193 <= (GET_MODE_MASK (mode) >> 1)))
3194 || max_size <= (GET_MODE_MASK (mode) >> 1)
3195 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
3197 class expand_operand ops[9];
3198 unsigned int nops;
3200 nops = insn_data[(int) code].n_generator_args;
3201 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
3203 create_fixed_operand (&ops[0], object);
3204 /* The check above guarantees that this size conversion is valid. */
3205 create_convert_operand_to (&ops[1], size, mode, true);
3206 create_convert_operand_from (&ops[2], val, byte_mode, true);
3207 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
3208 if (nops >= 6)
3210 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
3211 create_integer_operand (&ops[5], expected_size);
3213 if (nops >= 8)
3215 create_integer_operand (&ops[6], min_size);
3216 /* If we cannot represent the maximal size,
3217 make parameter NULL. */
3218 if ((HOST_WIDE_INT) max_size != -1)
3219 create_integer_operand (&ops[7], max_size);
3220 else
3221 create_fixed_operand (&ops[7], NULL);
3223 if (nops == 9)
3225 /* If we cannot represent the maximal size,
3226 make parameter NULL. */
3227 if ((HOST_WIDE_INT) probable_max_size != -1)
3228 create_integer_operand (&ops[8], probable_max_size);
3229 else
3230 create_fixed_operand (&ops[8], NULL);
3232 if (maybe_expand_insn (code, nops, ops))
3233 return true;
3237 return false;
3241 /* Write to one of the components of the complex value CPLX. Write VAL to
3242 the real part if IMAG_P is false, and the imaginary part if its true. */
3244 void
3245 write_complex_part (rtx cplx, rtx val, bool imag_p)
3247 machine_mode cmode;
3248 scalar_mode imode;
3249 unsigned ibitsize;
3251 if (GET_CODE (cplx) == CONCAT)
3253 emit_move_insn (XEXP (cplx, imag_p), val);
3254 return;
3257 cmode = GET_MODE (cplx);
3258 imode = GET_MODE_INNER (cmode);
3259 ibitsize = GET_MODE_BITSIZE (imode);
3261 /* For MEMs simplify_gen_subreg may generate an invalid new address
3262 because, e.g., the original address is considered mode-dependent
3263 by the target, which restricts simplify_subreg from invoking
3264 adjust_address_nv. Instead of preparing fallback support for an
3265 invalid address, we call adjust_address_nv directly. */
3266 if (MEM_P (cplx))
3268 emit_move_insn (adjust_address_nv (cplx, imode,
3269 imag_p ? GET_MODE_SIZE (imode) : 0),
3270 val);
3271 return;
3274 /* If the sub-object is at least word sized, then we know that subregging
3275 will work. This special case is important, since store_bit_field
3276 wants to operate on integer modes, and there's rarely an OImode to
3277 correspond to TCmode. */
3278 if (ibitsize >= BITS_PER_WORD
3279 /* For hard regs we have exact predicates. Assume we can split
3280 the original object if it spans an even number of hard regs.
3281 This special case is important for SCmode on 64-bit platforms
3282 where the natural size of floating-point regs is 32-bit. */
3283 || (REG_P (cplx)
3284 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3285 && REG_NREGS (cplx) % 2 == 0))
3287 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3288 imag_p ? GET_MODE_SIZE (imode) : 0);
3289 if (part)
3291 emit_move_insn (part, val);
3292 return;
3294 else
3295 /* simplify_gen_subreg may fail for sub-word MEMs. */
3296 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3299 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3300 false);
3303 /* Extract one of the components of the complex value CPLX. Extract the
3304 real part if IMAG_P is false, and the imaginary part if it's true. */
3307 read_complex_part (rtx cplx, bool imag_p)
3309 machine_mode cmode;
3310 scalar_mode imode;
3311 unsigned ibitsize;
3313 if (GET_CODE (cplx) == CONCAT)
3314 return XEXP (cplx, imag_p);
3316 cmode = GET_MODE (cplx);
3317 imode = GET_MODE_INNER (cmode);
3318 ibitsize = GET_MODE_BITSIZE (imode);
3320 /* Special case reads from complex constants that got spilled to memory. */
3321 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3323 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3324 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3326 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3327 if (CONSTANT_CLASS_P (part))
3328 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3332 /* For MEMs simplify_gen_subreg may generate an invalid new address
3333 because, e.g., the original address is considered mode-dependent
3334 by the target, which restricts simplify_subreg from invoking
3335 adjust_address_nv. Instead of preparing fallback support for an
3336 invalid address, we call adjust_address_nv directly. */
3337 if (MEM_P (cplx))
3338 return adjust_address_nv (cplx, imode,
3339 imag_p ? GET_MODE_SIZE (imode) : 0);
3341 /* If the sub-object is at least word sized, then we know that subregging
3342 will work. This special case is important, since extract_bit_field
3343 wants to operate on integer modes, and there's rarely an OImode to
3344 correspond to TCmode. */
3345 if (ibitsize >= BITS_PER_WORD
3346 /* For hard regs we have exact predicates. Assume we can split
3347 the original object if it spans an even number of hard regs.
3348 This special case is important for SCmode on 64-bit platforms
3349 where the natural size of floating-point regs is 32-bit. */
3350 || (REG_P (cplx)
3351 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3352 && REG_NREGS (cplx) % 2 == 0))
3354 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3355 imag_p ? GET_MODE_SIZE (imode) : 0);
3356 if (ret)
3357 return ret;
3358 else
3359 /* simplify_gen_subreg may fail for sub-word MEMs. */
3360 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3363 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3364 true, NULL_RTX, imode, imode, false, NULL);
3367 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3368 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3369 represented in NEW_MODE. If FORCE is true, this will never happen, as
3370 we'll force-create a SUBREG if needed. */
3372 static rtx
3373 emit_move_change_mode (machine_mode new_mode,
3374 machine_mode old_mode, rtx x, bool force)
3376 rtx ret;
3378 if (push_operand (x, GET_MODE (x)))
3380 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3381 MEM_COPY_ATTRIBUTES (ret, x);
3383 else if (MEM_P (x))
3385 /* We don't have to worry about changing the address since the
3386 size in bytes is supposed to be the same. */
3387 if (reload_in_progress)
3389 /* Copy the MEM to change the mode and move any
3390 substitutions from the old MEM to the new one. */
3391 ret = adjust_address_nv (x, new_mode, 0);
3392 copy_replacements (x, ret);
3394 else
3395 ret = adjust_address (x, new_mode, 0);
3397 else
3399 /* Note that we do want simplify_subreg's behavior of validating
3400 that the new mode is ok for a hard register. If we were to use
3401 simplify_gen_subreg, we would create the subreg, but would
3402 probably run into the target not being able to implement it. */
3403 /* Except, of course, when FORCE is true, when this is exactly what
3404 we want. Which is needed for CCmodes on some targets. */
3405 if (force)
3406 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3407 else
3408 ret = simplify_subreg (new_mode, x, old_mode, 0);
3411 return ret;
3414 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3415 an integer mode of the same size as MODE. Returns the instruction
3416 emitted, or NULL if such a move could not be generated. */
3418 static rtx_insn *
3419 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3421 scalar_int_mode imode;
3422 enum insn_code code;
3424 /* There must exist a mode of the exact size we require. */
3425 if (!int_mode_for_mode (mode).exists (&imode))
3426 return NULL;
3428 /* The target must support moves in this mode. */
3429 code = optab_handler (mov_optab, imode);
3430 if (code == CODE_FOR_nothing)
3431 return NULL;
3433 x = emit_move_change_mode (imode, mode, x, force);
3434 if (x == NULL_RTX)
3435 return NULL;
3436 y = emit_move_change_mode (imode, mode, y, force);
3437 if (y == NULL_RTX)
3438 return NULL;
3439 return emit_insn (GEN_FCN (code) (x, y));
3442 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3443 Return an equivalent MEM that does not use an auto-increment. */
3446 emit_move_resolve_push (machine_mode mode, rtx x)
3448 enum rtx_code code = GET_CODE (XEXP (x, 0));
3449 rtx temp;
3451 poly_int64 adjust = GET_MODE_SIZE (mode);
3452 #ifdef PUSH_ROUNDING
3453 adjust = PUSH_ROUNDING (adjust);
3454 #endif
3455 if (code == PRE_DEC || code == POST_DEC)
3456 adjust = -adjust;
3457 else if (code == PRE_MODIFY || code == POST_MODIFY)
3459 rtx expr = XEXP (XEXP (x, 0), 1);
3461 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3462 poly_int64 val = rtx_to_poly_int64 (XEXP (expr, 1));
3463 if (GET_CODE (expr) == MINUS)
3464 val = -val;
3465 gcc_assert (known_eq (adjust, val) || known_eq (adjust, -val));
3466 adjust = val;
3469 /* Do not use anti_adjust_stack, since we don't want to update
3470 stack_pointer_delta. */
3471 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3472 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3473 0, OPTAB_LIB_WIDEN);
3474 if (temp != stack_pointer_rtx)
3475 emit_move_insn (stack_pointer_rtx, temp);
3477 switch (code)
3479 case PRE_INC:
3480 case PRE_DEC:
3481 case PRE_MODIFY:
3482 temp = stack_pointer_rtx;
3483 break;
3484 case POST_INC:
3485 case POST_DEC:
3486 case POST_MODIFY:
3487 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3488 break;
3489 default:
3490 gcc_unreachable ();
3493 return replace_equiv_address (x, temp);
3496 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3497 X is known to satisfy push_operand, and MODE is known to be complex.
3498 Returns the last instruction emitted. */
3500 rtx_insn *
3501 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3503 scalar_mode submode = GET_MODE_INNER (mode);
3504 bool imag_first;
3506 #ifdef PUSH_ROUNDING
3507 poly_int64 submodesize = GET_MODE_SIZE (submode);
3509 /* In case we output to the stack, but the size is smaller than the
3510 machine can push exactly, we need to use move instructions. */
3511 if (maybe_ne (PUSH_ROUNDING (submodesize), submodesize))
3513 x = emit_move_resolve_push (mode, x);
3514 return emit_move_insn (x, y);
3516 #endif
3518 /* Note that the real part always precedes the imag part in memory
3519 regardless of machine's endianness. */
3520 switch (GET_CODE (XEXP (x, 0)))
3522 case PRE_DEC:
3523 case POST_DEC:
3524 imag_first = true;
3525 break;
3526 case PRE_INC:
3527 case POST_INC:
3528 imag_first = false;
3529 break;
3530 default:
3531 gcc_unreachable ();
3534 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3535 read_complex_part (y, imag_first));
3536 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3537 read_complex_part (y, !imag_first));
3540 /* A subroutine of emit_move_complex. Perform the move from Y to X
3541 via two moves of the parts. Returns the last instruction emitted. */
3543 rtx_insn *
3544 emit_move_complex_parts (rtx x, rtx y)
3546 /* Show the output dies here. This is necessary for SUBREGs
3547 of pseudos since we cannot track their lifetimes correctly;
3548 hard regs shouldn't appear here except as return values. */
3549 if (!reload_completed && !reload_in_progress
3550 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3551 emit_clobber (x);
3553 write_complex_part (x, read_complex_part (y, false), false);
3554 write_complex_part (x, read_complex_part (y, true), true);
3556 return get_last_insn ();
3559 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3560 MODE is known to be complex. Returns the last instruction emitted. */
3562 static rtx_insn *
3563 emit_move_complex (machine_mode mode, rtx x, rtx y)
3565 bool try_int;
3567 /* Need to take special care for pushes, to maintain proper ordering
3568 of the data, and possibly extra padding. */
3569 if (push_operand (x, mode))
3570 return emit_move_complex_push (mode, x, y);
3572 /* See if we can coerce the target into moving both values at once, except
3573 for floating point where we favor moving as parts if this is easy. */
3574 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3575 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3576 && !(REG_P (x)
3577 && HARD_REGISTER_P (x)
3578 && REG_NREGS (x) == 1)
3579 && !(REG_P (y)
3580 && HARD_REGISTER_P (y)
3581 && REG_NREGS (y) == 1))
3582 try_int = false;
3583 /* Not possible if the values are inherently not adjacent. */
3584 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3585 try_int = false;
3586 /* Is possible if both are registers (or subregs of registers). */
3587 else if (register_operand (x, mode) && register_operand (y, mode))
3588 try_int = true;
3589 /* If one of the operands is a memory, and alignment constraints
3590 are friendly enough, we may be able to do combined memory operations.
3591 We do not attempt this if Y is a constant because that combination is
3592 usually better with the by-parts thing below. */
3593 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3594 && (!STRICT_ALIGNMENT
3595 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3596 try_int = true;
3597 else
3598 try_int = false;
3600 if (try_int)
3602 rtx_insn *ret;
3604 /* For memory to memory moves, optimal behavior can be had with the
3605 existing block move logic. But use normal expansion if optimizing
3606 for size. */
3607 if (MEM_P (x) && MEM_P (y))
3609 emit_block_move (x, y, gen_int_mode (GET_MODE_SIZE (mode), Pmode),
3610 (optimize_insn_for_speed_p()
3611 ? BLOCK_OP_NO_LIBCALL : BLOCK_OP_NORMAL));
3612 return get_last_insn ();
3615 ret = emit_move_via_integer (mode, x, y, true);
3616 if (ret)
3617 return ret;
3620 return emit_move_complex_parts (x, y);
3623 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3624 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3626 static rtx_insn *
3627 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3629 rtx_insn *ret;
3631 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3632 if (mode != CCmode)
3634 enum insn_code code = optab_handler (mov_optab, CCmode);
3635 if (code != CODE_FOR_nothing)
3637 x = emit_move_change_mode (CCmode, mode, x, true);
3638 y = emit_move_change_mode (CCmode, mode, y, true);
3639 return emit_insn (GEN_FCN (code) (x, y));
3643 /* Otherwise, find the MODE_INT mode of the same width. */
3644 ret = emit_move_via_integer (mode, x, y, false);
3645 gcc_assert (ret != NULL);
3646 return ret;
3649 /* Return true if word I of OP lies entirely in the
3650 undefined bits of a paradoxical subreg. */
3652 static bool
3653 undefined_operand_subword_p (const_rtx op, int i)
3655 if (GET_CODE (op) != SUBREG)
3656 return false;
3657 machine_mode innermostmode = GET_MODE (SUBREG_REG (op));
3658 poly_int64 offset = i * UNITS_PER_WORD + subreg_memory_offset (op);
3659 return (known_ge (offset, GET_MODE_SIZE (innermostmode))
3660 || known_le (offset, -UNITS_PER_WORD));
3663 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3664 MODE is any multi-word or full-word mode that lacks a move_insn
3665 pattern. Note that you will get better code if you define such
3666 patterns, even if they must turn into multiple assembler instructions. */
3668 static rtx_insn *
3669 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3671 rtx_insn *last_insn = 0;
3672 rtx_insn *seq;
3673 rtx inner;
3674 bool need_clobber;
3675 int i, mode_size;
3677 /* This function can only handle cases where the number of words is
3678 known at compile time. */
3679 mode_size = GET_MODE_SIZE (mode).to_constant ();
3680 gcc_assert (mode_size >= UNITS_PER_WORD);
3682 /* If X is a push on the stack, do the push now and replace
3683 X with a reference to the stack pointer. */
3684 if (push_operand (x, mode))
3685 x = emit_move_resolve_push (mode, x);
3687 /* If we are in reload, see if either operand is a MEM whose address
3688 is scheduled for replacement. */
3689 if (reload_in_progress && MEM_P (x)
3690 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3691 x = replace_equiv_address_nv (x, inner);
3692 if (reload_in_progress && MEM_P (y)
3693 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3694 y = replace_equiv_address_nv (y, inner);
3696 start_sequence ();
3698 need_clobber = false;
3699 for (i = 0; i < CEIL (mode_size, UNITS_PER_WORD); i++)
3701 /* Do not generate code for a move if it would go entirely
3702 to the non-existing bits of a paradoxical subreg. */
3703 if (undefined_operand_subword_p (x, i))
3704 continue;
3706 rtx xpart = operand_subword (x, i, 1, mode);
3707 rtx ypart;
3709 /* Do not generate code for a move if it would come entirely
3710 from the undefined bits of a paradoxical subreg. */
3711 if (undefined_operand_subword_p (y, i))
3712 continue;
3714 ypart = operand_subword (y, i, 1, mode);
3716 /* If we can't get a part of Y, put Y into memory if it is a
3717 constant. Otherwise, force it into a register. Then we must
3718 be able to get a part of Y. */
3719 if (ypart == 0 && CONSTANT_P (y))
3721 y = use_anchored_address (force_const_mem (mode, y));
3722 ypart = operand_subword (y, i, 1, mode);
3724 else if (ypart == 0)
3725 ypart = operand_subword_force (y, i, mode);
3727 gcc_assert (xpart && ypart);
3729 need_clobber |= (GET_CODE (xpart) == SUBREG);
3731 last_insn = emit_move_insn (xpart, ypart);
3734 seq = get_insns ();
3735 end_sequence ();
3737 /* Show the output dies here. This is necessary for SUBREGs
3738 of pseudos since we cannot track their lifetimes correctly;
3739 hard regs shouldn't appear here except as return values.
3740 We never want to emit such a clobber after reload. */
3741 if (x != y
3742 && ! (reload_in_progress || reload_completed)
3743 && need_clobber != 0)
3744 emit_clobber (x);
3746 emit_insn (seq);
3748 return last_insn;
3751 /* Low level part of emit_move_insn.
3752 Called just like emit_move_insn, but assumes X and Y
3753 are basically valid. */
3755 rtx_insn *
3756 emit_move_insn_1 (rtx x, rtx y)
3758 machine_mode mode = GET_MODE (x);
3759 enum insn_code code;
3761 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3763 code = optab_handler (mov_optab, mode);
3764 if (code != CODE_FOR_nothing)
3765 return emit_insn (GEN_FCN (code) (x, y));
3767 /* Expand complex moves by moving real part and imag part. */
3768 if (COMPLEX_MODE_P (mode))
3769 return emit_move_complex (mode, x, y);
3771 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3772 || ALL_FIXED_POINT_MODE_P (mode))
3774 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3776 /* If we can't find an integer mode, use multi words. */
3777 if (result)
3778 return result;
3779 else
3780 return emit_move_multi_word (mode, x, y);
3783 if (GET_MODE_CLASS (mode) == MODE_CC)
3784 return emit_move_ccmode (mode, x, y);
3786 /* Try using a move pattern for the corresponding integer mode. This is
3787 only safe when simplify_subreg can convert MODE constants into integer
3788 constants. At present, it can only do this reliably if the value
3789 fits within a HOST_WIDE_INT. */
3790 if (!CONSTANT_P (y)
3791 || known_le (GET_MODE_BITSIZE (mode), HOST_BITS_PER_WIDE_INT))
3793 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3795 if (ret)
3797 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3798 return ret;
3802 return emit_move_multi_word (mode, x, y);
3805 /* Generate code to copy Y into X.
3806 Both Y and X must have the same mode, except that
3807 Y can be a constant with VOIDmode.
3808 This mode cannot be BLKmode; use emit_block_move for that.
3810 Return the last instruction emitted. */
3812 rtx_insn *
3813 emit_move_insn (rtx x, rtx y)
3815 machine_mode mode = GET_MODE (x);
3816 rtx y_cst = NULL_RTX;
3817 rtx_insn *last_insn;
3818 rtx set;
3820 gcc_assert (mode != BLKmode
3821 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3823 /* If we have a copy that looks like one of the following patterns:
3824 (set (subreg:M1 (reg:M2 ...)) (subreg:M1 (reg:M2 ...)))
3825 (set (subreg:M1 (reg:M2 ...)) (mem:M1 ADDR))
3826 (set (mem:M1 ADDR) (subreg:M1 (reg:M2 ...)))
3827 (set (subreg:M1 (reg:M2 ...)) (constant C))
3828 where mode M1 is equal in size to M2, try to detect whether the
3829 mode change involves an implicit round trip through memory.
3830 If so, see if we can avoid that by removing the subregs and
3831 doing the move in mode M2 instead. */
3833 rtx x_inner = NULL_RTX;
3834 rtx y_inner = NULL_RTX;
3836 auto candidate_subreg_p = [&](rtx subreg) {
3837 return (REG_P (SUBREG_REG (subreg))
3838 && known_eq (GET_MODE_SIZE (GET_MODE (SUBREG_REG (subreg))),
3839 GET_MODE_SIZE (GET_MODE (subreg)))
3840 && optab_handler (mov_optab, GET_MODE (SUBREG_REG (subreg)))
3841 != CODE_FOR_nothing);
3844 auto candidate_mem_p = [&](machine_mode innermode, rtx mem) {
3845 return (!targetm.can_change_mode_class (innermode, GET_MODE (mem), ALL_REGS)
3846 && !push_operand (mem, GET_MODE (mem))
3847 /* Not a candiate if innermode requires too much alignment. */
3848 && (MEM_ALIGN (mem) >= GET_MODE_ALIGNMENT (innermode)
3849 || targetm.slow_unaligned_access (GET_MODE (mem),
3850 MEM_ALIGN (mem))
3851 || !targetm.slow_unaligned_access (innermode,
3852 MEM_ALIGN (mem))));
3855 if (SUBREG_P (x) && candidate_subreg_p (x))
3856 x_inner = SUBREG_REG (x);
3858 if (SUBREG_P (y) && candidate_subreg_p (y))
3859 y_inner = SUBREG_REG (y);
3861 if (x_inner != NULL_RTX
3862 && y_inner != NULL_RTX
3863 && GET_MODE (x_inner) == GET_MODE (y_inner)
3864 && !targetm.can_change_mode_class (GET_MODE (x_inner), mode, ALL_REGS))
3866 x = x_inner;
3867 y = y_inner;
3868 mode = GET_MODE (x_inner);
3870 else if (x_inner != NULL_RTX
3871 && MEM_P (y)
3872 && candidate_mem_p (GET_MODE (x_inner), y))
3874 x = x_inner;
3875 y = adjust_address (y, GET_MODE (x_inner), 0);
3876 mode = GET_MODE (x_inner);
3878 else if (y_inner != NULL_RTX
3879 && MEM_P (x)
3880 && candidate_mem_p (GET_MODE (y_inner), x))
3882 x = adjust_address (x, GET_MODE (y_inner), 0);
3883 y = y_inner;
3884 mode = GET_MODE (y_inner);
3886 else if (x_inner != NULL_RTX
3887 && CONSTANT_P (y)
3888 && !targetm.can_change_mode_class (GET_MODE (x_inner),
3889 mode, ALL_REGS)
3890 && (y_inner = simplify_subreg (GET_MODE (x_inner), y, mode, 0)))
3892 x = x_inner;
3893 y = y_inner;
3894 mode = GET_MODE (x_inner);
3897 if (CONSTANT_P (y))
3899 if (optimize
3900 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3901 && (last_insn = compress_float_constant (x, y)))
3902 return last_insn;
3904 y_cst = y;
3906 if (!targetm.legitimate_constant_p (mode, y))
3908 y = force_const_mem (mode, y);
3910 /* If the target's cannot_force_const_mem prevented the spill,
3911 assume that the target's move expanders will also take care
3912 of the non-legitimate constant. */
3913 if (!y)
3914 y = y_cst;
3915 else
3916 y = use_anchored_address (y);
3920 /* If X or Y are memory references, verify that their addresses are valid
3921 for the machine. */
3922 if (MEM_P (x)
3923 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3924 MEM_ADDR_SPACE (x))
3925 && ! push_operand (x, GET_MODE (x))))
3926 x = validize_mem (x);
3928 if (MEM_P (y)
3929 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3930 MEM_ADDR_SPACE (y)))
3931 y = validize_mem (y);
3933 gcc_assert (mode != BLKmode);
3935 last_insn = emit_move_insn_1 (x, y);
3937 if (y_cst && REG_P (x)
3938 && (set = single_set (last_insn)) != NULL_RTX
3939 && SET_DEST (set) == x
3940 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3941 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3943 return last_insn;
3946 /* Generate the body of an instruction to copy Y into X.
3947 It may be a list of insns, if one insn isn't enough. */
3949 rtx_insn *
3950 gen_move_insn (rtx x, rtx y)
3952 rtx_insn *seq;
3954 start_sequence ();
3955 emit_move_insn_1 (x, y);
3956 seq = get_insns ();
3957 end_sequence ();
3958 return seq;
3961 /* If Y is representable exactly in a narrower mode, and the target can
3962 perform the extension directly from constant or memory, then emit the
3963 move as an extension. */
3965 static rtx_insn *
3966 compress_float_constant (rtx x, rtx y)
3968 machine_mode dstmode = GET_MODE (x);
3969 machine_mode orig_srcmode = GET_MODE (y);
3970 machine_mode srcmode;
3971 const REAL_VALUE_TYPE *r;
3972 int oldcost, newcost;
3973 bool speed = optimize_insn_for_speed_p ();
3975 r = CONST_DOUBLE_REAL_VALUE (y);
3977 if (targetm.legitimate_constant_p (dstmode, y))
3978 oldcost = set_src_cost (y, orig_srcmode, speed);
3979 else
3980 oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3982 FOR_EACH_MODE_UNTIL (srcmode, orig_srcmode)
3984 enum insn_code ic;
3985 rtx trunc_y;
3986 rtx_insn *last_insn;
3988 /* Skip if the target can't extend this way. */
3989 ic = can_extend_p (dstmode, srcmode, 0);
3990 if (ic == CODE_FOR_nothing)
3991 continue;
3993 /* Skip if the narrowed value isn't exact. */
3994 if (! exact_real_truncate (srcmode, r))
3995 continue;
3997 trunc_y = const_double_from_real_value (*r, srcmode);
3999 if (targetm.legitimate_constant_p (srcmode, trunc_y))
4001 /* Skip if the target needs extra instructions to perform
4002 the extension. */
4003 if (!insn_operand_matches (ic, 1, trunc_y))
4004 continue;
4005 /* This is valid, but may not be cheaper than the original. */
4006 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
4007 dstmode, speed);
4008 if (oldcost < newcost)
4009 continue;
4011 else if (float_extend_from_mem[dstmode][srcmode])
4013 trunc_y = force_const_mem (srcmode, trunc_y);
4014 /* This is valid, but may not be cheaper than the original. */
4015 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
4016 dstmode, speed);
4017 if (oldcost < newcost)
4018 continue;
4019 trunc_y = validize_mem (trunc_y);
4021 else
4022 continue;
4024 /* For CSE's benefit, force the compressed constant pool entry
4025 into a new pseudo. This constant may be used in different modes,
4026 and if not, combine will put things back together for us. */
4027 trunc_y = force_reg (srcmode, trunc_y);
4029 /* If x is a hard register, perform the extension into a pseudo,
4030 so that e.g. stack realignment code is aware of it. */
4031 rtx target = x;
4032 if (REG_P (x) && HARD_REGISTER_P (x))
4033 target = gen_reg_rtx (dstmode);
4035 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
4036 last_insn = get_last_insn ();
4038 if (REG_P (target))
4039 set_unique_reg_note (last_insn, REG_EQUAL, y);
4041 if (target != x)
4042 return emit_move_insn (x, target);
4043 return last_insn;
4046 return NULL;
4049 /* Pushing data onto the stack. */
4051 /* Push a block of length SIZE (perhaps variable)
4052 and return an rtx to address the beginning of the block.
4053 The value may be virtual_outgoing_args_rtx.
4055 EXTRA is the number of bytes of padding to push in addition to SIZE.
4056 BELOW nonzero means this padding comes at low addresses;
4057 otherwise, the padding comes at high addresses. */
4060 push_block (rtx size, poly_int64 extra, int below)
4062 rtx temp;
4064 size = convert_modes (Pmode, ptr_mode, size, 1);
4065 if (CONSTANT_P (size))
4066 anti_adjust_stack (plus_constant (Pmode, size, extra));
4067 else if (REG_P (size) && known_eq (extra, 0))
4068 anti_adjust_stack (size);
4069 else
4071 temp = copy_to_mode_reg (Pmode, size);
4072 if (maybe_ne (extra, 0))
4073 temp = expand_binop (Pmode, add_optab, temp,
4074 gen_int_mode (extra, Pmode),
4075 temp, 0, OPTAB_LIB_WIDEN);
4076 anti_adjust_stack (temp);
4079 if (STACK_GROWS_DOWNWARD)
4081 temp = virtual_outgoing_args_rtx;
4082 if (maybe_ne (extra, 0) && below)
4083 temp = plus_constant (Pmode, temp, extra);
4085 else
4087 poly_int64 csize;
4088 if (poly_int_rtx_p (size, &csize))
4089 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
4090 -csize - (below ? 0 : extra));
4091 else if (maybe_ne (extra, 0) && !below)
4092 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
4093 negate_rtx (Pmode, plus_constant (Pmode, size,
4094 extra)));
4095 else
4096 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
4097 negate_rtx (Pmode, size));
4100 return memory_address (NARROWEST_INT_MODE, temp);
4103 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
4105 static rtx
4106 mem_autoinc_base (rtx mem)
4108 if (MEM_P (mem))
4110 rtx addr = XEXP (mem, 0);
4111 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
4112 return XEXP (addr, 0);
4114 return NULL;
4117 /* A utility routine used here, in reload, and in try_split. The insns
4118 after PREV up to and including LAST are known to adjust the stack,
4119 with a final value of END_ARGS_SIZE. Iterate backward from LAST
4120 placing notes as appropriate. PREV may be NULL, indicating the
4121 entire insn sequence prior to LAST should be scanned.
4123 The set of allowed stack pointer modifications is small:
4124 (1) One or more auto-inc style memory references (aka pushes),
4125 (2) One or more addition/subtraction with the SP as destination,
4126 (3) A single move insn with the SP as destination,
4127 (4) A call_pop insn,
4128 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
4130 Insns in the sequence that do not modify the SP are ignored,
4131 except for noreturn calls.
4133 The return value is the amount of adjustment that can be trivially
4134 verified, via immediate operand or auto-inc. If the adjustment
4135 cannot be trivially extracted, the return value is HOST_WIDE_INT_MIN. */
4137 poly_int64
4138 find_args_size_adjust (rtx_insn *insn)
4140 rtx dest, set, pat;
4141 int i;
4143 pat = PATTERN (insn);
4144 set = NULL;
4146 /* Look for a call_pop pattern. */
4147 if (CALL_P (insn))
4149 /* We have to allow non-call_pop patterns for the case
4150 of emit_single_push_insn of a TLS address. */
4151 if (GET_CODE (pat) != PARALLEL)
4152 return 0;
4154 /* All call_pop have a stack pointer adjust in the parallel.
4155 The call itself is always first, and the stack adjust is
4156 usually last, so search from the end. */
4157 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
4159 set = XVECEXP (pat, 0, i);
4160 if (GET_CODE (set) != SET)
4161 continue;
4162 dest = SET_DEST (set);
4163 if (dest == stack_pointer_rtx)
4164 break;
4166 /* We'd better have found the stack pointer adjust. */
4167 if (i == 0)
4168 return 0;
4169 /* Fall through to process the extracted SET and DEST
4170 as if it was a standalone insn. */
4172 else if (GET_CODE (pat) == SET)
4173 set = pat;
4174 else if ((set = single_set (insn)) != NULL)
4176 else if (GET_CODE (pat) == PARALLEL)
4178 /* ??? Some older ports use a parallel with a stack adjust
4179 and a store for a PUSH_ROUNDING pattern, rather than a
4180 PRE/POST_MODIFY rtx. Don't force them to update yet... */
4181 /* ??? See h8300 and m68k, pushqi1. */
4182 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
4184 set = XVECEXP (pat, 0, i);
4185 if (GET_CODE (set) != SET)
4186 continue;
4187 dest = SET_DEST (set);
4188 if (dest == stack_pointer_rtx)
4189 break;
4191 /* We do not expect an auto-inc of the sp in the parallel. */
4192 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
4193 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4194 != stack_pointer_rtx);
4196 if (i < 0)
4197 return 0;
4199 else
4200 return 0;
4202 dest = SET_DEST (set);
4204 /* Look for direct modifications of the stack pointer. */
4205 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
4207 /* Look for a trivial adjustment, otherwise assume nothing. */
4208 /* Note that the SPU restore_stack_block pattern refers to
4209 the stack pointer in V4SImode. Consider that non-trivial. */
4210 poly_int64 offset;
4211 if (SCALAR_INT_MODE_P (GET_MODE (dest))
4212 && strip_offset (SET_SRC (set), &offset) == stack_pointer_rtx)
4213 return offset;
4214 /* ??? Reload can generate no-op moves, which will be cleaned
4215 up later. Recognize it and continue searching. */
4216 else if (rtx_equal_p (dest, SET_SRC (set)))
4217 return 0;
4218 else
4219 return HOST_WIDE_INT_MIN;
4221 else
4223 rtx mem, addr;
4225 /* Otherwise only think about autoinc patterns. */
4226 if (mem_autoinc_base (dest) == stack_pointer_rtx)
4228 mem = dest;
4229 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4230 != stack_pointer_rtx);
4232 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
4233 mem = SET_SRC (set);
4234 else
4235 return 0;
4237 addr = XEXP (mem, 0);
4238 switch (GET_CODE (addr))
4240 case PRE_INC:
4241 case POST_INC:
4242 return GET_MODE_SIZE (GET_MODE (mem));
4243 case PRE_DEC:
4244 case POST_DEC:
4245 return -GET_MODE_SIZE (GET_MODE (mem));
4246 case PRE_MODIFY:
4247 case POST_MODIFY:
4248 addr = XEXP (addr, 1);
4249 gcc_assert (GET_CODE (addr) == PLUS);
4250 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
4251 return rtx_to_poly_int64 (XEXP (addr, 1));
4252 default:
4253 gcc_unreachable ();
4258 poly_int64
4259 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last,
4260 poly_int64 end_args_size)
4262 poly_int64 args_size = end_args_size;
4263 bool saw_unknown = false;
4264 rtx_insn *insn;
4266 for (insn = last; insn != prev; insn = PREV_INSN (insn))
4268 if (!NONDEBUG_INSN_P (insn))
4269 continue;
4271 /* We might have existing REG_ARGS_SIZE notes, e.g. when pushing
4272 a call argument containing a TLS address that itself requires
4273 a call to __tls_get_addr. The handling of stack_pointer_delta
4274 in emit_single_push_insn is supposed to ensure that any such
4275 notes are already correct. */
4276 rtx note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4277 gcc_assert (!note || known_eq (args_size, get_args_size (note)));
4279 poly_int64 this_delta = find_args_size_adjust (insn);
4280 if (known_eq (this_delta, 0))
4282 if (!CALL_P (insn)
4283 || ACCUMULATE_OUTGOING_ARGS
4284 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
4285 continue;
4288 gcc_assert (!saw_unknown);
4289 if (known_eq (this_delta, HOST_WIDE_INT_MIN))
4290 saw_unknown = true;
4292 if (!note)
4293 add_args_size_note (insn, args_size);
4294 if (STACK_GROWS_DOWNWARD)
4295 this_delta = -poly_uint64 (this_delta);
4297 if (saw_unknown)
4298 args_size = HOST_WIDE_INT_MIN;
4299 else
4300 args_size -= this_delta;
4303 return args_size;
4306 #ifdef PUSH_ROUNDING
4307 /* Emit single push insn. */
4309 static void
4310 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4312 rtx dest_addr;
4313 poly_int64 rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4314 rtx dest;
4315 enum insn_code icode;
4317 /* If there is push pattern, use it. Otherwise try old way of throwing
4318 MEM representing push operation to move expander. */
4319 icode = optab_handler (push_optab, mode);
4320 if (icode != CODE_FOR_nothing)
4322 class expand_operand ops[1];
4324 create_input_operand (&ops[0], x, mode);
4325 if (maybe_expand_insn (icode, 1, ops))
4326 return;
4328 if (known_eq (GET_MODE_SIZE (mode), rounded_size))
4329 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4330 /* If we are to pad downward, adjust the stack pointer first and
4331 then store X into the stack location using an offset. This is
4332 because emit_move_insn does not know how to pad; it does not have
4333 access to type. */
4334 else if (targetm.calls.function_arg_padding (mode, type) == PAD_DOWNWARD)
4336 emit_move_insn (stack_pointer_rtx,
4337 expand_binop (Pmode,
4338 STACK_GROWS_DOWNWARD ? sub_optab
4339 : add_optab,
4340 stack_pointer_rtx,
4341 gen_int_mode (rounded_size, Pmode),
4342 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4344 poly_int64 offset = rounded_size - GET_MODE_SIZE (mode);
4345 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
4346 /* We have already decremented the stack pointer, so get the
4347 previous value. */
4348 offset += rounded_size;
4350 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4351 /* We have already incremented the stack pointer, so get the
4352 previous value. */
4353 offset -= rounded_size;
4355 dest_addr = plus_constant (Pmode, stack_pointer_rtx, offset);
4357 else
4359 if (STACK_GROWS_DOWNWARD)
4360 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4361 dest_addr = plus_constant (Pmode, stack_pointer_rtx, -rounded_size);
4362 else
4363 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4364 dest_addr = plus_constant (Pmode, stack_pointer_rtx, rounded_size);
4366 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4369 dest = gen_rtx_MEM (mode, dest_addr);
4371 if (type != 0)
4373 set_mem_attributes (dest, type, 1);
4375 if (cfun->tail_call_marked)
4376 /* Function incoming arguments may overlap with sibling call
4377 outgoing arguments and we cannot allow reordering of reads
4378 from function arguments with stores to outgoing arguments
4379 of sibling calls. */
4380 set_mem_alias_set (dest, 0);
4382 emit_move_insn (dest, x);
4385 /* Emit and annotate a single push insn. */
4387 static void
4388 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4390 poly_int64 delta, old_delta = stack_pointer_delta;
4391 rtx_insn *prev = get_last_insn ();
4392 rtx_insn *last;
4394 emit_single_push_insn_1 (mode, x, type);
4396 /* Adjust stack_pointer_delta to describe the situation after the push
4397 we just performed. Note that we must do this after the push rather
4398 than before the push in case calculating X needs pushes and pops of
4399 its own (e.g. if calling __tls_get_addr). The REG_ARGS_SIZE notes
4400 for such pushes and pops must not include the effect of the future
4401 push of X. */
4402 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4404 last = get_last_insn ();
4406 /* Notice the common case where we emitted exactly one insn. */
4407 if (PREV_INSN (last) == prev)
4409 add_args_size_note (last, stack_pointer_delta);
4410 return;
4413 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4414 gcc_assert (known_eq (delta, HOST_WIDE_INT_MIN)
4415 || known_eq (delta, old_delta));
4417 #endif
4419 /* If reading SIZE bytes from X will end up reading from
4420 Y return the number of bytes that overlap. Return -1
4421 if there is no overlap or -2 if we can't determine
4422 (for example when X and Y have different base registers). */
4424 static int
4425 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4427 rtx tmp = plus_constant (Pmode, x, size);
4428 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4430 if (!CONST_INT_P (sub))
4431 return -2;
4433 HOST_WIDE_INT val = INTVAL (sub);
4435 return IN_RANGE (val, 1, size) ? val : -1;
4438 /* Generate code to push X onto the stack, assuming it has mode MODE and
4439 type TYPE.
4440 MODE is redundant except when X is a CONST_INT (since they don't
4441 carry mode info).
4442 SIZE is an rtx for the size of data to be copied (in bytes),
4443 needed only if X is BLKmode.
4444 Return true if successful. May return false if asked to push a
4445 partial argument during a sibcall optimization (as specified by
4446 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4447 to not overlap.
4449 ALIGN (in bits) is maximum alignment we can assume.
4451 If PARTIAL and REG are both nonzero, then copy that many of the first
4452 bytes of X into registers starting with REG, and push the rest of X.
4453 The amount of space pushed is decreased by PARTIAL bytes.
4454 REG must be a hard register in this case.
4455 If REG is zero but PARTIAL is not, take any all others actions for an
4456 argument partially in registers, but do not actually load any
4457 registers.
4459 EXTRA is the amount in bytes of extra space to leave next to this arg.
4460 This is ignored if an argument block has already been allocated.
4462 On a machine that lacks real push insns, ARGS_ADDR is the address of
4463 the bottom of the argument block for this call. We use indexing off there
4464 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4465 argument block has not been preallocated.
4467 ARGS_SO_FAR is the size of args previously pushed for this call.
4469 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4470 for arguments passed in registers. If nonzero, it will be the number
4471 of bytes required. */
4473 bool
4474 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4475 unsigned int align, int partial, rtx reg, poly_int64 extra,
4476 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4477 rtx alignment_pad, bool sibcall_p)
4479 rtx xinner;
4480 pad_direction stack_direction
4481 = STACK_GROWS_DOWNWARD ? PAD_DOWNWARD : PAD_UPWARD;
4483 /* Decide where to pad the argument: PAD_DOWNWARD for below,
4484 PAD_UPWARD for above, or PAD_NONE for don't pad it.
4485 Default is below for small data on big-endian machines; else above. */
4486 pad_direction where_pad = targetm.calls.function_arg_padding (mode, type);
4488 /* Invert direction if stack is post-decrement.
4489 FIXME: why? */
4490 if (STACK_PUSH_CODE == POST_DEC)
4491 if (where_pad != PAD_NONE)
4492 where_pad = (where_pad == PAD_DOWNWARD ? PAD_UPWARD : PAD_DOWNWARD);
4494 xinner = x;
4496 int nregs = partial / UNITS_PER_WORD;
4497 rtx *tmp_regs = NULL;
4498 int overlapping = 0;
4500 if (mode == BLKmode
4501 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4503 /* Copy a block into the stack, entirely or partially. */
4505 rtx temp;
4506 int used;
4507 int offset;
4508 int skip;
4510 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4511 used = partial - offset;
4513 if (mode != BLKmode)
4515 /* A value is to be stored in an insufficiently aligned
4516 stack slot; copy via a suitably aligned slot if
4517 necessary. */
4518 size = gen_int_mode (GET_MODE_SIZE (mode), Pmode);
4519 if (!MEM_P (xinner))
4521 temp = assign_temp (type, 1, 1);
4522 emit_move_insn (temp, xinner);
4523 xinner = temp;
4527 gcc_assert (size);
4529 /* USED is now the # of bytes we need not copy to the stack
4530 because registers will take care of them. */
4532 if (partial != 0)
4533 xinner = adjust_address (xinner, BLKmode, used);
4535 /* If the partial register-part of the arg counts in its stack size,
4536 skip the part of stack space corresponding to the registers.
4537 Otherwise, start copying to the beginning of the stack space,
4538 by setting SKIP to 0. */
4539 skip = (reg_parm_stack_space == 0) ? 0 : used;
4541 #ifdef PUSH_ROUNDING
4542 /* Do it with several push insns if that doesn't take lots of insns
4543 and if there is no difficulty with push insns that skip bytes
4544 on the stack for alignment purposes. */
4545 if (args_addr == 0
4546 && PUSH_ARGS
4547 && CONST_INT_P (size)
4548 && skip == 0
4549 && MEM_ALIGN (xinner) >= align
4550 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4551 /* Here we avoid the case of a structure whose weak alignment
4552 forces many pushes of a small amount of data,
4553 and such small pushes do rounding that causes trouble. */
4554 && ((!targetm.slow_unaligned_access (word_mode, align))
4555 || align >= BIGGEST_ALIGNMENT
4556 || known_eq (PUSH_ROUNDING (align / BITS_PER_UNIT),
4557 align / BITS_PER_UNIT))
4558 && known_eq (PUSH_ROUNDING (INTVAL (size)), INTVAL (size)))
4560 /* Push padding now if padding above and stack grows down,
4561 or if padding below and stack grows up.
4562 But if space already allocated, this has already been done. */
4563 if (maybe_ne (extra, 0)
4564 && args_addr == 0
4565 && where_pad != PAD_NONE
4566 && where_pad != stack_direction)
4567 anti_adjust_stack (gen_int_mode (extra, Pmode));
4569 move_by_pieces (NULL, xinner, INTVAL (size) - used, align,
4570 RETURN_BEGIN);
4572 else
4573 #endif /* PUSH_ROUNDING */
4575 rtx target;
4577 /* Otherwise make space on the stack and copy the data
4578 to the address of that space. */
4580 /* Deduct words put into registers from the size we must copy. */
4581 if (partial != 0)
4583 if (CONST_INT_P (size))
4584 size = GEN_INT (INTVAL (size) - used);
4585 else
4586 size = expand_binop (GET_MODE (size), sub_optab, size,
4587 gen_int_mode (used, GET_MODE (size)),
4588 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4591 /* Get the address of the stack space.
4592 In this case, we do not deal with EXTRA separately.
4593 A single stack adjust will do. */
4594 poly_int64 const_args_so_far;
4595 if (! args_addr)
4597 temp = push_block (size, extra, where_pad == PAD_DOWNWARD);
4598 extra = 0;
4600 else if (poly_int_rtx_p (args_so_far, &const_args_so_far))
4601 temp = memory_address (BLKmode,
4602 plus_constant (Pmode, args_addr,
4603 skip + const_args_so_far));
4604 else
4605 temp = memory_address (BLKmode,
4606 plus_constant (Pmode,
4607 gen_rtx_PLUS (Pmode,
4608 args_addr,
4609 args_so_far),
4610 skip));
4612 if (!ACCUMULATE_OUTGOING_ARGS)
4614 /* If the source is referenced relative to the stack pointer,
4615 copy it to another register to stabilize it. We do not need
4616 to do this if we know that we won't be changing sp. */
4618 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4619 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4620 temp = copy_to_reg (temp);
4623 target = gen_rtx_MEM (BLKmode, temp);
4625 /* We do *not* set_mem_attributes here, because incoming arguments
4626 may overlap with sibling call outgoing arguments and we cannot
4627 allow reordering of reads from function arguments with stores
4628 to outgoing arguments of sibling calls. We do, however, want
4629 to record the alignment of the stack slot. */
4630 /* ALIGN may well be better aligned than TYPE, e.g. due to
4631 PARM_BOUNDARY. Assume the caller isn't lying. */
4632 set_mem_align (target, align);
4634 /* If part should go in registers and pushing to that part would
4635 overwrite some of the values that need to go into regs, load the
4636 overlapping values into temporary pseudos to be moved into the hard
4637 regs at the end after the stack pushing has completed.
4638 We cannot load them directly into the hard regs here because
4639 they can be clobbered by the block move expansions.
4640 See PR 65358. */
4642 if (partial > 0 && reg != 0 && mode == BLKmode
4643 && GET_CODE (reg) != PARALLEL)
4645 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4646 if (overlapping > 0)
4648 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4649 overlapping /= UNITS_PER_WORD;
4651 tmp_regs = XALLOCAVEC (rtx, overlapping);
4653 for (int i = 0; i < overlapping; i++)
4654 tmp_regs[i] = gen_reg_rtx (word_mode);
4656 for (int i = 0; i < overlapping; i++)
4657 emit_move_insn (tmp_regs[i],
4658 operand_subword_force (target, i, mode));
4660 else if (overlapping == -1)
4661 overlapping = 0;
4662 /* Could not determine whether there is overlap.
4663 Fail the sibcall. */
4664 else
4666 overlapping = 0;
4667 if (sibcall_p)
4668 return false;
4671 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4674 else if (partial > 0)
4676 /* Scalar partly in registers. This case is only supported
4677 for fixed-wdth modes. */
4678 int num_words = GET_MODE_SIZE (mode).to_constant ();
4679 num_words /= UNITS_PER_WORD;
4680 int i;
4681 int not_stack;
4682 /* # bytes of start of argument
4683 that we must make space for but need not store. */
4684 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4685 int args_offset = INTVAL (args_so_far);
4686 int skip;
4688 /* Push padding now if padding above and stack grows down,
4689 or if padding below and stack grows up.
4690 But if space already allocated, this has already been done. */
4691 if (maybe_ne (extra, 0)
4692 && args_addr == 0
4693 && where_pad != PAD_NONE
4694 && where_pad != stack_direction)
4695 anti_adjust_stack (gen_int_mode (extra, Pmode));
4697 /* If we make space by pushing it, we might as well push
4698 the real data. Otherwise, we can leave OFFSET nonzero
4699 and leave the space uninitialized. */
4700 if (args_addr == 0)
4701 offset = 0;
4703 /* Now NOT_STACK gets the number of words that we don't need to
4704 allocate on the stack. Convert OFFSET to words too. */
4705 not_stack = (partial - offset) / UNITS_PER_WORD;
4706 offset /= UNITS_PER_WORD;
4708 /* If the partial register-part of the arg counts in its stack size,
4709 skip the part of stack space corresponding to the registers.
4710 Otherwise, start copying to the beginning of the stack space,
4711 by setting SKIP to 0. */
4712 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4714 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4715 x = validize_mem (force_const_mem (mode, x));
4717 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4718 SUBREGs of such registers are not allowed. */
4719 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4720 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4721 x = copy_to_reg (x);
4723 /* Loop over all the words allocated on the stack for this arg. */
4724 /* We can do it by words, because any scalar bigger than a word
4725 has a size a multiple of a word. */
4726 for (i = num_words - 1; i >= not_stack; i--)
4727 if (i >= not_stack + offset)
4728 if (!emit_push_insn (operand_subword_force (x, i, mode),
4729 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4730 0, args_addr,
4731 GEN_INT (args_offset + ((i - not_stack + skip)
4732 * UNITS_PER_WORD)),
4733 reg_parm_stack_space, alignment_pad, sibcall_p))
4734 return false;
4736 else
4738 rtx addr;
4739 rtx dest;
4741 /* Push padding now if padding above and stack grows down,
4742 or if padding below and stack grows up.
4743 But if space already allocated, this has already been done. */
4744 if (maybe_ne (extra, 0)
4745 && args_addr == 0
4746 && where_pad != PAD_NONE
4747 && where_pad != stack_direction)
4748 anti_adjust_stack (gen_int_mode (extra, Pmode));
4750 #ifdef PUSH_ROUNDING
4751 if (args_addr == 0 && PUSH_ARGS)
4752 emit_single_push_insn (mode, x, type);
4753 else
4754 #endif
4756 addr = simplify_gen_binary (PLUS, Pmode, args_addr, args_so_far);
4757 dest = gen_rtx_MEM (mode, memory_address (mode, addr));
4759 /* We do *not* set_mem_attributes here, because incoming arguments
4760 may overlap with sibling call outgoing arguments and we cannot
4761 allow reordering of reads from function arguments with stores
4762 to outgoing arguments of sibling calls. We do, however, want
4763 to record the alignment of the stack slot. */
4764 /* ALIGN may well be better aligned than TYPE, e.g. due to
4765 PARM_BOUNDARY. Assume the caller isn't lying. */
4766 set_mem_align (dest, align);
4768 emit_move_insn (dest, x);
4772 /* Move the partial arguments into the registers and any overlapping
4773 values that we moved into the pseudos in tmp_regs. */
4774 if (partial > 0 && reg != 0)
4776 /* Handle calls that pass values in multiple non-contiguous locations.
4777 The Irix 6 ABI has examples of this. */
4778 if (GET_CODE (reg) == PARALLEL)
4779 emit_group_load (reg, x, type, -1);
4780 else
4782 gcc_assert (partial % UNITS_PER_WORD == 0);
4783 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4785 for (int i = 0; i < overlapping; i++)
4786 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4787 + nregs - overlapping + i),
4788 tmp_regs[i]);
4793 if (maybe_ne (extra, 0) && args_addr == 0 && where_pad == stack_direction)
4794 anti_adjust_stack (gen_int_mode (extra, Pmode));
4796 if (alignment_pad && args_addr == 0)
4797 anti_adjust_stack (alignment_pad);
4799 return true;
4802 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4803 operations. */
4805 static rtx
4806 get_subtarget (rtx x)
4808 return (optimize
4809 || x == 0
4810 /* Only registers can be subtargets. */
4811 || !REG_P (x)
4812 /* Don't use hard regs to avoid extending their life. */
4813 || REGNO (x) < FIRST_PSEUDO_REGISTER
4814 ? 0 : x);
4817 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4818 FIELD is a bitfield. Returns true if the optimization was successful,
4819 and there's nothing else to do. */
4821 static bool
4822 optimize_bitfield_assignment_op (poly_uint64 pbitsize,
4823 poly_uint64 pbitpos,
4824 poly_uint64 pbitregion_start,
4825 poly_uint64 pbitregion_end,
4826 machine_mode mode1, rtx str_rtx,
4827 tree to, tree src, bool reverse)
4829 /* str_mode is not guaranteed to be a scalar type. */
4830 machine_mode str_mode = GET_MODE (str_rtx);
4831 unsigned int str_bitsize;
4832 tree op0, op1;
4833 rtx value, result;
4834 optab binop;
4835 gimple *srcstmt;
4836 enum tree_code code;
4838 unsigned HOST_WIDE_INT bitsize, bitpos, bitregion_start, bitregion_end;
4839 if (mode1 != VOIDmode
4840 || !pbitsize.is_constant (&bitsize)
4841 || !pbitpos.is_constant (&bitpos)
4842 || !pbitregion_start.is_constant (&bitregion_start)
4843 || !pbitregion_end.is_constant (&bitregion_end)
4844 || bitsize >= BITS_PER_WORD
4845 || !GET_MODE_BITSIZE (str_mode).is_constant (&str_bitsize)
4846 || str_bitsize > BITS_PER_WORD
4847 || TREE_SIDE_EFFECTS (to)
4848 || TREE_THIS_VOLATILE (to))
4849 return false;
4851 STRIP_NOPS (src);
4852 if (TREE_CODE (src) != SSA_NAME)
4853 return false;
4854 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4855 return false;
4857 srcstmt = get_gimple_for_ssa_name (src);
4858 if (!srcstmt
4859 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4860 return false;
4862 code = gimple_assign_rhs_code (srcstmt);
4864 op0 = gimple_assign_rhs1 (srcstmt);
4866 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4867 to find its initialization. Hopefully the initialization will
4868 be from a bitfield load. */
4869 if (TREE_CODE (op0) == SSA_NAME)
4871 gimple *op0stmt = get_gimple_for_ssa_name (op0);
4873 /* We want to eventually have OP0 be the same as TO, which
4874 should be a bitfield. */
4875 if (!op0stmt
4876 || !is_gimple_assign (op0stmt)
4877 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4878 return false;
4879 op0 = gimple_assign_rhs1 (op0stmt);
4882 op1 = gimple_assign_rhs2 (srcstmt);
4884 if (!operand_equal_p (to, op0, 0))
4885 return false;
4887 if (MEM_P (str_rtx))
4889 unsigned HOST_WIDE_INT offset1;
4891 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4892 str_bitsize = BITS_PER_WORD;
4894 scalar_int_mode best_mode;
4895 if (!get_best_mode (bitsize, bitpos, bitregion_start, bitregion_end,
4896 MEM_ALIGN (str_rtx), str_bitsize, false, &best_mode))
4897 return false;
4898 str_mode = best_mode;
4899 str_bitsize = GET_MODE_BITSIZE (best_mode);
4901 offset1 = bitpos;
4902 bitpos %= str_bitsize;
4903 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4904 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4906 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4907 return false;
4909 /* If the bit field covers the whole REG/MEM, store_field
4910 will likely generate better code. */
4911 if (bitsize >= str_bitsize)
4912 return false;
4914 /* We can't handle fields split across multiple entities. */
4915 if (bitpos + bitsize > str_bitsize)
4916 return false;
4918 if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4919 bitpos = str_bitsize - bitpos - bitsize;
4921 switch (code)
4923 case PLUS_EXPR:
4924 case MINUS_EXPR:
4925 /* For now, just optimize the case of the topmost bitfield
4926 where we don't need to do any masking and also
4927 1 bit bitfields where xor can be used.
4928 We might win by one instruction for the other bitfields
4929 too if insv/extv instructions aren't used, so that
4930 can be added later. */
4931 if ((reverse || bitpos + bitsize != str_bitsize)
4932 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4933 break;
4935 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4936 value = convert_modes (str_mode,
4937 TYPE_MODE (TREE_TYPE (op1)), value,
4938 TYPE_UNSIGNED (TREE_TYPE (op1)));
4940 /* We may be accessing data outside the field, which means
4941 we can alias adjacent data. */
4942 if (MEM_P (str_rtx))
4944 str_rtx = shallow_copy_rtx (str_rtx);
4945 set_mem_alias_set (str_rtx, 0);
4946 set_mem_expr (str_rtx, 0);
4949 if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4951 value = expand_and (str_mode, value, const1_rtx, NULL);
4952 binop = xor_optab;
4954 else
4955 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4957 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4958 if (reverse)
4959 value = flip_storage_order (str_mode, value);
4960 result = expand_binop (str_mode, binop, str_rtx,
4961 value, str_rtx, 1, OPTAB_WIDEN);
4962 if (result != str_rtx)
4963 emit_move_insn (str_rtx, result);
4964 return true;
4966 case BIT_IOR_EXPR:
4967 case BIT_XOR_EXPR:
4968 if (TREE_CODE (op1) != INTEGER_CST)
4969 break;
4970 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4971 value = convert_modes (str_mode,
4972 TYPE_MODE (TREE_TYPE (op1)), value,
4973 TYPE_UNSIGNED (TREE_TYPE (op1)));
4975 /* We may be accessing data outside the field, which means
4976 we can alias adjacent data. */
4977 if (MEM_P (str_rtx))
4979 str_rtx = shallow_copy_rtx (str_rtx);
4980 set_mem_alias_set (str_rtx, 0);
4981 set_mem_expr (str_rtx, 0);
4984 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4985 if (bitpos + bitsize != str_bitsize)
4987 rtx mask = gen_int_mode ((HOST_WIDE_INT_1U << bitsize) - 1,
4988 str_mode);
4989 value = expand_and (str_mode, value, mask, NULL_RTX);
4991 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4992 if (reverse)
4993 value = flip_storage_order (str_mode, value);
4994 result = expand_binop (str_mode, binop, str_rtx,
4995 value, str_rtx, 1, OPTAB_WIDEN);
4996 if (result != str_rtx)
4997 emit_move_insn (str_rtx, result);
4998 return true;
5000 default:
5001 break;
5004 return false;
5007 /* In the C++ memory model, consecutive bit fields in a structure are
5008 considered one memory location.
5010 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
5011 returns the bit range of consecutive bits in which this COMPONENT_REF
5012 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
5013 and *OFFSET may be adjusted in the process.
5015 If the access does not need to be restricted, 0 is returned in both
5016 *BITSTART and *BITEND. */
5018 void
5019 get_bit_range (poly_uint64_pod *bitstart, poly_uint64_pod *bitend, tree exp,
5020 poly_int64_pod *bitpos, tree *offset)
5022 poly_int64 bitoffset;
5023 tree field, repr;
5025 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
5027 field = TREE_OPERAND (exp, 1);
5028 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
5029 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
5030 need to limit the range we can access. */
5031 if (!repr)
5033 *bitstart = *bitend = 0;
5034 return;
5037 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
5038 part of a larger bit field, then the representative does not serve any
5039 useful purpose. This can occur in Ada. */
5040 if (handled_component_p (TREE_OPERAND (exp, 0)))
5042 machine_mode rmode;
5043 poly_int64 rbitsize, rbitpos;
5044 tree roffset;
5045 int unsignedp, reversep, volatilep = 0;
5046 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
5047 &roffset, &rmode, &unsignedp, &reversep,
5048 &volatilep);
5049 if (!multiple_p (rbitpos, BITS_PER_UNIT))
5051 *bitstart = *bitend = 0;
5052 return;
5056 /* Compute the adjustment to bitpos from the offset of the field
5057 relative to the representative. DECL_FIELD_OFFSET of field and
5058 repr are the same by construction if they are not constants,
5059 see finish_bitfield_layout. */
5060 poly_uint64 field_offset, repr_offset;
5061 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
5062 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
5063 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
5064 else
5065 bitoffset = 0;
5066 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
5067 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
5069 /* If the adjustment is larger than bitpos, we would have a negative bit
5070 position for the lower bound and this may wreak havoc later. Adjust
5071 offset and bitpos to make the lower bound non-negative in that case. */
5072 if (maybe_gt (bitoffset, *bitpos))
5074 poly_int64 adjust_bits = upper_bound (bitoffset, *bitpos) - *bitpos;
5075 poly_int64 adjust_bytes = exact_div (adjust_bits, BITS_PER_UNIT);
5077 *bitpos += adjust_bits;
5078 if (*offset == NULL_TREE)
5079 *offset = size_int (-adjust_bytes);
5080 else
5081 *offset = size_binop (MINUS_EXPR, *offset, size_int (adjust_bytes));
5082 *bitstart = 0;
5084 else
5085 *bitstart = *bitpos - bitoffset;
5087 *bitend = *bitstart + tree_to_poly_uint64 (DECL_SIZE (repr)) - 1;
5090 /* Returns true if BASE is a DECL that does not reside in memory and
5091 has non-BLKmode. DECL_RTL must not be a MEM; if
5092 DECL_RTL was not set yet, return false. */
5094 static inline bool
5095 non_mem_decl_p (tree base)
5097 if (!DECL_P (base)
5098 || TREE_ADDRESSABLE (base)
5099 || DECL_MODE (base) == BLKmode)
5100 return false;
5102 if (!DECL_RTL_SET_P (base))
5103 return false;
5105 return (!MEM_P (DECL_RTL (base)));
5108 /* Returns true if REF refers to an object that does not
5109 reside in memory and has non-BLKmode. */
5111 static inline bool
5112 mem_ref_refers_to_non_mem_p (tree ref)
5114 tree base;
5116 if (TREE_CODE (ref) == MEM_REF
5117 || TREE_CODE (ref) == TARGET_MEM_REF)
5119 tree addr = TREE_OPERAND (ref, 0);
5121 if (TREE_CODE (addr) != ADDR_EXPR)
5122 return false;
5124 base = TREE_OPERAND (addr, 0);
5126 else
5127 base = ref;
5129 return non_mem_decl_p (base);
5132 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
5133 is true, try generating a nontemporal store. */
5135 void
5136 expand_assignment (tree to, tree from, bool nontemporal)
5138 rtx to_rtx = 0;
5139 rtx result;
5140 machine_mode mode;
5141 unsigned int align;
5142 enum insn_code icode;
5144 /* Don't crash if the lhs of the assignment was erroneous. */
5145 if (TREE_CODE (to) == ERROR_MARK)
5147 expand_normal (from);
5148 return;
5151 /* Optimize away no-op moves without side-effects. */
5152 if (operand_equal_p (to, from, 0))
5153 return;
5155 /* Handle misaligned stores. */
5156 mode = TYPE_MODE (TREE_TYPE (to));
5157 if ((TREE_CODE (to) == MEM_REF
5158 || TREE_CODE (to) == TARGET_MEM_REF
5159 || DECL_P (to))
5160 && mode != BLKmode
5161 && !mem_ref_refers_to_non_mem_p (to)
5162 && ((align = get_object_alignment (to))
5163 < GET_MODE_ALIGNMENT (mode))
5164 && (((icode = optab_handler (movmisalign_optab, mode))
5165 != CODE_FOR_nothing)
5166 || targetm.slow_unaligned_access (mode, align)))
5168 rtx reg, mem;
5170 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
5171 reg = force_not_mem (reg);
5172 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5173 if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
5174 reg = flip_storage_order (mode, reg);
5176 if (icode != CODE_FOR_nothing)
5178 class expand_operand ops[2];
5180 create_fixed_operand (&ops[0], mem);
5181 create_input_operand (&ops[1], reg, mode);
5182 /* The movmisalign<mode> pattern cannot fail, else the assignment
5183 would silently be omitted. */
5184 expand_insn (icode, 2, ops);
5186 else
5187 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
5188 false);
5189 return;
5192 /* Assignment of a structure component needs special treatment
5193 if the structure component's rtx is not simply a MEM.
5194 Assignment of an array element at a constant index, and assignment of
5195 an array element in an unaligned packed structure field, has the same
5196 problem. Same for (partially) storing into a non-memory object. */
5197 if (handled_component_p (to)
5198 || (TREE_CODE (to) == MEM_REF
5199 && (REF_REVERSE_STORAGE_ORDER (to)
5200 || mem_ref_refers_to_non_mem_p (to)))
5201 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
5203 machine_mode mode1;
5204 poly_int64 bitsize, bitpos;
5205 poly_uint64 bitregion_start = 0;
5206 poly_uint64 bitregion_end = 0;
5207 tree offset;
5208 int unsignedp, reversep, volatilep = 0;
5209 tree tem;
5211 push_temp_slots ();
5212 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
5213 &unsignedp, &reversep, &volatilep);
5215 /* Make sure bitpos is not negative, it can wreak havoc later. */
5216 if (maybe_lt (bitpos, 0))
5218 gcc_assert (offset == NULL_TREE);
5219 offset = size_int (bits_to_bytes_round_down (bitpos));
5220 bitpos = num_trailing_bits (bitpos);
5223 if (TREE_CODE (to) == COMPONENT_REF
5224 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
5225 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
5226 /* The C++ memory model naturally applies to byte-aligned fields.
5227 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5228 BITSIZE are not byte-aligned, there is no need to limit the range
5229 we can access. This can occur with packed structures in Ada. */
5230 else if (maybe_gt (bitsize, 0)
5231 && multiple_p (bitsize, BITS_PER_UNIT)
5232 && multiple_p (bitpos, BITS_PER_UNIT))
5234 bitregion_start = bitpos;
5235 bitregion_end = bitpos + bitsize - 1;
5238 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
5240 /* If the field has a mode, we want to access it in the
5241 field's mode, not the computed mode.
5242 If a MEM has VOIDmode (external with incomplete type),
5243 use BLKmode for it instead. */
5244 if (MEM_P (to_rtx))
5246 if (mode1 != VOIDmode)
5247 to_rtx = adjust_address (to_rtx, mode1, 0);
5248 else if (GET_MODE (to_rtx) == VOIDmode)
5249 to_rtx = adjust_address (to_rtx, BLKmode, 0);
5252 if (offset != 0)
5254 machine_mode address_mode;
5255 rtx offset_rtx;
5257 if (!MEM_P (to_rtx))
5259 /* We can get constant negative offsets into arrays with broken
5260 user code. Translate this to a trap instead of ICEing. */
5261 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
5262 expand_builtin_trap ();
5263 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
5266 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
5267 address_mode = get_address_mode (to_rtx);
5268 if (GET_MODE (offset_rtx) != address_mode)
5270 /* We cannot be sure that the RTL in offset_rtx is valid outside
5271 of a memory address context, so force it into a register
5272 before attempting to convert it to the desired mode. */
5273 offset_rtx = force_operand (offset_rtx, NULL_RTX);
5274 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5277 /* If we have an expression in OFFSET_RTX and a non-zero
5278 byte offset in BITPOS, adding the byte offset before the
5279 OFFSET_RTX results in better intermediate code, which makes
5280 later rtl optimization passes perform better.
5282 We prefer intermediate code like this:
5284 r124:DI=r123:DI+0x18
5285 [r124:DI]=r121:DI
5287 ... instead of ...
5289 r124:DI=r123:DI+0x10
5290 [r124:DI+0x8]=r121:DI
5292 This is only done for aligned data values, as these can
5293 be expected to result in single move instructions. */
5294 poly_int64 bytepos;
5295 if (mode1 != VOIDmode
5296 && maybe_ne (bitpos, 0)
5297 && maybe_gt (bitsize, 0)
5298 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
5299 && multiple_p (bitpos, bitsize)
5300 && multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
5301 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
5303 to_rtx = adjust_address (to_rtx, mode1, bytepos);
5304 bitregion_start = 0;
5305 if (known_ge (bitregion_end, poly_uint64 (bitpos)))
5306 bitregion_end -= bitpos;
5307 bitpos = 0;
5310 to_rtx = offset_address (to_rtx, offset_rtx,
5311 highest_pow2_factor_for_target (to,
5312 offset));
5315 /* No action is needed if the target is not a memory and the field
5316 lies completely outside that target. This can occur if the source
5317 code contains an out-of-bounds access to a small array. */
5318 if (!MEM_P (to_rtx)
5319 && GET_MODE (to_rtx) != BLKmode
5320 && known_ge (bitpos, GET_MODE_PRECISION (GET_MODE (to_rtx))))
5322 expand_normal (from);
5323 result = NULL;
5325 /* Handle expand_expr of a complex value returning a CONCAT. */
5326 else if (GET_CODE (to_rtx) == CONCAT)
5328 machine_mode to_mode = GET_MODE (to_rtx);
5329 gcc_checking_assert (COMPLEX_MODE_P (to_mode));
5330 poly_int64 mode_bitsize = GET_MODE_BITSIZE (to_mode);
5331 unsigned short inner_bitsize = GET_MODE_UNIT_BITSIZE (to_mode);
5332 if (TYPE_MODE (TREE_TYPE (from)) == to_mode
5333 && known_eq (bitpos, 0)
5334 && known_eq (bitsize, mode_bitsize))
5335 result = store_expr (from, to_rtx, false, nontemporal, reversep);
5336 else if (TYPE_MODE (TREE_TYPE (from)) == GET_MODE_INNER (to_mode)
5337 && known_eq (bitsize, inner_bitsize)
5338 && (known_eq (bitpos, 0)
5339 || known_eq (bitpos, inner_bitsize)))
5340 result = store_expr (from, XEXP (to_rtx, maybe_ne (bitpos, 0)),
5341 false, nontemporal, reversep);
5342 else if (known_le (bitpos + bitsize, inner_bitsize))
5343 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
5344 bitregion_start, bitregion_end,
5345 mode1, from, get_alias_set (to),
5346 nontemporal, reversep);
5347 else if (known_ge (bitpos, inner_bitsize))
5348 result = store_field (XEXP (to_rtx, 1), bitsize,
5349 bitpos - inner_bitsize,
5350 bitregion_start, bitregion_end,
5351 mode1, from, get_alias_set (to),
5352 nontemporal, reversep);
5353 else if (known_eq (bitpos, 0) && known_eq (bitsize, mode_bitsize))
5355 result = expand_normal (from);
5356 if (GET_CODE (result) == CONCAT)
5358 to_mode = GET_MODE_INNER (to_mode);
5359 machine_mode from_mode = GET_MODE_INNER (GET_MODE (result));
5360 rtx from_real
5361 = simplify_gen_subreg (to_mode, XEXP (result, 0),
5362 from_mode, 0);
5363 rtx from_imag
5364 = simplify_gen_subreg (to_mode, XEXP (result, 1),
5365 from_mode, 0);
5366 if (!from_real || !from_imag)
5367 goto concat_store_slow;
5368 emit_move_insn (XEXP (to_rtx, 0), from_real);
5369 emit_move_insn (XEXP (to_rtx, 1), from_imag);
5371 else
5373 machine_mode from_mode
5374 = GET_MODE (result) == VOIDmode
5375 ? TYPE_MODE (TREE_TYPE (from))
5376 : GET_MODE (result);
5377 rtx from_rtx;
5378 if (MEM_P (result))
5379 from_rtx = change_address (result, to_mode, NULL_RTX);
5380 else
5381 from_rtx
5382 = simplify_gen_subreg (to_mode, result, from_mode, 0);
5383 if (from_rtx)
5385 emit_move_insn (XEXP (to_rtx, 0),
5386 read_complex_part (from_rtx, false));
5387 emit_move_insn (XEXP (to_rtx, 1),
5388 read_complex_part (from_rtx, true));
5390 else
5392 to_mode = GET_MODE_INNER (to_mode);
5393 rtx from_real
5394 = simplify_gen_subreg (to_mode, result, from_mode, 0);
5395 rtx from_imag
5396 = simplify_gen_subreg (to_mode, result, from_mode,
5397 GET_MODE_SIZE (to_mode));
5398 if (!from_real || !from_imag)
5399 goto concat_store_slow;
5400 emit_move_insn (XEXP (to_rtx, 0), from_real);
5401 emit_move_insn (XEXP (to_rtx, 1), from_imag);
5405 else
5407 concat_store_slow:;
5408 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5409 GET_MODE_SIZE (GET_MODE (to_rtx)));
5410 write_complex_part (temp, XEXP (to_rtx, 0), false);
5411 write_complex_part (temp, XEXP (to_rtx, 1), true);
5412 result = store_field (temp, bitsize, bitpos,
5413 bitregion_start, bitregion_end,
5414 mode1, from, get_alias_set (to),
5415 nontemporal, reversep);
5416 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
5417 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5420 /* For calls to functions returning variable length structures, if TO_RTX
5421 is not a MEM, go through a MEM because we must not create temporaries
5422 of the VLA type. */
5423 else if (!MEM_P (to_rtx)
5424 && TREE_CODE (from) == CALL_EXPR
5425 && COMPLETE_TYPE_P (TREE_TYPE (from))
5426 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) != INTEGER_CST)
5428 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5429 GET_MODE_SIZE (GET_MODE (to_rtx)));
5430 result = store_field (temp, bitsize, bitpos, bitregion_start,
5431 bitregion_end, mode1, from, get_alias_set (to),
5432 nontemporal, reversep);
5433 emit_move_insn (to_rtx, temp);
5435 else
5437 if (MEM_P (to_rtx))
5439 /* If the field is at offset zero, we could have been given the
5440 DECL_RTX of the parent struct. Don't munge it. */
5441 to_rtx = shallow_copy_rtx (to_rtx);
5442 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5443 if (volatilep)
5444 MEM_VOLATILE_P (to_rtx) = 1;
5447 gcc_checking_assert (known_ge (bitpos, 0));
5448 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5449 bitregion_start, bitregion_end,
5450 mode1, to_rtx, to, from,
5451 reversep))
5452 result = NULL;
5453 else
5454 result = store_field (to_rtx, bitsize, bitpos,
5455 bitregion_start, bitregion_end,
5456 mode1, from, get_alias_set (to),
5457 nontemporal, reversep);
5460 if (result)
5461 preserve_temp_slots (result);
5462 pop_temp_slots ();
5463 return;
5466 /* If the rhs is a function call and its value is not an aggregate,
5467 call the function before we start to compute the lhs.
5468 This is needed for correct code for cases such as
5469 val = setjmp (buf) on machines where reference to val
5470 requires loading up part of an address in a separate insn.
5472 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5473 since it might be a promoted variable where the zero- or sign- extension
5474 needs to be done. Handling this in the normal way is safe because no
5475 computation is done before the call. The same is true for SSA names. */
5476 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5477 && COMPLETE_TYPE_P (TREE_TYPE (from))
5478 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5479 && ! (((VAR_P (to)
5480 || TREE_CODE (to) == PARM_DECL
5481 || TREE_CODE (to) == RESULT_DECL)
5482 && REG_P (DECL_RTL (to)))
5483 || TREE_CODE (to) == SSA_NAME))
5485 rtx value;
5487 push_temp_slots ();
5488 value = expand_normal (from);
5490 if (to_rtx == 0)
5491 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5493 /* Handle calls that return values in multiple non-contiguous locations.
5494 The Irix 6 ABI has examples of this. */
5495 if (GET_CODE (to_rtx) == PARALLEL)
5497 if (GET_CODE (value) == PARALLEL)
5498 emit_group_move (to_rtx, value);
5499 else
5500 emit_group_load (to_rtx, value, TREE_TYPE (from),
5501 int_size_in_bytes (TREE_TYPE (from)));
5503 else if (GET_CODE (value) == PARALLEL)
5504 emit_group_store (to_rtx, value, TREE_TYPE (from),
5505 int_size_in_bytes (TREE_TYPE (from)));
5506 else if (GET_MODE (to_rtx) == BLKmode)
5508 /* Handle calls that return BLKmode values in registers. */
5509 if (REG_P (value))
5510 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5511 else
5512 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5514 else
5516 if (POINTER_TYPE_P (TREE_TYPE (to)))
5517 value = convert_memory_address_addr_space
5518 (as_a <scalar_int_mode> (GET_MODE (to_rtx)), value,
5519 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5521 emit_move_insn (to_rtx, value);
5524 preserve_temp_slots (to_rtx);
5525 pop_temp_slots ();
5526 return;
5529 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5530 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5532 /* Don't move directly into a return register. */
5533 if (TREE_CODE (to) == RESULT_DECL
5534 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5536 rtx temp;
5538 push_temp_slots ();
5540 /* If the source is itself a return value, it still is in a pseudo at
5541 this point so we can move it back to the return register directly. */
5542 if (REG_P (to_rtx)
5543 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5544 && TREE_CODE (from) != CALL_EXPR)
5545 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5546 else
5547 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5549 /* Handle calls that return values in multiple non-contiguous locations.
5550 The Irix 6 ABI has examples of this. */
5551 if (GET_CODE (to_rtx) == PARALLEL)
5553 if (GET_CODE (temp) == PARALLEL)
5554 emit_group_move (to_rtx, temp);
5555 else
5556 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5557 int_size_in_bytes (TREE_TYPE (from)));
5559 else if (temp)
5560 emit_move_insn (to_rtx, temp);
5562 preserve_temp_slots (to_rtx);
5563 pop_temp_slots ();
5564 return;
5567 /* In case we are returning the contents of an object which overlaps
5568 the place the value is being stored, use a safe function when copying
5569 a value through a pointer into a structure value return block. */
5570 if (TREE_CODE (to) == RESULT_DECL
5571 && TREE_CODE (from) == INDIRECT_REF
5572 && ADDR_SPACE_GENERIC_P
5573 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5574 && refs_may_alias_p (to, from)
5575 && cfun->returns_struct
5576 && !cfun->returns_pcc_struct)
5578 rtx from_rtx, size;
5580 push_temp_slots ();
5581 size = expr_size (from);
5582 from_rtx = expand_normal (from);
5584 emit_block_move_via_libcall (XEXP (to_rtx, 0), XEXP (from_rtx, 0), size);
5586 preserve_temp_slots (to_rtx);
5587 pop_temp_slots ();
5588 return;
5591 /* Compute FROM and store the value in the rtx we got. */
5593 push_temp_slots ();
5594 result = store_expr (from, to_rtx, 0, nontemporal, false);
5595 preserve_temp_slots (result);
5596 pop_temp_slots ();
5597 return;
5600 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5601 succeeded, false otherwise. */
5603 bool
5604 emit_storent_insn (rtx to, rtx from)
5606 class expand_operand ops[2];
5607 machine_mode mode = GET_MODE (to);
5608 enum insn_code code = optab_handler (storent_optab, mode);
5610 if (code == CODE_FOR_nothing)
5611 return false;
5613 create_fixed_operand (&ops[0], to);
5614 create_input_operand (&ops[1], from, mode);
5615 return maybe_expand_insn (code, 2, ops);
5618 /* Helper function for store_expr storing of STRING_CST. */
5620 static rtx
5621 string_cst_read_str (void *data, HOST_WIDE_INT offset, scalar_int_mode mode)
5623 tree str = (tree) data;
5625 gcc_assert (offset >= 0);
5626 if (offset >= TREE_STRING_LENGTH (str))
5627 return const0_rtx;
5629 if ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
5630 > (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (str))
5632 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
5633 size_t l = TREE_STRING_LENGTH (str) - offset;
5634 memcpy (p, TREE_STRING_POINTER (str) + offset, l);
5635 memset (p + l, '\0', GET_MODE_SIZE (mode) - l);
5636 return c_readstr (p, mode, false);
5639 return c_readstr (TREE_STRING_POINTER (str) + offset, mode, false);
5642 /* Generate code for computing expression EXP,
5643 and storing the value into TARGET.
5645 If the mode is BLKmode then we may return TARGET itself.
5646 It turns out that in BLKmode it doesn't cause a problem.
5647 because C has no operators that could combine two different
5648 assignments into the same BLKmode object with different values
5649 with no sequence point. Will other languages need this to
5650 be more thorough?
5652 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5653 stack, and block moves may need to be treated specially.
5655 If NONTEMPORAL is true, try using a nontemporal store instruction.
5657 If REVERSE is true, the store is to be done in reverse order. */
5660 store_expr (tree exp, rtx target, int call_param_p,
5661 bool nontemporal, bool reverse)
5663 rtx temp;
5664 rtx alt_rtl = NULL_RTX;
5665 location_t loc = curr_insn_location ();
5666 bool shortened_string_cst = false;
5668 if (VOID_TYPE_P (TREE_TYPE (exp)))
5670 /* C++ can generate ?: expressions with a throw expression in one
5671 branch and an rvalue in the other. Here, we resolve attempts to
5672 store the throw expression's nonexistent result. */
5673 gcc_assert (!call_param_p);
5674 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5675 return NULL_RTX;
5677 if (TREE_CODE (exp) == COMPOUND_EXPR)
5679 /* Perform first part of compound expression, then assign from second
5680 part. */
5681 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5682 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5683 return store_expr (TREE_OPERAND (exp, 1), target,
5684 call_param_p, nontemporal, reverse);
5686 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5688 /* For conditional expression, get safe form of the target. Then
5689 test the condition, doing the appropriate assignment on either
5690 side. This avoids the creation of unnecessary temporaries.
5691 For non-BLKmode, it is more efficient not to do this. */
5693 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5695 do_pending_stack_adjust ();
5696 NO_DEFER_POP;
5697 jumpifnot (TREE_OPERAND (exp, 0), lab1,
5698 profile_probability::uninitialized ());
5699 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5700 nontemporal, reverse);
5701 emit_jump_insn (targetm.gen_jump (lab2));
5702 emit_barrier ();
5703 emit_label (lab1);
5704 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5705 nontemporal, reverse);
5706 emit_label (lab2);
5707 OK_DEFER_POP;
5709 return NULL_RTX;
5711 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5712 /* If this is a scalar in a register that is stored in a wider mode
5713 than the declared mode, compute the result into its declared mode
5714 and then convert to the wider mode. Our value is the computed
5715 expression. */
5717 rtx inner_target = 0;
5718 scalar_int_mode outer_mode = subreg_unpromoted_mode (target);
5719 scalar_int_mode inner_mode = subreg_promoted_mode (target);
5721 /* We can do the conversion inside EXP, which will often result
5722 in some optimizations. Do the conversion in two steps: first
5723 change the signedness, if needed, then the extend. But don't
5724 do this if the type of EXP is a subtype of something else
5725 since then the conversion might involve more than just
5726 converting modes. */
5727 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5728 && TREE_TYPE (TREE_TYPE (exp)) == 0
5729 && GET_MODE_PRECISION (outer_mode)
5730 == TYPE_PRECISION (TREE_TYPE (exp)))
5732 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5733 TYPE_UNSIGNED (TREE_TYPE (exp))))
5735 /* Some types, e.g. Fortran's logical*4, won't have a signed
5736 version, so use the mode instead. */
5737 tree ntype
5738 = (signed_or_unsigned_type_for
5739 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5740 if (ntype == NULL)
5741 ntype = lang_hooks.types.type_for_mode
5742 (TYPE_MODE (TREE_TYPE (exp)),
5743 SUBREG_PROMOTED_SIGN (target));
5745 exp = fold_convert_loc (loc, ntype, exp);
5748 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5749 (inner_mode, SUBREG_PROMOTED_SIGN (target)),
5750 exp);
5752 inner_target = SUBREG_REG (target);
5755 temp = expand_expr (exp, inner_target, VOIDmode,
5756 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5759 /* If TEMP is a VOIDmode constant, use convert_modes to make
5760 sure that we properly convert it. */
5761 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5763 temp = convert_modes (outer_mode, TYPE_MODE (TREE_TYPE (exp)),
5764 temp, SUBREG_PROMOTED_SIGN (target));
5765 temp = convert_modes (inner_mode, outer_mode, temp,
5766 SUBREG_PROMOTED_SIGN (target));
5769 convert_move (SUBREG_REG (target), temp,
5770 SUBREG_PROMOTED_SIGN (target));
5772 return NULL_RTX;
5774 else if ((TREE_CODE (exp) == STRING_CST
5775 || (TREE_CODE (exp) == MEM_REF
5776 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5777 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5778 == STRING_CST
5779 && integer_zerop (TREE_OPERAND (exp, 1))))
5780 && !nontemporal && !call_param_p
5781 && MEM_P (target))
5783 /* Optimize initialization of an array with a STRING_CST. */
5784 HOST_WIDE_INT exp_len, str_copy_len;
5785 rtx dest_mem;
5786 tree str = TREE_CODE (exp) == STRING_CST
5787 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5789 exp_len = int_expr_size (exp);
5790 if (exp_len <= 0)
5791 goto normal_expr;
5793 if (TREE_STRING_LENGTH (str) <= 0)
5794 goto normal_expr;
5796 if (can_store_by_pieces (exp_len, string_cst_read_str, (void *) str,
5797 MEM_ALIGN (target), false))
5799 store_by_pieces (target, exp_len, string_cst_read_str, (void *) str,
5800 MEM_ALIGN (target), false, RETURN_BEGIN);
5801 return NULL_RTX;
5804 str_copy_len = TREE_STRING_LENGTH (str);
5805 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
5807 str_copy_len += STORE_MAX_PIECES - 1;
5808 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5810 if (str_copy_len >= exp_len)
5811 goto normal_expr;
5813 if (!can_store_by_pieces (str_copy_len, string_cst_read_str,
5814 (void *) str, MEM_ALIGN (target), false))
5815 goto normal_expr;
5817 dest_mem = store_by_pieces (target, str_copy_len, string_cst_read_str,
5818 (void *) str, MEM_ALIGN (target), false,
5819 RETURN_END);
5820 clear_storage (adjust_address_1 (dest_mem, BLKmode, 0, 1, 1, 0,
5821 exp_len - str_copy_len),
5822 GEN_INT (exp_len - str_copy_len), BLOCK_OP_NORMAL);
5823 return NULL_RTX;
5825 else
5827 rtx tmp_target;
5829 normal_expr:
5830 /* If we want to use a nontemporal or a reverse order store, force the
5831 value into a register first. */
5832 tmp_target = nontemporal || reverse ? NULL_RTX : target;
5833 tree rexp = exp;
5834 if (TREE_CODE (exp) == STRING_CST
5835 && tmp_target == target
5836 && GET_MODE (target) == BLKmode
5837 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
5839 rtx size = expr_size (exp);
5840 if (CONST_INT_P (size)
5841 && size != const0_rtx
5842 && (UINTVAL (size)
5843 > ((unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (exp) + 32)))
5845 /* If the STRING_CST has much larger array type than
5846 TREE_STRING_LENGTH, only emit the TREE_STRING_LENGTH part of
5847 it into the rodata section as the code later on will use
5848 memset zero for the remainder anyway. See PR95052. */
5849 tmp_target = NULL_RTX;
5850 rexp = copy_node (exp);
5851 tree index
5852 = build_index_type (size_int (TREE_STRING_LENGTH (exp) - 1));
5853 TREE_TYPE (rexp) = build_array_type (TREE_TYPE (TREE_TYPE (exp)),
5854 index);
5855 shortened_string_cst = true;
5858 temp = expand_expr_real (rexp, tmp_target, GET_MODE (target),
5859 (call_param_p
5860 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5861 &alt_rtl, false);
5862 if (shortened_string_cst)
5864 gcc_assert (MEM_P (temp));
5865 temp = change_address (temp, BLKmode, NULL_RTX);
5869 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5870 the same as that of TARGET, adjust the constant. This is needed, for
5871 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5872 only a word-sized value. */
5873 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5874 && TREE_CODE (exp) != ERROR_MARK
5875 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5877 gcc_assert (!shortened_string_cst);
5878 if (GET_MODE_CLASS (GET_MODE (target))
5879 != GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp)))
5880 && known_eq (GET_MODE_BITSIZE (GET_MODE (target)),
5881 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)))))
5883 rtx t = simplify_gen_subreg (GET_MODE (target), temp,
5884 TYPE_MODE (TREE_TYPE (exp)), 0);
5885 if (t)
5886 temp = t;
5888 if (GET_MODE (temp) == VOIDmode)
5889 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5890 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5893 /* If value was not generated in the target, store it there.
5894 Convert the value to TARGET's type first if necessary and emit the
5895 pending incrementations that have been queued when expanding EXP.
5896 Note that we cannot emit the whole queue blindly because this will
5897 effectively disable the POST_INC optimization later.
5899 If TEMP and TARGET compare equal according to rtx_equal_p, but
5900 one or both of them are volatile memory refs, we have to distinguish
5901 two cases:
5902 - expand_expr has used TARGET. In this case, we must not generate
5903 another copy. This can be detected by TARGET being equal according
5904 to == .
5905 - expand_expr has not used TARGET - that means that the source just
5906 happens to have the same RTX form. Since temp will have been created
5907 by expand_expr, it will compare unequal according to == .
5908 We must generate a copy in this case, to reach the correct number
5909 of volatile memory references. */
5911 if ((! rtx_equal_p (temp, target)
5912 || (temp != target && (side_effects_p (temp)
5913 || side_effects_p (target))))
5914 && TREE_CODE (exp) != ERROR_MARK
5915 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5916 but TARGET is not valid memory reference, TEMP will differ
5917 from TARGET although it is really the same location. */
5918 && !(alt_rtl
5919 && rtx_equal_p (alt_rtl, target)
5920 && !side_effects_p (alt_rtl)
5921 && !side_effects_p (target))
5922 /* If there's nothing to copy, don't bother. Don't call
5923 expr_size unless necessary, because some front-ends (C++)
5924 expr_size-hook must not be given objects that are not
5925 supposed to be bit-copied or bit-initialized. */
5926 && expr_size (exp) != const0_rtx)
5928 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5930 gcc_assert (!shortened_string_cst);
5931 if (GET_MODE (target) == BLKmode)
5933 /* Handle calls that return BLKmode values in registers. */
5934 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5935 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5936 else
5937 store_bit_field (target,
5938 rtx_to_poly_int64 (expr_size (exp))
5939 * BITS_PER_UNIT,
5940 0, 0, 0, GET_MODE (temp), temp, reverse);
5942 else
5943 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5946 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5948 /* Handle copying a string constant into an array. The string
5949 constant may be shorter than the array. So copy just the string's
5950 actual length, and clear the rest. First get the size of the data
5951 type of the string, which is actually the size of the target. */
5952 rtx size = expr_size (exp);
5954 if (CONST_INT_P (size)
5955 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5956 emit_block_move (target, temp, size,
5957 (call_param_p
5958 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5959 else
5961 machine_mode pointer_mode
5962 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5963 machine_mode address_mode = get_address_mode (target);
5965 /* Compute the size of the data to copy from the string. */
5966 tree copy_size
5967 = size_binop_loc (loc, MIN_EXPR,
5968 make_tree (sizetype, size),
5969 size_int (TREE_STRING_LENGTH (exp)));
5970 rtx copy_size_rtx
5971 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5972 (call_param_p
5973 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5974 rtx_code_label *label = 0;
5976 /* Copy that much. */
5977 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5978 TYPE_UNSIGNED (sizetype));
5979 emit_block_move (target, temp, copy_size_rtx,
5980 (call_param_p
5981 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5983 /* Figure out how much is left in TARGET that we have to clear.
5984 Do all calculations in pointer_mode. */
5985 poly_int64 const_copy_size;
5986 if (poly_int_rtx_p (copy_size_rtx, &const_copy_size))
5988 size = plus_constant (address_mode, size, -const_copy_size);
5989 target = adjust_address (target, BLKmode, const_copy_size);
5991 else
5993 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5994 copy_size_rtx, NULL_RTX, 0,
5995 OPTAB_LIB_WIDEN);
5997 if (GET_MODE (copy_size_rtx) != address_mode)
5998 copy_size_rtx = convert_to_mode (address_mode,
5999 copy_size_rtx,
6000 TYPE_UNSIGNED (sizetype));
6002 target = offset_address (target, copy_size_rtx,
6003 highest_pow2_factor (copy_size));
6004 label = gen_label_rtx ();
6005 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
6006 GET_MODE (size), 0, label);
6009 if (size != const0_rtx)
6010 clear_storage (target, size, BLOCK_OP_NORMAL);
6012 if (label)
6013 emit_label (label);
6016 else if (shortened_string_cst)
6017 gcc_unreachable ();
6018 /* Handle calls that return values in multiple non-contiguous locations.
6019 The Irix 6 ABI has examples of this. */
6020 else if (GET_CODE (target) == PARALLEL)
6022 if (GET_CODE (temp) == PARALLEL)
6023 emit_group_move (target, temp);
6024 else
6025 emit_group_load (target, temp, TREE_TYPE (exp),
6026 int_size_in_bytes (TREE_TYPE (exp)));
6028 else if (GET_CODE (temp) == PARALLEL)
6029 emit_group_store (target, temp, TREE_TYPE (exp),
6030 int_size_in_bytes (TREE_TYPE (exp)));
6031 else if (GET_MODE (temp) == BLKmode)
6032 emit_block_move (target, temp, expr_size (exp),
6033 (call_param_p
6034 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
6035 /* If we emit a nontemporal store, there is nothing else to do. */
6036 else if (nontemporal && emit_storent_insn (target, temp))
6038 else
6040 if (reverse)
6041 temp = flip_storage_order (GET_MODE (target), temp);
6042 temp = force_operand (temp, target);
6043 if (temp != target)
6044 emit_move_insn (target, temp);
6047 else
6048 gcc_assert (!shortened_string_cst);
6050 return NULL_RTX;
6053 /* Return true if field F of structure TYPE is a flexible array. */
6055 static bool
6056 flexible_array_member_p (const_tree f, const_tree type)
6058 const_tree tf;
6060 tf = TREE_TYPE (f);
6061 return (DECL_CHAIN (f) == NULL
6062 && TREE_CODE (tf) == ARRAY_TYPE
6063 && TYPE_DOMAIN (tf)
6064 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
6065 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
6066 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
6067 && int_size_in_bytes (type) >= 0);
6070 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
6071 must have in order for it to completely initialize a value of type TYPE.
6072 Return -1 if the number isn't known.
6074 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
6076 static HOST_WIDE_INT
6077 count_type_elements (const_tree type, bool for_ctor_p)
6079 switch (TREE_CODE (type))
6081 case ARRAY_TYPE:
6083 tree nelts;
6085 nelts = array_type_nelts (type);
6086 if (nelts && tree_fits_uhwi_p (nelts))
6088 unsigned HOST_WIDE_INT n;
6090 n = tree_to_uhwi (nelts) + 1;
6091 if (n == 0 || for_ctor_p)
6092 return n;
6093 else
6094 return n * count_type_elements (TREE_TYPE (type), false);
6096 return for_ctor_p ? -1 : 1;
6099 case RECORD_TYPE:
6101 unsigned HOST_WIDE_INT n;
6102 tree f;
6104 n = 0;
6105 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
6106 if (TREE_CODE (f) == FIELD_DECL)
6108 if (!for_ctor_p)
6109 n += count_type_elements (TREE_TYPE (f), false);
6110 else if (!flexible_array_member_p (f, type))
6111 /* Don't count flexible arrays, which are not supposed
6112 to be initialized. */
6113 n += 1;
6116 return n;
6119 case UNION_TYPE:
6120 case QUAL_UNION_TYPE:
6122 tree f;
6123 HOST_WIDE_INT n, m;
6125 gcc_assert (!for_ctor_p);
6126 /* Estimate the number of scalars in each field and pick the
6127 maximum. Other estimates would do instead; the idea is simply
6128 to make sure that the estimate is not sensitive to the ordering
6129 of the fields. */
6130 n = 1;
6131 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
6132 if (TREE_CODE (f) == FIELD_DECL)
6134 m = count_type_elements (TREE_TYPE (f), false);
6135 /* If the field doesn't span the whole union, add an extra
6136 scalar for the rest. */
6137 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
6138 TYPE_SIZE (type)) != 1)
6139 m++;
6140 if (n < m)
6141 n = m;
6143 return n;
6146 case COMPLEX_TYPE:
6147 return 2;
6149 case VECTOR_TYPE:
6151 unsigned HOST_WIDE_INT nelts;
6152 if (TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
6153 return nelts;
6154 else
6155 return -1;
6158 case INTEGER_TYPE:
6159 case REAL_TYPE:
6160 case FIXED_POINT_TYPE:
6161 case ENUMERAL_TYPE:
6162 case BOOLEAN_TYPE:
6163 case POINTER_TYPE:
6164 case OFFSET_TYPE:
6165 case REFERENCE_TYPE:
6166 case NULLPTR_TYPE:
6167 return 1;
6169 case ERROR_MARK:
6170 return 0;
6172 case VOID_TYPE:
6173 case METHOD_TYPE:
6174 case FUNCTION_TYPE:
6175 case LANG_TYPE:
6176 default:
6177 gcc_unreachable ();
6181 /* Helper for categorize_ctor_elements. Identical interface. */
6183 static bool
6184 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
6185 HOST_WIDE_INT *p_unique_nz_elts,
6186 HOST_WIDE_INT *p_init_elts, bool *p_complete)
6188 unsigned HOST_WIDE_INT idx;
6189 HOST_WIDE_INT nz_elts, unique_nz_elts, init_elts, num_fields;
6190 tree value, purpose, elt_type;
6192 /* Whether CTOR is a valid constant initializer, in accordance with what
6193 initializer_constant_valid_p does. If inferred from the constructor
6194 elements, true until proven otherwise. */
6195 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
6196 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
6198 nz_elts = 0;
6199 unique_nz_elts = 0;
6200 init_elts = 0;
6201 num_fields = 0;
6202 elt_type = NULL_TREE;
6204 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
6206 HOST_WIDE_INT mult = 1;
6208 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
6210 tree lo_index = TREE_OPERAND (purpose, 0);
6211 tree hi_index = TREE_OPERAND (purpose, 1);
6213 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
6214 mult = (tree_to_uhwi (hi_index)
6215 - tree_to_uhwi (lo_index) + 1);
6217 num_fields += mult;
6218 elt_type = TREE_TYPE (value);
6220 switch (TREE_CODE (value))
6222 case CONSTRUCTOR:
6224 HOST_WIDE_INT nz = 0, unz = 0, ic = 0;
6226 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &unz,
6227 &ic, p_complete);
6229 nz_elts += mult * nz;
6230 unique_nz_elts += unz;
6231 init_elts += mult * ic;
6233 if (const_from_elts_p && const_p)
6234 const_p = const_elt_p;
6236 break;
6238 case INTEGER_CST:
6239 case REAL_CST:
6240 case FIXED_CST:
6241 if (!initializer_zerop (value))
6243 nz_elts += mult;
6244 unique_nz_elts++;
6246 init_elts += mult;
6247 break;
6249 case STRING_CST:
6250 nz_elts += mult * TREE_STRING_LENGTH (value);
6251 unique_nz_elts += TREE_STRING_LENGTH (value);
6252 init_elts += mult * TREE_STRING_LENGTH (value);
6253 break;
6255 case COMPLEX_CST:
6256 if (!initializer_zerop (TREE_REALPART (value)))
6258 nz_elts += mult;
6259 unique_nz_elts++;
6261 if (!initializer_zerop (TREE_IMAGPART (value)))
6263 nz_elts += mult;
6264 unique_nz_elts++;
6266 init_elts += 2 * mult;
6267 break;
6269 case VECTOR_CST:
6271 /* We can only construct constant-length vectors using
6272 CONSTRUCTOR. */
6273 unsigned int nunits = VECTOR_CST_NELTS (value).to_constant ();
6274 for (unsigned int i = 0; i < nunits; ++i)
6276 tree v = VECTOR_CST_ELT (value, i);
6277 if (!initializer_zerop (v))
6279 nz_elts += mult;
6280 unique_nz_elts++;
6282 init_elts += mult;
6285 break;
6287 default:
6289 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
6290 nz_elts += mult * tc;
6291 unique_nz_elts += tc;
6292 init_elts += mult * tc;
6294 if (const_from_elts_p && const_p)
6295 const_p
6296 = initializer_constant_valid_p (value,
6297 elt_type,
6298 TYPE_REVERSE_STORAGE_ORDER
6299 (TREE_TYPE (ctor)))
6300 != NULL_TREE;
6302 break;
6306 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
6307 num_fields, elt_type))
6308 *p_complete = false;
6310 *p_nz_elts += nz_elts;
6311 *p_unique_nz_elts += unique_nz_elts;
6312 *p_init_elts += init_elts;
6314 return const_p;
6317 /* Examine CTOR to discover:
6318 * how many scalar fields are set to nonzero values,
6319 and place it in *P_NZ_ELTS;
6320 * the same, but counting RANGE_EXPRs as multiplier of 1 instead of
6321 high - low + 1 (this can be useful for callers to determine ctors
6322 that could be cheaply initialized with - perhaps nested - loops
6323 compared to copied from huge read-only data),
6324 and place it in *P_UNIQUE_NZ_ELTS;
6325 * how many scalar fields in total are in CTOR,
6326 and place it in *P_ELT_COUNT.
6327 * whether the constructor is complete -- in the sense that every
6328 meaningful byte is explicitly given a value --
6329 and place it in *P_COMPLETE.
6331 Return whether or not CTOR is a valid static constant initializer, the same
6332 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
6334 bool
6335 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
6336 HOST_WIDE_INT *p_unique_nz_elts,
6337 HOST_WIDE_INT *p_init_elts, bool *p_complete)
6339 *p_nz_elts = 0;
6340 *p_unique_nz_elts = 0;
6341 *p_init_elts = 0;
6342 *p_complete = true;
6344 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_unique_nz_elts,
6345 p_init_elts, p_complete);
6348 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6349 of which had type LAST_TYPE. Each element was itself a complete
6350 initializer, in the sense that every meaningful byte was explicitly
6351 given a value. Return true if the same is true for the constructor
6352 as a whole. */
6354 bool
6355 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
6356 const_tree last_type)
6358 if (TREE_CODE (type) == UNION_TYPE
6359 || TREE_CODE (type) == QUAL_UNION_TYPE)
6361 if (num_elts == 0)
6362 return false;
6364 gcc_assert (num_elts == 1 && last_type);
6366 /* ??? We could look at each element of the union, and find the
6367 largest element. Which would avoid comparing the size of the
6368 initialized element against any tail padding in the union.
6369 Doesn't seem worth the effort... */
6370 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
6373 return count_type_elements (type, true) == num_elts;
6376 /* Return 1 if EXP contains mostly (3/4) zeros. */
6378 static int
6379 mostly_zeros_p (const_tree exp)
6381 if (TREE_CODE (exp) == CONSTRUCTOR)
6383 HOST_WIDE_INT nz_elts, unz_elts, init_elts;
6384 bool complete_p;
6386 categorize_ctor_elements (exp, &nz_elts, &unz_elts, &init_elts,
6387 &complete_p);
6388 return !complete_p || nz_elts < init_elts / 4;
6391 return initializer_zerop (exp);
6394 /* Return 1 if EXP contains all zeros. */
6396 static int
6397 all_zeros_p (const_tree exp)
6399 if (TREE_CODE (exp) == CONSTRUCTOR)
6401 HOST_WIDE_INT nz_elts, unz_elts, init_elts;
6402 bool complete_p;
6404 categorize_ctor_elements (exp, &nz_elts, &unz_elts, &init_elts,
6405 &complete_p);
6406 return nz_elts == 0;
6409 return initializer_zerop (exp);
6412 /* Helper function for store_constructor.
6413 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6414 CLEARED is as for store_constructor.
6415 ALIAS_SET is the alias set to use for any stores.
6416 If REVERSE is true, the store is to be done in reverse order.
6418 This provides a recursive shortcut back to store_constructor when it isn't
6419 necessary to go through store_field. This is so that we can pass through
6420 the cleared field to let store_constructor know that we may not have to
6421 clear a substructure if the outer structure has already been cleared. */
6423 static void
6424 store_constructor_field (rtx target, poly_uint64 bitsize, poly_int64 bitpos,
6425 poly_uint64 bitregion_start,
6426 poly_uint64 bitregion_end,
6427 machine_mode mode,
6428 tree exp, int cleared,
6429 alias_set_type alias_set, bool reverse)
6431 poly_int64 bytepos;
6432 poly_uint64 bytesize;
6433 if (TREE_CODE (exp) == CONSTRUCTOR
6434 /* We can only call store_constructor recursively if the size and
6435 bit position are on a byte boundary. */
6436 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
6437 && maybe_ne (bitsize, 0U)
6438 && multiple_p (bitsize, BITS_PER_UNIT, &bytesize)
6439 /* If we have a nonzero bitpos for a register target, then we just
6440 let store_field do the bitfield handling. This is unlikely to
6441 generate unnecessary clear instructions anyways. */
6442 && (known_eq (bitpos, 0) || MEM_P (target)))
6444 if (MEM_P (target))
6446 machine_mode target_mode = GET_MODE (target);
6447 if (target_mode != BLKmode
6448 && !multiple_p (bitpos, GET_MODE_ALIGNMENT (target_mode)))
6449 target_mode = BLKmode;
6450 target = adjust_address (target, target_mode, bytepos);
6454 /* Update the alias set, if required. */
6455 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
6456 && MEM_ALIAS_SET (target) != 0)
6458 target = copy_rtx (target);
6459 set_mem_alias_set (target, alias_set);
6462 store_constructor (exp, target, cleared, bytesize, reverse);
6464 else
6465 store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode,
6466 exp, alias_set, false, reverse);
6470 /* Returns the number of FIELD_DECLs in TYPE. */
6472 static int
6473 fields_length (const_tree type)
6475 tree t = TYPE_FIELDS (type);
6476 int count = 0;
6478 for (; t; t = DECL_CHAIN (t))
6479 if (TREE_CODE (t) == FIELD_DECL)
6480 ++count;
6482 return count;
6486 /* Store the value of constructor EXP into the rtx TARGET.
6487 TARGET is either a REG or a MEM; we know it cannot conflict, since
6488 safe_from_p has been called.
6489 CLEARED is true if TARGET is known to have been zero'd.
6490 SIZE is the number of bytes of TARGET we are allowed to modify: this
6491 may not be the same as the size of EXP if we are assigning to a field
6492 which has been packed to exclude padding bits.
6493 If REVERSE is true, the store is to be done in reverse order. */
6495 static void
6496 store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
6497 bool reverse)
6499 tree type = TREE_TYPE (exp);
6500 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6501 poly_int64 bitregion_end = known_gt (size, 0) ? size * BITS_PER_UNIT - 1 : 0;
6503 switch (TREE_CODE (type))
6505 case RECORD_TYPE:
6506 case UNION_TYPE:
6507 case QUAL_UNION_TYPE:
6509 unsigned HOST_WIDE_INT idx;
6510 tree field, value;
6512 /* The storage order is specified for every aggregate type. */
6513 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6515 /* If size is zero or the target is already cleared, do nothing. */
6516 if (known_eq (size, 0) || cleared)
6517 cleared = 1;
6518 /* We either clear the aggregate or indicate the value is dead. */
6519 else if ((TREE_CODE (type) == UNION_TYPE
6520 || TREE_CODE (type) == QUAL_UNION_TYPE)
6521 && ! CONSTRUCTOR_ELTS (exp))
6522 /* If the constructor is empty, clear the union. */
6524 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6525 cleared = 1;
6528 /* If we are building a static constructor into a register,
6529 set the initial value as zero so we can fold the value into
6530 a constant. But if more than one register is involved,
6531 this probably loses. */
6532 else if (REG_P (target) && TREE_STATIC (exp)
6533 && known_le (GET_MODE_SIZE (GET_MODE (target)),
6534 REGMODE_NATURAL_SIZE (GET_MODE (target))))
6536 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6537 cleared = 1;
6540 /* If the constructor has fewer fields than the structure or
6541 if we are initializing the structure to mostly zeros, clear
6542 the whole structure first. Don't do this if TARGET is a
6543 register whose mode size isn't equal to SIZE since
6544 clear_storage can't handle this case. */
6545 else if (known_size_p (size)
6546 && (((int) CONSTRUCTOR_NELTS (exp) != fields_length (type))
6547 || mostly_zeros_p (exp))
6548 && (!REG_P (target)
6549 || known_eq (GET_MODE_SIZE (GET_MODE (target)), size)))
6551 clear_storage (target, gen_int_mode (size, Pmode),
6552 BLOCK_OP_NORMAL);
6553 cleared = 1;
6556 if (REG_P (target) && !cleared)
6557 emit_clobber (target);
6559 /* Store each element of the constructor into the
6560 corresponding field of TARGET. */
6561 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6563 machine_mode mode;
6564 HOST_WIDE_INT bitsize;
6565 HOST_WIDE_INT bitpos = 0;
6566 tree offset;
6567 rtx to_rtx = target;
6569 /* Just ignore missing fields. We cleared the whole
6570 structure, above, if any fields are missing. */
6571 if (field == 0)
6572 continue;
6574 if (cleared && initializer_zerop (value))
6575 continue;
6577 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6578 bitsize = tree_to_uhwi (DECL_SIZE (field));
6579 else
6580 gcc_unreachable ();
6582 mode = DECL_MODE (field);
6583 if (DECL_BIT_FIELD (field))
6584 mode = VOIDmode;
6586 offset = DECL_FIELD_OFFSET (field);
6587 if (tree_fits_shwi_p (offset)
6588 && tree_fits_shwi_p (bit_position (field)))
6590 bitpos = int_bit_position (field);
6591 offset = NULL_TREE;
6593 else
6594 gcc_unreachable ();
6596 /* If this initializes a field that is smaller than a
6597 word, at the start of a word, try to widen it to a full
6598 word. This special case allows us to output C++ member
6599 function initializations in a form that the optimizers
6600 can understand. */
6601 if (WORD_REGISTER_OPERATIONS
6602 && REG_P (target)
6603 && bitsize < BITS_PER_WORD
6604 && bitpos % BITS_PER_WORD == 0
6605 && GET_MODE_CLASS (mode) == MODE_INT
6606 && TREE_CODE (value) == INTEGER_CST
6607 && exp_size >= 0
6608 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6610 type = TREE_TYPE (value);
6612 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6614 type = lang_hooks.types.type_for_mode
6615 (word_mode, TYPE_UNSIGNED (type));
6616 value = fold_convert (type, value);
6617 /* Make sure the bits beyond the original bitsize are zero
6618 so that we can correctly avoid extra zeroing stores in
6619 later constructor elements. */
6620 tree bitsize_mask
6621 = wide_int_to_tree (type, wi::mask (bitsize, false,
6622 BITS_PER_WORD));
6623 value = fold_build2 (BIT_AND_EXPR, type, value, bitsize_mask);
6626 if (BYTES_BIG_ENDIAN)
6627 value
6628 = fold_build2 (LSHIFT_EXPR, type, value,
6629 build_int_cst (type,
6630 BITS_PER_WORD - bitsize));
6631 bitsize = BITS_PER_WORD;
6632 mode = word_mode;
6635 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6636 && DECL_NONADDRESSABLE_P (field))
6638 to_rtx = copy_rtx (to_rtx);
6639 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6642 store_constructor_field (to_rtx, bitsize, bitpos,
6643 0, bitregion_end, mode,
6644 value, cleared,
6645 get_alias_set (TREE_TYPE (field)),
6646 reverse);
6648 break;
6650 case ARRAY_TYPE:
6652 tree value, index;
6653 unsigned HOST_WIDE_INT i;
6654 int need_to_clear;
6655 tree domain;
6656 tree elttype = TREE_TYPE (type);
6657 int const_bounds_p;
6658 HOST_WIDE_INT minelt = 0;
6659 HOST_WIDE_INT maxelt = 0;
6661 /* The storage order is specified for every aggregate type. */
6662 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6664 domain = TYPE_DOMAIN (type);
6665 const_bounds_p = (TYPE_MIN_VALUE (domain)
6666 && TYPE_MAX_VALUE (domain)
6667 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6668 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6670 /* If we have constant bounds for the range of the type, get them. */
6671 if (const_bounds_p)
6673 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6674 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6677 /* If the constructor has fewer elements than the array, clear
6678 the whole array first. Similarly if this is static
6679 constructor of a non-BLKmode object. */
6680 if (cleared)
6681 need_to_clear = 0;
6682 else if (REG_P (target) && TREE_STATIC (exp))
6683 need_to_clear = 1;
6684 else
6686 unsigned HOST_WIDE_INT idx;
6687 HOST_WIDE_INT count = 0, zero_count = 0;
6688 need_to_clear = ! const_bounds_p;
6690 /* This loop is a more accurate version of the loop in
6691 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6692 is also needed to check for missing elements. */
6693 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6695 HOST_WIDE_INT this_node_count;
6697 if (need_to_clear)
6698 break;
6700 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6702 tree lo_index = TREE_OPERAND (index, 0);
6703 tree hi_index = TREE_OPERAND (index, 1);
6705 if (! tree_fits_uhwi_p (lo_index)
6706 || ! tree_fits_uhwi_p (hi_index))
6708 need_to_clear = 1;
6709 break;
6712 this_node_count = (tree_to_uhwi (hi_index)
6713 - tree_to_uhwi (lo_index) + 1);
6715 else
6716 this_node_count = 1;
6718 count += this_node_count;
6719 if (mostly_zeros_p (value))
6720 zero_count += this_node_count;
6723 /* Clear the entire array first if there are any missing
6724 elements, or if the incidence of zero elements is >=
6725 75%. */
6726 if (! need_to_clear
6727 && (count < maxelt - minelt + 1
6728 || 4 * zero_count >= 3 * count))
6729 need_to_clear = 1;
6732 if (need_to_clear && maybe_gt (size, 0))
6734 if (REG_P (target))
6735 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6736 else
6737 clear_storage (target, gen_int_mode (size, Pmode),
6738 BLOCK_OP_NORMAL);
6739 cleared = 1;
6742 if (!cleared && REG_P (target))
6743 /* Inform later passes that the old value is dead. */
6744 emit_clobber (target);
6746 /* Store each element of the constructor into the
6747 corresponding element of TARGET, determined by counting the
6748 elements. */
6749 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6751 machine_mode mode;
6752 poly_int64 bitsize;
6753 HOST_WIDE_INT bitpos;
6754 rtx xtarget = target;
6756 if (cleared && initializer_zerop (value))
6757 continue;
6759 mode = TYPE_MODE (elttype);
6760 if (mode != BLKmode)
6761 bitsize = GET_MODE_BITSIZE (mode);
6762 else if (!poly_int_tree_p (TYPE_SIZE (elttype), &bitsize))
6763 bitsize = -1;
6765 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6767 tree lo_index = TREE_OPERAND (index, 0);
6768 tree hi_index = TREE_OPERAND (index, 1);
6769 rtx index_r, pos_rtx;
6770 HOST_WIDE_INT lo, hi, count;
6771 tree position;
6773 /* If the range is constant and "small", unroll the loop. */
6774 if (const_bounds_p
6775 && tree_fits_shwi_p (lo_index)
6776 && tree_fits_shwi_p (hi_index)
6777 && (lo = tree_to_shwi (lo_index),
6778 hi = tree_to_shwi (hi_index),
6779 count = hi - lo + 1,
6780 (!MEM_P (target)
6781 || count <= 2
6782 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6783 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6784 <= 40 * 8)))))
6786 lo -= minelt; hi -= minelt;
6787 for (; lo <= hi; lo++)
6789 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6791 if (MEM_P (target)
6792 && !MEM_KEEP_ALIAS_SET_P (target)
6793 && TREE_CODE (type) == ARRAY_TYPE
6794 && TYPE_NONALIASED_COMPONENT (type))
6796 target = copy_rtx (target);
6797 MEM_KEEP_ALIAS_SET_P (target) = 1;
6800 store_constructor_field
6801 (target, bitsize, bitpos, 0, bitregion_end,
6802 mode, value, cleared,
6803 get_alias_set (elttype), reverse);
6806 else
6808 rtx_code_label *loop_start = gen_label_rtx ();
6809 rtx_code_label *loop_end = gen_label_rtx ();
6810 tree exit_cond;
6812 expand_normal (hi_index);
6814 index = build_decl (EXPR_LOCATION (exp),
6815 VAR_DECL, NULL_TREE, domain);
6816 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6817 SET_DECL_RTL (index, index_r);
6818 store_expr (lo_index, index_r, 0, false, reverse);
6820 /* Build the head of the loop. */
6821 do_pending_stack_adjust ();
6822 emit_label (loop_start);
6824 /* Assign value to element index. */
6825 position =
6826 fold_convert (ssizetype,
6827 fold_build2 (MINUS_EXPR,
6828 TREE_TYPE (index),
6829 index,
6830 TYPE_MIN_VALUE (domain)));
6832 position =
6833 size_binop (MULT_EXPR, position,
6834 fold_convert (ssizetype,
6835 TYPE_SIZE_UNIT (elttype)));
6837 pos_rtx = expand_normal (position);
6838 xtarget = offset_address (target, pos_rtx,
6839 highest_pow2_factor (position));
6840 xtarget = adjust_address (xtarget, mode, 0);
6841 if (TREE_CODE (value) == CONSTRUCTOR)
6842 store_constructor (value, xtarget, cleared,
6843 exact_div (bitsize, BITS_PER_UNIT),
6844 reverse);
6845 else
6846 store_expr (value, xtarget, 0, false, reverse);
6848 /* Generate a conditional jump to exit the loop. */
6849 exit_cond = build2 (LT_EXPR, integer_type_node,
6850 index, hi_index);
6851 jumpif (exit_cond, loop_end,
6852 profile_probability::uninitialized ());
6854 /* Update the loop counter, and jump to the head of
6855 the loop. */
6856 expand_assignment (index,
6857 build2 (PLUS_EXPR, TREE_TYPE (index),
6858 index, integer_one_node),
6859 false);
6861 emit_jump (loop_start);
6863 /* Build the end of the loop. */
6864 emit_label (loop_end);
6867 else if ((index != 0 && ! tree_fits_shwi_p (index))
6868 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6870 tree position;
6872 if (index == 0)
6873 index = ssize_int (1);
6875 if (minelt)
6876 index = fold_convert (ssizetype,
6877 fold_build2 (MINUS_EXPR,
6878 TREE_TYPE (index),
6879 index,
6880 TYPE_MIN_VALUE (domain)));
6882 position =
6883 size_binop (MULT_EXPR, index,
6884 fold_convert (ssizetype,
6885 TYPE_SIZE_UNIT (elttype)));
6886 xtarget = offset_address (target,
6887 expand_normal (position),
6888 highest_pow2_factor (position));
6889 xtarget = adjust_address (xtarget, mode, 0);
6890 store_expr (value, xtarget, 0, false, reverse);
6892 else
6894 if (index != 0)
6895 bitpos = ((tree_to_shwi (index) - minelt)
6896 * tree_to_uhwi (TYPE_SIZE (elttype)));
6897 else
6898 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6900 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6901 && TREE_CODE (type) == ARRAY_TYPE
6902 && TYPE_NONALIASED_COMPONENT (type))
6904 target = copy_rtx (target);
6905 MEM_KEEP_ALIAS_SET_P (target) = 1;
6907 store_constructor_field (target, bitsize, bitpos, 0,
6908 bitregion_end, mode, value,
6909 cleared, get_alias_set (elttype),
6910 reverse);
6913 break;
6916 case VECTOR_TYPE:
6918 unsigned HOST_WIDE_INT idx;
6919 constructor_elt *ce;
6920 int i;
6921 int need_to_clear;
6922 insn_code icode = CODE_FOR_nothing;
6923 tree elt;
6924 tree elttype = TREE_TYPE (type);
6925 int elt_size = vector_element_bits (type);
6926 machine_mode eltmode = TYPE_MODE (elttype);
6927 HOST_WIDE_INT bitsize;
6928 HOST_WIDE_INT bitpos;
6929 rtvec vector = NULL;
6930 poly_uint64 n_elts;
6931 unsigned HOST_WIDE_INT const_n_elts;
6932 alias_set_type alias;
6933 bool vec_vec_init_p = false;
6934 machine_mode mode = GET_MODE (target);
6936 gcc_assert (eltmode != BLKmode);
6938 /* Try using vec_duplicate_optab for uniform vectors. */
6939 if (!TREE_SIDE_EFFECTS (exp)
6940 && VECTOR_MODE_P (mode)
6941 && eltmode == GET_MODE_INNER (mode)
6942 && ((icode = optab_handler (vec_duplicate_optab, mode))
6943 != CODE_FOR_nothing)
6944 && (elt = uniform_vector_p (exp)))
6946 class expand_operand ops[2];
6947 create_output_operand (&ops[0], target, mode);
6948 create_input_operand (&ops[1], expand_normal (elt), eltmode);
6949 expand_insn (icode, 2, ops);
6950 if (!rtx_equal_p (target, ops[0].value))
6951 emit_move_insn (target, ops[0].value);
6952 break;
6955 n_elts = TYPE_VECTOR_SUBPARTS (type);
6956 if (REG_P (target)
6957 && VECTOR_MODE_P (mode)
6958 && n_elts.is_constant (&const_n_elts))
6960 machine_mode emode = eltmode;
6961 bool vector_typed_elts_p = false;
6963 if (CONSTRUCTOR_NELTS (exp)
6964 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value))
6965 == VECTOR_TYPE))
6967 tree etype = TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value);
6968 gcc_assert (known_eq (CONSTRUCTOR_NELTS (exp)
6969 * TYPE_VECTOR_SUBPARTS (etype),
6970 n_elts));
6971 emode = TYPE_MODE (etype);
6972 vector_typed_elts_p = true;
6974 icode = convert_optab_handler (vec_init_optab, mode, emode);
6975 if (icode != CODE_FOR_nothing)
6977 unsigned int n = const_n_elts;
6979 if (vector_typed_elts_p)
6981 n = CONSTRUCTOR_NELTS (exp);
6982 vec_vec_init_p = true;
6984 vector = rtvec_alloc (n);
6985 for (unsigned int k = 0; k < n; k++)
6986 RTVEC_ELT (vector, k) = CONST0_RTX (emode);
6990 /* Compute the size of the elements in the CTOR. It differs
6991 from the size of the vector type elements only when the
6992 CTOR elements are vectors themselves. */
6993 tree val_type = TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value);
6994 if (VECTOR_TYPE_P (val_type))
6995 bitsize = tree_to_uhwi (TYPE_SIZE (val_type));
6996 else
6997 bitsize = elt_size;
6999 /* If the constructor has fewer elements than the vector,
7000 clear the whole array first. Similarly if this is static
7001 constructor of a non-BLKmode object. */
7002 if (cleared)
7003 need_to_clear = 0;
7004 else if (REG_P (target) && TREE_STATIC (exp))
7005 need_to_clear = 1;
7006 else
7008 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
7009 tree value;
7011 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7013 int n_elts_here = bitsize / elt_size;
7014 count += n_elts_here;
7015 if (mostly_zeros_p (value))
7016 zero_count += n_elts_here;
7019 /* Clear the entire vector first if there are any missing elements,
7020 or if the incidence of zero elements is >= 75%. */
7021 need_to_clear = (maybe_lt (count, n_elts)
7022 || 4 * zero_count >= 3 * count);
7025 if (need_to_clear && maybe_gt (size, 0) && !vector)
7027 if (REG_P (target))
7028 emit_move_insn (target, CONST0_RTX (mode));
7029 else
7030 clear_storage (target, gen_int_mode (size, Pmode),
7031 BLOCK_OP_NORMAL);
7032 cleared = 1;
7035 /* Inform later passes that the old value is dead. */
7036 if (!cleared && !vector && REG_P (target))
7037 emit_move_insn (target, CONST0_RTX (mode));
7039 if (MEM_P (target))
7040 alias = MEM_ALIAS_SET (target);
7041 else
7042 alias = get_alias_set (elttype);
7044 /* Store each element of the constructor into the corresponding
7045 element of TARGET, determined by counting the elements. */
7046 for (idx = 0, i = 0;
7047 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
7048 idx++, i += bitsize / elt_size)
7050 HOST_WIDE_INT eltpos;
7051 tree value = ce->value;
7053 if (cleared && initializer_zerop (value))
7054 continue;
7056 if (ce->index)
7057 eltpos = tree_to_uhwi (ce->index);
7058 else
7059 eltpos = i;
7061 if (vector)
7063 if (vec_vec_init_p)
7065 gcc_assert (ce->index == NULL_TREE);
7066 gcc_assert (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE);
7067 eltpos = idx;
7069 else
7070 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
7071 RTVEC_ELT (vector, eltpos) = expand_normal (value);
7073 else
7075 machine_mode value_mode
7076 = (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
7077 ? TYPE_MODE (TREE_TYPE (value)) : eltmode);
7078 bitpos = eltpos * elt_size;
7079 store_constructor_field (target, bitsize, bitpos, 0,
7080 bitregion_end, value_mode,
7081 value, cleared, alias, reverse);
7085 if (vector)
7086 emit_insn (GEN_FCN (icode) (target,
7087 gen_rtx_PARALLEL (mode, vector)));
7088 break;
7091 default:
7092 gcc_unreachable ();
7096 /* Store the value of EXP (an expression tree)
7097 into a subfield of TARGET which has mode MODE and occupies
7098 BITSIZE bits, starting BITPOS bits from the start of TARGET.
7099 If MODE is VOIDmode, it means that we are storing into a bit-field.
7101 BITREGION_START is bitpos of the first bitfield in this region.
7102 BITREGION_END is the bitpos of the ending bitfield in this region.
7103 These two fields are 0, if the C++ memory model does not apply,
7104 or we are not interested in keeping track of bitfield regions.
7106 Always return const0_rtx unless we have something particular to
7107 return.
7109 ALIAS_SET is the alias set for the destination. This value will
7110 (in general) be different from that for TARGET, since TARGET is a
7111 reference to the containing structure.
7113 If NONTEMPORAL is true, try generating a nontemporal store.
7115 If REVERSE is true, the store is to be done in reverse order. */
7117 static rtx
7118 store_field (rtx target, poly_int64 bitsize, poly_int64 bitpos,
7119 poly_uint64 bitregion_start, poly_uint64 bitregion_end,
7120 machine_mode mode, tree exp,
7121 alias_set_type alias_set, bool nontemporal, bool reverse)
7123 if (TREE_CODE (exp) == ERROR_MARK)
7124 return const0_rtx;
7126 /* If we have nothing to store, do nothing unless the expression has
7127 side-effects. Don't do that for zero sized addressable lhs of
7128 calls. */
7129 if (known_eq (bitsize, 0)
7130 && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
7131 || TREE_CODE (exp) != CALL_EXPR))
7132 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
7134 if (GET_CODE (target) == CONCAT)
7136 /* We're storing into a struct containing a single __complex. */
7138 gcc_assert (known_eq (bitpos, 0));
7139 return store_expr (exp, target, 0, nontemporal, reverse);
7142 /* If the structure is in a register or if the component
7143 is a bit field, we cannot use addressing to access it.
7144 Use bit-field techniques or SUBREG to store in it. */
7146 poly_int64 decl_bitsize;
7147 if (mode == VOIDmode
7148 || (mode != BLKmode && ! direct_store[(int) mode]
7149 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7150 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
7151 || REG_P (target)
7152 || GET_CODE (target) == SUBREG
7153 /* If the field isn't aligned enough to store as an ordinary memref,
7154 store it as a bit field. */
7155 || (mode != BLKmode
7156 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
7157 || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
7158 && targetm.slow_unaligned_access (mode, MEM_ALIGN (target)))
7159 || !multiple_p (bitpos, BITS_PER_UNIT)))
7160 || (known_size_p (bitsize)
7161 && mode != BLKmode
7162 && maybe_gt (GET_MODE_BITSIZE (mode), bitsize))
7163 /* If the RHS and field are a constant size and the size of the
7164 RHS isn't the same size as the bitfield, we must use bitfield
7165 operations. */
7166 || (known_size_p (bitsize)
7167 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
7168 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
7169 bitsize)
7170 /* Except for initialization of full bytes from a CONSTRUCTOR, which
7171 we will handle specially below. */
7172 && !(TREE_CODE (exp) == CONSTRUCTOR
7173 && multiple_p (bitsize, BITS_PER_UNIT))
7174 /* And except for bitwise copying of TREE_ADDRESSABLE types,
7175 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
7176 includes some extra padding. store_expr / expand_expr will in
7177 that case call get_inner_reference that will have the bitsize
7178 we check here and thus the block move will not clobber the
7179 padding that shouldn't be clobbered. In the future we could
7180 replace the TREE_ADDRESSABLE check with a check that
7181 get_base_address needs to live in memory. */
7182 && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
7183 || TREE_CODE (exp) != COMPONENT_REF
7184 || !multiple_p (bitsize, BITS_PER_UNIT)
7185 || !multiple_p (bitpos, BITS_PER_UNIT)
7186 || !poly_int_tree_p (DECL_SIZE (TREE_OPERAND (exp, 1)),
7187 &decl_bitsize)
7188 || maybe_ne (decl_bitsize, bitsize)))
7189 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
7190 decl we must use bitfield operations. */
7191 || (known_size_p (bitsize)
7192 && TREE_CODE (exp) == MEM_REF
7193 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7194 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7195 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7196 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
7198 rtx temp;
7199 gimple *nop_def;
7201 /* If EXP is a NOP_EXPR of precision less than its mode, then that
7202 implies a mask operation. If the precision is the same size as
7203 the field we're storing into, that mask is redundant. This is
7204 particularly common with bit field assignments generated by the
7205 C front end. */
7206 nop_def = get_def_for_expr (exp, NOP_EXPR);
7207 if (nop_def)
7209 tree type = TREE_TYPE (exp);
7210 if (INTEGRAL_TYPE_P (type)
7211 && maybe_ne (TYPE_PRECISION (type),
7212 GET_MODE_BITSIZE (TYPE_MODE (type)))
7213 && known_eq (bitsize, TYPE_PRECISION (type)))
7215 tree op = gimple_assign_rhs1 (nop_def);
7216 type = TREE_TYPE (op);
7217 if (INTEGRAL_TYPE_P (type)
7218 && known_ge (TYPE_PRECISION (type), bitsize))
7219 exp = op;
7223 temp = expand_normal (exp);
7225 /* We don't support variable-sized BLKmode bitfields, since our
7226 handling of BLKmode is bound up with the ability to break
7227 things into words. */
7228 gcc_assert (mode != BLKmode || bitsize.is_constant ());
7230 /* Handle calls that return values in multiple non-contiguous locations.
7231 The Irix 6 ABI has examples of this. */
7232 if (GET_CODE (temp) == PARALLEL)
7234 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
7235 machine_mode temp_mode = GET_MODE (temp);
7236 if (temp_mode == BLKmode || temp_mode == VOIDmode)
7237 temp_mode = smallest_int_mode_for_size (size * BITS_PER_UNIT);
7238 rtx temp_target = gen_reg_rtx (temp_mode);
7239 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
7240 temp = temp_target;
7243 /* Handle calls that return BLKmode values in registers. */
7244 else if (mode == BLKmode && REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
7246 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
7247 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
7248 temp = temp_target;
7251 /* If the value has aggregate type and an integral mode then, if BITSIZE
7252 is narrower than this mode and this is for big-endian data, we first
7253 need to put the value into the low-order bits for store_bit_field,
7254 except when MODE is BLKmode and BITSIZE larger than the word size
7255 (see the handling of fields larger than a word in store_bit_field).
7256 Moreover, the field may be not aligned on a byte boundary; in this
7257 case, if it has reverse storage order, it needs to be accessed as a
7258 scalar field with reverse storage order and we must first put the
7259 value into target order. */
7260 scalar_int_mode temp_mode;
7261 if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
7262 && is_int_mode (GET_MODE (temp), &temp_mode))
7264 HOST_WIDE_INT size = GET_MODE_BITSIZE (temp_mode);
7266 reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
7268 if (reverse)
7269 temp = flip_storage_order (temp_mode, temp);
7271 gcc_checking_assert (known_le (bitsize, size));
7272 if (maybe_lt (bitsize, size)
7273 && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN
7274 /* Use of to_constant for BLKmode was checked above. */
7275 && !(mode == BLKmode && bitsize.to_constant () > BITS_PER_WORD))
7276 temp = expand_shift (RSHIFT_EXPR, temp_mode, temp,
7277 size - bitsize, NULL_RTX, 1);
7280 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
7281 if (mode != VOIDmode && mode != BLKmode
7282 && mode != TYPE_MODE (TREE_TYPE (exp)))
7283 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
7285 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
7286 and BITPOS must be aligned on a byte boundary. If so, we simply do
7287 a block copy. Likewise for a BLKmode-like TARGET. */
7288 if (GET_MODE (temp) == BLKmode
7289 && (GET_MODE (target) == BLKmode
7290 || (MEM_P (target)
7291 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
7292 && multiple_p (bitpos, BITS_PER_UNIT)
7293 && multiple_p (bitsize, BITS_PER_UNIT))))
7295 gcc_assert (MEM_P (target) && MEM_P (temp));
7296 poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
7297 poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
7299 target = adjust_address (target, VOIDmode, bytepos);
7300 emit_block_move (target, temp,
7301 gen_int_mode (bytesize, Pmode),
7302 BLOCK_OP_NORMAL);
7304 return const0_rtx;
7307 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
7308 word size, we need to load the value (see again store_bit_field). */
7309 if (GET_MODE (temp) == BLKmode && known_le (bitsize, BITS_PER_WORD))
7311 temp_mode = smallest_int_mode_for_size (bitsize);
7312 temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
7313 temp_mode, false, NULL);
7316 /* Store the value in the bitfield. */
7317 gcc_checking_assert (known_ge (bitpos, 0));
7318 store_bit_field (target, bitsize, bitpos,
7319 bitregion_start, bitregion_end,
7320 mode, temp, reverse);
7322 return const0_rtx;
7324 else
7326 /* Now build a reference to just the desired component. */
7327 rtx to_rtx = adjust_address (target, mode,
7328 exact_div (bitpos, BITS_PER_UNIT));
7330 if (to_rtx == target)
7331 to_rtx = copy_rtx (to_rtx);
7333 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
7334 set_mem_alias_set (to_rtx, alias_set);
7336 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
7337 into a target smaller than its type; handle that case now. */
7338 if (TREE_CODE (exp) == CONSTRUCTOR && known_size_p (bitsize))
7340 poly_int64 bytesize = exact_div (bitsize, BITS_PER_UNIT);
7341 store_constructor (exp, to_rtx, 0, bytesize, reverse);
7342 return to_rtx;
7345 return store_expr (exp, to_rtx, 0, nontemporal, reverse);
7349 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
7350 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
7351 codes and find the ultimate containing object, which we return.
7353 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
7354 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
7355 storage order of the field.
7356 If the position of the field is variable, we store a tree
7357 giving the variable offset (in units) in *POFFSET.
7358 This offset is in addition to the bit position.
7359 If the position is not variable, we store 0 in *POFFSET.
7361 If any of the extraction expressions is volatile,
7362 we store 1 in *PVOLATILEP. Otherwise we don't change that.
7364 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
7365 Otherwise, it is a mode that can be used to access the field.
7367 If the field describes a variable-sized object, *PMODE is set to
7368 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
7369 this case, but the address of the object can be found. */
7371 tree
7372 get_inner_reference (tree exp, poly_int64_pod *pbitsize,
7373 poly_int64_pod *pbitpos, tree *poffset,
7374 machine_mode *pmode, int *punsignedp,
7375 int *preversep, int *pvolatilep)
7377 tree size_tree = 0;
7378 machine_mode mode = VOIDmode;
7379 bool blkmode_bitfield = false;
7380 tree offset = size_zero_node;
7381 poly_offset_int bit_offset = 0;
7383 /* First get the mode, signedness, storage order and size. We do this from
7384 just the outermost expression. */
7385 *pbitsize = -1;
7386 if (TREE_CODE (exp) == COMPONENT_REF)
7388 tree field = TREE_OPERAND (exp, 1);
7389 size_tree = DECL_SIZE (field);
7390 if (flag_strict_volatile_bitfields > 0
7391 && TREE_THIS_VOLATILE (exp)
7392 && DECL_BIT_FIELD_TYPE (field)
7393 && DECL_MODE (field) != BLKmode)
7394 /* Volatile bitfields should be accessed in the mode of the
7395 field's type, not the mode computed based on the bit
7396 size. */
7397 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
7398 else if (!DECL_BIT_FIELD (field))
7400 mode = DECL_MODE (field);
7401 /* For vector fields re-check the target flags, as DECL_MODE
7402 could have been set with different target flags than
7403 the current function has. */
7404 if (mode == BLKmode
7405 && VECTOR_TYPE_P (TREE_TYPE (field))
7406 && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field))))
7407 mode = TYPE_MODE (TREE_TYPE (field));
7409 else if (DECL_MODE (field) == BLKmode)
7410 blkmode_bitfield = true;
7412 *punsignedp = DECL_UNSIGNED (field);
7414 else if (TREE_CODE (exp) == BIT_FIELD_REF)
7416 size_tree = TREE_OPERAND (exp, 1);
7417 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
7418 || TYPE_UNSIGNED (TREE_TYPE (exp)));
7420 /* For vector element types with the correct size of access or for
7421 vector typed accesses use the mode of the access type. */
7422 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
7423 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
7424 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
7425 || VECTOR_TYPE_P (TREE_TYPE (exp)))
7426 mode = TYPE_MODE (TREE_TYPE (exp));
7428 else
7430 mode = TYPE_MODE (TREE_TYPE (exp));
7431 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
7433 if (mode == BLKmode)
7434 size_tree = TYPE_SIZE (TREE_TYPE (exp));
7435 else
7436 *pbitsize = GET_MODE_BITSIZE (mode);
7439 if (size_tree != 0)
7441 if (! tree_fits_uhwi_p (size_tree))
7442 mode = BLKmode, *pbitsize = -1;
7443 else
7444 *pbitsize = tree_to_uhwi (size_tree);
7447 *preversep = reverse_storage_order_for_component_p (exp);
7449 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7450 and find the ultimate containing object. */
7451 while (1)
7453 switch (TREE_CODE (exp))
7455 case BIT_FIELD_REF:
7456 bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
7457 break;
7459 case COMPONENT_REF:
7461 tree field = TREE_OPERAND (exp, 1);
7462 tree this_offset = component_ref_field_offset (exp);
7464 /* If this field hasn't been filled in yet, don't go past it.
7465 This should only happen when folding expressions made during
7466 type construction. */
7467 if (this_offset == 0)
7468 break;
7470 offset = size_binop (PLUS_EXPR, offset, this_offset);
7471 bit_offset += wi::to_poly_offset (DECL_FIELD_BIT_OFFSET (field));
7473 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7475 break;
7477 case ARRAY_REF:
7478 case ARRAY_RANGE_REF:
7480 tree index = TREE_OPERAND (exp, 1);
7481 tree low_bound = array_ref_low_bound (exp);
7482 tree unit_size = array_ref_element_size (exp);
7484 /* We assume all arrays have sizes that are a multiple of a byte.
7485 First subtract the lower bound, if any, in the type of the
7486 index, then convert to sizetype and multiply by the size of
7487 the array element. */
7488 if (! integer_zerop (low_bound))
7489 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
7490 index, low_bound);
7492 offset = size_binop (PLUS_EXPR, offset,
7493 size_binop (MULT_EXPR,
7494 fold_convert (sizetype, index),
7495 unit_size));
7497 break;
7499 case REALPART_EXPR:
7500 break;
7502 case IMAGPART_EXPR:
7503 bit_offset += *pbitsize;
7504 break;
7506 case VIEW_CONVERT_EXPR:
7507 break;
7509 case MEM_REF:
7510 /* Hand back the decl for MEM[&decl, off]. */
7511 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
7513 tree off = TREE_OPERAND (exp, 1);
7514 if (!integer_zerop (off))
7516 poly_offset_int boff = mem_ref_offset (exp);
7517 boff <<= LOG2_BITS_PER_UNIT;
7518 bit_offset += boff;
7520 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7522 goto done;
7524 default:
7525 goto done;
7528 /* If any reference in the chain is volatile, the effect is volatile. */
7529 if (TREE_THIS_VOLATILE (exp))
7530 *pvolatilep = 1;
7532 exp = TREE_OPERAND (exp, 0);
7534 done:
7536 /* If OFFSET is constant, see if we can return the whole thing as a
7537 constant bit position. Make sure to handle overflow during
7538 this conversion. */
7539 if (poly_int_tree_p (offset))
7541 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset),
7542 TYPE_PRECISION (sizetype));
7543 tem <<= LOG2_BITS_PER_UNIT;
7544 tem += bit_offset;
7545 if (tem.to_shwi (pbitpos))
7546 *poffset = offset = NULL_TREE;
7549 /* Otherwise, split it up. */
7550 if (offset)
7552 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7553 if (!bit_offset.to_shwi (pbitpos) || maybe_lt (*pbitpos, 0))
7555 *pbitpos = num_trailing_bits (bit_offset.force_shwi ());
7556 poly_offset_int bytes = bits_to_bytes_round_down (bit_offset);
7557 offset = size_binop (PLUS_EXPR, offset,
7558 build_int_cst (sizetype, bytes.force_shwi ()));
7561 *poffset = offset;
7564 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7565 if (mode == VOIDmode
7566 && blkmode_bitfield
7567 && multiple_p (*pbitpos, BITS_PER_UNIT)
7568 && multiple_p (*pbitsize, BITS_PER_UNIT))
7569 *pmode = BLKmode;
7570 else
7571 *pmode = mode;
7573 return exp;
7576 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7578 static unsigned HOST_WIDE_INT
7579 target_align (const_tree target)
7581 /* We might have a chain of nested references with intermediate misaligning
7582 bitfields components, so need to recurse to find out. */
7584 unsigned HOST_WIDE_INT this_align, outer_align;
7586 switch (TREE_CODE (target))
7588 case BIT_FIELD_REF:
7589 return 1;
7591 case COMPONENT_REF:
7592 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7593 outer_align = target_align (TREE_OPERAND (target, 0));
7594 return MIN (this_align, outer_align);
7596 case ARRAY_REF:
7597 case ARRAY_RANGE_REF:
7598 this_align = TYPE_ALIGN (TREE_TYPE (target));
7599 outer_align = target_align (TREE_OPERAND (target, 0));
7600 return MIN (this_align, outer_align);
7602 CASE_CONVERT:
7603 case NON_LVALUE_EXPR:
7604 case VIEW_CONVERT_EXPR:
7605 this_align = TYPE_ALIGN (TREE_TYPE (target));
7606 outer_align = target_align (TREE_OPERAND (target, 0));
7607 return MAX (this_align, outer_align);
7609 default:
7610 return TYPE_ALIGN (TREE_TYPE (target));
7615 /* Given an rtx VALUE that may contain additions and multiplications, return
7616 an equivalent value that just refers to a register, memory, or constant.
7617 This is done by generating instructions to perform the arithmetic and
7618 returning a pseudo-register containing the value.
7620 The returned value may be a REG, SUBREG, MEM or constant. */
7623 force_operand (rtx value, rtx target)
7625 rtx op1, op2;
7626 /* Use subtarget as the target for operand 0 of a binary operation. */
7627 rtx subtarget = get_subtarget (target);
7628 enum rtx_code code = GET_CODE (value);
7630 /* Check for subreg applied to an expression produced by loop optimizer. */
7631 if (code == SUBREG
7632 && !REG_P (SUBREG_REG (value))
7633 && !MEM_P (SUBREG_REG (value)))
7635 value
7636 = simplify_gen_subreg (GET_MODE (value),
7637 force_reg (GET_MODE (SUBREG_REG (value)),
7638 force_operand (SUBREG_REG (value),
7639 NULL_RTX)),
7640 GET_MODE (SUBREG_REG (value)),
7641 SUBREG_BYTE (value));
7642 code = GET_CODE (value);
7645 /* Check for a PIC address load. */
7646 if ((code == PLUS || code == MINUS)
7647 && XEXP (value, 0) == pic_offset_table_rtx
7648 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7649 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7650 || GET_CODE (XEXP (value, 1)) == CONST))
7652 if (!subtarget)
7653 subtarget = gen_reg_rtx (GET_MODE (value));
7654 emit_move_insn (subtarget, value);
7655 return subtarget;
7658 if (ARITHMETIC_P (value))
7660 op2 = XEXP (value, 1);
7661 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7662 subtarget = 0;
7663 if (code == MINUS && CONST_INT_P (op2))
7665 code = PLUS;
7666 op2 = negate_rtx (GET_MODE (value), op2);
7669 /* Check for an addition with OP2 a constant integer and our first
7670 operand a PLUS of a virtual register and something else. In that
7671 case, we want to emit the sum of the virtual register and the
7672 constant first and then add the other value. This allows virtual
7673 register instantiation to simply modify the constant rather than
7674 creating another one around this addition. */
7675 if (code == PLUS && CONST_INT_P (op2)
7676 && GET_CODE (XEXP (value, 0)) == PLUS
7677 && REG_P (XEXP (XEXP (value, 0), 0))
7678 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7679 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7681 rtx temp = expand_simple_binop (GET_MODE (value), code,
7682 XEXP (XEXP (value, 0), 0), op2,
7683 subtarget, 0, OPTAB_LIB_WIDEN);
7684 return expand_simple_binop (GET_MODE (value), code, temp,
7685 force_operand (XEXP (XEXP (value,
7686 0), 1), 0),
7687 target, 0, OPTAB_LIB_WIDEN);
7690 op1 = force_operand (XEXP (value, 0), subtarget);
7691 op2 = force_operand (op2, NULL_RTX);
7692 switch (code)
7694 case MULT:
7695 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7696 case DIV:
7697 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7698 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7699 target, 1, OPTAB_LIB_WIDEN);
7700 else
7701 return expand_divmod (0,
7702 FLOAT_MODE_P (GET_MODE (value))
7703 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7704 GET_MODE (value), op1, op2, target, 0);
7705 case MOD:
7706 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7707 target, 0);
7708 case UDIV:
7709 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7710 target, 1);
7711 case UMOD:
7712 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7713 target, 1);
7714 case ASHIFTRT:
7715 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7716 target, 0, OPTAB_LIB_WIDEN);
7717 default:
7718 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7719 target, 1, OPTAB_LIB_WIDEN);
7722 if (UNARY_P (value))
7724 if (!target)
7725 target = gen_reg_rtx (GET_MODE (value));
7726 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7727 switch (code)
7729 case ZERO_EXTEND:
7730 case SIGN_EXTEND:
7731 case TRUNCATE:
7732 case FLOAT_EXTEND:
7733 case FLOAT_TRUNCATE:
7734 convert_move (target, op1, code == ZERO_EXTEND);
7735 return target;
7737 case FIX:
7738 case UNSIGNED_FIX:
7739 expand_fix (target, op1, code == UNSIGNED_FIX);
7740 return target;
7742 case FLOAT:
7743 case UNSIGNED_FLOAT:
7744 expand_float (target, op1, code == UNSIGNED_FLOAT);
7745 return target;
7747 default:
7748 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7752 #ifdef INSN_SCHEDULING
7753 /* On machines that have insn scheduling, we want all memory reference to be
7754 explicit, so we need to deal with such paradoxical SUBREGs. */
7755 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7756 value
7757 = simplify_gen_subreg (GET_MODE (value),
7758 force_reg (GET_MODE (SUBREG_REG (value)),
7759 force_operand (SUBREG_REG (value),
7760 NULL_RTX)),
7761 GET_MODE (SUBREG_REG (value)),
7762 SUBREG_BYTE (value));
7763 #endif
7765 return value;
7768 /* Subroutine of expand_expr: return nonzero iff there is no way that
7769 EXP can reference X, which is being modified. TOP_P is nonzero if this
7770 call is going to be used to determine whether we need a temporary
7771 for EXP, as opposed to a recursive call to this function.
7773 It is always safe for this routine to return zero since it merely
7774 searches for optimization opportunities. */
7777 safe_from_p (const_rtx x, tree exp, int top_p)
7779 rtx exp_rtl = 0;
7780 int i, nops;
7782 if (x == 0
7783 /* If EXP has varying size, we MUST use a target since we currently
7784 have no way of allocating temporaries of variable size
7785 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7786 So we assume here that something at a higher level has prevented a
7787 clash. This is somewhat bogus, but the best we can do. Only
7788 do this when X is BLKmode and when we are at the top level. */
7789 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7790 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7791 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7792 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7793 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7794 != INTEGER_CST)
7795 && GET_MODE (x) == BLKmode)
7796 /* If X is in the outgoing argument area, it is always safe. */
7797 || (MEM_P (x)
7798 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7799 || (GET_CODE (XEXP (x, 0)) == PLUS
7800 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7801 return 1;
7803 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7804 find the underlying pseudo. */
7805 if (GET_CODE (x) == SUBREG)
7807 x = SUBREG_REG (x);
7808 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7809 return 0;
7812 /* Now look at our tree code and possibly recurse. */
7813 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7815 case tcc_declaration:
7816 exp_rtl = DECL_RTL_IF_SET (exp);
7817 break;
7819 case tcc_constant:
7820 return 1;
7822 case tcc_exceptional:
7823 if (TREE_CODE (exp) == TREE_LIST)
7825 while (1)
7827 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7828 return 0;
7829 exp = TREE_CHAIN (exp);
7830 if (!exp)
7831 return 1;
7832 if (TREE_CODE (exp) != TREE_LIST)
7833 return safe_from_p (x, exp, 0);
7836 else if (TREE_CODE (exp) == CONSTRUCTOR)
7838 constructor_elt *ce;
7839 unsigned HOST_WIDE_INT idx;
7841 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7842 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7843 || !safe_from_p (x, ce->value, 0))
7844 return 0;
7845 return 1;
7847 else if (TREE_CODE (exp) == ERROR_MARK)
7848 return 1; /* An already-visited SAVE_EXPR? */
7849 else
7850 return 0;
7852 case tcc_statement:
7853 /* The only case we look at here is the DECL_INITIAL inside a
7854 DECL_EXPR. */
7855 return (TREE_CODE (exp) != DECL_EXPR
7856 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7857 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7858 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7860 case tcc_binary:
7861 case tcc_comparison:
7862 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7863 return 0;
7864 /* Fall through. */
7866 case tcc_unary:
7867 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7869 case tcc_expression:
7870 case tcc_reference:
7871 case tcc_vl_exp:
7872 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7873 the expression. If it is set, we conflict iff we are that rtx or
7874 both are in memory. Otherwise, we check all operands of the
7875 expression recursively. */
7877 switch (TREE_CODE (exp))
7879 case ADDR_EXPR:
7880 /* If the operand is static or we are static, we can't conflict.
7881 Likewise if we don't conflict with the operand at all. */
7882 if (staticp (TREE_OPERAND (exp, 0))
7883 || TREE_STATIC (exp)
7884 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7885 return 1;
7887 /* Otherwise, the only way this can conflict is if we are taking
7888 the address of a DECL a that address if part of X, which is
7889 very rare. */
7890 exp = TREE_OPERAND (exp, 0);
7891 if (DECL_P (exp))
7893 if (!DECL_RTL_SET_P (exp)
7894 || !MEM_P (DECL_RTL (exp)))
7895 return 0;
7896 else
7897 exp_rtl = XEXP (DECL_RTL (exp), 0);
7899 break;
7901 case MEM_REF:
7902 if (MEM_P (x)
7903 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7904 get_alias_set (exp)))
7905 return 0;
7906 break;
7908 case CALL_EXPR:
7909 /* Assume that the call will clobber all hard registers and
7910 all of memory. */
7911 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7912 || MEM_P (x))
7913 return 0;
7914 break;
7916 case WITH_CLEANUP_EXPR:
7917 case CLEANUP_POINT_EXPR:
7918 /* Lowered by gimplify.c. */
7919 gcc_unreachable ();
7921 case SAVE_EXPR:
7922 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7924 default:
7925 break;
7928 /* If we have an rtx, we do not need to scan our operands. */
7929 if (exp_rtl)
7930 break;
7932 nops = TREE_OPERAND_LENGTH (exp);
7933 for (i = 0; i < nops; i++)
7934 if (TREE_OPERAND (exp, i) != 0
7935 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7936 return 0;
7938 break;
7940 case tcc_type:
7941 /* Should never get a type here. */
7942 gcc_unreachable ();
7945 /* If we have an rtl, find any enclosed object. Then see if we conflict
7946 with it. */
7947 if (exp_rtl)
7949 if (GET_CODE (exp_rtl) == SUBREG)
7951 exp_rtl = SUBREG_REG (exp_rtl);
7952 if (REG_P (exp_rtl)
7953 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7954 return 0;
7957 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7958 are memory and they conflict. */
7959 return ! (rtx_equal_p (x, exp_rtl)
7960 || (MEM_P (x) && MEM_P (exp_rtl)
7961 && true_dependence (exp_rtl, VOIDmode, x)));
7964 /* If we reach here, it is safe. */
7965 return 1;
7969 /* Return the highest power of two that EXP is known to be a multiple of.
7970 This is used in updating alignment of MEMs in array references. */
7972 unsigned HOST_WIDE_INT
7973 highest_pow2_factor (const_tree exp)
7975 unsigned HOST_WIDE_INT ret;
7976 int trailing_zeros = tree_ctz (exp);
7977 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7978 return BIGGEST_ALIGNMENT;
7979 ret = HOST_WIDE_INT_1U << trailing_zeros;
7980 if (ret > BIGGEST_ALIGNMENT)
7981 return BIGGEST_ALIGNMENT;
7982 return ret;
7985 /* Similar, except that the alignment requirements of TARGET are
7986 taken into account. Assume it is at least as aligned as its
7987 type, unless it is a COMPONENT_REF in which case the layout of
7988 the structure gives the alignment. */
7990 static unsigned HOST_WIDE_INT
7991 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7993 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7994 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7996 return MAX (factor, talign);
7999 /* Convert the tree comparison code TCODE to the rtl one where the
8000 signedness is UNSIGNEDP. */
8002 static enum rtx_code
8003 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
8005 enum rtx_code code;
8006 switch (tcode)
8008 case EQ_EXPR:
8009 code = EQ;
8010 break;
8011 case NE_EXPR:
8012 code = NE;
8013 break;
8014 case LT_EXPR:
8015 code = unsignedp ? LTU : LT;
8016 break;
8017 case LE_EXPR:
8018 code = unsignedp ? LEU : LE;
8019 break;
8020 case GT_EXPR:
8021 code = unsignedp ? GTU : GT;
8022 break;
8023 case GE_EXPR:
8024 code = unsignedp ? GEU : GE;
8025 break;
8026 case UNORDERED_EXPR:
8027 code = UNORDERED;
8028 break;
8029 case ORDERED_EXPR:
8030 code = ORDERED;
8031 break;
8032 case UNLT_EXPR:
8033 code = UNLT;
8034 break;
8035 case UNLE_EXPR:
8036 code = UNLE;
8037 break;
8038 case UNGT_EXPR:
8039 code = UNGT;
8040 break;
8041 case UNGE_EXPR:
8042 code = UNGE;
8043 break;
8044 case UNEQ_EXPR:
8045 code = UNEQ;
8046 break;
8047 case LTGT_EXPR:
8048 code = LTGT;
8049 break;
8051 default:
8052 gcc_unreachable ();
8054 return code;
8057 /* Subroutine of expand_expr. Expand the two operands of a binary
8058 expression EXP0 and EXP1 placing the results in OP0 and OP1.
8059 The value may be stored in TARGET if TARGET is nonzero. The
8060 MODIFIER argument is as documented by expand_expr. */
8062 void
8063 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
8064 enum expand_modifier modifier)
8066 if (! safe_from_p (target, exp1, 1))
8067 target = 0;
8068 if (operand_equal_p (exp0, exp1, 0))
8070 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
8071 *op1 = copy_rtx (*op0);
8073 else
8075 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
8076 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
8081 /* Return a MEM that contains constant EXP. DEFER is as for
8082 output_constant_def and MODIFIER is as for expand_expr. */
8084 static rtx
8085 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
8087 rtx mem;
8089 mem = output_constant_def (exp, defer);
8090 if (modifier != EXPAND_INITIALIZER)
8091 mem = use_anchored_address (mem);
8092 return mem;
8095 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
8096 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
8098 static rtx
8099 expand_expr_addr_expr_1 (tree exp, rtx target, scalar_int_mode tmode,
8100 enum expand_modifier modifier, addr_space_t as)
8102 rtx result, subtarget;
8103 tree inner, offset;
8104 poly_int64 bitsize, bitpos;
8105 int unsignedp, reversep, volatilep = 0;
8106 machine_mode mode1;
8108 /* If we are taking the address of a constant and are at the top level,
8109 we have to use output_constant_def since we can't call force_const_mem
8110 at top level. */
8111 /* ??? This should be considered a front-end bug. We should not be
8112 generating ADDR_EXPR of something that isn't an LVALUE. The only
8113 exception here is STRING_CST. */
8114 if (CONSTANT_CLASS_P (exp))
8116 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
8117 if (modifier < EXPAND_SUM)
8118 result = force_operand (result, target);
8119 return result;
8122 /* Everything must be something allowed by is_gimple_addressable. */
8123 switch (TREE_CODE (exp))
8125 case INDIRECT_REF:
8126 /* This case will happen via recursion for &a->b. */
8127 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
8129 case MEM_REF:
8131 tree tem = TREE_OPERAND (exp, 0);
8132 if (!integer_zerop (TREE_OPERAND (exp, 1)))
8133 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
8134 return expand_expr (tem, target, tmode, modifier);
8137 case TARGET_MEM_REF:
8138 return addr_for_mem_ref (exp, as, true);
8140 case CONST_DECL:
8141 /* Expand the initializer like constants above. */
8142 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
8143 0, modifier), 0);
8144 if (modifier < EXPAND_SUM)
8145 result = force_operand (result, target);
8146 return result;
8148 case REALPART_EXPR:
8149 /* The real part of the complex number is always first, therefore
8150 the address is the same as the address of the parent object. */
8151 offset = 0;
8152 bitpos = 0;
8153 inner = TREE_OPERAND (exp, 0);
8154 break;
8156 case IMAGPART_EXPR:
8157 /* The imaginary part of the complex number is always second.
8158 The expression is therefore always offset by the size of the
8159 scalar type. */
8160 offset = 0;
8161 bitpos = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (exp)));
8162 inner = TREE_OPERAND (exp, 0);
8163 break;
8165 case COMPOUND_LITERAL_EXPR:
8166 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
8167 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
8168 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
8169 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
8170 the initializers aren't gimplified. */
8171 if (COMPOUND_LITERAL_EXPR_DECL (exp)
8172 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
8173 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
8174 target, tmode, modifier, as);
8175 /* FALLTHRU */
8176 default:
8177 /* If the object is a DECL, then expand it for its rtl. Don't bypass
8178 expand_expr, as that can have various side effects; LABEL_DECLs for
8179 example, may not have their DECL_RTL set yet. Expand the rtl of
8180 CONSTRUCTORs too, which should yield a memory reference for the
8181 constructor's contents. Assume language specific tree nodes can
8182 be expanded in some interesting way. */
8183 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
8184 if (DECL_P (exp)
8185 || TREE_CODE (exp) == CONSTRUCTOR
8186 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
8188 result = expand_expr (exp, target, tmode,
8189 modifier == EXPAND_INITIALIZER
8190 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
8192 /* If the DECL isn't in memory, then the DECL wasn't properly
8193 marked TREE_ADDRESSABLE, which will be either a front-end
8194 or a tree optimizer bug. */
8196 gcc_assert (MEM_P (result));
8197 result = XEXP (result, 0);
8199 /* ??? Is this needed anymore? */
8200 if (DECL_P (exp))
8201 TREE_USED (exp) = 1;
8203 if (modifier != EXPAND_INITIALIZER
8204 && modifier != EXPAND_CONST_ADDRESS
8205 && modifier != EXPAND_SUM)
8206 result = force_operand (result, target);
8207 return result;
8210 /* Pass FALSE as the last argument to get_inner_reference although
8211 we are expanding to RTL. The rationale is that we know how to
8212 handle "aligning nodes" here: we can just bypass them because
8213 they won't change the final object whose address will be returned
8214 (they actually exist only for that purpose). */
8215 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
8216 &unsignedp, &reversep, &volatilep);
8217 break;
8220 /* We must have made progress. */
8221 gcc_assert (inner != exp);
8223 subtarget = offset || maybe_ne (bitpos, 0) ? NULL_RTX : target;
8224 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
8225 inner alignment, force the inner to be sufficiently aligned. */
8226 if (CONSTANT_CLASS_P (inner)
8227 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
8229 inner = copy_node (inner);
8230 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
8231 SET_TYPE_ALIGN (TREE_TYPE (inner), TYPE_ALIGN (TREE_TYPE (exp)));
8232 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
8234 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
8236 if (offset)
8238 rtx tmp;
8240 if (modifier != EXPAND_NORMAL)
8241 result = force_operand (result, NULL);
8242 tmp = expand_expr (offset, NULL_RTX, tmode,
8243 modifier == EXPAND_INITIALIZER
8244 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
8246 /* expand_expr is allowed to return an object in a mode other
8247 than TMODE. If it did, we need to convert. */
8248 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
8249 tmp = convert_modes (tmode, GET_MODE (tmp),
8250 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
8251 result = convert_memory_address_addr_space (tmode, result, as);
8252 tmp = convert_memory_address_addr_space (tmode, tmp, as);
8254 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8255 result = simplify_gen_binary (PLUS, tmode, result, tmp);
8256 else
8258 subtarget = maybe_ne (bitpos, 0) ? NULL_RTX : target;
8259 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
8260 1, OPTAB_LIB_WIDEN);
8264 if (maybe_ne (bitpos, 0))
8266 /* Someone beforehand should have rejected taking the address
8267 of an object that isn't byte-aligned. */
8268 poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
8269 result = convert_memory_address_addr_space (tmode, result, as);
8270 result = plus_constant (tmode, result, bytepos);
8271 if (modifier < EXPAND_SUM)
8272 result = force_operand (result, target);
8275 return result;
8278 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
8279 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
8281 static rtx
8282 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
8283 enum expand_modifier modifier)
8285 addr_space_t as = ADDR_SPACE_GENERIC;
8286 scalar_int_mode address_mode = Pmode;
8287 scalar_int_mode pointer_mode = ptr_mode;
8288 machine_mode rmode;
8289 rtx result;
8291 /* Target mode of VOIDmode says "whatever's natural". */
8292 if (tmode == VOIDmode)
8293 tmode = TYPE_MODE (TREE_TYPE (exp));
8295 if (POINTER_TYPE_P (TREE_TYPE (exp)))
8297 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
8298 address_mode = targetm.addr_space.address_mode (as);
8299 pointer_mode = targetm.addr_space.pointer_mode (as);
8302 /* We can get called with some Weird Things if the user does silliness
8303 like "(short) &a". In that case, convert_memory_address won't do
8304 the right thing, so ignore the given target mode. */
8305 scalar_int_mode new_tmode = (tmode == pointer_mode
8306 ? pointer_mode
8307 : address_mode);
8309 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
8310 new_tmode, modifier, as);
8312 /* Despite expand_expr claims concerning ignoring TMODE when not
8313 strictly convenient, stuff breaks if we don't honor it. Note
8314 that combined with the above, we only do this for pointer modes. */
8315 rmode = GET_MODE (result);
8316 if (rmode == VOIDmode)
8317 rmode = new_tmode;
8318 if (rmode != new_tmode)
8319 result = convert_memory_address_addr_space (new_tmode, result, as);
8321 return result;
8324 /* Generate code for computing CONSTRUCTOR EXP.
8325 An rtx for the computed value is returned. If AVOID_TEMP_MEM
8326 is TRUE, instead of creating a temporary variable in memory
8327 NULL is returned and the caller needs to handle it differently. */
8329 static rtx
8330 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
8331 bool avoid_temp_mem)
8333 tree type = TREE_TYPE (exp);
8334 machine_mode mode = TYPE_MODE (type);
8336 /* Try to avoid creating a temporary at all. This is possible
8337 if all of the initializer is zero.
8338 FIXME: try to handle all [0..255] initializers we can handle
8339 with memset. */
8340 if (TREE_STATIC (exp)
8341 && !TREE_ADDRESSABLE (exp)
8342 && target != 0 && mode == BLKmode
8343 && all_zeros_p (exp))
8345 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
8346 return target;
8349 /* All elts simple constants => refer to a constant in memory. But
8350 if this is a non-BLKmode mode, let it store a field at a time
8351 since that should make a CONST_INT, CONST_WIDE_INT or
8352 CONST_DOUBLE when we fold. Likewise, if we have a target we can
8353 use, it is best to store directly into the target unless the type
8354 is large enough that memcpy will be used. If we are making an
8355 initializer and all operands are constant, put it in memory as
8356 well.
8358 FIXME: Avoid trying to fill vector constructors piece-meal.
8359 Output them with output_constant_def below unless we're sure
8360 they're zeros. This should go away when vector initializers
8361 are treated like VECTOR_CST instead of arrays. */
8362 if ((TREE_STATIC (exp)
8363 && ((mode == BLKmode
8364 && ! (target != 0 && safe_from_p (target, exp, 1)))
8365 || TREE_ADDRESSABLE (exp)
8366 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
8367 && (! can_move_by_pieces
8368 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
8369 TYPE_ALIGN (type)))
8370 && ! mostly_zeros_p (exp))))
8371 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
8372 && TREE_CONSTANT (exp)))
8374 rtx constructor;
8376 if (avoid_temp_mem)
8377 return NULL_RTX;
8379 constructor = expand_expr_constant (exp, 1, modifier);
8381 if (modifier != EXPAND_CONST_ADDRESS
8382 && modifier != EXPAND_INITIALIZER
8383 && modifier != EXPAND_SUM)
8384 constructor = validize_mem (constructor);
8386 return constructor;
8389 /* Handle calls that pass values in multiple non-contiguous
8390 locations. The Irix 6 ABI has examples of this. */
8391 if (target == 0 || ! safe_from_p (target, exp, 1)
8392 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM
8393 /* Also make a temporary if the store is to volatile memory, to
8394 avoid individual accesses to aggregate members. */
8395 || (GET_CODE (target) == MEM
8396 && MEM_VOLATILE_P (target)
8397 && !TREE_ADDRESSABLE (TREE_TYPE (exp))))
8399 if (avoid_temp_mem)
8400 return NULL_RTX;
8402 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
8405 store_constructor (exp, target, 0, int_expr_size (exp), false);
8406 return target;
8410 /* expand_expr: generate code for computing expression EXP.
8411 An rtx for the computed value is returned. The value is never null.
8412 In the case of a void EXP, const0_rtx is returned.
8414 The value may be stored in TARGET if TARGET is nonzero.
8415 TARGET is just a suggestion; callers must assume that
8416 the rtx returned may not be the same as TARGET.
8418 If TARGET is CONST0_RTX, it means that the value will be ignored.
8420 If TMODE is not VOIDmode, it suggests generating the
8421 result in mode TMODE. But this is done only when convenient.
8422 Otherwise, TMODE is ignored and the value generated in its natural mode.
8423 TMODE is just a suggestion; callers must assume that
8424 the rtx returned may not have mode TMODE.
8426 Note that TARGET may have neither TMODE nor MODE. In that case, it
8427 probably will not be used.
8429 If MODIFIER is EXPAND_SUM then when EXP is an addition
8430 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8431 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8432 products as above, or REG or MEM, or constant.
8433 Ordinarily in such cases we would output mul or add instructions
8434 and then return a pseudo reg containing the sum.
8436 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8437 it also marks a label as absolutely required (it can't be dead).
8438 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8439 This is used for outputting expressions used in initializers.
8441 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8442 with a constant address even if that address is not normally legitimate.
8443 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8445 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8446 a call parameter. Such targets require special care as we haven't yet
8447 marked TARGET so that it's safe from being trashed by libcalls. We
8448 don't want to use TARGET for anything but the final result;
8449 Intermediate values must go elsewhere. Additionally, calls to
8450 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8452 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8453 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8454 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8455 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8456 recursively.
8457 If the result can be stored at TARGET, and ALT_RTL is non-NULL,
8458 then *ALT_RTL is set to TARGET (before legitimziation).
8460 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8461 In this case, we don't adjust a returned MEM rtx that wouldn't be
8462 sufficiently aligned for its mode; instead, it's up to the caller
8463 to deal with it afterwards. This is used to make sure that unaligned
8464 base objects for which out-of-bounds accesses are supported, for
8465 example record types with trailing arrays, aren't realigned behind
8466 the back of the caller.
8467 The normal operating mode is to pass FALSE for this parameter. */
8470 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8471 enum expand_modifier modifier, rtx *alt_rtl,
8472 bool inner_reference_p)
8474 rtx ret;
8476 /* Handle ERROR_MARK before anybody tries to access its type. */
8477 if (TREE_CODE (exp) == ERROR_MARK
8478 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8480 ret = CONST0_RTX (tmode);
8481 return ret ? ret : const0_rtx;
8484 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8485 inner_reference_p);
8486 return ret;
8489 /* Try to expand the conditional expression which is represented by
8490 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8491 return the rtl reg which represents the result. Otherwise return
8492 NULL_RTX. */
8494 static rtx
8495 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8496 tree treeop1 ATTRIBUTE_UNUSED,
8497 tree treeop2 ATTRIBUTE_UNUSED)
8499 rtx insn;
8500 rtx op00, op01, op1, op2;
8501 enum rtx_code comparison_code;
8502 machine_mode comparison_mode;
8503 gimple *srcstmt;
8504 rtx temp;
8505 tree type = TREE_TYPE (treeop1);
8506 int unsignedp = TYPE_UNSIGNED (type);
8507 machine_mode mode = TYPE_MODE (type);
8508 machine_mode orig_mode = mode;
8509 static bool expanding_cond_expr_using_cmove = false;
8511 /* Conditional move expansion can end up TERing two operands which,
8512 when recursively hitting conditional expressions can result in
8513 exponential behavior if the cmove expansion ultimatively fails.
8514 It's hardly profitable to TER a cmove into a cmove so avoid doing
8515 that by failing early if we end up recursing. */
8516 if (expanding_cond_expr_using_cmove)
8517 return NULL_RTX;
8519 /* If we cannot do a conditional move on the mode, try doing it
8520 with the promoted mode. */
8521 if (!can_conditionally_move_p (mode))
8523 mode = promote_mode (type, mode, &unsignedp);
8524 if (!can_conditionally_move_p (mode))
8525 return NULL_RTX;
8526 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
8528 else
8529 temp = assign_temp (type, 0, 1);
8531 expanding_cond_expr_using_cmove = true;
8532 start_sequence ();
8533 expand_operands (treeop1, treeop2,
8534 temp, &op1, &op2, EXPAND_NORMAL);
8536 if (TREE_CODE (treeop0) == SSA_NAME
8537 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8539 type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8540 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8541 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8542 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8543 comparison_mode = TYPE_MODE (type);
8544 unsignedp = TYPE_UNSIGNED (type);
8545 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8547 else if (COMPARISON_CLASS_P (treeop0))
8549 type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8550 enum tree_code cmpcode = TREE_CODE (treeop0);
8551 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8552 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8553 unsignedp = TYPE_UNSIGNED (type);
8554 comparison_mode = TYPE_MODE (type);
8555 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8557 else
8559 op00 = expand_normal (treeop0);
8560 op01 = const0_rtx;
8561 comparison_code = NE;
8562 comparison_mode = GET_MODE (op00);
8563 if (comparison_mode == VOIDmode)
8564 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8566 expanding_cond_expr_using_cmove = false;
8568 if (GET_MODE (op1) != mode)
8569 op1 = gen_lowpart (mode, op1);
8571 if (GET_MODE (op2) != mode)
8572 op2 = gen_lowpart (mode, op2);
8574 /* Try to emit the conditional move. */
8575 insn = emit_conditional_move (temp, comparison_code,
8576 op00, op01, comparison_mode,
8577 op1, op2, mode,
8578 unsignedp);
8580 /* If we could do the conditional move, emit the sequence,
8581 and return. */
8582 if (insn)
8584 rtx_insn *seq = get_insns ();
8585 end_sequence ();
8586 emit_insn (seq);
8587 return convert_modes (orig_mode, mode, temp, 0);
8590 /* Otherwise discard the sequence and fall back to code with
8591 branches. */
8592 end_sequence ();
8593 return NULL_RTX;
8596 /* A helper function for expand_expr_real_2 to be used with a
8597 misaligned mem_ref TEMP. Assume an unsigned type if UNSIGNEDP
8598 is nonzero, with alignment ALIGN in bits.
8599 Store the value at TARGET if possible (if TARGET is nonzero).
8600 Regardless of TARGET, we return the rtx for where the value is placed.
8601 If the result can be stored at TARGET, and ALT_RTL is non-NULL,
8602 then *ALT_RTL is set to TARGET (before legitimziation). */
8604 static rtx
8605 expand_misaligned_mem_ref (rtx temp, machine_mode mode, int unsignedp,
8606 unsigned int align, rtx target, rtx *alt_rtl)
8608 enum insn_code icode;
8610 if ((icode = optab_handler (movmisalign_optab, mode))
8611 != CODE_FOR_nothing)
8613 class expand_operand ops[2];
8615 /* We've already validated the memory, and we're creating a
8616 new pseudo destination. The predicates really can't fail,
8617 nor can the generator. */
8618 create_output_operand (&ops[0], NULL_RTX, mode);
8619 create_fixed_operand (&ops[1], temp);
8620 expand_insn (icode, 2, ops);
8621 temp = ops[0].value;
8623 else if (targetm.slow_unaligned_access (mode, align))
8624 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
8625 0, unsignedp, target,
8626 mode, mode, false, alt_rtl);
8627 return temp;
8631 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8632 enum expand_modifier modifier)
8634 rtx op0, op1, op2, temp;
8635 rtx_code_label *lab;
8636 tree type;
8637 int unsignedp;
8638 machine_mode mode;
8639 scalar_int_mode int_mode;
8640 enum tree_code code = ops->code;
8641 optab this_optab;
8642 rtx subtarget, original_target;
8643 int ignore;
8644 bool reduce_bit_field;
8645 location_t loc = ops->location;
8646 tree treeop0, treeop1, treeop2;
8647 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8648 ? reduce_to_bit_field_precision ((expr), \
8649 target, \
8650 type) \
8651 : (expr))
8653 type = ops->type;
8654 mode = TYPE_MODE (type);
8655 unsignedp = TYPE_UNSIGNED (type);
8657 treeop0 = ops->op0;
8658 treeop1 = ops->op1;
8659 treeop2 = ops->op2;
8661 /* We should be called only on simple (binary or unary) expressions,
8662 exactly those that are valid in gimple expressions that aren't
8663 GIMPLE_SINGLE_RHS (or invalid). */
8664 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8665 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8666 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8668 ignore = (target == const0_rtx
8669 || ((CONVERT_EXPR_CODE_P (code)
8670 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8671 && TREE_CODE (type) == VOID_TYPE));
8673 /* We should be called only if we need the result. */
8674 gcc_assert (!ignore);
8676 /* An operation in what may be a bit-field type needs the
8677 result to be reduced to the precision of the bit-field type,
8678 which is narrower than that of the type's mode. */
8679 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8680 && !type_has_mode_precision_p (type));
8682 if (reduce_bit_field
8683 && (modifier == EXPAND_STACK_PARM
8684 || (target && GET_MODE (target) != mode)))
8685 target = 0;
8687 /* Use subtarget as the target for operand 0 of a binary operation. */
8688 subtarget = get_subtarget (target);
8689 original_target = target;
8691 switch (code)
8693 case NON_LVALUE_EXPR:
8694 case PAREN_EXPR:
8695 CASE_CONVERT:
8696 if (treeop0 == error_mark_node)
8697 return const0_rtx;
8699 if (TREE_CODE (type) == UNION_TYPE)
8701 tree valtype = TREE_TYPE (treeop0);
8703 /* If both input and output are BLKmode, this conversion isn't doing
8704 anything except possibly changing memory attribute. */
8705 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8707 rtx result = expand_expr (treeop0, target, tmode,
8708 modifier);
8710 result = copy_rtx (result);
8711 set_mem_attributes (result, type, 0);
8712 return result;
8715 if (target == 0)
8717 if (TYPE_MODE (type) != BLKmode)
8718 target = gen_reg_rtx (TYPE_MODE (type));
8719 else
8720 target = assign_temp (type, 1, 1);
8723 if (MEM_P (target))
8724 /* Store data into beginning of memory target. */
8725 store_expr (treeop0,
8726 adjust_address (target, TYPE_MODE (valtype), 0),
8727 modifier == EXPAND_STACK_PARM,
8728 false, TYPE_REVERSE_STORAGE_ORDER (type));
8730 else
8732 gcc_assert (REG_P (target)
8733 && !TYPE_REVERSE_STORAGE_ORDER (type));
8735 /* Store this field into a union of the proper type. */
8736 poly_uint64 op0_size
8737 = tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (treeop0)));
8738 poly_uint64 union_size = GET_MODE_BITSIZE (mode);
8739 store_field (target,
8740 /* The conversion must be constructed so that
8741 we know at compile time how many bits
8742 to preserve. */
8743 ordered_min (op0_size, union_size),
8744 0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8745 false, false);
8748 /* Return the entire union. */
8749 return target;
8752 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8754 op0 = expand_expr (treeop0, target, VOIDmode,
8755 modifier);
8757 /* If the signedness of the conversion differs and OP0 is
8758 a promoted SUBREG, clear that indication since we now
8759 have to do the proper extension. */
8760 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8761 && GET_CODE (op0) == SUBREG)
8762 SUBREG_PROMOTED_VAR_P (op0) = 0;
8764 return REDUCE_BIT_FIELD (op0);
8767 op0 = expand_expr (treeop0, NULL_RTX, mode,
8768 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8769 if (GET_MODE (op0) == mode)
8772 /* If OP0 is a constant, just convert it into the proper mode. */
8773 else if (CONSTANT_P (op0))
8775 tree inner_type = TREE_TYPE (treeop0);
8776 machine_mode inner_mode = GET_MODE (op0);
8778 if (inner_mode == VOIDmode)
8779 inner_mode = TYPE_MODE (inner_type);
8781 if (modifier == EXPAND_INITIALIZER)
8782 op0 = lowpart_subreg (mode, op0, inner_mode);
8783 else
8784 op0= convert_modes (mode, inner_mode, op0,
8785 TYPE_UNSIGNED (inner_type));
8788 else if (modifier == EXPAND_INITIALIZER)
8789 op0 = gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8790 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8792 else if (target == 0)
8793 op0 = convert_to_mode (mode, op0,
8794 TYPE_UNSIGNED (TREE_TYPE
8795 (treeop0)));
8796 else
8798 convert_move (target, op0,
8799 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8800 op0 = target;
8803 return REDUCE_BIT_FIELD (op0);
8805 case ADDR_SPACE_CONVERT_EXPR:
8807 tree treeop0_type = TREE_TYPE (treeop0);
8809 gcc_assert (POINTER_TYPE_P (type));
8810 gcc_assert (POINTER_TYPE_P (treeop0_type));
8812 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8813 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8815 /* Conversions between pointers to the same address space should
8816 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8817 gcc_assert (as_to != as_from);
8819 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8821 /* Ask target code to handle conversion between pointers
8822 to overlapping address spaces. */
8823 if (targetm.addr_space.subset_p (as_to, as_from)
8824 || targetm.addr_space.subset_p (as_from, as_to))
8826 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8828 else
8830 /* For disjoint address spaces, converting anything but a null
8831 pointer invokes undefined behavior. We truncate or extend the
8832 value as if we'd converted via integers, which handles 0 as
8833 required, and all others as the programmer likely expects. */
8834 #ifndef POINTERS_EXTEND_UNSIGNED
8835 const int POINTERS_EXTEND_UNSIGNED = 1;
8836 #endif
8837 op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8838 op0, POINTERS_EXTEND_UNSIGNED);
8840 gcc_assert (op0);
8841 return op0;
8844 case POINTER_PLUS_EXPR:
8845 /* Even though the sizetype mode and the pointer's mode can be different
8846 expand is able to handle this correctly and get the correct result out
8847 of the PLUS_EXPR code. */
8848 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8849 if sizetype precision is smaller than pointer precision. */
8850 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8851 treeop1 = fold_convert_loc (loc, type,
8852 fold_convert_loc (loc, ssizetype,
8853 treeop1));
8854 /* If sizetype precision is larger than pointer precision, truncate the
8855 offset to have matching modes. */
8856 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8857 treeop1 = fold_convert_loc (loc, type, treeop1);
8858 /* FALLTHRU */
8860 case PLUS_EXPR:
8861 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8862 something else, make sure we add the register to the constant and
8863 then to the other thing. This case can occur during strength
8864 reduction and doing it this way will produce better code if the
8865 frame pointer or argument pointer is eliminated.
8867 fold-const.c will ensure that the constant is always in the inner
8868 PLUS_EXPR, so the only case we need to do anything about is if
8869 sp, ap, or fp is our second argument, in which case we must swap
8870 the innermost first argument and our second argument. */
8872 if (TREE_CODE (treeop0) == PLUS_EXPR
8873 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8874 && VAR_P (treeop1)
8875 && (DECL_RTL (treeop1) == frame_pointer_rtx
8876 || DECL_RTL (treeop1) == stack_pointer_rtx
8877 || DECL_RTL (treeop1) == arg_pointer_rtx))
8879 gcc_unreachable ();
8882 /* If the result is to be ptr_mode and we are adding an integer to
8883 something, we might be forming a constant. So try to use
8884 plus_constant. If it produces a sum and we can't accept it,
8885 use force_operand. This allows P = &ARR[const] to generate
8886 efficient code on machines where a SYMBOL_REF is not a valid
8887 address.
8889 If this is an EXPAND_SUM call, always return the sum. */
8890 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8891 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8893 if (modifier == EXPAND_STACK_PARM)
8894 target = 0;
8895 if (TREE_CODE (treeop0) == INTEGER_CST
8896 && HWI_COMPUTABLE_MODE_P (mode)
8897 && TREE_CONSTANT (treeop1))
8899 rtx constant_part;
8900 HOST_WIDE_INT wc;
8901 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8903 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8904 EXPAND_SUM);
8905 /* Use wi::shwi to ensure that the constant is
8906 truncated according to the mode of OP1, then sign extended
8907 to a HOST_WIDE_INT. Using the constant directly can result
8908 in non-canonical RTL in a 64x32 cross compile. */
8909 wc = TREE_INT_CST_LOW (treeop0);
8910 constant_part =
8911 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8912 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8913 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8914 op1 = force_operand (op1, target);
8915 return REDUCE_BIT_FIELD (op1);
8918 else if (TREE_CODE (treeop1) == INTEGER_CST
8919 && HWI_COMPUTABLE_MODE_P (mode)
8920 && TREE_CONSTANT (treeop0))
8922 rtx constant_part;
8923 HOST_WIDE_INT wc;
8924 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8926 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8927 (modifier == EXPAND_INITIALIZER
8928 ? EXPAND_INITIALIZER : EXPAND_SUM));
8929 if (! CONSTANT_P (op0))
8931 op1 = expand_expr (treeop1, NULL_RTX,
8932 VOIDmode, modifier);
8933 /* Return a PLUS if modifier says it's OK. */
8934 if (modifier == EXPAND_SUM
8935 || modifier == EXPAND_INITIALIZER)
8936 return simplify_gen_binary (PLUS, mode, op0, op1);
8937 goto binop2;
8939 /* Use wi::shwi to ensure that the constant is
8940 truncated according to the mode of OP1, then sign extended
8941 to a HOST_WIDE_INT. Using the constant directly can result
8942 in non-canonical RTL in a 64x32 cross compile. */
8943 wc = TREE_INT_CST_LOW (treeop1);
8944 constant_part
8945 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8946 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8947 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8948 op0 = force_operand (op0, target);
8949 return REDUCE_BIT_FIELD (op0);
8953 /* Use TER to expand pointer addition of a negated value
8954 as pointer subtraction. */
8955 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8956 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8957 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8958 && TREE_CODE (treeop1) == SSA_NAME
8959 && TYPE_MODE (TREE_TYPE (treeop0))
8960 == TYPE_MODE (TREE_TYPE (treeop1)))
8962 gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8963 if (def)
8965 treeop1 = gimple_assign_rhs1 (def);
8966 code = MINUS_EXPR;
8967 goto do_minus;
8971 /* No sense saving up arithmetic to be done
8972 if it's all in the wrong mode to form part of an address.
8973 And force_operand won't know whether to sign-extend or
8974 zero-extend. */
8975 if (modifier != EXPAND_INITIALIZER
8976 && (modifier != EXPAND_SUM || mode != ptr_mode))
8978 expand_operands (treeop0, treeop1,
8979 subtarget, &op0, &op1, modifier);
8980 if (op0 == const0_rtx)
8981 return op1;
8982 if (op1 == const0_rtx)
8983 return op0;
8984 goto binop2;
8987 expand_operands (treeop0, treeop1,
8988 subtarget, &op0, &op1, modifier);
8989 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8991 case MINUS_EXPR:
8992 case POINTER_DIFF_EXPR:
8993 do_minus:
8994 /* For initializers, we are allowed to return a MINUS of two
8995 symbolic constants. Here we handle all cases when both operands
8996 are constant. */
8997 /* Handle difference of two symbolic constants,
8998 for the sake of an initializer. */
8999 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9000 && really_constant_p (treeop0)
9001 && really_constant_p (treeop1))
9003 expand_operands (treeop0, treeop1,
9004 NULL_RTX, &op0, &op1, modifier);
9005 return simplify_gen_binary (MINUS, mode, op0, op1);
9008 /* No sense saving up arithmetic to be done
9009 if it's all in the wrong mode to form part of an address.
9010 And force_operand won't know whether to sign-extend or
9011 zero-extend. */
9012 if (modifier != EXPAND_INITIALIZER
9013 && (modifier != EXPAND_SUM || mode != ptr_mode))
9014 goto binop;
9016 expand_operands (treeop0, treeop1,
9017 subtarget, &op0, &op1, modifier);
9019 /* Convert A - const to A + (-const). */
9020 if (CONST_INT_P (op1))
9022 op1 = negate_rtx (mode, op1);
9023 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
9026 goto binop2;
9028 case WIDEN_MULT_PLUS_EXPR:
9029 case WIDEN_MULT_MINUS_EXPR:
9030 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9031 op2 = expand_normal (treeop2);
9032 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9033 target, unsignedp);
9034 return target;
9036 case WIDEN_MULT_EXPR:
9037 /* If first operand is constant, swap them.
9038 Thus the following special case checks need only
9039 check the second operand. */
9040 if (TREE_CODE (treeop0) == INTEGER_CST)
9041 std::swap (treeop0, treeop1);
9043 /* First, check if we have a multiplication of one signed and one
9044 unsigned operand. */
9045 if (TREE_CODE (treeop1) != INTEGER_CST
9046 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
9047 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
9049 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
9050 this_optab = usmul_widen_optab;
9051 if (find_widening_optab_handler (this_optab, mode, innermode)
9052 != CODE_FOR_nothing)
9054 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
9055 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
9056 EXPAND_NORMAL);
9057 else
9058 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
9059 EXPAND_NORMAL);
9060 /* op0 and op1 might still be constant, despite the above
9061 != INTEGER_CST check. Handle it. */
9062 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
9064 op0 = convert_modes (mode, innermode, op0, true);
9065 op1 = convert_modes (mode, innermode, op1, false);
9066 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
9067 target, unsignedp));
9069 goto binop3;
9072 /* Check for a multiplication with matching signedness. */
9073 else if ((TREE_CODE (treeop1) == INTEGER_CST
9074 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
9075 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
9076 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
9078 tree op0type = TREE_TYPE (treeop0);
9079 machine_mode innermode = TYPE_MODE (op0type);
9080 bool zextend_p = TYPE_UNSIGNED (op0type);
9081 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
9082 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
9084 if (TREE_CODE (treeop0) != INTEGER_CST)
9086 if (find_widening_optab_handler (this_optab, mode, innermode)
9087 != CODE_FOR_nothing)
9089 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
9090 EXPAND_NORMAL);
9091 /* op0 and op1 might still be constant, despite the above
9092 != INTEGER_CST check. Handle it. */
9093 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
9095 widen_mult_const:
9096 op0 = convert_modes (mode, innermode, op0, zextend_p);
9098 = convert_modes (mode, innermode, op1,
9099 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
9100 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
9101 target,
9102 unsignedp));
9104 temp = expand_widening_mult (mode, op0, op1, target,
9105 unsignedp, this_optab);
9106 return REDUCE_BIT_FIELD (temp);
9108 if (find_widening_optab_handler (other_optab, mode, innermode)
9109 != CODE_FOR_nothing
9110 && innermode == word_mode)
9112 rtx htem, hipart;
9113 op0 = expand_normal (treeop0);
9114 op1 = expand_normal (treeop1);
9115 /* op0 and op1 might be constants, despite the above
9116 != INTEGER_CST check. Handle it. */
9117 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
9118 goto widen_mult_const;
9119 temp = expand_binop (mode, other_optab, op0, op1, target,
9120 unsignedp, OPTAB_LIB_WIDEN);
9121 hipart = gen_highpart (word_mode, temp);
9122 htem = expand_mult_highpart_adjust (word_mode, hipart,
9123 op0, op1, hipart,
9124 zextend_p);
9125 if (htem != hipart)
9126 emit_move_insn (hipart, htem);
9127 return REDUCE_BIT_FIELD (temp);
9131 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
9132 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
9133 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
9134 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
9136 case MULT_EXPR:
9137 /* If this is a fixed-point operation, then we cannot use the code
9138 below because "expand_mult" doesn't support sat/no-sat fixed-point
9139 multiplications. */
9140 if (ALL_FIXED_POINT_MODE_P (mode))
9141 goto binop;
9143 /* If first operand is constant, swap them.
9144 Thus the following special case checks need only
9145 check the second operand. */
9146 if (TREE_CODE (treeop0) == INTEGER_CST)
9147 std::swap (treeop0, treeop1);
9149 /* Attempt to return something suitable for generating an
9150 indexed address, for machines that support that. */
9152 if (modifier == EXPAND_SUM && mode == ptr_mode
9153 && tree_fits_shwi_p (treeop1))
9155 tree exp1 = treeop1;
9157 op0 = expand_expr (treeop0, subtarget, VOIDmode,
9158 EXPAND_SUM);
9160 if (!REG_P (op0))
9161 op0 = force_operand (op0, NULL_RTX);
9162 if (!REG_P (op0))
9163 op0 = copy_to_mode_reg (mode, op0);
9165 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
9166 gen_int_mode (tree_to_shwi (exp1),
9167 TYPE_MODE (TREE_TYPE (exp1)))));
9170 if (modifier == EXPAND_STACK_PARM)
9171 target = 0;
9173 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
9174 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
9176 case TRUNC_MOD_EXPR:
9177 case FLOOR_MOD_EXPR:
9178 case CEIL_MOD_EXPR:
9179 case ROUND_MOD_EXPR:
9181 case TRUNC_DIV_EXPR:
9182 case FLOOR_DIV_EXPR:
9183 case CEIL_DIV_EXPR:
9184 case ROUND_DIV_EXPR:
9185 case EXACT_DIV_EXPR:
9187 /* If this is a fixed-point operation, then we cannot use the code
9188 below because "expand_divmod" doesn't support sat/no-sat fixed-point
9189 divisions. */
9190 if (ALL_FIXED_POINT_MODE_P (mode))
9191 goto binop;
9193 if (modifier == EXPAND_STACK_PARM)
9194 target = 0;
9195 /* Possible optimization: compute the dividend with EXPAND_SUM
9196 then if the divisor is constant can optimize the case
9197 where some terms of the dividend have coeffs divisible by it. */
9198 expand_operands (treeop0, treeop1,
9199 subtarget, &op0, &op1, EXPAND_NORMAL);
9200 bool mod_p = code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR
9201 || code == CEIL_MOD_EXPR || code == ROUND_MOD_EXPR;
9202 if (SCALAR_INT_MODE_P (mode)
9203 && optimize >= 2
9204 && get_range_pos_neg (treeop0) == 1
9205 && get_range_pos_neg (treeop1) == 1)
9207 /* If both arguments are known to be positive when interpreted
9208 as signed, we can expand it as both signed and unsigned
9209 division or modulo. Choose the cheaper sequence in that case. */
9210 bool speed_p = optimize_insn_for_speed_p ();
9211 do_pending_stack_adjust ();
9212 start_sequence ();
9213 rtx uns_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 1);
9214 rtx_insn *uns_insns = get_insns ();
9215 end_sequence ();
9216 start_sequence ();
9217 rtx sgn_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 0);
9218 rtx_insn *sgn_insns = get_insns ();
9219 end_sequence ();
9220 unsigned uns_cost = seq_cost (uns_insns, speed_p);
9221 unsigned sgn_cost = seq_cost (sgn_insns, speed_p);
9223 /* If costs are the same then use as tie breaker the other
9224 other factor. */
9225 if (uns_cost == sgn_cost)
9227 uns_cost = seq_cost (uns_insns, !speed_p);
9228 sgn_cost = seq_cost (sgn_insns, !speed_p);
9231 if (uns_cost < sgn_cost || (uns_cost == sgn_cost && unsignedp))
9233 emit_insn (uns_insns);
9234 return uns_ret;
9236 emit_insn (sgn_insns);
9237 return sgn_ret;
9239 return expand_divmod (mod_p, code, mode, op0, op1, target, unsignedp);
9241 case RDIV_EXPR:
9242 goto binop;
9244 case MULT_HIGHPART_EXPR:
9245 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
9246 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
9247 gcc_assert (temp);
9248 return temp;
9250 case FIXED_CONVERT_EXPR:
9251 op0 = expand_normal (treeop0);
9252 if (target == 0 || modifier == EXPAND_STACK_PARM)
9253 target = gen_reg_rtx (mode);
9255 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
9256 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
9257 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
9258 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
9259 else
9260 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
9261 return target;
9263 case FIX_TRUNC_EXPR:
9264 op0 = expand_normal (treeop0);
9265 if (target == 0 || modifier == EXPAND_STACK_PARM)
9266 target = gen_reg_rtx (mode);
9267 expand_fix (target, op0, unsignedp);
9268 return target;
9270 case FLOAT_EXPR:
9271 op0 = expand_normal (treeop0);
9272 if (target == 0 || modifier == EXPAND_STACK_PARM)
9273 target = gen_reg_rtx (mode);
9274 /* expand_float can't figure out what to do if FROM has VOIDmode.
9275 So give it the correct mode. With -O, cse will optimize this. */
9276 if (GET_MODE (op0) == VOIDmode)
9277 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
9278 op0);
9279 expand_float (target, op0,
9280 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9281 return target;
9283 case NEGATE_EXPR:
9284 op0 = expand_expr (treeop0, subtarget,
9285 VOIDmode, EXPAND_NORMAL);
9286 if (modifier == EXPAND_STACK_PARM)
9287 target = 0;
9288 temp = expand_unop (mode,
9289 optab_for_tree_code (NEGATE_EXPR, type,
9290 optab_default),
9291 op0, target, 0);
9292 gcc_assert (temp);
9293 return REDUCE_BIT_FIELD (temp);
9295 case ABS_EXPR:
9296 case ABSU_EXPR:
9297 op0 = expand_expr (treeop0, subtarget,
9298 VOIDmode, EXPAND_NORMAL);
9299 if (modifier == EXPAND_STACK_PARM)
9300 target = 0;
9302 /* ABS_EXPR is not valid for complex arguments. */
9303 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9304 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
9306 /* Unsigned abs is simply the operand. Testing here means we don't
9307 risk generating incorrect code below. */
9308 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
9309 return op0;
9311 return expand_abs (mode, op0, target, unsignedp,
9312 safe_from_p (target, treeop0, 1));
9314 case MAX_EXPR:
9315 case MIN_EXPR:
9316 target = original_target;
9317 if (target == 0
9318 || modifier == EXPAND_STACK_PARM
9319 || (MEM_P (target) && MEM_VOLATILE_P (target))
9320 || GET_MODE (target) != mode
9321 || (REG_P (target)
9322 && REGNO (target) < FIRST_PSEUDO_REGISTER))
9323 target = gen_reg_rtx (mode);
9324 expand_operands (treeop0, treeop1,
9325 target, &op0, &op1, EXPAND_NORMAL);
9327 /* First try to do it with a special MIN or MAX instruction.
9328 If that does not win, use a conditional jump to select the proper
9329 value. */
9330 this_optab = optab_for_tree_code (code, type, optab_default);
9331 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9332 OPTAB_WIDEN);
9333 if (temp != 0)
9334 return temp;
9336 if (VECTOR_TYPE_P (type))
9337 gcc_unreachable ();
9339 /* At this point, a MEM target is no longer useful; we will get better
9340 code without it. */
9342 if (! REG_P (target))
9343 target = gen_reg_rtx (mode);
9345 /* If op1 was placed in target, swap op0 and op1. */
9346 if (target != op0 && target == op1)
9347 std::swap (op0, op1);
9349 /* We generate better code and avoid problems with op1 mentioning
9350 target by forcing op1 into a pseudo if it isn't a constant. */
9351 if (! CONSTANT_P (op1))
9352 op1 = force_reg (mode, op1);
9355 enum rtx_code comparison_code;
9356 rtx cmpop1 = op1;
9358 if (code == MAX_EXPR)
9359 comparison_code = unsignedp ? GEU : GE;
9360 else
9361 comparison_code = unsignedp ? LEU : LE;
9363 /* Canonicalize to comparisons against 0. */
9364 if (op1 == const1_rtx)
9366 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9367 or (a != 0 ? a : 1) for unsigned.
9368 For MIN we are safe converting (a <= 1 ? a : 1)
9369 into (a <= 0 ? a : 1) */
9370 cmpop1 = const0_rtx;
9371 if (code == MAX_EXPR)
9372 comparison_code = unsignedp ? NE : GT;
9374 if (op1 == constm1_rtx && !unsignedp)
9376 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9377 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9378 cmpop1 = const0_rtx;
9379 if (code == MIN_EXPR)
9380 comparison_code = LT;
9383 /* Use a conditional move if possible. */
9384 if (can_conditionally_move_p (mode))
9386 rtx insn;
9388 start_sequence ();
9390 /* Try to emit the conditional move. */
9391 insn = emit_conditional_move (target, comparison_code,
9392 op0, cmpop1, mode,
9393 op0, op1, mode,
9394 unsignedp);
9396 /* If we could do the conditional move, emit the sequence,
9397 and return. */
9398 if (insn)
9400 rtx_insn *seq = get_insns ();
9401 end_sequence ();
9402 emit_insn (seq);
9403 return target;
9406 /* Otherwise discard the sequence and fall back to code with
9407 branches. */
9408 end_sequence ();
9411 if (target != op0)
9412 emit_move_insn (target, op0);
9414 lab = gen_label_rtx ();
9415 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
9416 unsignedp, mode, NULL_RTX, NULL, lab,
9417 profile_probability::uninitialized ());
9419 emit_move_insn (target, op1);
9420 emit_label (lab);
9421 return target;
9423 case BIT_NOT_EXPR:
9424 op0 = expand_expr (treeop0, subtarget,
9425 VOIDmode, EXPAND_NORMAL);
9426 if (modifier == EXPAND_STACK_PARM)
9427 target = 0;
9428 /* In case we have to reduce the result to bitfield precision
9429 for unsigned bitfield expand this as XOR with a proper constant
9430 instead. */
9431 if (reduce_bit_field && TYPE_UNSIGNED (type))
9433 int_mode = SCALAR_INT_TYPE_MODE (type);
9434 wide_int mask = wi::mask (TYPE_PRECISION (type),
9435 false, GET_MODE_PRECISION (int_mode));
9437 temp = expand_binop (int_mode, xor_optab, op0,
9438 immed_wide_int_const (mask, int_mode),
9439 target, 1, OPTAB_LIB_WIDEN);
9441 else
9442 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
9443 gcc_assert (temp);
9444 return temp;
9446 /* ??? Can optimize bitwise operations with one arg constant.
9447 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9448 and (a bitwise1 b) bitwise2 b (etc)
9449 but that is probably not worth while. */
9451 case BIT_AND_EXPR:
9452 case BIT_IOR_EXPR:
9453 case BIT_XOR_EXPR:
9454 goto binop;
9456 case LROTATE_EXPR:
9457 case RROTATE_EXPR:
9458 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
9459 || type_has_mode_precision_p (type));
9460 /* fall through */
9462 case LSHIFT_EXPR:
9463 case RSHIFT_EXPR:
9465 /* If this is a fixed-point operation, then we cannot use the code
9466 below because "expand_shift" doesn't support sat/no-sat fixed-point
9467 shifts. */
9468 if (ALL_FIXED_POINT_MODE_P (mode))
9469 goto binop;
9471 if (! safe_from_p (subtarget, treeop1, 1))
9472 subtarget = 0;
9473 if (modifier == EXPAND_STACK_PARM)
9474 target = 0;
9475 op0 = expand_expr (treeop0, subtarget,
9476 VOIDmode, EXPAND_NORMAL);
9478 /* Left shift optimization when shifting across word_size boundary.
9480 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9481 there isn't native instruction to support this wide mode
9482 left shift. Given below scenario:
9484 Type A = (Type) B << C
9486 |< T >|
9487 | dest_high | dest_low |
9489 | word_size |
9491 If the shift amount C caused we shift B to across the word
9492 size boundary, i.e part of B shifted into high half of
9493 destination register, and part of B remains in the low
9494 half, then GCC will use the following left shift expand
9495 logic:
9497 1. Initialize dest_low to B.
9498 2. Initialize every bit of dest_high to the sign bit of B.
9499 3. Logic left shift dest_low by C bit to finalize dest_low.
9500 The value of dest_low before this shift is kept in a temp D.
9501 4. Logic left shift dest_high by C.
9502 5. Logic right shift D by (word_size - C).
9503 6. Or the result of 4 and 5 to finalize dest_high.
9505 While, by checking gimple statements, if operand B is
9506 coming from signed extension, then we can simplify above
9507 expand logic into:
9509 1. dest_high = src_low >> (word_size - C).
9510 2. dest_low = src_low << C.
9512 We can use one arithmetic right shift to finish all the
9513 purpose of steps 2, 4, 5, 6, thus we reduce the steps
9514 needed from 6 into 2.
9516 The case is similar for zero extension, except that we
9517 initialize dest_high to zero rather than copies of the sign
9518 bit from B. Furthermore, we need to use a logical right shift
9519 in this case.
9521 The choice of sign-extension versus zero-extension is
9522 determined entirely by whether or not B is signed and is
9523 independent of the current setting of unsignedp. */
9525 temp = NULL_RTX;
9526 if (code == LSHIFT_EXPR
9527 && target
9528 && REG_P (target)
9529 && GET_MODE_2XWIDER_MODE (word_mode).exists (&int_mode)
9530 && mode == int_mode
9531 && TREE_CONSTANT (treeop1)
9532 && TREE_CODE (treeop0) == SSA_NAME)
9534 gimple *def = SSA_NAME_DEF_STMT (treeop0);
9535 if (is_gimple_assign (def)
9536 && gimple_assign_rhs_code (def) == NOP_EXPR)
9538 scalar_int_mode rmode = SCALAR_INT_TYPE_MODE
9539 (TREE_TYPE (gimple_assign_rhs1 (def)));
9541 if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (int_mode)
9542 && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
9543 && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
9544 >= GET_MODE_BITSIZE (word_mode)))
9546 rtx_insn *seq, *seq_old;
9547 poly_uint64 high_off = subreg_highpart_offset (word_mode,
9548 int_mode);
9549 bool extend_unsigned
9550 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def)));
9551 rtx low = lowpart_subreg (word_mode, op0, int_mode);
9552 rtx dest_low = lowpart_subreg (word_mode, target, int_mode);
9553 rtx dest_high = simplify_gen_subreg (word_mode, target,
9554 int_mode, high_off);
9555 HOST_WIDE_INT ramount = (BITS_PER_WORD
9556 - TREE_INT_CST_LOW (treeop1));
9557 tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
9559 start_sequence ();
9560 /* dest_high = src_low >> (word_size - C). */
9561 temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
9562 rshift, dest_high,
9563 extend_unsigned);
9564 if (temp != dest_high)
9565 emit_move_insn (dest_high, temp);
9567 /* dest_low = src_low << C. */
9568 temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
9569 treeop1, dest_low, unsignedp);
9570 if (temp != dest_low)
9571 emit_move_insn (dest_low, temp);
9573 seq = get_insns ();
9574 end_sequence ();
9575 temp = target ;
9577 if (have_insn_for (ASHIFT, int_mode))
9579 bool speed_p = optimize_insn_for_speed_p ();
9580 start_sequence ();
9581 rtx ret_old = expand_variable_shift (code, int_mode,
9582 op0, treeop1,
9583 target,
9584 unsignedp);
9586 seq_old = get_insns ();
9587 end_sequence ();
9588 if (seq_cost (seq, speed_p)
9589 >= seq_cost (seq_old, speed_p))
9591 seq = seq_old;
9592 temp = ret_old;
9595 emit_insn (seq);
9600 if (temp == NULL_RTX)
9601 temp = expand_variable_shift (code, mode, op0, treeop1, target,
9602 unsignedp);
9603 if (code == LSHIFT_EXPR)
9604 temp = REDUCE_BIT_FIELD (temp);
9605 return temp;
9608 /* Could determine the answer when only additive constants differ. Also,
9609 the addition of one can be handled by changing the condition. */
9610 case LT_EXPR:
9611 case LE_EXPR:
9612 case GT_EXPR:
9613 case GE_EXPR:
9614 case EQ_EXPR:
9615 case NE_EXPR:
9616 case UNORDERED_EXPR:
9617 case ORDERED_EXPR:
9618 case UNLT_EXPR:
9619 case UNLE_EXPR:
9620 case UNGT_EXPR:
9621 case UNGE_EXPR:
9622 case UNEQ_EXPR:
9623 case LTGT_EXPR:
9625 temp = do_store_flag (ops,
9626 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9627 tmode != VOIDmode ? tmode : mode);
9628 if (temp)
9629 return temp;
9631 /* Use a compare and a jump for BLKmode comparisons, or for function
9632 type comparisons is have_canonicalize_funcptr_for_compare. */
9634 if ((target == 0
9635 || modifier == EXPAND_STACK_PARM
9636 || ! safe_from_p (target, treeop0, 1)
9637 || ! safe_from_p (target, treeop1, 1)
9638 /* Make sure we don't have a hard reg (such as function's return
9639 value) live across basic blocks, if not optimizing. */
9640 || (!optimize && REG_P (target)
9641 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9642 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9644 emit_move_insn (target, const0_rtx);
9646 rtx_code_label *lab1 = gen_label_rtx ();
9647 jumpifnot_1 (code, treeop0, treeop1, lab1,
9648 profile_probability::uninitialized ());
9650 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9651 emit_move_insn (target, constm1_rtx);
9652 else
9653 emit_move_insn (target, const1_rtx);
9655 emit_label (lab1);
9656 return target;
9658 case COMPLEX_EXPR:
9659 /* Get the rtx code of the operands. */
9660 op0 = expand_normal (treeop0);
9661 op1 = expand_normal (treeop1);
9663 if (!target)
9664 target = gen_reg_rtx (TYPE_MODE (type));
9665 else
9666 /* If target overlaps with op1, then either we need to force
9667 op1 into a pseudo (if target also overlaps with op0),
9668 or write the complex parts in reverse order. */
9669 switch (GET_CODE (target))
9671 case CONCAT:
9672 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9674 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9676 complex_expr_force_op1:
9677 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9678 emit_move_insn (temp, op1);
9679 op1 = temp;
9680 break;
9682 complex_expr_swap_order:
9683 /* Move the imaginary (op1) and real (op0) parts to their
9684 location. */
9685 write_complex_part (target, op1, true);
9686 write_complex_part (target, op0, false);
9688 return target;
9690 break;
9691 case MEM:
9692 temp = adjust_address_nv (target,
9693 GET_MODE_INNER (GET_MODE (target)), 0);
9694 if (reg_overlap_mentioned_p (temp, op1))
9696 scalar_mode imode = GET_MODE_INNER (GET_MODE (target));
9697 temp = adjust_address_nv (target, imode,
9698 GET_MODE_SIZE (imode));
9699 if (reg_overlap_mentioned_p (temp, op0))
9700 goto complex_expr_force_op1;
9701 goto complex_expr_swap_order;
9703 break;
9704 default:
9705 if (reg_overlap_mentioned_p (target, op1))
9707 if (reg_overlap_mentioned_p (target, op0))
9708 goto complex_expr_force_op1;
9709 goto complex_expr_swap_order;
9711 break;
9714 /* Move the real (op0) and imaginary (op1) parts to their location. */
9715 write_complex_part (target, op0, false);
9716 write_complex_part (target, op1, true);
9718 return target;
9720 case WIDEN_SUM_EXPR:
9722 tree oprnd0 = treeop0;
9723 tree oprnd1 = treeop1;
9725 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9726 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9727 target, unsignedp);
9728 return target;
9731 case VEC_UNPACK_HI_EXPR:
9732 case VEC_UNPACK_LO_EXPR:
9733 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
9734 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
9736 op0 = expand_normal (treeop0);
9737 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9738 target, unsignedp);
9739 gcc_assert (temp);
9740 return temp;
9743 case VEC_UNPACK_FLOAT_HI_EXPR:
9744 case VEC_UNPACK_FLOAT_LO_EXPR:
9746 op0 = expand_normal (treeop0);
9747 /* The signedness is determined from input operand. */
9748 temp = expand_widen_pattern_expr
9749 (ops, op0, NULL_RTX, NULL_RTX,
9750 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9752 gcc_assert (temp);
9753 return temp;
9756 case VEC_WIDEN_MULT_HI_EXPR:
9757 case VEC_WIDEN_MULT_LO_EXPR:
9758 case VEC_WIDEN_MULT_EVEN_EXPR:
9759 case VEC_WIDEN_MULT_ODD_EXPR:
9760 case VEC_WIDEN_LSHIFT_HI_EXPR:
9761 case VEC_WIDEN_LSHIFT_LO_EXPR:
9762 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9763 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9764 target, unsignedp);
9765 gcc_assert (target);
9766 return target;
9768 case VEC_PACK_SAT_EXPR:
9769 case VEC_PACK_FIX_TRUNC_EXPR:
9770 mode = TYPE_MODE (TREE_TYPE (treeop0));
9771 goto binop;
9773 case VEC_PACK_TRUNC_EXPR:
9774 if (VECTOR_BOOLEAN_TYPE_P (type)
9775 && VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (treeop0))
9776 && mode == TYPE_MODE (TREE_TYPE (treeop0))
9777 && SCALAR_INT_MODE_P (mode))
9779 class expand_operand eops[4];
9780 machine_mode imode = TYPE_MODE (TREE_TYPE (treeop0));
9781 expand_operands (treeop0, treeop1,
9782 subtarget, &op0, &op1, EXPAND_NORMAL);
9783 this_optab = vec_pack_sbool_trunc_optab;
9784 enum insn_code icode = optab_handler (this_optab, imode);
9785 create_output_operand (&eops[0], target, mode);
9786 create_convert_operand_from (&eops[1], op0, imode, false);
9787 create_convert_operand_from (&eops[2], op1, imode, false);
9788 temp = GEN_INT (TYPE_VECTOR_SUBPARTS (type).to_constant ());
9789 create_input_operand (&eops[3], temp, imode);
9790 expand_insn (icode, 4, eops);
9791 return eops[0].value;
9793 mode = TYPE_MODE (TREE_TYPE (treeop0));
9794 goto binop;
9796 case VEC_PACK_FLOAT_EXPR:
9797 mode = TYPE_MODE (TREE_TYPE (treeop0));
9798 expand_operands (treeop0, treeop1,
9799 subtarget, &op0, &op1, EXPAND_NORMAL);
9800 this_optab = optab_for_tree_code (code, TREE_TYPE (treeop0),
9801 optab_default);
9802 target = expand_binop (mode, this_optab, op0, op1, target,
9803 TYPE_UNSIGNED (TREE_TYPE (treeop0)),
9804 OPTAB_LIB_WIDEN);
9805 gcc_assert (target);
9806 return target;
9808 case VEC_PERM_EXPR:
9810 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9811 vec_perm_builder sel;
9812 if (TREE_CODE (treeop2) == VECTOR_CST
9813 && tree_to_vec_perm_builder (&sel, treeop2))
9815 machine_mode sel_mode = TYPE_MODE (TREE_TYPE (treeop2));
9816 temp = expand_vec_perm_const (mode, op0, op1, sel,
9817 sel_mode, target);
9819 else
9821 op2 = expand_normal (treeop2);
9822 temp = expand_vec_perm_var (mode, op0, op1, op2, target);
9824 gcc_assert (temp);
9825 return temp;
9828 case DOT_PROD_EXPR:
9830 tree oprnd0 = treeop0;
9831 tree oprnd1 = treeop1;
9832 tree oprnd2 = treeop2;
9834 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9835 op2 = expand_normal (oprnd2);
9836 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9837 target, unsignedp);
9838 return target;
9841 case SAD_EXPR:
9843 tree oprnd0 = treeop0;
9844 tree oprnd1 = treeop1;
9845 tree oprnd2 = treeop2;
9847 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9848 op2 = expand_normal (oprnd2);
9849 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9850 target, unsignedp);
9851 return target;
9854 case REALIGN_LOAD_EXPR:
9856 tree oprnd0 = treeop0;
9857 tree oprnd1 = treeop1;
9858 tree oprnd2 = treeop2;
9860 this_optab = optab_for_tree_code (code, type, optab_default);
9861 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9862 op2 = expand_normal (oprnd2);
9863 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9864 target, unsignedp);
9865 gcc_assert (temp);
9866 return temp;
9869 case COND_EXPR:
9871 /* A COND_EXPR with its type being VOID_TYPE represents a
9872 conditional jump and is handled in
9873 expand_gimple_cond_expr. */
9874 gcc_assert (!VOID_TYPE_P (type));
9876 /* Note that COND_EXPRs whose type is a structure or union
9877 are required to be constructed to contain assignments of
9878 a temporary variable, so that we can evaluate them here
9879 for side effect only. If type is void, we must do likewise. */
9881 gcc_assert (!TREE_ADDRESSABLE (type)
9882 && !ignore
9883 && TREE_TYPE (treeop1) != void_type_node
9884 && TREE_TYPE (treeop2) != void_type_node);
9886 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9887 if (temp)
9888 return temp;
9890 /* If we are not to produce a result, we have no target. Otherwise,
9891 if a target was specified use it; it will not be used as an
9892 intermediate target unless it is safe. If no target, use a
9893 temporary. */
9895 if (modifier != EXPAND_STACK_PARM
9896 && original_target
9897 && safe_from_p (original_target, treeop0, 1)
9898 && GET_MODE (original_target) == mode
9899 && !MEM_P (original_target))
9900 temp = original_target;
9901 else
9902 temp = assign_temp (type, 0, 1);
9904 do_pending_stack_adjust ();
9905 NO_DEFER_POP;
9906 rtx_code_label *lab0 = gen_label_rtx ();
9907 rtx_code_label *lab1 = gen_label_rtx ();
9908 jumpifnot (treeop0, lab0,
9909 profile_probability::uninitialized ());
9910 store_expr (treeop1, temp,
9911 modifier == EXPAND_STACK_PARM,
9912 false, false);
9914 emit_jump_insn (targetm.gen_jump (lab1));
9915 emit_barrier ();
9916 emit_label (lab0);
9917 store_expr (treeop2, temp,
9918 modifier == EXPAND_STACK_PARM,
9919 false, false);
9921 emit_label (lab1);
9922 OK_DEFER_POP;
9923 return temp;
9926 case VEC_DUPLICATE_EXPR:
9927 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
9928 target = expand_vector_broadcast (mode, op0);
9929 gcc_assert (target);
9930 return target;
9932 case VEC_SERIES_EXPR:
9933 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, modifier);
9934 return expand_vec_series_expr (mode, op0, op1, target);
9936 case BIT_INSERT_EXPR:
9938 unsigned bitpos = tree_to_uhwi (treeop2);
9939 unsigned bitsize;
9940 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1)))
9941 bitsize = TYPE_PRECISION (TREE_TYPE (treeop1));
9942 else
9943 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1)));
9944 op0 = expand_normal (treeop0);
9945 op1 = expand_normal (treeop1);
9946 rtx dst = gen_reg_rtx (mode);
9947 emit_move_insn (dst, op0);
9948 store_bit_field (dst, bitsize, bitpos, 0, 0,
9949 TYPE_MODE (TREE_TYPE (treeop1)), op1, false);
9950 return dst;
9953 default:
9954 gcc_unreachable ();
9957 /* Here to do an ordinary binary operator. */
9958 binop:
9959 expand_operands (treeop0, treeop1,
9960 subtarget, &op0, &op1, EXPAND_NORMAL);
9961 binop2:
9962 this_optab = optab_for_tree_code (code, type, optab_default);
9963 binop3:
9964 if (modifier == EXPAND_STACK_PARM)
9965 target = 0;
9966 temp = expand_binop (mode, this_optab, op0, op1, target,
9967 unsignedp, OPTAB_LIB_WIDEN);
9968 gcc_assert (temp);
9969 /* Bitwise operations do not need bitfield reduction as we expect their
9970 operands being properly truncated. */
9971 if (code == BIT_XOR_EXPR
9972 || code == BIT_AND_EXPR
9973 || code == BIT_IOR_EXPR)
9974 return temp;
9975 return REDUCE_BIT_FIELD (temp);
9977 #undef REDUCE_BIT_FIELD
9980 /* Return TRUE if expression STMT is suitable for replacement.
9981 Never consider memory loads as replaceable, because those don't ever lead
9982 into constant expressions. */
9984 static bool
9985 stmt_is_replaceable_p (gimple *stmt)
9987 if (ssa_is_replaceable_p (stmt))
9989 /* Don't move around loads. */
9990 if (!gimple_assign_single_p (stmt)
9991 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9992 return true;
9994 return false;
9998 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9999 enum expand_modifier modifier, rtx *alt_rtl,
10000 bool inner_reference_p)
10002 rtx op0, op1, temp, decl_rtl;
10003 tree type;
10004 int unsignedp;
10005 machine_mode mode, dmode;
10006 enum tree_code code = TREE_CODE (exp);
10007 rtx subtarget, original_target;
10008 int ignore;
10009 tree context;
10010 bool reduce_bit_field;
10011 location_t loc = EXPR_LOCATION (exp);
10012 struct separate_ops ops;
10013 tree treeop0, treeop1, treeop2;
10014 tree ssa_name = NULL_TREE;
10015 gimple *g;
10017 type = TREE_TYPE (exp);
10018 mode = TYPE_MODE (type);
10019 unsignedp = TYPE_UNSIGNED (type);
10021 treeop0 = treeop1 = treeop2 = NULL_TREE;
10022 if (!VL_EXP_CLASS_P (exp))
10023 switch (TREE_CODE_LENGTH (code))
10025 default:
10026 case 3: treeop2 = TREE_OPERAND (exp, 2); /* FALLTHRU */
10027 case 2: treeop1 = TREE_OPERAND (exp, 1); /* FALLTHRU */
10028 case 1: treeop0 = TREE_OPERAND (exp, 0); /* FALLTHRU */
10029 case 0: break;
10031 ops.code = code;
10032 ops.type = type;
10033 ops.op0 = treeop0;
10034 ops.op1 = treeop1;
10035 ops.op2 = treeop2;
10036 ops.location = loc;
10038 ignore = (target == const0_rtx
10039 || ((CONVERT_EXPR_CODE_P (code)
10040 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
10041 && TREE_CODE (type) == VOID_TYPE));
10043 /* An operation in what may be a bit-field type needs the
10044 result to be reduced to the precision of the bit-field type,
10045 which is narrower than that of the type's mode. */
10046 reduce_bit_field = (!ignore
10047 && INTEGRAL_TYPE_P (type)
10048 && !type_has_mode_precision_p (type));
10050 /* If we are going to ignore this result, we need only do something
10051 if there is a side-effect somewhere in the expression. If there
10052 is, short-circuit the most common cases here. Note that we must
10053 not call expand_expr with anything but const0_rtx in case this
10054 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
10056 if (ignore)
10058 if (! TREE_SIDE_EFFECTS (exp))
10059 return const0_rtx;
10061 /* Ensure we reference a volatile object even if value is ignored, but
10062 don't do this if all we are doing is taking its address. */
10063 if (TREE_THIS_VOLATILE (exp)
10064 && TREE_CODE (exp) != FUNCTION_DECL
10065 && mode != VOIDmode && mode != BLKmode
10066 && modifier != EXPAND_CONST_ADDRESS)
10068 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
10069 if (MEM_P (temp))
10070 copy_to_reg (temp);
10071 return const0_rtx;
10074 if (TREE_CODE_CLASS (code) == tcc_unary
10075 || code == BIT_FIELD_REF
10076 || code == COMPONENT_REF
10077 || code == INDIRECT_REF)
10078 return expand_expr (treeop0, const0_rtx, VOIDmode,
10079 modifier);
10081 else if (TREE_CODE_CLASS (code) == tcc_binary
10082 || TREE_CODE_CLASS (code) == tcc_comparison
10083 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
10085 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
10086 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
10087 return const0_rtx;
10090 target = 0;
10093 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
10094 target = 0;
10096 /* Use subtarget as the target for operand 0 of a binary operation. */
10097 subtarget = get_subtarget (target);
10098 original_target = target;
10100 switch (code)
10102 case LABEL_DECL:
10104 tree function = decl_function_context (exp);
10106 temp = label_rtx (exp);
10107 temp = gen_rtx_LABEL_REF (Pmode, temp);
10109 if (function != current_function_decl
10110 && function != 0)
10111 LABEL_REF_NONLOCAL_P (temp) = 1;
10113 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
10114 return temp;
10117 case SSA_NAME:
10118 /* ??? ivopts calls expander, without any preparation from
10119 out-of-ssa. So fake instructions as if this was an access to the
10120 base variable. This unnecessarily allocates a pseudo, see how we can
10121 reuse it, if partition base vars have it set already. */
10122 if (!currently_expanding_to_rtl)
10124 tree var = SSA_NAME_VAR (exp);
10125 if (var && DECL_RTL_SET_P (var))
10126 return DECL_RTL (var);
10127 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
10128 LAST_VIRTUAL_REGISTER + 1);
10131 g = get_gimple_for_ssa_name (exp);
10132 /* For EXPAND_INITIALIZER try harder to get something simpler. */
10133 if (g == NULL
10134 && modifier == EXPAND_INITIALIZER
10135 && !SSA_NAME_IS_DEFAULT_DEF (exp)
10136 && (optimize || !SSA_NAME_VAR (exp)
10137 || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
10138 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
10139 g = SSA_NAME_DEF_STMT (exp);
10140 if (g)
10142 rtx r;
10143 location_t saved_loc = curr_insn_location ();
10144 loc = gimple_location (g);
10145 if (loc != UNKNOWN_LOCATION)
10146 set_curr_insn_location (loc);
10147 ops.code = gimple_assign_rhs_code (g);
10148 switch (get_gimple_rhs_class (ops.code))
10150 case GIMPLE_TERNARY_RHS:
10151 ops.op2 = gimple_assign_rhs3 (g);
10152 /* Fallthru */
10153 case GIMPLE_BINARY_RHS:
10154 ops.op1 = gimple_assign_rhs2 (g);
10156 /* Try to expand conditonal compare. */
10157 if (targetm.gen_ccmp_first)
10159 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
10160 r = expand_ccmp_expr (g, mode);
10161 if (r)
10162 break;
10164 /* Fallthru */
10165 case GIMPLE_UNARY_RHS:
10166 ops.op0 = gimple_assign_rhs1 (g);
10167 ops.type = TREE_TYPE (gimple_assign_lhs (g));
10168 ops.location = loc;
10169 r = expand_expr_real_2 (&ops, target, tmode, modifier);
10170 break;
10171 case GIMPLE_SINGLE_RHS:
10173 r = expand_expr_real (gimple_assign_rhs1 (g), target,
10174 tmode, modifier, alt_rtl,
10175 inner_reference_p);
10176 break;
10178 default:
10179 gcc_unreachable ();
10181 set_curr_insn_location (saved_loc);
10182 if (REG_P (r) && !REG_EXPR (r))
10183 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
10184 return r;
10187 ssa_name = exp;
10188 decl_rtl = get_rtx_for_ssa_name (ssa_name);
10189 exp = SSA_NAME_VAR (ssa_name);
10190 goto expand_decl_rtl;
10192 case PARM_DECL:
10193 case VAR_DECL:
10194 /* If a static var's type was incomplete when the decl was written,
10195 but the type is complete now, lay out the decl now. */
10196 if (DECL_SIZE (exp) == 0
10197 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
10198 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
10199 layout_decl (exp, 0);
10201 /* fall through */
10203 case FUNCTION_DECL:
10204 case RESULT_DECL:
10205 decl_rtl = DECL_RTL (exp);
10206 expand_decl_rtl:
10207 gcc_assert (decl_rtl);
10209 /* DECL_MODE might change when TYPE_MODE depends on attribute target
10210 settings for VECTOR_TYPE_P that might switch for the function. */
10211 if (currently_expanding_to_rtl
10212 && code == VAR_DECL && MEM_P (decl_rtl)
10213 && VECTOR_TYPE_P (type) && exp && DECL_MODE (exp) != mode)
10214 decl_rtl = change_address (decl_rtl, TYPE_MODE (type), 0);
10215 else
10216 decl_rtl = copy_rtx (decl_rtl);
10218 /* Record writes to register variables. */
10219 if (modifier == EXPAND_WRITE
10220 && REG_P (decl_rtl)
10221 && HARD_REGISTER_P (decl_rtl))
10222 add_to_hard_reg_set (&crtl->asm_clobbers,
10223 GET_MODE (decl_rtl), REGNO (decl_rtl));
10225 /* Ensure variable marked as used even if it doesn't go through
10226 a parser. If it hasn't be used yet, write out an external
10227 definition. */
10228 if (exp)
10229 TREE_USED (exp) = 1;
10231 /* Show we haven't gotten RTL for this yet. */
10232 temp = 0;
10234 /* Variables inherited from containing functions should have
10235 been lowered by this point. */
10236 if (exp)
10237 context = decl_function_context (exp);
10238 gcc_assert (!exp
10239 || SCOPE_FILE_SCOPE_P (context)
10240 || context == current_function_decl
10241 || TREE_STATIC (exp)
10242 || DECL_EXTERNAL (exp)
10243 /* ??? C++ creates functions that are not TREE_STATIC. */
10244 || TREE_CODE (exp) == FUNCTION_DECL);
10246 /* This is the case of an array whose size is to be determined
10247 from its initializer, while the initializer is still being parsed.
10248 ??? We aren't parsing while expanding anymore. */
10250 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
10251 temp = validize_mem (decl_rtl);
10253 /* If DECL_RTL is memory, we are in the normal case and the
10254 address is not valid, get the address into a register. */
10256 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
10258 if (alt_rtl)
10259 *alt_rtl = decl_rtl;
10260 decl_rtl = use_anchored_address (decl_rtl);
10261 if (modifier != EXPAND_CONST_ADDRESS
10262 && modifier != EXPAND_SUM
10263 && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
10264 : GET_MODE (decl_rtl),
10265 XEXP (decl_rtl, 0),
10266 MEM_ADDR_SPACE (decl_rtl)))
10267 temp = replace_equiv_address (decl_rtl,
10268 copy_rtx (XEXP (decl_rtl, 0)));
10271 /* If we got something, return it. But first, set the alignment
10272 if the address is a register. */
10273 if (temp != 0)
10275 if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
10276 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
10278 else if (MEM_P (decl_rtl))
10279 temp = decl_rtl;
10281 if (temp != 0)
10283 if (MEM_P (temp)
10284 && modifier != EXPAND_WRITE
10285 && modifier != EXPAND_MEMORY
10286 && modifier != EXPAND_INITIALIZER
10287 && modifier != EXPAND_CONST_ADDRESS
10288 && modifier != EXPAND_SUM
10289 && !inner_reference_p
10290 && mode != BLKmode
10291 && MEM_ALIGN (temp) < GET_MODE_ALIGNMENT (mode))
10292 temp = expand_misaligned_mem_ref (temp, mode, unsignedp,
10293 MEM_ALIGN (temp), NULL_RTX, NULL);
10295 return temp;
10298 if (exp)
10299 dmode = DECL_MODE (exp);
10300 else
10301 dmode = TYPE_MODE (TREE_TYPE (ssa_name));
10303 /* If the mode of DECL_RTL does not match that of the decl,
10304 there are two cases: we are dealing with a BLKmode value
10305 that is returned in a register, or we are dealing with
10306 a promoted value. In the latter case, return a SUBREG
10307 of the wanted mode, but mark it so that we know that it
10308 was already extended. */
10309 if (REG_P (decl_rtl)
10310 && dmode != BLKmode
10311 && GET_MODE (decl_rtl) != dmode)
10313 machine_mode pmode;
10315 /* Get the signedness to be used for this variable. Ensure we get
10316 the same mode we got when the variable was declared. */
10317 if (code != SSA_NAME)
10318 pmode = promote_decl_mode (exp, &unsignedp);
10319 else if ((g = SSA_NAME_DEF_STMT (ssa_name))
10320 && gimple_code (g) == GIMPLE_CALL
10321 && !gimple_call_internal_p (g))
10322 pmode = promote_function_mode (type, mode, &unsignedp,
10323 gimple_call_fntype (g),
10325 else
10326 pmode = promote_ssa_mode (ssa_name, &unsignedp);
10327 gcc_assert (GET_MODE (decl_rtl) == pmode);
10329 temp = gen_lowpart_SUBREG (mode, decl_rtl);
10330 SUBREG_PROMOTED_VAR_P (temp) = 1;
10331 SUBREG_PROMOTED_SET (temp, unsignedp);
10332 return temp;
10335 return decl_rtl;
10337 case INTEGER_CST:
10339 /* Given that TYPE_PRECISION (type) is not always equal to
10340 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
10341 the former to the latter according to the signedness of the
10342 type. */
10343 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (type);
10344 temp = immed_wide_int_const
10345 (wi::to_wide (exp, GET_MODE_PRECISION (int_mode)), int_mode);
10346 return temp;
10349 case VECTOR_CST:
10351 tree tmp = NULL_TREE;
10352 if (VECTOR_MODE_P (mode))
10353 return const_vector_from_tree (exp);
10354 scalar_int_mode int_mode;
10355 if (is_int_mode (mode, &int_mode))
10357 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
10358 return const_scalar_mask_from_tree (int_mode, exp);
10359 else
10361 tree type_for_mode
10362 = lang_hooks.types.type_for_mode (int_mode, 1);
10363 if (type_for_mode)
10364 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR,
10365 type_for_mode, exp);
10368 if (!tmp)
10370 vec<constructor_elt, va_gc> *v;
10371 /* Constructors need to be fixed-length. FIXME. */
10372 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
10373 vec_alloc (v, nunits);
10374 for (unsigned int i = 0; i < nunits; ++i)
10375 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
10376 tmp = build_constructor (type, v);
10378 return expand_expr (tmp, ignore ? const0_rtx : target,
10379 tmode, modifier);
10382 case CONST_DECL:
10383 if (modifier == EXPAND_WRITE)
10385 /* Writing into CONST_DECL is always invalid, but handle it
10386 gracefully. */
10387 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
10388 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
10389 op0 = expand_expr_addr_expr_1 (exp, NULL_RTX, address_mode,
10390 EXPAND_NORMAL, as);
10391 op0 = memory_address_addr_space (mode, op0, as);
10392 temp = gen_rtx_MEM (mode, op0);
10393 set_mem_addr_space (temp, as);
10394 return temp;
10396 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
10398 case REAL_CST:
10399 /* If optimized, generate immediate CONST_DOUBLE
10400 which will be turned into memory by reload if necessary.
10402 We used to force a register so that loop.c could see it. But
10403 this does not allow gen_* patterns to perform optimizations with
10404 the constants. It also produces two insns in cases like "x = 1.0;".
10405 On most machines, floating-point constants are not permitted in
10406 many insns, so we'd end up copying it to a register in any case.
10408 Now, we do the copying in expand_binop, if appropriate. */
10409 return const_double_from_real_value (TREE_REAL_CST (exp),
10410 TYPE_MODE (TREE_TYPE (exp)));
10412 case FIXED_CST:
10413 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
10414 TYPE_MODE (TREE_TYPE (exp)));
10416 case COMPLEX_CST:
10417 /* Handle evaluating a complex constant in a CONCAT target. */
10418 if (original_target && GET_CODE (original_target) == CONCAT)
10420 rtx rtarg, itarg;
10422 mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
10423 rtarg = XEXP (original_target, 0);
10424 itarg = XEXP (original_target, 1);
10426 /* Move the real and imaginary parts separately. */
10427 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
10428 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
10430 if (op0 != rtarg)
10431 emit_move_insn (rtarg, op0);
10432 if (op1 != itarg)
10433 emit_move_insn (itarg, op1);
10435 return original_target;
10438 /* fall through */
10440 case STRING_CST:
10441 temp = expand_expr_constant (exp, 1, modifier);
10443 /* temp contains a constant address.
10444 On RISC machines where a constant address isn't valid,
10445 make some insns to get that address into a register. */
10446 if (modifier != EXPAND_CONST_ADDRESS
10447 && modifier != EXPAND_INITIALIZER
10448 && modifier != EXPAND_SUM
10449 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
10450 MEM_ADDR_SPACE (temp)))
10451 return replace_equiv_address (temp,
10452 copy_rtx (XEXP (temp, 0)));
10453 return temp;
10455 case POLY_INT_CST:
10456 return immed_wide_int_const (poly_int_cst_value (exp), mode);
10458 case SAVE_EXPR:
10460 tree val = treeop0;
10461 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
10462 inner_reference_p);
10464 if (!SAVE_EXPR_RESOLVED_P (exp))
10466 /* We can indeed still hit this case, typically via builtin
10467 expanders calling save_expr immediately before expanding
10468 something. Assume this means that we only have to deal
10469 with non-BLKmode values. */
10470 gcc_assert (GET_MODE (ret) != BLKmode);
10472 val = build_decl (curr_insn_location (),
10473 VAR_DECL, NULL, TREE_TYPE (exp));
10474 DECL_ARTIFICIAL (val) = 1;
10475 DECL_IGNORED_P (val) = 1;
10476 treeop0 = val;
10477 TREE_OPERAND (exp, 0) = treeop0;
10478 SAVE_EXPR_RESOLVED_P (exp) = 1;
10480 if (!CONSTANT_P (ret))
10481 ret = copy_to_reg (ret);
10482 SET_DECL_RTL (val, ret);
10485 return ret;
10489 case CONSTRUCTOR:
10490 /* If we don't need the result, just ensure we evaluate any
10491 subexpressions. */
10492 if (ignore)
10494 unsigned HOST_WIDE_INT idx;
10495 tree value;
10497 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
10498 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
10500 return const0_rtx;
10503 return expand_constructor (exp, target, modifier, false);
10505 case TARGET_MEM_REF:
10507 addr_space_t as
10508 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10509 unsigned int align;
10511 op0 = addr_for_mem_ref (exp, as, true);
10512 op0 = memory_address_addr_space (mode, op0, as);
10513 temp = gen_rtx_MEM (mode, op0);
10514 set_mem_attributes (temp, exp, 0);
10515 set_mem_addr_space (temp, as);
10516 align = get_object_alignment (exp);
10517 if (modifier != EXPAND_WRITE
10518 && modifier != EXPAND_MEMORY
10519 && mode != BLKmode
10520 && align < GET_MODE_ALIGNMENT (mode))
10521 temp = expand_misaligned_mem_ref (temp, mode, unsignedp,
10522 align, NULL_RTX, NULL);
10523 return temp;
10526 case MEM_REF:
10528 const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
10529 addr_space_t as
10530 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10531 machine_mode address_mode;
10532 tree base = TREE_OPERAND (exp, 0);
10533 gimple *def_stmt;
10534 unsigned align;
10535 /* Handle expansion of non-aliased memory with non-BLKmode. That
10536 might end up in a register. */
10537 if (mem_ref_refers_to_non_mem_p (exp))
10539 poly_int64 offset = mem_ref_offset (exp).force_shwi ();
10540 base = TREE_OPERAND (base, 0);
10541 poly_uint64 type_size;
10542 if (known_eq (offset, 0)
10543 && !reverse
10544 && poly_int_tree_p (TYPE_SIZE (type), &type_size)
10545 && known_eq (GET_MODE_BITSIZE (DECL_MODE (base)), type_size))
10546 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
10547 target, tmode, modifier);
10548 if (TYPE_MODE (type) == BLKmode)
10550 temp = assign_stack_temp (DECL_MODE (base),
10551 GET_MODE_SIZE (DECL_MODE (base)));
10552 store_expr (base, temp, 0, false, false);
10553 temp = adjust_address (temp, BLKmode, offset);
10554 set_mem_size (temp, int_size_in_bytes (type));
10555 return temp;
10557 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
10558 bitsize_int (offset * BITS_PER_UNIT));
10559 REF_REVERSE_STORAGE_ORDER (exp) = reverse;
10560 return expand_expr (exp, target, tmode, modifier);
10562 address_mode = targetm.addr_space.address_mode (as);
10563 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
10565 tree mask = gimple_assign_rhs2 (def_stmt);
10566 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
10567 gimple_assign_rhs1 (def_stmt), mask);
10568 TREE_OPERAND (exp, 0) = base;
10570 align = get_object_alignment (exp);
10571 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
10572 op0 = memory_address_addr_space (mode, op0, as);
10573 if (!integer_zerop (TREE_OPERAND (exp, 1)))
10575 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
10576 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
10577 op0 = memory_address_addr_space (mode, op0, as);
10579 temp = gen_rtx_MEM (mode, op0);
10580 set_mem_attributes (temp, exp, 0);
10581 set_mem_addr_space (temp, as);
10582 if (TREE_THIS_VOLATILE (exp))
10583 MEM_VOLATILE_P (temp) = 1;
10584 if (modifier != EXPAND_WRITE
10585 && modifier != EXPAND_MEMORY
10586 && !inner_reference_p
10587 && mode != BLKmode
10588 && align < GET_MODE_ALIGNMENT (mode))
10589 temp = expand_misaligned_mem_ref (temp, mode, unsignedp, align,
10590 modifier == EXPAND_STACK_PARM
10591 ? NULL_RTX : target, alt_rtl);
10592 if (reverse
10593 && modifier != EXPAND_MEMORY
10594 && modifier != EXPAND_WRITE)
10595 temp = flip_storage_order (mode, temp);
10596 return temp;
10599 case ARRAY_REF:
10602 tree array = treeop0;
10603 tree index = treeop1;
10604 tree init;
10606 /* Fold an expression like: "foo"[2].
10607 This is not done in fold so it won't happen inside &.
10608 Don't fold if this is for wide characters since it's too
10609 difficult to do correctly and this is a very rare case. */
10611 if (modifier != EXPAND_CONST_ADDRESS
10612 && modifier != EXPAND_INITIALIZER
10613 && modifier != EXPAND_MEMORY)
10615 tree t = fold_read_from_constant_string (exp);
10617 if (t)
10618 return expand_expr (t, target, tmode, modifier);
10621 /* If this is a constant index into a constant array,
10622 just get the value from the array. Handle both the cases when
10623 we have an explicit constructor and when our operand is a variable
10624 that was declared const. */
10626 if (modifier != EXPAND_CONST_ADDRESS
10627 && modifier != EXPAND_INITIALIZER
10628 && modifier != EXPAND_MEMORY
10629 && TREE_CODE (array) == CONSTRUCTOR
10630 && ! TREE_SIDE_EFFECTS (array)
10631 && TREE_CODE (index) == INTEGER_CST)
10633 unsigned HOST_WIDE_INT ix;
10634 tree field, value;
10636 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10637 field, value)
10638 if (tree_int_cst_equal (field, index))
10640 if (!TREE_SIDE_EFFECTS (value))
10641 return expand_expr (fold (value), target, tmode, modifier);
10642 break;
10646 else if (optimize >= 1
10647 && modifier != EXPAND_CONST_ADDRESS
10648 && modifier != EXPAND_INITIALIZER
10649 && modifier != EXPAND_MEMORY
10650 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10651 && TREE_CODE (index) == INTEGER_CST
10652 && (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
10653 && (init = ctor_for_folding (array)) != error_mark_node)
10655 if (init == NULL_TREE)
10657 tree value = build_zero_cst (type);
10658 if (TREE_CODE (value) == CONSTRUCTOR)
10660 /* If VALUE is a CONSTRUCTOR, this optimization is only
10661 useful if this doesn't store the CONSTRUCTOR into
10662 memory. If it does, it is more efficient to just
10663 load the data from the array directly. */
10664 rtx ret = expand_constructor (value, target,
10665 modifier, true);
10666 if (ret == NULL_RTX)
10667 value = NULL_TREE;
10670 if (value)
10671 return expand_expr (value, target, tmode, modifier);
10673 else if (TREE_CODE (init) == CONSTRUCTOR)
10675 unsigned HOST_WIDE_INT ix;
10676 tree field, value;
10678 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10679 field, value)
10680 if (tree_int_cst_equal (field, index))
10682 if (TREE_SIDE_EFFECTS (value))
10683 break;
10685 if (TREE_CODE (value) == CONSTRUCTOR)
10687 /* If VALUE is a CONSTRUCTOR, this
10688 optimization is only useful if
10689 this doesn't store the CONSTRUCTOR
10690 into memory. If it does, it is more
10691 efficient to just load the data from
10692 the array directly. */
10693 rtx ret = expand_constructor (value, target,
10694 modifier, true);
10695 if (ret == NULL_RTX)
10696 break;
10699 return
10700 expand_expr (fold (value), target, tmode, modifier);
10703 else if (TREE_CODE (init) == STRING_CST)
10705 tree low_bound = array_ref_low_bound (exp);
10706 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10708 /* Optimize the special case of a zero lower bound.
10710 We convert the lower bound to sizetype to avoid problems
10711 with constant folding. E.g. suppose the lower bound is
10712 1 and its mode is QI. Without the conversion
10713 (ARRAY + (INDEX - (unsigned char)1))
10714 becomes
10715 (ARRAY + (-(unsigned char)1) + INDEX)
10716 which becomes
10717 (ARRAY + 255 + INDEX). Oops! */
10718 if (!integer_zerop (low_bound))
10719 index1 = size_diffop_loc (loc, index1,
10720 fold_convert_loc (loc, sizetype,
10721 low_bound));
10723 if (tree_fits_uhwi_p (index1)
10724 && compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10726 tree char_type = TREE_TYPE (TREE_TYPE (init));
10727 scalar_int_mode char_mode;
10729 if (is_int_mode (TYPE_MODE (char_type), &char_mode)
10730 && GET_MODE_SIZE (char_mode) == 1)
10731 return gen_int_mode (TREE_STRING_POINTER (init)
10732 [TREE_INT_CST_LOW (index1)],
10733 char_mode);
10738 goto normal_inner_ref;
10740 case COMPONENT_REF:
10741 /* If the operand is a CONSTRUCTOR, we can just extract the
10742 appropriate field if it is present. */
10743 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10745 unsigned HOST_WIDE_INT idx;
10746 tree field, value;
10747 scalar_int_mode field_mode;
10749 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10750 idx, field, value)
10751 if (field == treeop1
10752 /* We can normally use the value of the field in the
10753 CONSTRUCTOR. However, if this is a bitfield in
10754 an integral mode that we can fit in a HOST_WIDE_INT,
10755 we must mask only the number of bits in the bitfield,
10756 since this is done implicitly by the constructor. If
10757 the bitfield does not meet either of those conditions,
10758 we can't do this optimization. */
10759 && (! DECL_BIT_FIELD (field)
10760 || (is_int_mode (DECL_MODE (field), &field_mode)
10761 && (GET_MODE_PRECISION (field_mode)
10762 <= HOST_BITS_PER_WIDE_INT))))
10764 if (DECL_BIT_FIELD (field)
10765 && modifier == EXPAND_STACK_PARM)
10766 target = 0;
10767 op0 = expand_expr (value, target, tmode, modifier);
10768 if (DECL_BIT_FIELD (field))
10770 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10771 scalar_int_mode imode
10772 = SCALAR_INT_TYPE_MODE (TREE_TYPE (field));
10774 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10776 op1 = gen_int_mode ((HOST_WIDE_INT_1 << bitsize) - 1,
10777 imode);
10778 op0 = expand_and (imode, op0, op1, target);
10780 else
10782 int count = GET_MODE_PRECISION (imode) - bitsize;
10784 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10785 target, 0);
10786 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10787 target, 0);
10791 return op0;
10794 goto normal_inner_ref;
10796 case BIT_FIELD_REF:
10797 case ARRAY_RANGE_REF:
10798 normal_inner_ref:
10800 machine_mode mode1, mode2;
10801 poly_int64 bitsize, bitpos, bytepos;
10802 tree offset;
10803 int reversep, volatilep = 0, must_force_mem;
10804 tree tem
10805 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10806 &unsignedp, &reversep, &volatilep);
10807 rtx orig_op0, memloc;
10808 bool clear_mem_expr = false;
10810 /* If we got back the original object, something is wrong. Perhaps
10811 we are evaluating an expression too early. In any event, don't
10812 infinitely recurse. */
10813 gcc_assert (tem != exp);
10815 /* If TEM's type is a union of variable size, pass TARGET to the inner
10816 computation, since it will need a temporary and TARGET is known
10817 to have to do. This occurs in unchecked conversion in Ada. */
10818 orig_op0 = op0
10819 = expand_expr_real (tem,
10820 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10821 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10822 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10823 != INTEGER_CST)
10824 && modifier != EXPAND_STACK_PARM
10825 ? target : NULL_RTX),
10826 VOIDmode,
10827 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10828 NULL, true);
10830 /* If the field has a mode, we want to access it in the
10831 field's mode, not the computed mode.
10832 If a MEM has VOIDmode (external with incomplete type),
10833 use BLKmode for it instead. */
10834 if (MEM_P (op0))
10836 if (mode1 != VOIDmode)
10837 op0 = adjust_address (op0, mode1, 0);
10838 else if (GET_MODE (op0) == VOIDmode)
10839 op0 = adjust_address (op0, BLKmode, 0);
10842 mode2
10843 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10845 /* Make sure bitpos is not negative, it can wreak havoc later. */
10846 if (maybe_lt (bitpos, 0))
10848 gcc_checking_assert (offset == NULL_TREE);
10849 offset = size_int (bits_to_bytes_round_down (bitpos));
10850 bitpos = num_trailing_bits (bitpos);
10853 /* If we have either an offset, a BLKmode result, or a reference
10854 outside the underlying object, we must force it to memory.
10855 Such a case can occur in Ada if we have unchecked conversion
10856 of an expression from a scalar type to an aggregate type or
10857 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10858 passed a partially uninitialized object or a view-conversion
10859 to a larger size. */
10860 must_force_mem = (offset
10861 || mode1 == BLKmode
10862 || (mode == BLKmode
10863 && !int_mode_for_size (bitsize, 1).exists ())
10864 || maybe_gt (bitpos + bitsize,
10865 GET_MODE_BITSIZE (mode2)));
10867 /* Handle CONCAT first. */
10868 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10870 if (known_eq (bitpos, 0)
10871 && known_eq (bitsize, GET_MODE_BITSIZE (GET_MODE (op0)))
10872 && COMPLEX_MODE_P (mode1)
10873 && COMPLEX_MODE_P (GET_MODE (op0))
10874 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1))
10875 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0)))))
10877 if (reversep)
10878 op0 = flip_storage_order (GET_MODE (op0), op0);
10879 if (mode1 != GET_MODE (op0))
10881 rtx parts[2];
10882 for (int i = 0; i < 2; i++)
10884 rtx op = read_complex_part (op0, i != 0);
10885 if (GET_CODE (op) == SUBREG)
10886 op = force_reg (GET_MODE (op), op);
10887 temp = gen_lowpart_common (GET_MODE_INNER (mode1), op);
10888 if (temp)
10889 op = temp;
10890 else
10892 if (!REG_P (op) && !MEM_P (op))
10893 op = force_reg (GET_MODE (op), op);
10894 op = gen_lowpart (GET_MODE_INNER (mode1), op);
10896 parts[i] = op;
10898 op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]);
10900 return op0;
10902 if (known_eq (bitpos, 0)
10903 && known_eq (bitsize,
10904 GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
10905 && maybe_ne (bitsize, 0))
10907 op0 = XEXP (op0, 0);
10908 mode2 = GET_MODE (op0);
10910 else if (known_eq (bitpos,
10911 GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
10912 && known_eq (bitsize,
10913 GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1))))
10914 && maybe_ne (bitpos, 0)
10915 && maybe_ne (bitsize, 0))
10917 op0 = XEXP (op0, 1);
10918 bitpos = 0;
10919 mode2 = GET_MODE (op0);
10921 else
10922 /* Otherwise force into memory. */
10923 must_force_mem = 1;
10926 /* If this is a constant, put it in a register if it is a legitimate
10927 constant and we don't need a memory reference. */
10928 if (CONSTANT_P (op0)
10929 && mode2 != BLKmode
10930 && targetm.legitimate_constant_p (mode2, op0)
10931 && !must_force_mem)
10932 op0 = force_reg (mode2, op0);
10934 /* Otherwise, if this is a constant, try to force it to the constant
10935 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10936 is a legitimate constant. */
10937 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10938 op0 = validize_mem (memloc);
10940 /* Otherwise, if this is a constant or the object is not in memory
10941 and need be, put it there. */
10942 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10944 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10945 emit_move_insn (memloc, op0);
10946 op0 = memloc;
10947 clear_mem_expr = true;
10950 if (offset)
10952 machine_mode address_mode;
10953 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10954 EXPAND_SUM);
10956 gcc_assert (MEM_P (op0));
10958 address_mode = get_address_mode (op0);
10959 if (GET_MODE (offset_rtx) != address_mode)
10961 /* We cannot be sure that the RTL in offset_rtx is valid outside
10962 of a memory address context, so force it into a register
10963 before attempting to convert it to the desired mode. */
10964 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10965 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10968 /* See the comment in expand_assignment for the rationale. */
10969 if (mode1 != VOIDmode
10970 && maybe_ne (bitpos, 0)
10971 && maybe_gt (bitsize, 0)
10972 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
10973 && multiple_p (bitpos, bitsize)
10974 && multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
10975 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10977 op0 = adjust_address (op0, mode1, bytepos);
10978 bitpos = 0;
10981 op0 = offset_address (op0, offset_rtx,
10982 highest_pow2_factor (offset));
10985 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10986 record its alignment as BIGGEST_ALIGNMENT. */
10987 if (MEM_P (op0)
10988 && known_eq (bitpos, 0)
10989 && offset != 0
10990 && is_aligning_offset (offset, tem))
10991 set_mem_align (op0, BIGGEST_ALIGNMENT);
10993 /* Don't forget about volatility even if this is a bitfield. */
10994 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10996 if (op0 == orig_op0)
10997 op0 = copy_rtx (op0);
10999 MEM_VOLATILE_P (op0) = 1;
11002 if (MEM_P (op0) && TREE_CODE (tem) == FUNCTION_DECL)
11004 if (op0 == orig_op0)
11005 op0 = copy_rtx (op0);
11007 set_mem_align (op0, BITS_PER_UNIT);
11010 /* In cases where an aligned union has an unaligned object
11011 as a field, we might be extracting a BLKmode value from
11012 an integer-mode (e.g., SImode) object. Handle this case
11013 by doing the extract into an object as wide as the field
11014 (which we know to be the width of a basic mode), then
11015 storing into memory, and changing the mode to BLKmode. */
11016 if (mode1 == VOIDmode
11017 || REG_P (op0) || GET_CODE (op0) == SUBREG
11018 || (mode1 != BLKmode && ! direct_load[(int) mode1]
11019 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
11020 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
11021 && modifier != EXPAND_CONST_ADDRESS
11022 && modifier != EXPAND_INITIALIZER
11023 && modifier != EXPAND_MEMORY)
11024 /* If the bitfield is volatile and the bitsize
11025 is narrower than the access size of the bitfield,
11026 we need to extract bitfields from the access. */
11027 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
11028 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
11029 && mode1 != BLKmode
11030 && maybe_lt (bitsize, GET_MODE_SIZE (mode1) * BITS_PER_UNIT))
11031 /* If the field isn't aligned enough to fetch as a memref,
11032 fetch it as a bit field. */
11033 || (mode1 != BLKmode
11034 && (((MEM_P (op0)
11035 ? MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
11036 || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode1))
11037 : TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
11038 || !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
11039 && modifier != EXPAND_MEMORY
11040 && ((modifier == EXPAND_CONST_ADDRESS
11041 || modifier == EXPAND_INITIALIZER)
11042 ? STRICT_ALIGNMENT
11043 : targetm.slow_unaligned_access (mode1,
11044 MEM_ALIGN (op0))))
11045 || !multiple_p (bitpos, BITS_PER_UNIT)))
11046 /* If the type and the field are a constant size and the
11047 size of the type isn't the same size as the bitfield,
11048 we must use bitfield operations. */
11049 || (known_size_p (bitsize)
11050 && TYPE_SIZE (TREE_TYPE (exp))
11051 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
11052 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
11053 bitsize)))
11055 machine_mode ext_mode = mode;
11057 if (ext_mode == BLKmode
11058 && ! (target != 0 && MEM_P (op0)
11059 && MEM_P (target)
11060 && multiple_p (bitpos, BITS_PER_UNIT)))
11061 ext_mode = int_mode_for_size (bitsize, 1).else_blk ();
11063 if (ext_mode == BLKmode)
11065 if (target == 0)
11066 target = assign_temp (type, 1, 1);
11068 /* ??? Unlike the similar test a few lines below, this one is
11069 very likely obsolete. */
11070 if (known_eq (bitsize, 0))
11071 return target;
11073 /* In this case, BITPOS must start at a byte boundary and
11074 TARGET, if specified, must be a MEM. */
11075 gcc_assert (MEM_P (op0)
11076 && (!target || MEM_P (target)));
11078 bytepos = exact_div (bitpos, BITS_PER_UNIT);
11079 poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
11080 emit_block_move (target,
11081 adjust_address (op0, VOIDmode, bytepos),
11082 gen_int_mode (bytesize, Pmode),
11083 (modifier == EXPAND_STACK_PARM
11084 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
11086 return target;
11089 /* If we have nothing to extract, the result will be 0 for targets
11090 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
11091 return 0 for the sake of consistency, as reading a zero-sized
11092 bitfield is valid in Ada and the value is fully specified. */
11093 if (known_eq (bitsize, 0))
11094 return const0_rtx;
11096 op0 = validize_mem (op0);
11098 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
11099 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
11101 /* If the result has aggregate type and the extraction is done in
11102 an integral mode, then the field may be not aligned on a byte
11103 boundary; in this case, if it has reverse storage order, it
11104 needs to be extracted as a scalar field with reverse storage
11105 order and put back into memory order afterwards. */
11106 if (AGGREGATE_TYPE_P (type)
11107 && GET_MODE_CLASS (ext_mode) == MODE_INT)
11108 reversep = TYPE_REVERSE_STORAGE_ORDER (type);
11110 gcc_checking_assert (known_ge (bitpos, 0));
11111 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
11112 (modifier == EXPAND_STACK_PARM
11113 ? NULL_RTX : target),
11114 ext_mode, ext_mode, reversep, alt_rtl);
11116 /* If the result has aggregate type and the mode of OP0 is an
11117 integral mode then, if BITSIZE is narrower than this mode
11118 and this is for big-endian data, we must put the field
11119 into the high-order bits. And we must also put it back
11120 into memory order if it has been previously reversed. */
11121 scalar_int_mode op0_mode;
11122 if (AGGREGATE_TYPE_P (type)
11123 && is_int_mode (GET_MODE (op0), &op0_mode))
11125 HOST_WIDE_INT size = GET_MODE_BITSIZE (op0_mode);
11127 gcc_checking_assert (known_le (bitsize, size));
11128 if (maybe_lt (bitsize, size)
11129 && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
11130 op0 = expand_shift (LSHIFT_EXPR, op0_mode, op0,
11131 size - bitsize, op0, 1);
11133 if (reversep)
11134 op0 = flip_storage_order (op0_mode, op0);
11137 /* If the result type is BLKmode, store the data into a temporary
11138 of the appropriate type, but with the mode corresponding to the
11139 mode for the data we have (op0's mode). */
11140 if (mode == BLKmode)
11142 rtx new_rtx
11143 = assign_stack_temp_for_type (ext_mode,
11144 GET_MODE_BITSIZE (ext_mode),
11145 type);
11146 emit_move_insn (new_rtx, op0);
11147 op0 = copy_rtx (new_rtx);
11148 PUT_MODE (op0, BLKmode);
11151 return op0;
11154 /* If the result is BLKmode, use that to access the object
11155 now as well. */
11156 if (mode == BLKmode)
11157 mode1 = BLKmode;
11159 /* Get a reference to just this component. */
11160 bytepos = bits_to_bytes_round_down (bitpos);
11161 if (modifier == EXPAND_CONST_ADDRESS
11162 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
11163 op0 = adjust_address_nv (op0, mode1, bytepos);
11164 else
11165 op0 = adjust_address (op0, mode1, bytepos);
11167 if (op0 == orig_op0)
11168 op0 = copy_rtx (op0);
11170 /* Don't set memory attributes if the base expression is
11171 SSA_NAME that got expanded as a MEM or a CONSTANT. In that case,
11172 we should just honor its original memory attributes. */
11173 if (!(TREE_CODE (tem) == SSA_NAME
11174 && (MEM_P (orig_op0) || CONSTANT_P (orig_op0))))
11175 set_mem_attributes (op0, exp, 0);
11177 if (REG_P (XEXP (op0, 0)))
11178 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
11180 /* If op0 is a temporary because the original expressions was forced
11181 to memory, clear MEM_EXPR so that the original expression cannot
11182 be marked as addressable through MEM_EXPR of the temporary. */
11183 if (clear_mem_expr)
11184 set_mem_expr (op0, NULL_TREE);
11186 MEM_VOLATILE_P (op0) |= volatilep;
11188 if (reversep
11189 && modifier != EXPAND_MEMORY
11190 && modifier != EXPAND_WRITE)
11191 op0 = flip_storage_order (mode1, op0);
11193 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
11194 || modifier == EXPAND_CONST_ADDRESS
11195 || modifier == EXPAND_INITIALIZER)
11196 return op0;
11198 if (target == 0)
11199 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
11201 convert_move (target, op0, unsignedp);
11202 return target;
11205 case OBJ_TYPE_REF:
11206 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
11208 case CALL_EXPR:
11209 /* All valid uses of __builtin_va_arg_pack () are removed during
11210 inlining. */
11211 if (CALL_EXPR_VA_ARG_PACK (exp))
11212 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
11214 tree fndecl = get_callee_fndecl (exp), attr;
11216 if (fndecl
11217 /* Don't diagnose the error attribute in thunks, those are
11218 artificially created. */
11219 && !CALL_FROM_THUNK_P (exp)
11220 && (attr = lookup_attribute ("error",
11221 DECL_ATTRIBUTES (fndecl))) != NULL)
11223 const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
11224 error ("%Kcall to %qs declared with attribute error: %s", exp,
11225 identifier_to_locale (ident),
11226 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
11228 if (fndecl
11229 /* Don't diagnose the warning attribute in thunks, those are
11230 artificially created. */
11231 && !CALL_FROM_THUNK_P (exp)
11232 && (attr = lookup_attribute ("warning",
11233 DECL_ATTRIBUTES (fndecl))) != NULL)
11235 const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
11236 warning_at (tree_nonartificial_location (exp),
11237 OPT_Wattribute_warning,
11238 "%Kcall to %qs declared with attribute warning: %s",
11239 exp, identifier_to_locale (ident),
11240 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
11243 /* Check for a built-in function. */
11244 if (fndecl && fndecl_built_in_p (fndecl))
11246 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
11247 return expand_builtin (exp, target, subtarget, tmode, ignore);
11250 return expand_call (exp, target, ignore);
11252 case VIEW_CONVERT_EXPR:
11253 op0 = NULL_RTX;
11255 /* If we are converting to BLKmode, try to avoid an intermediate
11256 temporary by fetching an inner memory reference. */
11257 if (mode == BLKmode
11258 && poly_int_tree_p (TYPE_SIZE (type))
11259 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
11260 && handled_component_p (treeop0))
11262 machine_mode mode1;
11263 poly_int64 bitsize, bitpos, bytepos;
11264 tree offset;
11265 int reversep, volatilep = 0;
11266 tree tem
11267 = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
11268 &unsignedp, &reversep, &volatilep);
11270 /* ??? We should work harder and deal with non-zero offsets. */
11271 if (!offset
11272 && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
11273 && !reversep
11274 && known_size_p (bitsize)
11275 && known_eq (wi::to_poly_offset (TYPE_SIZE (type)), bitsize))
11277 /* See the normal_inner_ref case for the rationale. */
11278 rtx orig_op0
11279 = expand_expr_real (tem,
11280 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
11281 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
11282 != INTEGER_CST)
11283 && modifier != EXPAND_STACK_PARM
11284 ? target : NULL_RTX),
11285 VOIDmode,
11286 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
11287 NULL, true);
11289 if (MEM_P (orig_op0))
11291 op0 = orig_op0;
11293 /* Get a reference to just this component. */
11294 if (modifier == EXPAND_CONST_ADDRESS
11295 || modifier == EXPAND_SUM
11296 || modifier == EXPAND_INITIALIZER)
11297 op0 = adjust_address_nv (op0, mode, bytepos);
11298 else
11299 op0 = adjust_address (op0, mode, bytepos);
11301 if (op0 == orig_op0)
11302 op0 = copy_rtx (op0);
11304 set_mem_attributes (op0, treeop0, 0);
11305 if (REG_P (XEXP (op0, 0)))
11306 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
11308 MEM_VOLATILE_P (op0) |= volatilep;
11313 if (!op0)
11314 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
11315 NULL, inner_reference_p);
11317 /* If the input and output modes are both the same, we are done. */
11318 if (mode == GET_MODE (op0))
11320 /* If neither mode is BLKmode, and both modes are the same size
11321 then we can use gen_lowpart. */
11322 else if (mode != BLKmode
11323 && GET_MODE (op0) != BLKmode
11324 && known_eq (GET_MODE_PRECISION (mode),
11325 GET_MODE_PRECISION (GET_MODE (op0)))
11326 && !COMPLEX_MODE_P (GET_MODE (op0)))
11328 if (GET_CODE (op0) == SUBREG)
11329 op0 = force_reg (GET_MODE (op0), op0);
11330 temp = gen_lowpart_common (mode, op0);
11331 if (temp)
11332 op0 = temp;
11333 else
11335 if (!REG_P (op0) && !MEM_P (op0))
11336 op0 = force_reg (GET_MODE (op0), op0);
11337 op0 = gen_lowpart (mode, op0);
11340 /* If both types are integral, convert from one mode to the other. */
11341 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
11342 op0 = convert_modes (mode, GET_MODE (op0), op0,
11343 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
11344 /* If the output type is a bit-field type, do an extraction. */
11345 else if (reduce_bit_field)
11346 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
11347 TYPE_UNSIGNED (type), NULL_RTX,
11348 mode, mode, false, NULL);
11349 /* As a last resort, spill op0 to memory, and reload it in a
11350 different mode. */
11351 else if (!MEM_P (op0))
11353 /* If the operand is not a MEM, force it into memory. Since we
11354 are going to be changing the mode of the MEM, don't call
11355 force_const_mem for constants because we don't allow pool
11356 constants to change mode. */
11357 tree inner_type = TREE_TYPE (treeop0);
11359 gcc_assert (!TREE_ADDRESSABLE (exp));
11361 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
11362 target
11363 = assign_stack_temp_for_type
11364 (TYPE_MODE (inner_type),
11365 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
11367 emit_move_insn (target, op0);
11368 op0 = target;
11371 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
11372 output type is such that the operand is known to be aligned, indicate
11373 that it is. Otherwise, we need only be concerned about alignment for
11374 non-BLKmode results. */
11375 if (MEM_P (op0))
11377 enum insn_code icode;
11379 if (modifier != EXPAND_WRITE
11380 && modifier != EXPAND_MEMORY
11381 && !inner_reference_p
11382 && mode != BLKmode
11383 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
11385 /* If the target does have special handling for unaligned
11386 loads of mode then use them. */
11387 if ((icode = optab_handler (movmisalign_optab, mode))
11388 != CODE_FOR_nothing)
11390 rtx reg;
11392 op0 = adjust_address (op0, mode, 0);
11393 /* We've already validated the memory, and we're creating a
11394 new pseudo destination. The predicates really can't
11395 fail. */
11396 reg = gen_reg_rtx (mode);
11398 /* Nor can the insn generator. */
11399 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
11400 emit_insn (insn);
11401 return reg;
11403 else if (STRICT_ALIGNMENT)
11405 poly_uint64 mode_size = GET_MODE_SIZE (mode);
11406 poly_uint64 temp_size = mode_size;
11407 if (GET_MODE (op0) != BLKmode)
11408 temp_size = upper_bound (temp_size,
11409 GET_MODE_SIZE (GET_MODE (op0)));
11410 rtx new_rtx
11411 = assign_stack_temp_for_type (mode, temp_size, type);
11412 rtx new_with_op0_mode
11413 = adjust_address (new_rtx, GET_MODE (op0), 0);
11415 gcc_assert (!TREE_ADDRESSABLE (exp));
11417 if (GET_MODE (op0) == BLKmode)
11419 rtx size_rtx = gen_int_mode (mode_size, Pmode);
11420 emit_block_move (new_with_op0_mode, op0, size_rtx,
11421 (modifier == EXPAND_STACK_PARM
11422 ? BLOCK_OP_CALL_PARM
11423 : BLOCK_OP_NORMAL));
11425 else
11426 emit_move_insn (new_with_op0_mode, op0);
11428 op0 = new_rtx;
11432 op0 = adjust_address (op0, mode, 0);
11435 return op0;
11437 case MODIFY_EXPR:
11439 tree lhs = treeop0;
11440 tree rhs = treeop1;
11441 gcc_assert (ignore);
11443 /* Check for |= or &= of a bitfield of size one into another bitfield
11444 of size 1. In this case, (unless we need the result of the
11445 assignment) we can do this more efficiently with a
11446 test followed by an assignment, if necessary.
11448 ??? At this point, we can't get a BIT_FIELD_REF here. But if
11449 things change so we do, this code should be enhanced to
11450 support it. */
11451 if (TREE_CODE (lhs) == COMPONENT_REF
11452 && (TREE_CODE (rhs) == BIT_IOR_EXPR
11453 || TREE_CODE (rhs) == BIT_AND_EXPR)
11454 && TREE_OPERAND (rhs, 0) == lhs
11455 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
11456 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
11457 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
11459 rtx_code_label *label = gen_label_rtx ();
11460 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
11461 profile_probability prob = profile_probability::uninitialized ();
11462 if (value)
11463 jumpifnot (TREE_OPERAND (rhs, 1), label, prob);
11464 else
11465 jumpif (TREE_OPERAND (rhs, 1), label, prob);
11466 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
11467 false);
11468 do_pending_stack_adjust ();
11469 emit_label (label);
11470 return const0_rtx;
11473 expand_assignment (lhs, rhs, false);
11474 return const0_rtx;
11477 case ADDR_EXPR:
11478 return expand_expr_addr_expr (exp, target, tmode, modifier);
11480 case REALPART_EXPR:
11481 op0 = expand_normal (treeop0);
11482 return read_complex_part (op0, false);
11484 case IMAGPART_EXPR:
11485 op0 = expand_normal (treeop0);
11486 return read_complex_part (op0, true);
11488 case RETURN_EXPR:
11489 case LABEL_EXPR:
11490 case GOTO_EXPR:
11491 case SWITCH_EXPR:
11492 case ASM_EXPR:
11493 /* Expanded in cfgexpand.c. */
11494 gcc_unreachable ();
11496 case TRY_CATCH_EXPR:
11497 case CATCH_EXPR:
11498 case EH_FILTER_EXPR:
11499 case TRY_FINALLY_EXPR:
11500 case EH_ELSE_EXPR:
11501 /* Lowered by tree-eh.c. */
11502 gcc_unreachable ();
11504 case WITH_CLEANUP_EXPR:
11505 case CLEANUP_POINT_EXPR:
11506 case TARGET_EXPR:
11507 case CASE_LABEL_EXPR:
11508 case VA_ARG_EXPR:
11509 case BIND_EXPR:
11510 case INIT_EXPR:
11511 case CONJ_EXPR:
11512 case COMPOUND_EXPR:
11513 case PREINCREMENT_EXPR:
11514 case PREDECREMENT_EXPR:
11515 case POSTINCREMENT_EXPR:
11516 case POSTDECREMENT_EXPR:
11517 case LOOP_EXPR:
11518 case EXIT_EXPR:
11519 case COMPOUND_LITERAL_EXPR:
11520 /* Lowered by gimplify.c. */
11521 gcc_unreachable ();
11523 case FDESC_EXPR:
11524 /* Function descriptors are not valid except for as
11525 initialization constants, and should not be expanded. */
11526 gcc_unreachable ();
11528 case WITH_SIZE_EXPR:
11529 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11530 have pulled out the size to use in whatever context it needed. */
11531 return expand_expr_real (treeop0, original_target, tmode,
11532 modifier, alt_rtl, inner_reference_p);
11534 default:
11535 return expand_expr_real_2 (&ops, target, tmode, modifier);
11539 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11540 signedness of TYPE), possibly returning the result in TARGET.
11541 TYPE is known to be a partial integer type. */
11542 static rtx
11543 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
11545 scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
11546 HOST_WIDE_INT prec = TYPE_PRECISION (type);
11547 gcc_assert ((GET_MODE (exp) == VOIDmode || GET_MODE (exp) == mode)
11548 && (!target || GET_MODE (target) == mode));
11550 /* For constant values, reduce using wide_int_to_tree. */
11551 if (poly_int_rtx_p (exp))
11553 auto value = wi::to_poly_wide (exp, mode);
11554 tree t = wide_int_to_tree (type, value);
11555 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
11557 else if (TYPE_UNSIGNED (type))
11559 rtx mask = immed_wide_int_const
11560 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
11561 return expand_and (mode, exp, mask, target);
11563 else
11565 int count = GET_MODE_PRECISION (mode) - prec;
11566 exp = expand_shift (LSHIFT_EXPR, mode, exp, count, target, 0);
11567 return expand_shift (RSHIFT_EXPR, mode, exp, count, target, 0);
11571 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11572 when applied to the address of EXP produces an address known to be
11573 aligned more than BIGGEST_ALIGNMENT. */
11575 static int
11576 is_aligning_offset (const_tree offset, const_tree exp)
11578 /* Strip off any conversions. */
11579 while (CONVERT_EXPR_P (offset))
11580 offset = TREE_OPERAND (offset, 0);
11582 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11583 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11584 if (TREE_CODE (offset) != BIT_AND_EXPR
11585 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
11586 || compare_tree_int (TREE_OPERAND (offset, 1),
11587 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
11588 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1))
11589 return 0;
11591 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11592 It must be NEGATE_EXPR. Then strip any more conversions. */
11593 offset = TREE_OPERAND (offset, 0);
11594 while (CONVERT_EXPR_P (offset))
11595 offset = TREE_OPERAND (offset, 0);
11597 if (TREE_CODE (offset) != NEGATE_EXPR)
11598 return 0;
11600 offset = TREE_OPERAND (offset, 0);
11601 while (CONVERT_EXPR_P (offset))
11602 offset = TREE_OPERAND (offset, 0);
11604 /* This must now be the address of EXP. */
11605 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
11608 /* If EXPR is a constant initializer (either an expression or CONSTRUCTOR),
11609 attempt to obtain its native representation as an array of nonzero BYTES.
11610 Return true on success and false on failure (the latter without modifying
11611 BYTES). */
11613 static bool
11614 convert_to_bytes (tree type, tree expr, vec<unsigned char> *bytes)
11616 if (TREE_CODE (expr) == CONSTRUCTOR)
11618 /* Set to the size of the CONSTRUCTOR elements. */
11619 unsigned HOST_WIDE_INT ctor_size = bytes->length ();
11621 if (TREE_CODE (type) == ARRAY_TYPE)
11623 tree val, idx;
11624 tree eltype = TREE_TYPE (type);
11625 unsigned HOST_WIDE_INT elsize =
11626 tree_to_uhwi (TYPE_SIZE_UNIT (eltype));
11628 /* Jump through hoops to determine the lower bound for languages
11629 like Ada that can set it to an (almost) arbitrary value. */
11630 tree dom = TYPE_DOMAIN (type);
11631 if (!dom)
11632 return false;
11633 tree min = TYPE_MIN_VALUE (dom);
11634 if (!min || !tree_fits_uhwi_p (min))
11635 return false;
11636 unsigned HOST_WIDE_INT i, last_idx = tree_to_uhwi (min) - 1;
11637 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, idx, val)
11639 /* Append zeros for elements with no initializers. */
11640 if (!tree_fits_uhwi_p (idx))
11641 return false;
11642 unsigned HOST_WIDE_INT cur_idx = tree_to_uhwi (idx);
11643 if (unsigned HOST_WIDE_INT size = cur_idx - (last_idx + 1))
11645 size = size * elsize + bytes->length ();
11646 bytes->safe_grow_cleared (size, true);
11649 if (!convert_to_bytes (eltype, val, bytes))
11650 return false;
11652 last_idx = cur_idx;
11655 else if (TREE_CODE (type) == RECORD_TYPE)
11657 tree val, fld;
11658 unsigned HOST_WIDE_INT i;
11659 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, fld, val)
11661 /* Append zeros for members with no initializers and
11662 any padding. */
11663 unsigned HOST_WIDE_INT cur_off = int_byte_position (fld);
11664 if (bytes->length () < cur_off)
11665 bytes->safe_grow_cleared (cur_off, true);
11667 if (!convert_to_bytes (TREE_TYPE (val), val, bytes))
11668 return false;
11671 else
11672 return false;
11674 /* Compute the size of the COSNTRUCTOR elements. */
11675 ctor_size = bytes->length () - ctor_size;
11677 /* Append zeros to the byte vector to the full size of the type.
11678 The type size can be less than the size of the CONSTRUCTOR
11679 if the latter contains initializers for a flexible array
11680 member. */
11681 tree size = TYPE_SIZE_UNIT (type);
11682 unsigned HOST_WIDE_INT type_size = tree_to_uhwi (size);
11683 if (ctor_size < type_size)
11684 if (unsigned HOST_WIDE_INT size_grow = type_size - ctor_size)
11685 bytes->safe_grow_cleared (bytes->length () + size_grow, true);
11687 return true;
11690 /* Except for RECORD_TYPE which may have an initialized flexible array
11691 member, the size of a type is the same as the size of the initializer
11692 (including any implicitly zeroed out members and padding). Allocate
11693 just enough for that many bytes. */
11694 tree expr_size = TYPE_SIZE_UNIT (TREE_TYPE (expr));
11695 if (!expr_size || !tree_fits_uhwi_p (expr_size))
11696 return false;
11697 const unsigned HOST_WIDE_INT expr_bytes = tree_to_uhwi (expr_size);
11698 const unsigned bytes_sofar = bytes->length ();
11699 /* native_encode_expr can convert at most INT_MAX bytes. vec is limited
11700 to at most UINT_MAX. */
11701 if (bytes_sofar + expr_bytes > INT_MAX)
11702 return false;
11704 /* Unlike for RECORD_TYPE, there is no need to clear the memory since
11705 it's completely overwritten by native_encode_expr. */
11706 bytes->safe_grow (bytes_sofar + expr_bytes, true);
11707 unsigned char *pnext = bytes->begin () + bytes_sofar;
11708 int nbytes = native_encode_expr (expr, pnext, expr_bytes, 0);
11709 /* NBYTES is zero on failure. Otherwise it should equal EXPR_BYTES. */
11710 return (unsigned HOST_WIDE_INT) nbytes == expr_bytes;
11713 /* Return a STRING_CST corresponding to ARG's constant initializer either
11714 if it's a string constant, or, when VALREP is set, any other constant,
11715 or null otherwise.
11716 On success, set *PTR_OFFSET to the (possibly non-constant) byte offset
11717 within the byte string that ARG is references. If nonnull set *MEM_SIZE
11718 to the size of the byte string. If nonnull, set *DECL to the constant
11719 declaration ARG refers to. */
11721 static tree
11722 constant_byte_string (tree arg, tree *ptr_offset, tree *mem_size, tree *decl,
11723 bool valrep = false)
11725 tree dummy = NULL_TREE;;
11726 if (!mem_size)
11727 mem_size = &dummy;
11729 /* Store the type of the original expression before conversions
11730 via NOP_EXPR or POINTER_PLUS_EXPR to other types have been
11731 removed. */
11732 tree argtype = TREE_TYPE (arg);
11734 tree array;
11735 STRIP_NOPS (arg);
11737 /* Non-constant index into the character array in an ARRAY_REF
11738 expression or null. */
11739 tree varidx = NULL_TREE;
11741 poly_int64 base_off = 0;
11743 if (TREE_CODE (arg) == ADDR_EXPR)
11745 arg = TREE_OPERAND (arg, 0);
11746 tree ref = arg;
11747 if (TREE_CODE (arg) == ARRAY_REF)
11749 tree idx = TREE_OPERAND (arg, 1);
11750 if (TREE_CODE (idx) != INTEGER_CST)
11752 /* From a pointer (but not array) argument extract the variable
11753 index to prevent get_addr_base_and_unit_offset() from failing
11754 due to it. Use it later to compute the non-constant offset
11755 into the string and return it to the caller. */
11756 varidx = idx;
11757 ref = TREE_OPERAND (arg, 0);
11759 if (TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE)
11760 return NULL_TREE;
11762 if (!integer_zerop (array_ref_low_bound (arg)))
11763 return NULL_TREE;
11765 if (!integer_onep (array_ref_element_size (arg)))
11766 return NULL_TREE;
11769 array = get_addr_base_and_unit_offset (ref, &base_off);
11770 if (!array
11771 || (TREE_CODE (array) != VAR_DECL
11772 && TREE_CODE (array) != CONST_DECL
11773 && TREE_CODE (array) != STRING_CST))
11774 return NULL_TREE;
11776 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
11778 tree arg0 = TREE_OPERAND (arg, 0);
11779 tree arg1 = TREE_OPERAND (arg, 1);
11781 tree offset;
11782 tree str = string_constant (arg0, &offset, mem_size, decl);
11783 if (!str)
11785 str = string_constant (arg1, &offset, mem_size, decl);
11786 arg1 = arg0;
11789 if (str)
11791 /* Avoid pointers to arrays (see bug 86622). */
11792 if (POINTER_TYPE_P (TREE_TYPE (arg))
11793 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == ARRAY_TYPE
11794 && !(decl && !*decl)
11795 && !(decl && tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl))
11796 && tree_fits_uhwi_p (*mem_size)
11797 && tree_int_cst_equal (*mem_size, DECL_SIZE_UNIT (*decl))))
11798 return NULL_TREE;
11800 tree type = TREE_TYPE (offset);
11801 arg1 = fold_convert (type, arg1);
11802 *ptr_offset = fold_build2 (PLUS_EXPR, type, offset, arg1);
11803 return str;
11805 return NULL_TREE;
11807 else if (TREE_CODE (arg) == SSA_NAME)
11809 gimple *stmt = SSA_NAME_DEF_STMT (arg);
11810 if (!is_gimple_assign (stmt))
11811 return NULL_TREE;
11813 tree rhs1 = gimple_assign_rhs1 (stmt);
11814 tree_code code = gimple_assign_rhs_code (stmt);
11815 if (code == ADDR_EXPR)
11816 return string_constant (rhs1, ptr_offset, mem_size, decl);
11817 else if (code != POINTER_PLUS_EXPR)
11818 return NULL_TREE;
11820 tree offset;
11821 if (tree str = string_constant (rhs1, &offset, mem_size, decl))
11823 /* Avoid pointers to arrays (see bug 86622). */
11824 if (POINTER_TYPE_P (TREE_TYPE (rhs1))
11825 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs1))) == ARRAY_TYPE
11826 && !(decl && !*decl)
11827 && !(decl && tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl))
11828 && tree_fits_uhwi_p (*mem_size)
11829 && tree_int_cst_equal (*mem_size, DECL_SIZE_UNIT (*decl))))
11830 return NULL_TREE;
11832 tree rhs2 = gimple_assign_rhs2 (stmt);
11833 tree type = TREE_TYPE (offset);
11834 rhs2 = fold_convert (type, rhs2);
11835 *ptr_offset = fold_build2 (PLUS_EXPR, type, offset, rhs2);
11836 return str;
11838 return NULL_TREE;
11840 else if (DECL_P (arg))
11841 array = arg;
11842 else
11843 return NULL_TREE;
11845 tree offset = wide_int_to_tree (sizetype, base_off);
11846 if (varidx)
11848 if (TREE_CODE (TREE_TYPE (array)) != ARRAY_TYPE)
11849 return NULL_TREE;
11851 gcc_assert (TREE_CODE (arg) == ARRAY_REF);
11852 tree chartype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (arg, 0)));
11853 if (TREE_CODE (chartype) != INTEGER_TYPE)
11854 return NULL;
11856 offset = fold_convert (sizetype, varidx);
11859 if (TREE_CODE (array) == STRING_CST)
11861 *ptr_offset = fold_convert (sizetype, offset);
11862 *mem_size = TYPE_SIZE_UNIT (TREE_TYPE (array));
11863 if (decl)
11864 *decl = NULL_TREE;
11865 gcc_checking_assert (tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (array)))
11866 >= TREE_STRING_LENGTH (array));
11867 return array;
11870 tree init = ctor_for_folding (array);
11871 if (!init || init == error_mark_node)
11872 return NULL_TREE;
11874 if (valrep)
11876 HOST_WIDE_INT cstoff;
11877 if (!base_off.is_constant (&cstoff))
11878 return NULL_TREE;
11880 /* If value representation was requested convert the initializer
11881 for the whole array or object into a string of bytes forming
11882 its value representation and return it. */
11883 auto_vec<unsigned char> bytes;
11884 if (!convert_to_bytes (TREE_TYPE (init), init, &bytes))
11885 return NULL_TREE;
11887 unsigned n = bytes.length ();
11888 const char *p = reinterpret_cast<const char *>(bytes.address ());
11889 init = build_string_literal (n, p, char_type_node);
11890 init = TREE_OPERAND (init, 0);
11891 init = TREE_OPERAND (init, 0);
11893 *mem_size = size_int (TREE_STRING_LENGTH (init));
11894 *ptr_offset = wide_int_to_tree (ssizetype, base_off);
11896 if (decl)
11897 *decl = array;
11899 return init;
11902 if (TREE_CODE (init) == CONSTRUCTOR)
11904 /* Convert the 64-bit constant offset to a wider type to avoid
11905 overflow and use it to obtain the initializer for the subobject
11906 it points into. */
11907 offset_int wioff;
11908 if (!base_off.is_constant (&wioff))
11909 return NULL_TREE;
11911 wioff *= BITS_PER_UNIT;
11912 if (!wi::fits_uhwi_p (wioff))
11913 return NULL_TREE;
11915 base_off = wioff.to_uhwi ();
11916 unsigned HOST_WIDE_INT fieldoff = 0;
11917 init = fold_ctor_reference (TREE_TYPE (arg), init, base_off, 0, array,
11918 &fieldoff);
11919 if (!init || init == error_mark_node)
11920 return NULL_TREE;
11922 HOST_WIDE_INT cstoff;
11923 if (!base_off.is_constant (&cstoff))
11924 return NULL_TREE;
11926 cstoff = (cstoff - fieldoff) / BITS_PER_UNIT;
11927 tree off = build_int_cst (sizetype, cstoff);
11928 if (varidx)
11929 offset = fold_build2 (PLUS_EXPR, TREE_TYPE (offset), offset, off);
11930 else
11931 offset = off;
11934 *ptr_offset = offset;
11936 tree inittype = TREE_TYPE (init);
11938 if (TREE_CODE (init) == INTEGER_CST
11939 && (TREE_CODE (TREE_TYPE (array)) == INTEGER_TYPE
11940 || TYPE_MAIN_VARIANT (inittype) == char_type_node))
11942 /* For a reference to (address of) a single constant character,
11943 store the native representation of the character in CHARBUF.
11944 If the reference is to an element of an array or a member
11945 of a struct, only consider narrow characters until ctors
11946 for wide character arrays are transformed to STRING_CSTs
11947 like those for narrow arrays. */
11948 unsigned char charbuf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11949 int len = native_encode_expr (init, charbuf, sizeof charbuf, 0);
11950 if (len > 0)
11952 /* Construct a string literal with elements of INITTYPE and
11953 the representation above. Then strip
11954 the ADDR_EXPR (ARRAY_REF (...)) around the STRING_CST. */
11955 init = build_string_literal (len, (char *)charbuf, inittype);
11956 init = TREE_OPERAND (TREE_OPERAND (init, 0), 0);
11960 tree initsize = TYPE_SIZE_UNIT (inittype);
11962 if (TREE_CODE (init) == CONSTRUCTOR && initializer_zerop (init))
11964 /* Fold an empty/zero constructor for an implicitly initialized
11965 object or subobject into the empty string. */
11967 /* Determine the character type from that of the original
11968 expression. */
11969 tree chartype = argtype;
11970 if (POINTER_TYPE_P (chartype))
11971 chartype = TREE_TYPE (chartype);
11972 while (TREE_CODE (chartype) == ARRAY_TYPE)
11973 chartype = TREE_TYPE (chartype);
11975 if (INTEGRAL_TYPE_P (chartype)
11976 && TYPE_PRECISION (chartype) == TYPE_PRECISION (char_type_node))
11978 /* Convert a char array to an empty STRING_CST having an array
11979 of the expected type and size. */
11980 if (!initsize)
11981 initsize = integer_zero_node;
11983 unsigned HOST_WIDE_INT size = tree_to_uhwi (initsize);
11984 init = build_string_literal (size, NULL, chartype, size);
11985 init = TREE_OPERAND (init, 0);
11986 init = TREE_OPERAND (init, 0);
11988 *ptr_offset = integer_zero_node;
11992 if (decl)
11993 *decl = array;
11995 if (TREE_CODE (init) != STRING_CST)
11996 return NULL_TREE;
11998 *mem_size = initsize;
12000 gcc_checking_assert (tree_to_shwi (initsize) >= TREE_STRING_LENGTH (init));
12002 return init;
12005 /* Return STRING_CST if an ARG corresponds to a string constant or zero
12006 if it doesn't. If we return nonzero, set *PTR_OFFSET to the (possibly
12007 non-constant) offset in bytes within the string that ARG is accessing.
12008 If MEM_SIZE is non-zero the storage size of the memory is returned.
12009 If DECL is non-zero the constant declaration is returned if available. */
12011 tree
12012 string_constant (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
12014 return constant_byte_string (arg, ptr_offset, mem_size, decl, false);
12017 /* Similar to string_constant, return a STRING_CST corresponding
12018 to the value representation of the first argument if it's
12019 a constant. */
12021 tree
12022 byte_representation (tree arg, tree *ptr_offset, tree *mem_size, tree *decl)
12024 return constant_byte_string (arg, ptr_offset, mem_size, decl, true);
12027 /* Optimize x % C1 == C2 for signed modulo if C1 is a power of two and C2
12028 is non-zero and C3 ((1<<(prec-1)) | (C1 - 1)):
12029 for C2 > 0 to x & C3 == C2
12030 for C2 < 0 to x & C3 == (C2 & C3). */
12031 enum tree_code
12032 maybe_optimize_pow2p_mod_cmp (enum tree_code code, tree *arg0, tree *arg1)
12034 gimple *stmt = get_def_for_expr (*arg0, TRUNC_MOD_EXPR);
12035 tree treeop0 = gimple_assign_rhs1 (stmt);
12036 tree treeop1 = gimple_assign_rhs2 (stmt);
12037 tree type = TREE_TYPE (*arg0);
12038 scalar_int_mode mode;
12039 if (!is_a <scalar_int_mode> (TYPE_MODE (type), &mode))
12040 return code;
12041 if (GET_MODE_BITSIZE (mode) != TYPE_PRECISION (type)
12042 || TYPE_PRECISION (type) <= 1
12043 || TYPE_UNSIGNED (type)
12044 /* Signed x % c == 0 should have been optimized into unsigned modulo
12045 earlier. */
12046 || integer_zerop (*arg1)
12047 /* If c is known to be non-negative, modulo will be expanded as unsigned
12048 modulo. */
12049 || get_range_pos_neg (treeop0) == 1)
12050 return code;
12052 /* x % c == d where d < 0 && d <= -c should be always false. */
12053 if (tree_int_cst_sgn (*arg1) == -1
12054 && -wi::to_widest (treeop1) >= wi::to_widest (*arg1))
12055 return code;
12057 int prec = TYPE_PRECISION (type);
12058 wide_int w = wi::to_wide (treeop1) - 1;
12059 w |= wi::shifted_mask (0, prec - 1, true, prec);
12060 tree c3 = wide_int_to_tree (type, w);
12061 tree c4 = *arg1;
12062 if (tree_int_cst_sgn (*arg1) == -1)
12063 c4 = wide_int_to_tree (type, w & wi::to_wide (*arg1));
12065 rtx op0 = expand_normal (treeop0);
12066 treeop0 = make_tree (TREE_TYPE (treeop0), op0);
12068 bool speed_p = optimize_insn_for_speed_p ();
12070 do_pending_stack_adjust ();
12072 location_t loc = gimple_location (stmt);
12073 struct separate_ops ops;
12074 ops.code = TRUNC_MOD_EXPR;
12075 ops.location = loc;
12076 ops.type = TREE_TYPE (treeop0);
12077 ops.op0 = treeop0;
12078 ops.op1 = treeop1;
12079 ops.op2 = NULL_TREE;
12080 start_sequence ();
12081 rtx mor = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type),
12082 EXPAND_NORMAL);
12083 rtx_insn *moinsns = get_insns ();
12084 end_sequence ();
12086 unsigned mocost = seq_cost (moinsns, speed_p);
12087 mocost += rtx_cost (mor, mode, EQ, 0, speed_p);
12088 mocost += rtx_cost (expand_normal (*arg1), mode, EQ, 1, speed_p);
12090 ops.code = BIT_AND_EXPR;
12091 ops.location = loc;
12092 ops.type = TREE_TYPE (treeop0);
12093 ops.op0 = treeop0;
12094 ops.op1 = c3;
12095 ops.op2 = NULL_TREE;
12096 start_sequence ();
12097 rtx mur = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type),
12098 EXPAND_NORMAL);
12099 rtx_insn *muinsns = get_insns ();
12100 end_sequence ();
12102 unsigned mucost = seq_cost (muinsns, speed_p);
12103 mucost += rtx_cost (mur, mode, EQ, 0, speed_p);
12104 mucost += rtx_cost (expand_normal (c4), mode, EQ, 1, speed_p);
12106 if (mocost <= mucost)
12108 emit_insn (moinsns);
12109 *arg0 = make_tree (TREE_TYPE (*arg0), mor);
12110 return code;
12113 emit_insn (muinsns);
12114 *arg0 = make_tree (TREE_TYPE (*arg0), mur);
12115 *arg1 = c4;
12116 return code;
12119 /* Attempt to optimize unsigned (X % C1) == C2 (or (X % C1) != C2).
12120 If C1 is odd to:
12121 (X - C2) * C3 <= C4 (or >), where
12122 C3 is modular multiplicative inverse of C1 and 1<<prec and
12123 C4 is ((1<<prec) - 1) / C1 or ((1<<prec) - 1) / C1 - 1 (the latter
12124 if C2 > ((1<<prec) - 1) % C1).
12125 If C1 is even, S = ctz (C1) and C2 is 0, use
12126 ((X * C3) r>> S) <= C4, where C3 is modular multiplicative
12127 inverse of C1>>S and 1<<prec and C4 is (((1<<prec) - 1) / (C1>>S)) >> S.
12129 For signed (X % C1) == 0 if C1 is odd to (all operations in it
12130 unsigned):
12131 (X * C3) + C4 <= 2 * C4, where
12132 C3 is modular multiplicative inverse of (unsigned) C1 and 1<<prec and
12133 C4 is ((1<<(prec - 1) - 1) / C1).
12134 If C1 is even, S = ctz(C1), use
12135 ((X * C3) + C4) r>> S <= (C4 >> (S - 1))
12136 where C3 is modular multiplicative inverse of (unsigned)(C1>>S) and 1<<prec
12137 and C4 is ((1<<(prec - 1) - 1) / (C1>>S)) & (-1<<S).
12139 See the Hacker's Delight book, section 10-17. */
12140 enum tree_code
12141 maybe_optimize_mod_cmp (enum tree_code code, tree *arg0, tree *arg1)
12143 gcc_checking_assert (code == EQ_EXPR || code == NE_EXPR);
12144 gcc_checking_assert (TREE_CODE (*arg1) == INTEGER_CST);
12146 if (optimize < 2)
12147 return code;
12149 gimple *stmt = get_def_for_expr (*arg0, TRUNC_MOD_EXPR);
12150 if (stmt == NULL)
12151 return code;
12153 tree treeop0 = gimple_assign_rhs1 (stmt);
12154 tree treeop1 = gimple_assign_rhs2 (stmt);
12155 if (TREE_CODE (treeop0) != SSA_NAME
12156 || TREE_CODE (treeop1) != INTEGER_CST
12157 /* Don't optimize the undefined behavior case x % 0;
12158 x % 1 should have been optimized into zero, punt if
12159 it makes it here for whatever reason;
12160 x % -c should have been optimized into x % c. */
12161 || compare_tree_int (treeop1, 2) <= 0
12162 /* Likewise x % c == d where d >= c should be always false. */
12163 || tree_int_cst_le (treeop1, *arg1))
12164 return code;
12166 /* Unsigned x % pow2 is handled right already, for signed
12167 modulo handle it in maybe_optimize_pow2p_mod_cmp. */
12168 if (integer_pow2p (treeop1))
12169 return maybe_optimize_pow2p_mod_cmp (code, arg0, arg1);
12171 tree type = TREE_TYPE (*arg0);
12172 scalar_int_mode mode;
12173 if (!is_a <scalar_int_mode> (TYPE_MODE (type), &mode))
12174 return code;
12175 if (GET_MODE_BITSIZE (mode) != TYPE_PRECISION (type)
12176 || TYPE_PRECISION (type) <= 1)
12177 return code;
12179 signop sgn = UNSIGNED;
12180 /* If both operands are known to have the sign bit clear, handle
12181 even the signed modulo case as unsigned. treeop1 is always
12182 positive >= 2, checked above. */
12183 if (!TYPE_UNSIGNED (type) && get_range_pos_neg (treeop0) != 1)
12184 sgn = SIGNED;
12186 if (!TYPE_UNSIGNED (type))
12188 if (tree_int_cst_sgn (*arg1) == -1)
12189 return code;
12190 type = unsigned_type_for (type);
12191 if (!type || TYPE_MODE (type) != TYPE_MODE (TREE_TYPE (*arg0)))
12192 return code;
12195 int prec = TYPE_PRECISION (type);
12196 wide_int w = wi::to_wide (treeop1);
12197 int shift = wi::ctz (w);
12198 /* Unsigned (X % C1) == C2 is equivalent to (X - C2) % C1 == 0 if
12199 C2 <= -1U % C1, because for any Z >= 0U - C2 in that case (Z % C1) != 0.
12200 If C1 is odd, we can handle all cases by subtracting
12201 C4 below. We could handle even the even C1 and C2 > -1U % C1 cases
12202 e.g. by testing for overflow on the subtraction, punt on that for now
12203 though. */
12204 if ((sgn == SIGNED || shift) && !integer_zerop (*arg1))
12206 if (sgn == SIGNED)
12207 return code;
12208 wide_int x = wi::umod_trunc (wi::mask (prec, false, prec), w);
12209 if (wi::gtu_p (wi::to_wide (*arg1), x))
12210 return code;
12213 imm_use_iterator imm_iter;
12214 use_operand_p use_p;
12215 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, treeop0)
12217 gimple *use_stmt = USE_STMT (use_p);
12218 /* Punt if treeop0 is used in the same bb in a division
12219 or another modulo with the same divisor. We should expect
12220 the division and modulo combined together. */
12221 if (use_stmt == stmt
12222 || gimple_bb (use_stmt) != gimple_bb (stmt))
12223 continue;
12224 if (!is_gimple_assign (use_stmt)
12225 || (gimple_assign_rhs_code (use_stmt) != TRUNC_DIV_EXPR
12226 && gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR))
12227 continue;
12228 if (gimple_assign_rhs1 (use_stmt) != treeop0
12229 || !operand_equal_p (gimple_assign_rhs2 (use_stmt), treeop1, 0))
12230 continue;
12231 return code;
12234 w = wi::lrshift (w, shift);
12235 wide_int a = wide_int::from (w, prec + 1, UNSIGNED);
12236 wide_int b = wi::shifted_mask (prec, 1, false, prec + 1);
12237 wide_int m = wide_int::from (wi::mod_inv (a, b), prec, UNSIGNED);
12238 tree c3 = wide_int_to_tree (type, m);
12239 tree c5 = NULL_TREE;
12240 wide_int d, e;
12241 if (sgn == UNSIGNED)
12243 d = wi::divmod_trunc (wi::mask (prec, false, prec), w, UNSIGNED, &e);
12244 /* Use <= floor ((1<<prec) - 1) / C1 only if C2 <= ((1<<prec) - 1) % C1,
12245 otherwise use < or subtract one from C4. E.g. for
12246 x % 3U == 0 we transform this into x * 0xaaaaaaab <= 0x55555555, but
12247 x % 3U == 1 already needs to be
12248 (x - 1) * 0xaaaaaaabU <= 0x55555554. */
12249 if (!shift && wi::gtu_p (wi::to_wide (*arg1), e))
12250 d -= 1;
12251 if (shift)
12252 d = wi::lrshift (d, shift);
12254 else
12256 e = wi::udiv_trunc (wi::mask (prec - 1, false, prec), w);
12257 if (!shift)
12258 d = wi::lshift (e, 1);
12259 else
12261 e = wi::bit_and (e, wi::mask (shift, true, prec));
12262 d = wi::lrshift (e, shift - 1);
12264 c5 = wide_int_to_tree (type, e);
12266 tree c4 = wide_int_to_tree (type, d);
12268 rtx op0 = expand_normal (treeop0);
12269 treeop0 = make_tree (TREE_TYPE (treeop0), op0);
12271 bool speed_p = optimize_insn_for_speed_p ();
12273 do_pending_stack_adjust ();
12275 location_t loc = gimple_location (stmt);
12276 struct separate_ops ops;
12277 ops.code = TRUNC_MOD_EXPR;
12278 ops.location = loc;
12279 ops.type = TREE_TYPE (treeop0);
12280 ops.op0 = treeop0;
12281 ops.op1 = treeop1;
12282 ops.op2 = NULL_TREE;
12283 start_sequence ();
12284 rtx mor = expand_expr_real_2 (&ops, NULL_RTX, TYPE_MODE (ops.type),
12285 EXPAND_NORMAL);
12286 rtx_insn *moinsns = get_insns ();
12287 end_sequence ();
12289 unsigned mocost = seq_cost (moinsns, speed_p);
12290 mocost += rtx_cost (mor, mode, EQ, 0, speed_p);
12291 mocost += rtx_cost (expand_normal (*arg1), mode, EQ, 1, speed_p);
12293 tree t = fold_convert_loc (loc, type, treeop0);
12294 if (!integer_zerop (*arg1))
12295 t = fold_build2_loc (loc, MINUS_EXPR, type, t, fold_convert (type, *arg1));
12296 t = fold_build2_loc (loc, MULT_EXPR, type, t, c3);
12297 if (sgn == SIGNED)
12298 t = fold_build2_loc (loc, PLUS_EXPR, type, t, c5);
12299 if (shift)
12301 tree s = build_int_cst (NULL_TREE, shift);
12302 t = fold_build2_loc (loc, RROTATE_EXPR, type, t, s);
12305 start_sequence ();
12306 rtx mur = expand_normal (t);
12307 rtx_insn *muinsns = get_insns ();
12308 end_sequence ();
12310 unsigned mucost = seq_cost (muinsns, speed_p);
12311 mucost += rtx_cost (mur, mode, LE, 0, speed_p);
12312 mucost += rtx_cost (expand_normal (c4), mode, LE, 1, speed_p);
12314 if (mocost <= mucost)
12316 emit_insn (moinsns);
12317 *arg0 = make_tree (TREE_TYPE (*arg0), mor);
12318 return code;
12321 emit_insn (muinsns);
12322 *arg0 = make_tree (type, mur);
12323 *arg1 = c4;
12324 return code == EQ_EXPR ? LE_EXPR : GT_EXPR;
12327 /* Generate code to calculate OPS, and exploded expression
12328 using a store-flag instruction and return an rtx for the result.
12329 OPS reflects a comparison.
12331 If TARGET is nonzero, store the result there if convenient.
12333 Return zero if there is no suitable set-flag instruction
12334 available on this machine.
12336 Once expand_expr has been called on the arguments of the comparison,
12337 we are committed to doing the store flag, since it is not safe to
12338 re-evaluate the expression. We emit the store-flag insn by calling
12339 emit_store_flag, but only expand the arguments if we have a reason
12340 to believe that emit_store_flag will be successful. If we think that
12341 it will, but it isn't, we have to simulate the store-flag with a
12342 set/jump/set sequence. */
12344 static rtx
12345 do_store_flag (sepops ops, rtx target, machine_mode mode)
12347 enum rtx_code code;
12348 tree arg0, arg1, type;
12349 machine_mode operand_mode;
12350 int unsignedp;
12351 rtx op0, op1;
12352 rtx subtarget = target;
12353 location_t loc = ops->location;
12355 arg0 = ops->op0;
12356 arg1 = ops->op1;
12358 /* Don't crash if the comparison was erroneous. */
12359 if (arg0 == error_mark_node || arg1 == error_mark_node)
12360 return const0_rtx;
12362 type = TREE_TYPE (arg0);
12363 operand_mode = TYPE_MODE (type);
12364 unsignedp = TYPE_UNSIGNED (type);
12366 /* We won't bother with BLKmode store-flag operations because it would mean
12367 passing a lot of information to emit_store_flag. */
12368 if (operand_mode == BLKmode)
12369 return 0;
12371 /* We won't bother with store-flag operations involving function pointers
12372 when function pointers must be canonicalized before comparisons. */
12373 if (targetm.have_canonicalize_funcptr_for_compare ()
12374 && ((POINTER_TYPE_P (TREE_TYPE (arg0))
12375 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg0))))
12376 || (POINTER_TYPE_P (TREE_TYPE (arg1))
12377 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg1))))))
12378 return 0;
12380 STRIP_NOPS (arg0);
12381 STRIP_NOPS (arg1);
12383 /* For vector typed comparisons emit code to generate the desired
12384 all-ones or all-zeros mask. */
12385 if (TREE_CODE (ops->type) == VECTOR_TYPE)
12387 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
12388 if (VECTOR_BOOLEAN_TYPE_P (ops->type)
12389 && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type, ops->code))
12390 return expand_vec_cmp_expr (ops->type, ifexp, target);
12391 else
12392 gcc_unreachable ();
12395 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
12396 into (x - C2) * C3 < C4. */
12397 if ((ops->code == EQ_EXPR || ops->code == NE_EXPR)
12398 && TREE_CODE (arg0) == SSA_NAME
12399 && TREE_CODE (arg1) == INTEGER_CST)
12401 enum tree_code new_code = maybe_optimize_mod_cmp (ops->code,
12402 &arg0, &arg1);
12403 if (new_code != ops->code)
12405 struct separate_ops nops = *ops;
12406 nops.code = ops->code = new_code;
12407 nops.op0 = arg0;
12408 nops.op1 = arg1;
12409 nops.type = TREE_TYPE (arg0);
12410 return do_store_flag (&nops, target, mode);
12414 /* Get the rtx comparison code to use. We know that EXP is a comparison
12415 operation of some type. Some comparisons against 1 and -1 can be
12416 converted to comparisons with zero. Do so here so that the tests
12417 below will be aware that we have a comparison with zero. These
12418 tests will not catch constants in the first operand, but constants
12419 are rarely passed as the first operand. */
12421 switch (ops->code)
12423 case EQ_EXPR:
12424 code = EQ;
12425 break;
12426 case NE_EXPR:
12427 code = NE;
12428 break;
12429 case LT_EXPR:
12430 if (integer_onep (arg1))
12431 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
12432 else
12433 code = unsignedp ? LTU : LT;
12434 break;
12435 case LE_EXPR:
12436 if (! unsignedp && integer_all_onesp (arg1))
12437 arg1 = integer_zero_node, code = LT;
12438 else
12439 code = unsignedp ? LEU : LE;
12440 break;
12441 case GT_EXPR:
12442 if (! unsignedp && integer_all_onesp (arg1))
12443 arg1 = integer_zero_node, code = GE;
12444 else
12445 code = unsignedp ? GTU : GT;
12446 break;
12447 case GE_EXPR:
12448 if (integer_onep (arg1))
12449 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
12450 else
12451 code = unsignedp ? GEU : GE;
12452 break;
12454 case UNORDERED_EXPR:
12455 code = UNORDERED;
12456 break;
12457 case ORDERED_EXPR:
12458 code = ORDERED;
12459 break;
12460 case UNLT_EXPR:
12461 code = UNLT;
12462 break;
12463 case UNLE_EXPR:
12464 code = UNLE;
12465 break;
12466 case UNGT_EXPR:
12467 code = UNGT;
12468 break;
12469 case UNGE_EXPR:
12470 code = UNGE;
12471 break;
12472 case UNEQ_EXPR:
12473 code = UNEQ;
12474 break;
12475 case LTGT_EXPR:
12476 code = LTGT;
12477 break;
12479 default:
12480 gcc_unreachable ();
12483 /* Put a constant second. */
12484 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
12485 || TREE_CODE (arg0) == FIXED_CST)
12487 std::swap (arg0, arg1);
12488 code = swap_condition (code);
12491 /* If this is an equality or inequality test of a single bit, we can
12492 do this by shifting the bit being tested to the low-order bit and
12493 masking the result with the constant 1. If the condition was EQ,
12494 we xor it with 1. This does not require an scc insn and is faster
12495 than an scc insn even if we have it.
12497 The code to make this transformation was moved into fold_single_bit_test,
12498 so we just call into the folder and expand its result. */
12500 if ((code == NE || code == EQ)
12501 && integer_zerop (arg1)
12502 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
12504 gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
12505 if (srcstmt
12506 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
12508 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
12509 type = lang_hooks.types.type_for_mode (mode, unsignedp);
12510 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
12511 gimple_assign_rhs1 (srcstmt),
12512 gimple_assign_rhs2 (srcstmt));
12513 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
12514 if (temp)
12515 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
12519 if (! get_subtarget (target)
12520 || GET_MODE (subtarget) != operand_mode)
12521 subtarget = 0;
12523 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
12525 if (target == 0)
12526 target = gen_reg_rtx (mode);
12528 /* Try a cstore if possible. */
12529 return emit_store_flag_force (target, code, op0, op1,
12530 operand_mode, unsignedp,
12531 (TYPE_PRECISION (ops->type) == 1
12532 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
12535 /* Attempt to generate a casesi instruction. Returns 1 if successful,
12536 0 otherwise (i.e. if there is no casesi instruction).
12538 DEFAULT_PROBABILITY is the probability of jumping to the default
12539 label. */
12541 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
12542 rtx table_label, rtx default_label, rtx fallback_label,
12543 profile_probability default_probability)
12545 class expand_operand ops[5];
12546 scalar_int_mode index_mode = SImode;
12547 rtx op1, op2, index;
12549 if (! targetm.have_casesi ())
12550 return 0;
12552 /* The index must be some form of integer. Convert it to SImode. */
12553 scalar_int_mode omode = SCALAR_INT_TYPE_MODE (index_type);
12554 if (GET_MODE_BITSIZE (omode) > GET_MODE_BITSIZE (index_mode))
12556 rtx rangertx = expand_normal (range);
12558 /* We must handle the endpoints in the original mode. */
12559 index_expr = build2 (MINUS_EXPR, index_type,
12560 index_expr, minval);
12561 minval = integer_zero_node;
12562 index = expand_normal (index_expr);
12563 if (default_label)
12564 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
12565 omode, 1, default_label,
12566 default_probability);
12567 /* Now we can safely truncate. */
12568 index = convert_to_mode (index_mode, index, 0);
12570 else
12572 if (omode != index_mode)
12574 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
12575 index_expr = fold_convert (index_type, index_expr);
12578 index = expand_normal (index_expr);
12581 do_pending_stack_adjust ();
12583 op1 = expand_normal (minval);
12584 op2 = expand_normal (range);
12586 create_input_operand (&ops[0], index, index_mode);
12587 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
12588 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
12589 create_fixed_operand (&ops[3], table_label);
12590 create_fixed_operand (&ops[4], (default_label
12591 ? default_label
12592 : fallback_label));
12593 expand_jump_insn (targetm.code_for_casesi, 5, ops);
12594 return 1;
12597 /* Attempt to generate a tablejump instruction; same concept. */
12598 /* Subroutine of the next function.
12600 INDEX is the value being switched on, with the lowest value
12601 in the table already subtracted.
12602 MODE is its expected mode (needed if INDEX is constant).
12603 RANGE is the length of the jump table.
12604 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
12606 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
12607 index value is out of range.
12608 DEFAULT_PROBABILITY is the probability of jumping to
12609 the default label. */
12611 static void
12612 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
12613 rtx default_label, profile_probability default_probability)
12615 rtx temp, vector;
12617 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
12618 cfun->cfg->max_jumptable_ents = INTVAL (range);
12620 /* Do an unsigned comparison (in the proper mode) between the index
12621 expression and the value which represents the length of the range.
12622 Since we just finished subtracting the lower bound of the range
12623 from the index expression, this comparison allows us to simultaneously
12624 check that the original index expression value is both greater than
12625 or equal to the minimum value of the range and less than or equal to
12626 the maximum value of the range. */
12628 if (default_label)
12629 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
12630 default_label, default_probability);
12632 /* If index is in range, it must fit in Pmode.
12633 Convert to Pmode so we can index with it. */
12634 if (mode != Pmode)
12636 unsigned int width;
12638 /* We know the value of INDEX is between 0 and RANGE. If we have a
12639 sign-extended subreg, and RANGE does not have the sign bit set, then
12640 we have a value that is valid for both sign and zero extension. In
12641 this case, we get better code if we sign extend. */
12642 if (GET_CODE (index) == SUBREG
12643 && SUBREG_PROMOTED_VAR_P (index)
12644 && SUBREG_PROMOTED_SIGNED_P (index)
12645 && ((width = GET_MODE_PRECISION (as_a <scalar_int_mode> (mode)))
12646 <= HOST_BITS_PER_WIDE_INT)
12647 && ! (UINTVAL (range) & (HOST_WIDE_INT_1U << (width - 1))))
12648 index = convert_to_mode (Pmode, index, 0);
12649 else
12650 index = convert_to_mode (Pmode, index, 1);
12653 /* Don't let a MEM slip through, because then INDEX that comes
12654 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
12655 and break_out_memory_refs will go to work on it and mess it up. */
12656 #ifdef PIC_CASE_VECTOR_ADDRESS
12657 if (flag_pic && !REG_P (index))
12658 index = copy_to_mode_reg (Pmode, index);
12659 #endif
12661 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
12662 GET_MODE_SIZE, because this indicates how large insns are. The other
12663 uses should all be Pmode, because they are addresses. This code
12664 could fail if addresses and insns are not the same size. */
12665 index = simplify_gen_binary (MULT, Pmode, index,
12666 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
12667 Pmode));
12668 index = simplify_gen_binary (PLUS, Pmode, index,
12669 gen_rtx_LABEL_REF (Pmode, table_label));
12671 #ifdef PIC_CASE_VECTOR_ADDRESS
12672 if (flag_pic)
12673 index = PIC_CASE_VECTOR_ADDRESS (index);
12674 else
12675 #endif
12676 index = memory_address (CASE_VECTOR_MODE, index);
12677 temp = gen_reg_rtx (CASE_VECTOR_MODE);
12678 vector = gen_const_mem (CASE_VECTOR_MODE, index);
12679 convert_move (temp, vector, 0);
12681 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
12683 /* If we are generating PIC code or if the table is PC-relative, the
12684 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
12685 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
12686 emit_barrier ();
12690 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
12691 rtx table_label, rtx default_label,
12692 profile_probability default_probability)
12694 rtx index;
12696 if (! targetm.have_tablejump ())
12697 return 0;
12699 index_expr = fold_build2 (MINUS_EXPR, index_type,
12700 fold_convert (index_type, index_expr),
12701 fold_convert (index_type, minval));
12702 index = expand_normal (index_expr);
12703 do_pending_stack_adjust ();
12705 do_tablejump (index, TYPE_MODE (index_type),
12706 convert_modes (TYPE_MODE (index_type),
12707 TYPE_MODE (TREE_TYPE (range)),
12708 expand_normal (range),
12709 TYPE_UNSIGNED (TREE_TYPE (range))),
12710 table_label, default_label, default_probability);
12711 return 1;
12714 /* Return a CONST_VECTOR rtx representing vector mask for
12715 a VECTOR_CST of booleans. */
12716 static rtx
12717 const_vector_mask_from_tree (tree exp)
12719 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
12720 machine_mode inner = GET_MODE_INNER (mode);
12722 rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp),
12723 VECTOR_CST_NELTS_PER_PATTERN (exp));
12724 unsigned int count = builder.encoded_nelts ();
12725 for (unsigned int i = 0; i < count; ++i)
12727 tree elt = VECTOR_CST_ELT (exp, i);
12728 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
12729 if (integer_zerop (elt))
12730 builder.quick_push (CONST0_RTX (inner));
12731 else if (integer_onep (elt)
12732 || integer_minus_onep (elt))
12733 builder.quick_push (CONSTM1_RTX (inner));
12734 else
12735 gcc_unreachable ();
12737 return builder.build ();
12740 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones.
12741 Return a constant scalar rtx of mode MODE in which bit X is set if element
12742 X of EXP is nonzero. */
12743 static rtx
12744 const_scalar_mask_from_tree (scalar_int_mode mode, tree exp)
12746 wide_int res = wi::zero (GET_MODE_PRECISION (mode));
12747 tree elt;
12749 /* The result has a fixed number of bits so the input must too. */
12750 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
12751 for (unsigned int i = 0; i < nunits; ++i)
12753 elt = VECTOR_CST_ELT (exp, i);
12754 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
12755 if (integer_all_onesp (elt))
12756 res = wi::set_bit (res, i);
12757 else
12758 gcc_assert (integer_zerop (elt));
12761 return immed_wide_int_const (res, mode);
12764 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
12765 static rtx
12766 const_vector_from_tree (tree exp)
12768 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
12770 if (initializer_zerop (exp))
12771 return CONST0_RTX (mode);
12773 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
12774 return const_vector_mask_from_tree (exp);
12776 machine_mode inner = GET_MODE_INNER (mode);
12778 rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp),
12779 VECTOR_CST_NELTS_PER_PATTERN (exp));
12780 unsigned int count = builder.encoded_nelts ();
12781 for (unsigned int i = 0; i < count; ++i)
12783 tree elt = VECTOR_CST_ELT (exp, i);
12784 if (TREE_CODE (elt) == REAL_CST)
12785 builder.quick_push (const_double_from_real_value (TREE_REAL_CST (elt),
12786 inner));
12787 else if (TREE_CODE (elt) == FIXED_CST)
12788 builder.quick_push (CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
12789 inner));
12790 else
12791 builder.quick_push (immed_wide_int_const (wi::to_poly_wide (elt),
12792 inner));
12794 return builder.build ();
12797 /* Build a decl for a personality function given a language prefix. */
12799 tree
12800 build_personality_function (const char *lang)
12802 const char *unwind_and_version;
12803 tree decl, type;
12804 char *name;
12806 switch (targetm_common.except_unwind_info (&global_options))
12808 case UI_NONE:
12809 return NULL;
12810 case UI_SJLJ:
12811 unwind_and_version = "_sj0";
12812 break;
12813 case UI_DWARF2:
12814 case UI_TARGET:
12815 unwind_and_version = "_v0";
12816 break;
12817 case UI_SEH:
12818 unwind_and_version = "_seh0";
12819 break;
12820 default:
12821 gcc_unreachable ();
12824 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
12826 type = build_function_type_list (unsigned_type_node,
12827 integer_type_node, integer_type_node,
12828 long_long_unsigned_type_node,
12829 ptr_type_node, ptr_type_node, NULL_TREE);
12830 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
12831 get_identifier (name), type);
12832 DECL_ARTIFICIAL (decl) = 1;
12833 DECL_EXTERNAL (decl) = 1;
12834 TREE_PUBLIC (decl) = 1;
12836 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
12837 are the flags assigned by targetm.encode_section_info. */
12838 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
12840 return decl;
12843 /* Extracts the personality function of DECL and returns the corresponding
12844 libfunc. */
12847 get_personality_function (tree decl)
12849 tree personality = DECL_FUNCTION_PERSONALITY (decl);
12850 enum eh_personality_kind pk;
12852 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
12853 if (pk == eh_personality_none)
12854 return NULL;
12856 if (!personality
12857 && pk == eh_personality_any)
12858 personality = lang_hooks.eh_personality ();
12860 if (pk == eh_personality_lang)
12861 gcc_assert (personality != NULL_TREE);
12863 return XEXP (DECL_RTL (personality), 0);
12866 /* Returns a tree for the size of EXP in bytes. */
12868 static tree
12869 tree_expr_size (const_tree exp)
12871 if (DECL_P (exp)
12872 && DECL_SIZE_UNIT (exp) != 0)
12873 return DECL_SIZE_UNIT (exp);
12874 else
12875 return size_in_bytes (TREE_TYPE (exp));
12878 /* Return an rtx for the size in bytes of the value of EXP. */
12881 expr_size (tree exp)
12883 tree size;
12885 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
12886 size = TREE_OPERAND (exp, 1);
12887 else
12889 size = tree_expr_size (exp);
12890 gcc_assert (size);
12891 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
12894 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
12897 /* Return a wide integer for the size in bytes of the value of EXP, or -1
12898 if the size can vary or is larger than an integer. */
12900 static HOST_WIDE_INT
12901 int_expr_size (tree exp)
12903 tree size;
12905 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
12906 size = TREE_OPERAND (exp, 1);
12907 else
12909 size = tree_expr_size (exp);
12910 gcc_assert (size);
12913 if (size == 0 || !tree_fits_shwi_p (size))
12914 return -1;
12916 return tree_to_shwi (size);