1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
38 #include "diagnostic.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
45 #include "insn-attr.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
52 #include "optabs-tree.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
58 #include "tree-ssa-live.h"
59 #include "tree-outof-ssa.h"
60 #include "tree-ssa-address.h"
63 #include "gimple-fold.h"
64 #include "rtx-vector-builder.h"
67 /* If this is nonzero, we do not bother generating VOLATILE
68 around volatile memory references, and we are willing to
69 output indirect addresses. If cse is to follow, we reject
70 indirect addresses so a useful potential cse is generated;
71 if it is used only once, instruction combination will produce
72 the same indirect address eventually. */
75 static bool block_move_libcall_safe_for_call_parm (void);
76 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
77 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
78 unsigned HOST_WIDE_INT
);
79 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
80 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
81 static rtx_insn
*compress_float_constant (rtx
, rtx
);
82 static rtx
get_subtarget (rtx
);
83 static void store_constructor (tree
, rtx
, int, poly_int64
, bool);
84 static rtx
store_field (rtx
, poly_int64
, poly_int64
, poly_uint64
, poly_uint64
,
85 machine_mode
, tree
, alias_set_type
, bool, bool);
87 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
89 static int is_aligning_offset (const_tree
, const_tree
);
90 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
91 static rtx
do_store_flag (sepops
, rtx
, machine_mode
);
93 static void emit_single_push_insn (machine_mode
, rtx
, tree
);
95 static void do_tablejump (rtx
, machine_mode
, rtx
, rtx
, rtx
,
97 static rtx
const_vector_from_tree (tree
);
98 static rtx
const_scalar_mask_from_tree (scalar_int_mode
, tree
);
99 static tree
tree_expr_size (const_tree
);
100 static HOST_WIDE_INT
int_expr_size (tree
);
101 static void convert_mode_scalar (rtx
, rtx
, int);
104 /* This is run to set up which modes can be used
105 directly in memory and to initialize the block move optab. It is run
106 at the beginning of compilation and when the target is reinitialized. */
109 init_expr_target (void)
116 /* Try indexing by frame ptr and try by stack ptr.
117 It is known that on the Convex the stack ptr isn't a valid index.
118 With luck, one or the other is valid on any machine. */
119 mem
= gen_rtx_MEM (word_mode
, stack_pointer_rtx
);
120 mem1
= gen_rtx_MEM (word_mode
, frame_pointer_rtx
);
122 /* A scratch register we can modify in-place below to avoid
123 useless RTL allocations. */
124 reg
= gen_rtx_REG (word_mode
, LAST_VIRTUAL_REGISTER
+ 1);
126 rtx_insn
*insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
127 pat
= gen_rtx_SET (NULL_RTX
, NULL_RTX
);
128 PATTERN (insn
) = pat
;
130 for (machine_mode mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
131 mode
= (machine_mode
) ((int) mode
+ 1))
135 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
136 PUT_MODE (mem
, mode
);
137 PUT_MODE (mem1
, mode
);
139 /* See if there is some register that can be used in this mode and
140 directly loaded or stored from memory. */
142 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
143 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
144 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
147 if (!targetm
.hard_regno_mode_ok (regno
, mode
))
150 set_mode_and_regno (reg
, mode
, regno
);
153 SET_DEST (pat
) = reg
;
154 if (recog (pat
, insn
, &num_clobbers
) >= 0)
155 direct_load
[(int) mode
] = 1;
157 SET_SRC (pat
) = mem1
;
158 SET_DEST (pat
) = reg
;
159 if (recog (pat
, insn
, &num_clobbers
) >= 0)
160 direct_load
[(int) mode
] = 1;
163 SET_DEST (pat
) = mem
;
164 if (recog (pat
, insn
, &num_clobbers
) >= 0)
165 direct_store
[(int) mode
] = 1;
168 SET_DEST (pat
) = mem1
;
169 if (recog (pat
, insn
, &num_clobbers
) >= 0)
170 direct_store
[(int) mode
] = 1;
174 mem
= gen_rtx_MEM (VOIDmode
, gen_raw_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1));
176 opt_scalar_float_mode mode_iter
;
177 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_FLOAT
)
179 scalar_float_mode mode
= mode_iter
.require ();
180 scalar_float_mode srcmode
;
181 FOR_EACH_MODE_UNTIL (srcmode
, mode
)
185 ic
= can_extend_p (mode
, srcmode
, 0);
186 if (ic
== CODE_FOR_nothing
)
189 PUT_MODE (mem
, srcmode
);
191 if (insn_operand_matches (ic
, 1, mem
))
192 float_extend_from_mem
[mode
][srcmode
] = true;
197 /* This is run at the start of compiling a function. */
202 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
205 /* Copy data from FROM to TO, where the machine modes are not the same.
206 Both modes may be integer, or both may be floating, or both may be
208 UNSIGNEDP should be nonzero if FROM is an unsigned type.
209 This causes zero-extension instead of sign-extension. */
212 convert_move (rtx to
, rtx from
, int unsignedp
)
214 machine_mode to_mode
= GET_MODE (to
);
215 machine_mode from_mode
= GET_MODE (from
);
217 gcc_assert (to_mode
!= BLKmode
);
218 gcc_assert (from_mode
!= BLKmode
);
220 /* If the source and destination are already the same, then there's
225 /* If FROM is a SUBREG that indicates that we have already done at least
226 the required extension, strip it. We don't handle such SUBREGs as
229 scalar_int_mode to_int_mode
;
230 if (GET_CODE (from
) == SUBREG
231 && SUBREG_PROMOTED_VAR_P (from
)
232 && is_a
<scalar_int_mode
> (to_mode
, &to_int_mode
)
233 && (GET_MODE_PRECISION (subreg_promoted_mode (from
))
234 >= GET_MODE_PRECISION (to_int_mode
))
235 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
237 from
= gen_lowpart (to_int_mode
, SUBREG_REG (from
));
238 from_mode
= to_int_mode
;
241 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
243 if (to_mode
== from_mode
244 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
246 emit_move_insn (to
, from
);
250 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
252 gcc_assert (known_eq (GET_MODE_BITSIZE (from_mode
),
253 GET_MODE_BITSIZE (to_mode
)));
255 if (VECTOR_MODE_P (to_mode
))
256 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
258 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
260 emit_move_insn (to
, from
);
264 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
266 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
267 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
271 convert_mode_scalar (to
, from
, unsignedp
);
274 /* Like convert_move, but deals only with scalar modes. */
277 convert_mode_scalar (rtx to
, rtx from
, int unsignedp
)
279 /* Both modes should be scalar types. */
280 scalar_mode from_mode
= as_a
<scalar_mode
> (GET_MODE (from
));
281 scalar_mode to_mode
= as_a
<scalar_mode
> (GET_MODE (to
));
282 bool to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
283 bool from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
287 gcc_assert (to_real
== from_real
);
289 /* rtx code for making an equivalent value. */
290 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
291 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
299 gcc_assert ((GET_MODE_PRECISION (from_mode
)
300 != GET_MODE_PRECISION (to_mode
))
301 || (DECIMAL_FLOAT_MODE_P (from_mode
)
302 != DECIMAL_FLOAT_MODE_P (to_mode
)));
304 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
305 /* Conversion between decimal float and binary float, same size. */
306 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
307 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
312 /* Try converting directly if the insn is supported. */
314 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
315 if (code
!= CODE_FOR_nothing
)
317 emit_unop_insn (code
, to
, from
,
318 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
322 /* Otherwise use a libcall. */
323 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
325 /* Is this conversion implemented yet? */
326 gcc_assert (libcall
);
329 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
331 insns
= get_insns ();
333 emit_libcall_block (insns
, to
, value
,
334 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
336 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
340 /* Handle pointer conversion. */ /* SPEE 900220. */
341 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
345 if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
352 if (convert_optab_handler (ctab
, to_mode
, from_mode
)
355 emit_unop_insn (convert_optab_handler (ctab
, to_mode
, from_mode
),
361 /* Targets are expected to provide conversion insns between PxImode and
362 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
363 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
365 scalar_int_mode full_mode
366 = smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode
));
368 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
369 != CODE_FOR_nothing
);
371 if (full_mode
!= from_mode
)
372 from
= convert_to_mode (full_mode
, from
, unsignedp
);
373 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
377 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
380 scalar_int_mode full_mode
381 = smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode
));
382 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
383 enum insn_code icode
;
385 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
386 gcc_assert (icode
!= CODE_FOR_nothing
);
388 if (to_mode
== full_mode
)
390 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
394 new_from
= gen_reg_rtx (full_mode
);
395 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
397 /* else proceed to integer conversions below. */
398 from_mode
= full_mode
;
402 /* Make sure both are fixed-point modes or both are not. */
403 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
404 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
405 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
407 /* If we widen from_mode to to_mode and they are in the same class,
408 we won't saturate the result.
409 Otherwise, always saturate the result to play safe. */
410 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
411 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
412 expand_fixed_convert (to
, from
, 0, 0);
414 expand_fixed_convert (to
, from
, 0, 1);
418 /* Now both modes are integers. */
420 /* Handle expanding beyond a word. */
421 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
422 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
429 scalar_mode lowpart_mode
;
430 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
432 /* Try converting directly if the insn is supported. */
433 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
436 /* If FROM is a SUBREG, put it into a register. Do this
437 so that we always generate the same set of insns for
438 better cse'ing; if an intermediate assignment occurred,
439 we won't be doing the operation directly on the SUBREG. */
440 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
441 from
= force_reg (from_mode
, from
);
442 emit_unop_insn (code
, to
, from
, equiv_code
);
445 /* Next, try converting via full word. */
446 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
447 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
448 != CODE_FOR_nothing
))
450 rtx word_to
= gen_reg_rtx (word_mode
);
453 if (reg_overlap_mentioned_p (to
, from
))
454 from
= force_reg (from_mode
, from
);
457 convert_move (word_to
, from
, unsignedp
);
458 emit_unop_insn (code
, to
, word_to
, equiv_code
);
462 /* No special multiword conversion insn; do it by hand. */
465 /* Since we will turn this into a no conflict block, we must ensure
466 the source does not overlap the target so force it into an isolated
467 register when maybe so. Likewise for any MEM input, since the
468 conversion sequence might require several references to it and we
469 must ensure we're getting the same value every time. */
471 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
472 from
= force_reg (from_mode
, from
);
474 /* Get a copy of FROM widened to a word, if necessary. */
475 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
476 lowpart_mode
= word_mode
;
478 lowpart_mode
= from_mode
;
480 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
482 lowpart
= gen_lowpart (lowpart_mode
, to
);
483 emit_move_insn (lowpart
, lowfrom
);
485 /* Compute the value to put in each remaining word. */
487 fill_value
= const0_rtx
;
489 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
490 LT
, lowfrom
, const0_rtx
,
491 lowpart_mode
, 0, -1);
493 /* Fill the remaining words. */
494 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
496 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
497 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
499 gcc_assert (subword
);
501 if (fill_value
!= subword
)
502 emit_move_insn (subword
, fill_value
);
505 insns
= get_insns ();
512 /* Truncating multi-word to a word or less. */
513 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
514 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
517 && ! MEM_VOLATILE_P (from
)
518 && direct_load
[(int) to_mode
]
519 && ! mode_dependent_address_p (XEXP (from
, 0),
520 MEM_ADDR_SPACE (from
)))
522 || GET_CODE (from
) == SUBREG
))
523 from
= force_reg (from_mode
, from
);
524 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
528 /* Now follow all the conversions between integers
529 no more than a word long. */
531 /* For truncation, usually we can just refer to FROM in a narrower mode. */
532 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
533 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
536 && ! MEM_VOLATILE_P (from
)
537 && direct_load
[(int) to_mode
]
538 && ! mode_dependent_address_p (XEXP (from
, 0),
539 MEM_ADDR_SPACE (from
)))
541 || GET_CODE (from
) == SUBREG
))
542 from
= force_reg (from_mode
, from
);
543 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
544 && !targetm
.hard_regno_mode_ok (REGNO (from
), to_mode
))
545 from
= copy_to_reg (from
);
546 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
550 /* Handle extension. */
551 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
553 /* Convert directly if that works. */
554 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
557 emit_unop_insn (code
, to
, from
, equiv_code
);
562 scalar_mode intermediate
;
566 /* Search for a mode to convert via. */
567 opt_scalar_mode intermediate_iter
;
568 FOR_EACH_MODE_FROM (intermediate_iter
, from_mode
)
570 scalar_mode intermediate
= intermediate_iter
.require ();
571 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
573 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
574 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
,
576 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
577 != CODE_FOR_nothing
))
579 convert_move (to
, convert_to_mode (intermediate
, from
,
580 unsignedp
), unsignedp
);
585 /* No suitable intermediate mode.
586 Generate what we need with shifts. */
587 shift_amount
= (GET_MODE_PRECISION (to_mode
)
588 - GET_MODE_PRECISION (from_mode
));
589 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
590 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
592 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
595 emit_move_insn (to
, tmp
);
600 /* Support special truncate insns for certain modes. */
601 if (convert_optab_handler (trunc_optab
, to_mode
,
602 from_mode
) != CODE_FOR_nothing
)
604 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
609 /* Handle truncation of volatile memrefs, and so on;
610 the things that couldn't be truncated directly,
611 and for which there was no special instruction.
613 ??? Code above formerly short-circuited this, for most integer
614 mode pairs, with a force_reg in from_mode followed by a recursive
615 call to this routine. Appears always to have been wrong. */
616 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
618 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
619 emit_move_insn (to
, temp
);
623 /* Mode combination is not recognized. */
627 /* Return an rtx for a value that would result
628 from converting X to mode MODE.
629 Both X and MODE may be floating, or both integer.
630 UNSIGNEDP is nonzero if X is an unsigned value.
631 This can be done by referring to a part of X in place
632 or by copying to a new temporary with conversion. */
635 convert_to_mode (machine_mode mode
, rtx x
, int unsignedp
)
637 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
640 /* Return an rtx for a value that would result
641 from converting X from mode OLDMODE to mode MODE.
642 Both modes may be floating, or both integer.
643 UNSIGNEDP is nonzero if X is an unsigned value.
645 This can be done by referring to a part of X in place
646 or by copying to a new temporary with conversion.
648 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
651 convert_modes (machine_mode mode
, machine_mode oldmode
, rtx x
, int unsignedp
)
654 scalar_int_mode int_mode
;
656 /* If FROM is a SUBREG that indicates that we have already done at least
657 the required extension, strip it. */
659 if (GET_CODE (x
) == SUBREG
660 && SUBREG_PROMOTED_VAR_P (x
)
661 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
662 && (GET_MODE_PRECISION (subreg_promoted_mode (x
))
663 >= GET_MODE_PRECISION (int_mode
))
664 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
665 x
= gen_lowpart (int_mode
, SUBREG_REG (x
));
667 if (GET_MODE (x
) != VOIDmode
)
668 oldmode
= GET_MODE (x
);
673 if (CONST_SCALAR_INT_P (x
)
674 && is_int_mode (mode
, &int_mode
))
676 /* If the caller did not tell us the old mode, then there is not
677 much to do with respect to canonicalization. We have to
678 assume that all the bits are significant. */
679 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
680 oldmode
= MAX_MODE_INT
;
681 wide_int w
= wide_int::from (rtx_mode_t (x
, oldmode
),
682 GET_MODE_PRECISION (int_mode
),
683 unsignedp
? UNSIGNED
: SIGNED
);
684 return immed_wide_int_const (w
, int_mode
);
687 /* We can do this with a gen_lowpart if both desired and current modes
688 are integer, and this is either a constant integer, a register, or a
690 scalar_int_mode int_oldmode
;
691 if (is_int_mode (mode
, &int_mode
)
692 && is_int_mode (oldmode
, &int_oldmode
)
693 && GET_MODE_PRECISION (int_mode
) <= GET_MODE_PRECISION (int_oldmode
)
694 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) int_mode
])
695 || CONST_POLY_INT_P (x
)
697 && (!HARD_REGISTER_P (x
)
698 || targetm
.hard_regno_mode_ok (REGNO (x
), int_mode
))
699 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode
, GET_MODE (x
)))))
700 return gen_lowpart (int_mode
, x
);
702 /* Converting from integer constant into mode is always equivalent to an
704 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
706 gcc_assert (known_eq (GET_MODE_BITSIZE (mode
),
707 GET_MODE_BITSIZE (oldmode
)));
708 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
711 temp
= gen_reg_rtx (mode
);
712 convert_move (temp
, x
, unsignedp
);
716 /* Return the largest alignment we can use for doing a move (or store)
717 of MAX_PIECES. ALIGN is the largest alignment we could use. */
720 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
722 scalar_int_mode tmode
723 = int_mode_for_size (max_pieces
* BITS_PER_UNIT
, 1).require ();
725 if (align
>= GET_MODE_ALIGNMENT (tmode
))
726 align
= GET_MODE_ALIGNMENT (tmode
);
729 scalar_int_mode xmode
= NARROWEST_INT_MODE
;
730 opt_scalar_int_mode mode_iter
;
731 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
733 tmode
= mode_iter
.require ();
734 if (GET_MODE_SIZE (tmode
) > max_pieces
735 || targetm
.slow_unaligned_access (tmode
, align
))
740 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
746 /* Return the widest integer mode that is narrower than SIZE bytes. */
748 static scalar_int_mode
749 widest_int_mode_for_size (unsigned int size
)
751 scalar_int_mode result
= NARROWEST_INT_MODE
;
753 gcc_checking_assert (size
> 1);
755 opt_scalar_int_mode tmode
;
756 FOR_EACH_MODE_IN_CLASS (tmode
, MODE_INT
)
757 if (GET_MODE_SIZE (tmode
.require ()) < size
)
758 result
= tmode
.require ();
763 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
764 and should be performed piecewise. */
767 can_do_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
,
768 enum by_pieces_operation op
)
770 return targetm
.use_by_pieces_infrastructure_p (len
, align
, op
,
771 optimize_insn_for_speed_p ());
774 /* Determine whether the LEN bytes can be moved by using several move
775 instructions. Return nonzero if a call to move_by_pieces should
779 can_move_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
)
781 return can_do_by_pieces (len
, align
, MOVE_BY_PIECES
);
784 /* Return number of insns required to perform operation OP by pieces
785 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
787 unsigned HOST_WIDE_INT
788 by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
789 unsigned int max_size
, by_pieces_operation op
)
791 unsigned HOST_WIDE_INT n_insns
= 0;
793 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
795 while (max_size
> 1 && l
> 0)
797 scalar_int_mode mode
= widest_int_mode_for_size (max_size
);
798 enum insn_code icode
;
800 unsigned int modesize
= GET_MODE_SIZE (mode
);
802 icode
= optab_handler (mov_optab
, mode
);
803 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
805 unsigned HOST_WIDE_INT n_pieces
= l
/ modesize
;
813 case COMPARE_BY_PIECES
:
814 int batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
815 int batch_ops
= 4 * batch
- 1;
816 unsigned HOST_WIDE_INT full
= n_pieces
/ batch
;
817 n_insns
+= full
* batch_ops
;
818 if (n_pieces
% batch
!= 0)
831 /* Used when performing piecewise block operations, holds information
832 about one of the memory objects involved. The member functions
833 can be used to generate code for loading from the object and
834 updating the address when iterating. */
838 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
841 /* The address of the object. Can differ from that seen in the
842 MEM rtx if we copied the address to a register. */
844 /* Nonzero if the address on the object has an autoincrement already,
845 signifies whether that was an increment or decrement. */
846 signed char m_addr_inc
;
847 /* Nonzero if we intend to use autoinc without the address already
848 having autoinc form. We will insert add insns around each memory
849 reference, expecting later passes to form autoinc addressing modes.
850 The only supported options are predecrement and postincrement. */
851 signed char m_explicit_inc
;
852 /* True if we have either of the two possible cases of using
855 /* True if this is an address to be used for load operations rather
859 /* Optionally, a function to obtain constants for any given offset into
860 the objects, and data associated with it. */
861 by_pieces_constfn m_constfn
;
864 pieces_addr (rtx
, bool, by_pieces_constfn
, void *);
865 rtx
adjust (scalar_int_mode
, HOST_WIDE_INT
);
866 void increment_address (HOST_WIDE_INT
);
867 void maybe_predec (HOST_WIDE_INT
);
868 void maybe_postinc (HOST_WIDE_INT
);
869 void decide_autoinc (machine_mode
, bool, HOST_WIDE_INT
);
876 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
877 true if the operation to be performed on this object is a load
878 rather than a store. For stores, OBJ can be NULL, in which case we
879 assume the operation is a stack push. For loads, the optional
880 CONSTFN and its associated CFNDATA can be used in place of the
883 pieces_addr::pieces_addr (rtx obj
, bool is_load
, by_pieces_constfn constfn
,
885 : m_obj (obj
), m_is_load (is_load
), m_constfn (constfn
), m_cfndata (cfndata
)
891 rtx addr
= XEXP (obj
, 0);
892 rtx_code code
= GET_CODE (addr
);
894 bool dec
= code
== PRE_DEC
|| code
== POST_DEC
;
895 bool inc
= code
== PRE_INC
|| code
== POST_INC
;
898 m_addr_inc
= dec
? -1 : 1;
900 /* While we have always looked for these codes here, the code
901 implementing the memory operation has never handled them.
902 Support could be added later if necessary or beneficial. */
903 gcc_assert (code
!= PRE_INC
&& code
!= POST_DEC
);
911 if (STACK_GROWS_DOWNWARD
)
917 gcc_assert (constfn
!= NULL
);
921 gcc_assert (is_load
);
924 /* Decide whether to use autoinc for an address involved in a memory op.
925 MODE is the mode of the accesses, REVERSE is true if we've decided to
926 perform the operation starting from the end, and LEN is the length of
927 the operation. Don't override an earlier decision to set m_auto. */
930 pieces_addr::decide_autoinc (machine_mode
ARG_UNUSED (mode
), bool reverse
,
933 if (m_auto
|| m_obj
== NULL_RTX
)
936 bool use_predec
= (m_is_load
937 ? USE_LOAD_PRE_DECREMENT (mode
)
938 : USE_STORE_PRE_DECREMENT (mode
));
939 bool use_postinc
= (m_is_load
940 ? USE_LOAD_POST_INCREMENT (mode
)
941 : USE_STORE_POST_INCREMENT (mode
));
942 machine_mode addr_mode
= get_address_mode (m_obj
);
944 if (use_predec
&& reverse
)
946 m_addr
= copy_to_mode_reg (addr_mode
,
947 plus_constant (addr_mode
,
952 else if (use_postinc
&& !reverse
)
954 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
958 else if (CONSTANT_P (m_addr
))
959 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
962 /* Adjust the address to refer to the data at OFFSET in MODE. If we
963 are using autoincrement for this address, we don't add the offset,
964 but we still modify the MEM's properties. */
967 pieces_addr::adjust (scalar_int_mode mode
, HOST_WIDE_INT offset
)
970 return m_constfn (m_cfndata
, offset
, mode
);
971 if (m_obj
== NULL_RTX
)
974 return adjust_automodify_address (m_obj
, mode
, m_addr
, offset
);
976 return adjust_address (m_obj
, mode
, offset
);
979 /* Emit an add instruction to increment the address by SIZE. */
982 pieces_addr::increment_address (HOST_WIDE_INT size
)
984 rtx amount
= gen_int_mode (size
, GET_MODE (m_addr
));
985 emit_insn (gen_add2_insn (m_addr
, amount
));
988 /* If we are supposed to decrement the address after each access, emit code
989 to do so now. Increment by SIZE (which has should have the correct sign
993 pieces_addr::maybe_predec (HOST_WIDE_INT size
)
995 if (m_explicit_inc
>= 0)
997 gcc_assert (HAVE_PRE_DECREMENT
);
998 increment_address (size
);
1001 /* If we are supposed to decrement the address after each access, emit code
1002 to do so now. Increment by SIZE. */
1005 pieces_addr::maybe_postinc (HOST_WIDE_INT size
)
1007 if (m_explicit_inc
<= 0)
1009 gcc_assert (HAVE_POST_INCREMENT
);
1010 increment_address (size
);
1013 /* This structure is used by do_op_by_pieces to describe the operation
1016 class op_by_pieces_d
1019 pieces_addr m_to
, m_from
;
1020 unsigned HOST_WIDE_INT m_len
;
1021 HOST_WIDE_INT m_offset
;
1022 unsigned int m_align
;
1023 unsigned int m_max_size
;
1026 /* Virtual functions, overriden by derived classes for the specific
1028 virtual void generate (rtx
, rtx
, machine_mode
) = 0;
1029 virtual bool prepare_mode (machine_mode
, unsigned int) = 0;
1030 virtual void finish_mode (machine_mode
)
1035 op_by_pieces_d (rtx
, bool, rtx
, bool, by_pieces_constfn
, void *,
1036 unsigned HOST_WIDE_INT
, unsigned int);
1040 /* The constructor for an op_by_pieces_d structure. We require two
1041 objects named TO and FROM, which are identified as loads or stores
1042 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1043 and its associated FROM_CFN_DATA can be used to replace loads with
1044 constant values. LEN describes the length of the operation. */
1046 op_by_pieces_d::op_by_pieces_d (rtx to
, bool to_load
,
1047 rtx from
, bool from_load
,
1048 by_pieces_constfn from_cfn
,
1049 void *from_cfn_data
,
1050 unsigned HOST_WIDE_INT len
,
1052 : m_to (to
, to_load
, NULL
, NULL
),
1053 m_from (from
, from_load
, from_cfn
, from_cfn_data
),
1054 m_len (len
), m_max_size (MOVE_MAX_PIECES
+ 1)
1056 int toi
= m_to
.get_addr_inc ();
1057 int fromi
= m_from
.get_addr_inc ();
1058 if (toi
>= 0 && fromi
>= 0)
1060 else if (toi
<= 0 && fromi
<= 0)
1065 m_offset
= m_reverse
? len
: 0;
1066 align
= MIN (to
? MEM_ALIGN (to
) : align
,
1067 from
? MEM_ALIGN (from
) : align
);
1069 /* If copying requires more than two move insns,
1070 copy addresses to registers (to make displacements shorter)
1071 and use post-increment if available. */
1072 if (by_pieces_ninsns (len
, align
, m_max_size
, MOVE_BY_PIECES
) > 2)
1074 /* Find the mode of the largest comparison. */
1075 scalar_int_mode mode
= widest_int_mode_for_size (m_max_size
);
1077 m_from
.decide_autoinc (mode
, m_reverse
, len
);
1078 m_to
.decide_autoinc (mode
, m_reverse
, len
);
1081 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1085 /* This function contains the main loop used for expanding a block
1086 operation. First move what we can in the largest integer mode,
1087 then go to successively smaller modes. For every access, call
1088 GENFUN with the two operands and the EXTRA_DATA. */
1091 op_by_pieces_d::run ()
1093 while (m_max_size
> 1 && m_len
> 0)
1095 scalar_int_mode mode
= widest_int_mode_for_size (m_max_size
);
1097 if (prepare_mode (mode
, m_align
))
1099 unsigned int size
= GET_MODE_SIZE (mode
);
1100 rtx to1
= NULL_RTX
, from1
;
1102 while (m_len
>= size
)
1107 to1
= m_to
.adjust (mode
, m_offset
);
1108 from1
= m_from
.adjust (mode
, m_offset
);
1110 m_to
.maybe_predec (-(HOST_WIDE_INT
)size
);
1111 m_from
.maybe_predec (-(HOST_WIDE_INT
)size
);
1113 generate (to1
, from1
, mode
);
1115 m_to
.maybe_postinc (size
);
1116 m_from
.maybe_postinc (size
);
1127 m_max_size
= GET_MODE_SIZE (mode
);
1130 /* The code above should have handled everything. */
1131 gcc_assert (!m_len
);
1134 /* Derived class from op_by_pieces_d, providing support for block move
1137 class move_by_pieces_d
: public op_by_pieces_d
1139 insn_gen_fn m_gen_fun
;
1140 void generate (rtx
, rtx
, machine_mode
);
1141 bool prepare_mode (machine_mode
, unsigned int);
1144 move_by_pieces_d (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1146 : op_by_pieces_d (to
, false, from
, true, NULL
, NULL
, len
, align
)
1149 rtx
finish_retmode (memop_ret
);
1152 /* Return true if MODE can be used for a set of copies, given an
1153 alignment ALIGN. Prepare whatever data is necessary for later
1154 calls to generate. */
1157 move_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1159 insn_code icode
= optab_handler (mov_optab
, mode
);
1160 m_gen_fun
= GEN_FCN (icode
);
1161 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1164 /* A callback used when iterating for a compare_by_pieces_operation.
1165 OP0 and OP1 are the values that have been loaded and should be
1166 compared in MODE. If OP0 is NULL, this means we should generate a
1167 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1168 gen function that should be used to generate the mode. */
1171 move_by_pieces_d::generate (rtx op0
, rtx op1
,
1172 machine_mode mode ATTRIBUTE_UNUSED
)
1174 #ifdef PUSH_ROUNDING
1175 if (op0
== NULL_RTX
)
1177 emit_single_push_insn (mode
, op1
, NULL
);
1181 emit_insn (m_gen_fun (op0
, op1
));
1184 /* Perform the final adjustment at the end of a string to obtain the
1185 correct return value for the block operation.
1186 Return value is based on RETMODE argument. */
1189 move_by_pieces_d::finish_retmode (memop_ret retmode
)
1191 gcc_assert (!m_reverse
);
1192 if (retmode
== RETURN_END_MINUS_ONE
)
1194 m_to
.maybe_postinc (-1);
1197 return m_to
.adjust (QImode
, m_offset
);
1200 /* Generate several move instructions to copy LEN bytes from block FROM to
1201 block TO. (These are MEM rtx's with BLKmode).
1203 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1204 used to push FROM to the stack.
1206 ALIGN is maximum stack alignment we can assume.
1208 Return value is based on RETMODE argument. */
1211 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1212 unsigned int align
, memop_ret retmode
)
1214 #ifndef PUSH_ROUNDING
1219 move_by_pieces_d
data (to
, from
, len
, align
);
1223 if (retmode
!= RETURN_BEGIN
)
1224 return data
.finish_retmode (retmode
);
1229 /* Derived class from op_by_pieces_d, providing support for block move
1232 class store_by_pieces_d
: public op_by_pieces_d
1234 insn_gen_fn m_gen_fun
;
1235 void generate (rtx
, rtx
, machine_mode
);
1236 bool prepare_mode (machine_mode
, unsigned int);
1239 store_by_pieces_d (rtx to
, by_pieces_constfn cfn
, void *cfn_data
,
1240 unsigned HOST_WIDE_INT len
, unsigned int align
)
1241 : op_by_pieces_d (to
, false, NULL_RTX
, true, cfn
, cfn_data
, len
, align
)
1244 rtx
finish_retmode (memop_ret
);
1247 /* Return true if MODE can be used for a set of stores, given an
1248 alignment ALIGN. Prepare whatever data is necessary for later
1249 calls to generate. */
1252 store_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1254 insn_code icode
= optab_handler (mov_optab
, mode
);
1255 m_gen_fun
= GEN_FCN (icode
);
1256 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1259 /* A callback used when iterating for a store_by_pieces_operation.
1260 OP0 and OP1 are the values that have been loaded and should be
1261 compared in MODE. If OP0 is NULL, this means we should generate a
1262 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1263 gen function that should be used to generate the mode. */
1266 store_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode
)
1268 emit_insn (m_gen_fun (op0
, op1
));
1271 /* Perform the final adjustment at the end of a string to obtain the
1272 correct return value for the block operation.
1273 Return value is based on RETMODE argument. */
1276 store_by_pieces_d::finish_retmode (memop_ret retmode
)
1278 gcc_assert (!m_reverse
);
1279 if (retmode
== RETURN_END_MINUS_ONE
)
1281 m_to
.maybe_postinc (-1);
1284 return m_to
.adjust (QImode
, m_offset
);
1287 /* Determine whether the LEN bytes generated by CONSTFUN can be
1288 stored to memory using several move instructions. CONSTFUNDATA is
1289 a pointer which will be passed as argument in every CONSTFUN call.
1290 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1291 a memset operation and false if it's a copy of a constant string.
1292 Return nonzero if a call to store_by_pieces should succeed. */
1295 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
1296 rtx (*constfun
) (void *, HOST_WIDE_INT
, scalar_int_mode
),
1297 void *constfundata
, unsigned int align
, bool memsetp
)
1299 unsigned HOST_WIDE_INT l
;
1300 unsigned int max_size
;
1301 HOST_WIDE_INT offset
= 0;
1302 enum insn_code icode
;
1304 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1305 rtx cst ATTRIBUTE_UNUSED
;
1310 if (!targetm
.use_by_pieces_infrastructure_p (len
, align
,
1314 optimize_insn_for_speed_p ()))
1317 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
1319 /* We would first store what we can in the largest integer mode, then go to
1320 successively smaller modes. */
1323 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
1327 max_size
= STORE_MAX_PIECES
+ 1;
1328 while (max_size
> 1 && l
> 0)
1330 scalar_int_mode mode
= widest_int_mode_for_size (max_size
);
1332 icode
= optab_handler (mov_optab
, mode
);
1333 if (icode
!= CODE_FOR_nothing
1334 && align
>= GET_MODE_ALIGNMENT (mode
))
1336 unsigned int size
= GET_MODE_SIZE (mode
);
1343 cst
= (*constfun
) (constfundata
, offset
, mode
);
1344 if (!targetm
.legitimate_constant_p (mode
, cst
))
1354 max_size
= GET_MODE_SIZE (mode
);
1357 /* The code above should have handled everything. */
1364 /* Generate several move instructions to store LEN bytes generated by
1365 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1366 pointer which will be passed as argument in every CONSTFUN call.
1367 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1368 a memset operation and false if it's a copy of a constant string.
1369 Return value is based on RETMODE argument. */
1372 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
1373 rtx (*constfun
) (void *, HOST_WIDE_INT
, scalar_int_mode
),
1374 void *constfundata
, unsigned int align
, bool memsetp
,
1379 gcc_assert (retmode
!= RETURN_END_MINUS_ONE
);
1383 gcc_assert (targetm
.use_by_pieces_infrastructure_p
1385 memsetp
? SET_BY_PIECES
: STORE_BY_PIECES
,
1386 optimize_insn_for_speed_p ()));
1388 store_by_pieces_d
data (to
, constfun
, constfundata
, len
, align
);
1391 if (retmode
!= RETURN_BEGIN
)
1392 return data
.finish_retmode (retmode
);
1397 /* Callback routine for clear_by_pieces.
1398 Return const0_rtx unconditionally. */
1401 clear_by_pieces_1 (void *, HOST_WIDE_INT
, scalar_int_mode
)
1406 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1407 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1410 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
1415 store_by_pieces_d
data (to
, clear_by_pieces_1
, NULL
, len
, align
);
1419 /* Context used by compare_by_pieces_genfn. It stores the fail label
1420 to jump to in case of miscomparison, and for branch ratios greater than 1,
1421 it stores an accumulator and the current and maximum counts before
1422 emitting another branch. */
1424 class compare_by_pieces_d
: public op_by_pieces_d
1426 rtx_code_label
*m_fail_label
;
1428 int m_count
, m_batch
;
1430 void generate (rtx
, rtx
, machine_mode
);
1431 bool prepare_mode (machine_mode
, unsigned int);
1432 void finish_mode (machine_mode
);
1434 compare_by_pieces_d (rtx op0
, rtx op1
, by_pieces_constfn op1_cfn
,
1435 void *op1_cfn_data
, HOST_WIDE_INT len
, int align
,
1436 rtx_code_label
*fail_label
)
1437 : op_by_pieces_d (op0
, true, op1
, true, op1_cfn
, op1_cfn_data
, len
, align
)
1439 m_fail_label
= fail_label
;
1443 /* A callback used when iterating for a compare_by_pieces_operation.
1444 OP0 and OP1 are the values that have been loaded and should be
1445 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1446 context structure. */
1449 compare_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode mode
)
1453 rtx temp
= expand_binop (mode
, sub_optab
, op0
, op1
, NULL_RTX
,
1454 true, OPTAB_LIB_WIDEN
);
1456 temp
= expand_binop (mode
, ior_optab
, m_accumulator
, temp
, temp
,
1457 true, OPTAB_LIB_WIDEN
);
1458 m_accumulator
= temp
;
1460 if (++m_count
< m_batch
)
1464 op0
= m_accumulator
;
1466 m_accumulator
= NULL_RTX
;
1468 do_compare_rtx_and_jump (op0
, op1
, NE
, true, mode
, NULL_RTX
, NULL
,
1469 m_fail_label
, profile_probability::uninitialized ());
1472 /* Return true if MODE can be used for a set of moves and comparisons,
1473 given an alignment ALIGN. Prepare whatever data is necessary for
1474 later calls to generate. */
1477 compare_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1479 insn_code icode
= optab_handler (mov_optab
, mode
);
1480 if (icode
== CODE_FOR_nothing
1481 || align
< GET_MODE_ALIGNMENT (mode
)
1482 || !can_compare_p (EQ
, mode
, ccp_jump
))
1484 m_batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
1487 m_accumulator
= NULL_RTX
;
1492 /* Called after expanding a series of comparisons in MODE. If we have
1493 accumulated results for which we haven't emitted a branch yet, do
1497 compare_by_pieces_d::finish_mode (machine_mode mode
)
1499 if (m_accumulator
!= NULL_RTX
)
1500 do_compare_rtx_and_jump (m_accumulator
, const0_rtx
, NE
, true, mode
,
1501 NULL_RTX
, NULL
, m_fail_label
,
1502 profile_probability::uninitialized ());
1505 /* Generate several move instructions to compare LEN bytes from blocks
1506 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1508 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1509 used to push FROM to the stack.
1511 ALIGN is maximum stack alignment we can assume.
1513 Optionally, the caller can pass a constfn and associated data in A1_CFN
1514 and A1_CFN_DATA. describing that the second operand being compared is a
1515 known constant and how to obtain its data. */
1518 compare_by_pieces (rtx arg0
, rtx arg1
, unsigned HOST_WIDE_INT len
,
1519 rtx target
, unsigned int align
,
1520 by_pieces_constfn a1_cfn
, void *a1_cfn_data
)
1522 rtx_code_label
*fail_label
= gen_label_rtx ();
1523 rtx_code_label
*end_label
= gen_label_rtx ();
1525 if (target
== NULL_RTX
1526 || !REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
1527 target
= gen_reg_rtx (TYPE_MODE (integer_type_node
));
1529 compare_by_pieces_d
data (arg0
, arg1
, a1_cfn
, a1_cfn_data
, len
, align
,
1534 emit_move_insn (target
, const0_rtx
);
1535 emit_jump (end_label
);
1537 emit_label (fail_label
);
1538 emit_move_insn (target
, const1_rtx
);
1539 emit_label (end_label
);
1544 /* Emit code to move a block Y to a block X. This may be done with
1545 string-move instructions, with multiple scalar move instructions,
1546 or with a library call.
1548 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1549 SIZE is an rtx that says how long they are.
1550 ALIGN is the maximum alignment we can assume they have.
1551 METHOD describes what kind of copy this is, and what mechanisms may be used.
1552 MIN_SIZE is the minimal size of block to move
1553 MAX_SIZE is the maximal size of block to move, if it cannot be represented
1554 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1556 Return the address of the new block, if memcpy is called and returns it,
1560 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1561 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1562 unsigned HOST_WIDE_INT min_size
,
1563 unsigned HOST_WIDE_INT max_size
,
1564 unsigned HOST_WIDE_INT probable_max_size
)
1571 if (CONST_INT_P (size
) && INTVAL (size
) == 0)
1576 case BLOCK_OP_NORMAL
:
1577 case BLOCK_OP_TAILCALL
:
1581 case BLOCK_OP_CALL_PARM
:
1582 may_use_call
= block_move_libcall_safe_for_call_parm ();
1584 /* Make inhibit_defer_pop nonzero around the library call
1585 to force it to pop the arguments right away. */
1589 case BLOCK_OP_NO_LIBCALL
:
1593 case BLOCK_OP_NO_LIBCALL_RET
:
1601 gcc_assert (MEM_P (x
) && MEM_P (y
));
1602 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1603 gcc_assert (align
>= BITS_PER_UNIT
);
1605 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1606 block copy is more efficient for other large modes, e.g. DCmode. */
1607 x
= adjust_address (x
, BLKmode
, 0);
1608 y
= adjust_address (y
, BLKmode
, 0);
1610 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1611 can be incorrect is coming from __builtin_memcpy. */
1612 poly_int64 const_size
;
1613 if (poly_int_rtx_p (size
, &const_size
))
1615 x
= shallow_copy_rtx (x
);
1616 y
= shallow_copy_rtx (y
);
1617 set_mem_size (x
, const_size
);
1618 set_mem_size (y
, const_size
);
1621 if (CONST_INT_P (size
) && can_move_by_pieces (INTVAL (size
), align
))
1622 move_by_pieces (x
, y
, INTVAL (size
), align
, RETURN_BEGIN
);
1623 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1624 expected_align
, expected_size
,
1625 min_size
, max_size
, probable_max_size
))
1627 else if (may_use_call
1628 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1629 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1631 if (may_use_call
< 0)
1634 /* Since x and y are passed to a libcall, mark the corresponding
1635 tree EXPR as addressable. */
1636 tree y_expr
= MEM_EXPR (y
);
1637 tree x_expr
= MEM_EXPR (x
);
1639 mark_addressable (y_expr
);
1641 mark_addressable (x_expr
);
1642 retval
= emit_block_copy_via_libcall (x
, y
, size
,
1643 method
== BLOCK_OP_TAILCALL
);
1647 emit_block_move_via_loop (x
, y
, size
, align
);
1649 if (method
== BLOCK_OP_CALL_PARM
)
1656 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1658 unsigned HOST_WIDE_INT max
, min
= 0;
1659 if (GET_CODE (size
) == CONST_INT
)
1660 min
= max
= UINTVAL (size
);
1662 max
= GET_MODE_MASK (GET_MODE (size
));
1663 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1667 /* A subroutine of emit_block_move. Returns true if calling the
1668 block move libcall will not clobber any parameters which may have
1669 already been placed on the stack. */
1672 block_move_libcall_safe_for_call_parm (void)
1674 #if defined (REG_PARM_STACK_SPACE)
1678 /* If arguments are pushed on the stack, then they're safe. */
1682 /* If registers go on the stack anyway, any argument is sure to clobber
1683 an outgoing argument. */
1684 #if defined (REG_PARM_STACK_SPACE)
1685 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1686 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1687 depend on its argument. */
1689 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1690 && REG_PARM_STACK_SPACE (fn
) != 0)
1694 /* If any argument goes in memory, then it might clobber an outgoing
1697 CUMULATIVE_ARGS args_so_far_v
;
1698 cumulative_args_t args_so_far
;
1701 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1702 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1703 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1705 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1706 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1708 machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1709 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1711 if (!tmp
|| !REG_P (tmp
))
1713 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1715 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1722 /* A subroutine of emit_block_move. Expand a movmem pattern;
1723 return true if successful. */
1726 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1727 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1728 unsigned HOST_WIDE_INT min_size
,
1729 unsigned HOST_WIDE_INT max_size
,
1730 unsigned HOST_WIDE_INT probable_max_size
)
1732 int save_volatile_ok
= volatile_ok
;
1734 if (expected_align
< align
)
1735 expected_align
= align
;
1736 if (expected_size
!= -1)
1738 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1739 expected_size
= probable_max_size
;
1740 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1741 expected_size
= min_size
;
1744 /* Since this is a move insn, we don't care about volatility. */
1747 /* Try the most limited insn first, because there's no point
1748 including more than one in the machine description unless
1749 the more limited one has some advantage. */
1751 opt_scalar_int_mode mode_iter
;
1752 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
1754 scalar_int_mode mode
= mode_iter
.require ();
1755 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1757 if (code
!= CODE_FOR_nothing
1758 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1759 here because if SIZE is less than the mode mask, as it is
1760 returned by the macro, it will definitely be less than the
1761 actual mode mask. Since SIZE is within the Pmode address
1762 space, we limit MODE to Pmode. */
1763 && ((CONST_INT_P (size
)
1764 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1765 <= (GET_MODE_MASK (mode
) >> 1)))
1766 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1767 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1769 struct expand_operand ops
[9];
1772 /* ??? When called via emit_block_move_for_call, it'd be
1773 nice if there were some way to inform the backend, so
1774 that it doesn't fail the expansion because it thinks
1775 emitting the libcall would be more efficient. */
1776 nops
= insn_data
[(int) code
].n_generator_args
;
1777 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1779 create_fixed_operand (&ops
[0], x
);
1780 create_fixed_operand (&ops
[1], y
);
1781 /* The check above guarantees that this size conversion is valid. */
1782 create_convert_operand_to (&ops
[2], size
, mode
, true);
1783 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1786 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1787 create_integer_operand (&ops
[5], expected_size
);
1791 create_integer_operand (&ops
[6], min_size
);
1792 /* If we cannot represent the maximal size,
1793 make parameter NULL. */
1794 if ((HOST_WIDE_INT
) max_size
!= -1)
1795 create_integer_operand (&ops
[7], max_size
);
1797 create_fixed_operand (&ops
[7], NULL
);
1801 /* If we cannot represent the maximal size,
1802 make parameter NULL. */
1803 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1804 create_integer_operand (&ops
[8], probable_max_size
);
1806 create_fixed_operand (&ops
[8], NULL
);
1808 if (maybe_expand_insn (code
, nops
, ops
))
1810 volatile_ok
= save_volatile_ok
;
1816 volatile_ok
= save_volatile_ok
;
1820 /* A subroutine of emit_block_move. Copy the data via an explicit
1821 loop. This is used only when libcalls are forbidden. */
1822 /* ??? It'd be nice to copy in hunks larger than QImode. */
1825 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1826 unsigned int align ATTRIBUTE_UNUSED
)
1828 rtx_code_label
*cmp_label
, *top_label
;
1829 rtx iter
, x_addr
, y_addr
, tmp
;
1830 machine_mode x_addr_mode
= get_address_mode (x
);
1831 machine_mode y_addr_mode
= get_address_mode (y
);
1832 machine_mode iter_mode
;
1834 iter_mode
= GET_MODE (size
);
1835 if (iter_mode
== VOIDmode
)
1836 iter_mode
= word_mode
;
1838 top_label
= gen_label_rtx ();
1839 cmp_label
= gen_label_rtx ();
1840 iter
= gen_reg_rtx (iter_mode
);
1842 emit_move_insn (iter
, const0_rtx
);
1844 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1845 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1846 do_pending_stack_adjust ();
1848 emit_jump (cmp_label
);
1849 emit_label (top_label
);
1851 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1852 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1854 if (x_addr_mode
!= y_addr_mode
)
1855 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1856 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1858 x
= change_address (x
, QImode
, x_addr
);
1859 y
= change_address (y
, QImode
, y_addr
);
1861 emit_move_insn (x
, y
);
1863 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1864 true, OPTAB_LIB_WIDEN
);
1866 emit_move_insn (iter
, tmp
);
1868 emit_label (cmp_label
);
1870 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1872 profile_probability::guessed_always ()
1873 .apply_scale (9, 10));
1876 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1877 TAILCALL is true if this is a tail call. */
1880 emit_block_op_via_libcall (enum built_in_function fncode
, rtx dst
, rtx src
,
1881 rtx size
, bool tailcall
)
1883 rtx dst_addr
, src_addr
;
1884 tree call_expr
, dst_tree
, src_tree
, size_tree
;
1885 machine_mode size_mode
;
1887 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1888 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1889 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1891 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1892 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1893 src_tree
= make_tree (ptr_type_node
, src_addr
);
1895 size_mode
= TYPE_MODE (sizetype
);
1896 size
= convert_to_mode (size_mode
, size
, 1);
1897 size
= copy_to_mode_reg (size_mode
, size
);
1898 size_tree
= make_tree (sizetype
, size
);
1900 /* It is incorrect to use the libcall calling conventions for calls to
1901 memcpy/memmove/memcmp because they can be provided by the user. */
1902 tree fn
= builtin_decl_implicit (fncode
);
1903 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1904 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1906 return expand_call (call_expr
, NULL_RTX
, false);
1909 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1910 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1911 otherwise return null. */
1914 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
1915 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
1916 HOST_WIDE_INT align
)
1918 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
1920 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
1923 struct expand_operand ops
[5];
1924 create_output_operand (&ops
[0], target
, insn_mode
);
1925 create_fixed_operand (&ops
[1], arg1_rtx
);
1926 create_fixed_operand (&ops
[2], arg2_rtx
);
1927 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
1928 TYPE_UNSIGNED (arg3_type
));
1929 create_integer_operand (&ops
[4], align
);
1930 if (maybe_expand_insn (icode
, 5, ops
))
1931 return ops
[0].value
;
1935 /* Expand a block compare between X and Y with length LEN using the
1936 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
1937 of the expression that was used to calculate the length. ALIGN
1938 gives the known minimum common alignment. */
1941 emit_block_cmp_via_cmpmem (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1944 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1945 implementing memcmp because it will stop if it encounters two
1947 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
1949 if (icode
== CODE_FOR_nothing
)
1952 return expand_cmpstrn_or_cmpmem (icode
, target
, x
, y
, len_type
, len
, align
);
1955 /* Emit code to compare a block Y to a block X. This may be done with
1956 string-compare instructions, with multiple scalar instructions,
1957 or with a library call.
1959 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
1960 they are. LEN_TYPE is the type of the expression that was used to
1963 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1964 value of a normal memcmp call, instead we can just compare for equality.
1965 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1968 Optionally, the caller can pass a constfn and associated data in Y_CFN
1969 and Y_CFN_DATA. describing that the second operand being compared is a
1970 known constant and how to obtain its data.
1971 Return the result of the comparison, or NULL_RTX if we failed to
1972 perform the operation. */
1975 emit_block_cmp_hints (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1976 bool equality_only
, by_pieces_constfn y_cfn
,
1981 if (CONST_INT_P (len
) && INTVAL (len
) == 0)
1984 gcc_assert (MEM_P (x
) && MEM_P (y
));
1985 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1986 gcc_assert (align
>= BITS_PER_UNIT
);
1988 x
= adjust_address (x
, BLKmode
, 0);
1989 y
= adjust_address (y
, BLKmode
, 0);
1992 && CONST_INT_P (len
)
1993 && can_do_by_pieces (INTVAL (len
), align
, COMPARE_BY_PIECES
))
1994 result
= compare_by_pieces (x
, y
, INTVAL (len
), target
, align
,
1997 result
= emit_block_cmp_via_cmpmem (x
, y
, len
, len_type
, target
, align
);
2002 /* Copy all or part of a value X into registers starting at REGNO.
2003 The number of registers to be filled is NREGS. */
2006 move_block_to_reg (int regno
, rtx x
, int nregs
, machine_mode mode
)
2011 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
2012 x
= validize_mem (force_const_mem (mode
, x
));
2014 /* See if the machine can do this with a load multiple insn. */
2015 if (targetm
.have_load_multiple ())
2017 rtx_insn
*last
= get_last_insn ();
2018 rtx first
= gen_rtx_REG (word_mode
, regno
);
2019 if (rtx_insn
*pat
= targetm
.gen_load_multiple (first
, x
,
2026 delete_insns_since (last
);
2029 for (int i
= 0; i
< nregs
; i
++)
2030 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2031 operand_subword_force (x
, i
, mode
));
2034 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2035 The number of registers to be filled is NREGS. */
2038 move_block_from_reg (int regno
, rtx x
, int nregs
)
2043 /* See if the machine can do this with a store multiple insn. */
2044 if (targetm
.have_store_multiple ())
2046 rtx_insn
*last
= get_last_insn ();
2047 rtx first
= gen_rtx_REG (word_mode
, regno
);
2048 if (rtx_insn
*pat
= targetm
.gen_store_multiple (x
, first
,
2055 delete_insns_since (last
);
2058 for (int i
= 0; i
< nregs
; i
++)
2060 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2064 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2068 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2069 ORIG, where ORIG is a non-consecutive group of registers represented by
2070 a PARALLEL. The clone is identical to the original except in that the
2071 original set of registers is replaced by a new set of pseudo registers.
2072 The new set has the same modes as the original set. */
2075 gen_group_rtx (rtx orig
)
2080 gcc_assert (GET_CODE (orig
) == PARALLEL
);
2082 length
= XVECLEN (orig
, 0);
2083 tmps
= XALLOCAVEC (rtx
, length
);
2085 /* Skip a NULL entry in first slot. */
2086 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2091 for (; i
< length
; i
++)
2093 machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2094 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2096 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2099 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2102 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2103 except that values are placed in TMPS[i], and must later be moved
2104 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2107 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
,
2112 machine_mode m
= GET_MODE (orig_src
);
2114 gcc_assert (GET_CODE (dst
) == PARALLEL
);
2117 && !SCALAR_INT_MODE_P (m
)
2118 && !MEM_P (orig_src
)
2119 && GET_CODE (orig_src
) != CONCAT
)
2121 scalar_int_mode imode
;
2122 if (int_mode_for_mode (GET_MODE (orig_src
)).exists (&imode
))
2124 src
= gen_reg_rtx (imode
);
2125 emit_move_insn (gen_lowpart (GET_MODE (orig_src
), src
), orig_src
);
2129 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
2130 emit_move_insn (src
, orig_src
);
2132 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2136 /* Check for a NULL entry, used to indicate that the parameter goes
2137 both on the stack and in registers. */
2138 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2143 /* Process the pieces. */
2144 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2146 machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2147 poly_int64 bytepos
= rtx_to_poly_int64 (XEXP (XVECEXP (dst
, 0, i
), 1));
2148 poly_int64 bytelen
= GET_MODE_SIZE (mode
);
2149 poly_int64 shift
= 0;
2151 /* Handle trailing fragments that run over the size of the struct.
2152 It's the target's responsibility to make sure that the fragment
2153 cannot be strictly smaller in some cases and strictly larger
2155 gcc_checking_assert (ordered_p (bytepos
+ bytelen
, ssize
));
2156 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2158 /* Arrange to shift the fragment to where it belongs.
2159 extract_bit_field loads to the lsb of the reg. */
2161 #ifdef BLOCK_REG_PADDING
2162 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
2163 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)
2168 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2169 bytelen
= ssize
- bytepos
;
2170 gcc_assert (maybe_gt (bytelen
, 0));
2173 /* If we won't be loading directly from memory, protect the real source
2174 from strange tricks we might play; but make sure that the source can
2175 be loaded directly into the destination. */
2177 if (!MEM_P (orig_src
)
2178 && (!CONSTANT_P (orig_src
)
2179 || (GET_MODE (orig_src
) != mode
2180 && GET_MODE (orig_src
) != VOIDmode
)))
2182 if (GET_MODE (orig_src
) == VOIDmode
)
2183 src
= gen_reg_rtx (mode
);
2185 src
= gen_reg_rtx (GET_MODE (orig_src
));
2187 emit_move_insn (src
, orig_src
);
2190 /* Optimize the access just a bit. */
2192 && (! targetm
.slow_unaligned_access (mode
, MEM_ALIGN (src
))
2193 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
2194 && multiple_p (bytepos
* BITS_PER_UNIT
, GET_MODE_ALIGNMENT (mode
))
2195 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2197 tmps
[i
] = gen_reg_rtx (mode
);
2198 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2200 else if (COMPLEX_MODE_P (mode
)
2201 && GET_MODE (src
) == mode
2202 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2203 /* Let emit_move_complex do the bulk of the work. */
2205 else if (GET_CODE (src
) == CONCAT
)
2207 poly_int64 slen
= GET_MODE_SIZE (GET_MODE (src
));
2208 poly_int64 slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2212 if (can_div_trunc_p (bytepos
, slen0
, &elt
, &subpos
)
2213 && known_le (subpos
+ bytelen
, slen0
))
2215 /* The following assumes that the concatenated objects all
2216 have the same size. In this case, a simple calculation
2217 can be used to determine the object and the bit field
2219 tmps
[i
] = XEXP (src
, elt
);
2220 if (maybe_ne (subpos
, 0)
2221 || maybe_ne (subpos
+ bytelen
, slen0
)
2222 || (!CONSTANT_P (tmps
[i
])
2223 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
)))
2224 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2225 subpos
* BITS_PER_UNIT
,
2226 1, NULL_RTX
, mode
, mode
, false,
2233 gcc_assert (known_eq (bytepos
, 0));
2234 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2235 emit_move_insn (mem
, src
);
2236 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
2237 0, 1, NULL_RTX
, mode
, mode
, false,
2241 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2242 SIMD register, which is currently broken. While we get GCC
2243 to emit proper RTL for these cases, let's dump to memory. */
2244 else if (VECTOR_MODE_P (GET_MODE (dst
))
2247 poly_uint64 slen
= GET_MODE_SIZE (GET_MODE (src
));
2250 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2251 emit_move_insn (mem
, src
);
2252 tmps
[i
] = adjust_address (mem
, mode
, bytepos
);
2254 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
2255 && XVECLEN (dst
, 0) > 1)
2256 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
2257 else if (CONSTANT_P (src
))
2259 if (known_eq (bytelen
, ssize
))
2265 /* TODO: const_wide_int can have sizes other than this... */
2266 gcc_assert (known_eq (2 * bytelen
, ssize
));
2267 split_double (src
, &first
, &second
);
2274 else if (REG_P (src
) && GET_MODE (src
) == mode
)
2277 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2278 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2279 mode
, mode
, false, NULL
);
2281 if (maybe_ne (shift
, 0))
2282 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
2287 /* Emit code to move a block SRC of type TYPE to a block DST,
2288 where DST is non-consecutive registers represented by a PARALLEL.
2289 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2293 emit_group_load (rtx dst
, rtx src
, tree type
, poly_int64 ssize
)
2298 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
2299 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2301 /* Copy the extracted pieces into the proper (probable) hard regs. */
2302 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
2304 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
2307 emit_move_insn (d
, tmps
[i
]);
2311 /* Similar, but load SRC into new pseudos in a format that looks like
2312 PARALLEL. This can later be fed to emit_group_move to get things
2313 in the right place. */
2316 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, poly_int64 ssize
)
2321 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
2322 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
2324 /* Convert the vector to look just like the original PARALLEL, except
2325 with the computed values. */
2326 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
2328 rtx e
= XVECEXP (parallel
, 0, i
);
2329 rtx d
= XEXP (e
, 0);
2333 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
2334 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
2336 RTVEC_ELT (vec
, i
) = e
;
2339 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
2342 /* Emit code to move a block SRC to block DST, where SRC and DST are
2343 non-consecutive groups of registers, each represented by a PARALLEL. */
2346 emit_group_move (rtx dst
, rtx src
)
2350 gcc_assert (GET_CODE (src
) == PARALLEL
2351 && GET_CODE (dst
) == PARALLEL
2352 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
2354 /* Skip first entry if NULL. */
2355 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2356 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2357 XEXP (XVECEXP (src
, 0, i
), 0));
2360 /* Move a group of registers represented by a PARALLEL into pseudos. */
2363 emit_group_move_into_temps (rtx src
)
2365 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
2368 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
2370 rtx e
= XVECEXP (src
, 0, i
);
2371 rtx d
= XEXP (e
, 0);
2374 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
2375 RTVEC_ELT (vec
, i
) = e
;
2378 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
2381 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2382 where SRC is non-consecutive registers represented by a PARALLEL.
2383 SSIZE represents the total size of block ORIG_DST, or -1 if not
2387 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
,
2391 int start
, finish
, i
;
2392 machine_mode m
= GET_MODE (orig_dst
);
2394 gcc_assert (GET_CODE (src
) == PARALLEL
);
2396 if (!SCALAR_INT_MODE_P (m
)
2397 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
2399 scalar_int_mode imode
;
2400 if (int_mode_for_mode (GET_MODE (orig_dst
)).exists (&imode
))
2402 dst
= gen_reg_rtx (imode
);
2403 emit_group_store (dst
, src
, type
, ssize
);
2404 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
2408 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
2409 emit_group_store (dst
, src
, type
, ssize
);
2411 emit_move_insn (orig_dst
, dst
);
2415 /* Check for a NULL entry, used to indicate that the parameter goes
2416 both on the stack and in registers. */
2417 if (XEXP (XVECEXP (src
, 0, 0), 0))
2421 finish
= XVECLEN (src
, 0);
2423 tmps
= XALLOCAVEC (rtx
, finish
);
2425 /* Copy the (probable) hard regs into pseudos. */
2426 for (i
= start
; i
< finish
; i
++)
2428 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2429 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
2431 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2432 emit_move_insn (tmps
[i
], reg
);
2438 /* If we won't be storing directly into memory, protect the real destination
2439 from strange tricks we might play. */
2441 if (GET_CODE (dst
) == PARALLEL
)
2445 /* We can get a PARALLEL dst if there is a conditional expression in
2446 a return statement. In that case, the dst and src are the same,
2447 so no action is necessary. */
2448 if (rtx_equal_p (dst
, src
))
2451 /* It is unclear if we can ever reach here, but we may as well handle
2452 it. Allocate a temporary, and split this into a store/load to/from
2454 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
2455 emit_group_store (temp
, src
, type
, ssize
);
2456 emit_group_load (dst
, temp
, type
, ssize
);
2459 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
2461 machine_mode outer
= GET_MODE (dst
);
2467 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
2468 dst
= gen_reg_rtx (outer
);
2470 /* Make life a bit easier for combine. */
2471 /* If the first element of the vector is the low part
2472 of the destination mode, use a paradoxical subreg to
2473 initialize the destination. */
2476 inner
= GET_MODE (tmps
[start
]);
2477 bytepos
= subreg_lowpart_offset (inner
, outer
);
2478 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src
, 0, start
), 1)),
2481 temp
= simplify_gen_subreg (outer
, tmps
[start
],
2485 emit_move_insn (dst
, temp
);
2492 /* If the first element wasn't the low part, try the last. */
2494 && start
< finish
- 1)
2496 inner
= GET_MODE (tmps
[finish
- 1]);
2497 bytepos
= subreg_lowpart_offset (inner
, outer
);
2498 if (known_eq (rtx_to_poly_int64 (XEXP (XVECEXP (src
, 0,
2502 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2506 emit_move_insn (dst
, temp
);
2513 /* Otherwise, simply initialize the result to zero. */
2515 emit_move_insn (dst
, CONST0_RTX (outer
));
2518 /* Process the pieces. */
2519 for (i
= start
; i
< finish
; i
++)
2521 poly_int64 bytepos
= rtx_to_poly_int64 (XEXP (XVECEXP (src
, 0, i
), 1));
2522 machine_mode mode
= GET_MODE (tmps
[i
]);
2523 poly_int64 bytelen
= GET_MODE_SIZE (mode
);
2524 poly_uint64 adj_bytelen
;
2527 /* Handle trailing fragments that run over the size of the struct.
2528 It's the target's responsibility to make sure that the fragment
2529 cannot be strictly smaller in some cases and strictly larger
2531 gcc_checking_assert (ordered_p (bytepos
+ bytelen
, ssize
));
2532 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2533 adj_bytelen
= ssize
- bytepos
;
2535 adj_bytelen
= bytelen
;
2537 if (GET_CODE (dst
) == CONCAT
)
2539 if (known_le (bytepos
+ adj_bytelen
,
2540 GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)))))
2541 dest
= XEXP (dst
, 0);
2542 else if (known_ge (bytepos
, GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)))))
2544 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2545 dest
= XEXP (dst
, 1);
2549 machine_mode dest_mode
= GET_MODE (dest
);
2550 machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2552 gcc_assert (known_eq (bytepos
, 0) && XVECLEN (src
, 0));
2554 if (GET_MODE_ALIGNMENT (dest_mode
)
2555 >= GET_MODE_ALIGNMENT (tmp_mode
))
2557 dest
= assign_stack_temp (dest_mode
,
2558 GET_MODE_SIZE (dest_mode
));
2559 emit_move_insn (adjust_address (dest
,
2567 dest
= assign_stack_temp (tmp_mode
,
2568 GET_MODE_SIZE (tmp_mode
));
2569 emit_move_insn (dest
, tmps
[i
]);
2570 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2576 /* Handle trailing fragments that run over the size of the struct. */
2577 if (known_size_p (ssize
) && maybe_gt (bytepos
+ bytelen
, ssize
))
2579 /* store_bit_field always takes its value from the lsb.
2580 Move the fragment to the lsb if it's not already there. */
2582 #ifdef BLOCK_REG_PADDING
2583 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2584 == (BYTES_BIG_ENDIAN
? PAD_UPWARD
: PAD_DOWNWARD
)
2590 poly_int64 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2591 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2595 /* Make sure not to write past the end of the struct. */
2596 store_bit_field (dest
,
2597 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2598 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2599 VOIDmode
, tmps
[i
], false);
2602 /* Optimize the access just a bit. */
2603 else if (MEM_P (dest
)
2604 && (!targetm
.slow_unaligned_access (mode
, MEM_ALIGN (dest
))
2605 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2606 && multiple_p (bytepos
* BITS_PER_UNIT
,
2607 GET_MODE_ALIGNMENT (mode
))
2608 && known_eq (bytelen
, GET_MODE_SIZE (mode
)))
2609 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2612 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2613 0, 0, mode
, tmps
[i
], false);
2616 /* Copy from the pseudo into the (probable) hard reg. */
2617 if (orig_dst
!= dst
)
2618 emit_move_insn (orig_dst
, dst
);
2621 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2622 of the value stored in X. */
2625 maybe_emit_group_store (rtx x
, tree type
)
2627 machine_mode mode
= TYPE_MODE (type
);
2628 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2629 if (GET_CODE (x
) == PARALLEL
)
2631 rtx result
= gen_reg_rtx (mode
);
2632 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2638 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2640 This is used on targets that return BLKmode values in registers. */
2643 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2645 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2646 rtx src
= NULL
, dst
= NULL
;
2647 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2648 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2649 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2650 fixed_size_mode mode
= as_a
<fixed_size_mode
> (GET_MODE (srcreg
));
2651 fixed_size_mode tmode
= as_a
<fixed_size_mode
> (GET_MODE (target
));
2652 fixed_size_mode copy_mode
;
2654 /* BLKmode registers created in the back-end shouldn't have survived. */
2655 gcc_assert (mode
!= BLKmode
);
2657 /* If the structure doesn't take up a whole number of words, see whether
2658 SRCREG is padded on the left or on the right. If it's on the left,
2659 set PADDING_CORRECTION to the number of bits to skip.
2661 In most ABIs, the structure will be returned at the least end of
2662 the register, which translates to right padding on little-endian
2663 targets and left padding on big-endian targets. The opposite
2664 holds if the structure is returned at the most significant
2665 end of the register. */
2666 if (bytes
% UNITS_PER_WORD
!= 0
2667 && (targetm
.calls
.return_in_msb (type
)
2669 : BYTES_BIG_ENDIAN
))
2671 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2673 /* We can use a single move if we have an exact mode for the size. */
2674 else if (MEM_P (target
)
2675 && (!targetm
.slow_unaligned_access (mode
, MEM_ALIGN (target
))
2676 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2677 && bytes
== GET_MODE_SIZE (mode
))
2679 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2683 /* And if we additionally have the same mode for a register. */
2684 else if (REG_P (target
)
2685 && GET_MODE (target
) == mode
2686 && bytes
== GET_MODE_SIZE (mode
))
2688 emit_move_insn (target
, srcreg
);
2692 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2693 into a new pseudo which is a full word. */
2694 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2696 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2700 /* Copy the structure BITSIZE bits at a time. If the target lives in
2701 memory, take care of not reading/writing past its end by selecting
2702 a copy mode suited to BITSIZE. This should always be possible given
2705 If the target lives in register, make sure not to select a copy mode
2706 larger than the mode of the register.
2708 We could probably emit more efficient code for machines which do not use
2709 strict alignment, but it doesn't seem worth the effort at the current
2712 copy_mode
= word_mode
;
2715 opt_scalar_int_mode mem_mode
= int_mode_for_size (bitsize
, 1);
2716 if (mem_mode
.exists ())
2717 copy_mode
= mem_mode
.require ();
2719 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2722 for (bitpos
= 0, xbitpos
= padding_correction
;
2723 bitpos
< bytes
* BITS_PER_UNIT
;
2724 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2726 /* We need a new source operand each time xbitpos is on a
2727 word boundary and when xbitpos == padding_correction
2728 (the first time through). */
2729 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2730 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2732 /* We need a new destination operand each time bitpos is on
2734 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2736 else if (bitpos
% BITS_PER_WORD
== 0)
2737 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2739 /* Use xbitpos for the source extraction (right justified) and
2740 bitpos for the destination store (left justified). */
2741 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2742 extract_bit_field (src
, bitsize
,
2743 xbitpos
% BITS_PER_WORD
, 1,
2744 NULL_RTX
, copy_mode
, copy_mode
,
2750 /* Copy BLKmode value SRC into a register of mode MODE_IN. Return the
2751 register if it contains any data, otherwise return null.
2753 This is used on targets that return BLKmode values in registers. */
2756 copy_blkmode_to_reg (machine_mode mode_in
, tree src
)
2759 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2760 unsigned int bitsize
;
2761 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2762 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2763 fixed_size_mode mode
= as_a
<fixed_size_mode
> (mode_in
);
2764 fixed_size_mode dst_mode
;
2765 scalar_int_mode min_mode
;
2767 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2769 x
= expand_normal (src
);
2771 bytes
= arg_int_size_in_bytes (TREE_TYPE (src
));
2775 /* If the structure doesn't take up a whole number of words, see
2776 whether the register value should be padded on the left or on
2777 the right. Set PADDING_CORRECTION to the number of padding
2778 bits needed on the left side.
2780 In most ABIs, the structure will be returned at the least end of
2781 the register, which translates to right padding on little-endian
2782 targets and left padding on big-endian targets. The opposite
2783 holds if the structure is returned at the most significant
2784 end of the register. */
2785 if (bytes
% UNITS_PER_WORD
!= 0
2786 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2788 : BYTES_BIG_ENDIAN
))
2789 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2792 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2793 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2794 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2795 min_mode
= smallest_int_mode_for_size (bitsize
);
2797 /* Copy the structure BITSIZE bits at a time. */
2798 for (bitpos
= 0, xbitpos
= padding_correction
;
2799 bitpos
< bytes
* BITS_PER_UNIT
;
2800 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2802 /* We need a new destination pseudo each time xbitpos is
2803 on a word boundary and when xbitpos == padding_correction
2804 (the first time through). */
2805 if (xbitpos
% BITS_PER_WORD
== 0
2806 || xbitpos
== padding_correction
)
2808 /* Generate an appropriate register. */
2809 dst_word
= gen_reg_rtx (word_mode
);
2810 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2812 /* Clear the destination before we move anything into it. */
2813 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2816 /* Find the largest integer mode that can be used to copy all or as
2817 many bits as possible of the structure if the target supports larger
2818 copies. There are too many corner cases here w.r.t to alignments on
2819 the read/writes. So if there is any padding just use single byte
2821 opt_scalar_int_mode mode_iter
;
2822 if (padding_correction
== 0 && !STRICT_ALIGNMENT
)
2824 FOR_EACH_MODE_FROM (mode_iter
, min_mode
)
2826 unsigned int msize
= GET_MODE_BITSIZE (mode_iter
.require ());
2827 if (msize
<= ((bytes
* BITS_PER_UNIT
) - bitpos
)
2828 && msize
<= BITS_PER_WORD
)
2835 /* We need a new source operand each time bitpos is on a word
2837 if (bitpos
% BITS_PER_WORD
== 0)
2838 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2840 /* Use bitpos for the source extraction (left justified) and
2841 xbitpos for the destination store (right justified). */
2842 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2844 extract_bit_field (src_word
, bitsize
,
2845 bitpos
% BITS_PER_WORD
, 1,
2846 NULL_RTX
, word_mode
, word_mode
,
2851 if (mode
== BLKmode
)
2853 /* Find the smallest integer mode large enough to hold the
2854 entire structure. */
2855 opt_scalar_int_mode mode_iter
;
2856 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
2857 if (GET_MODE_SIZE (mode_iter
.require ()) >= bytes
)
2860 /* A suitable mode should have been found. */
2861 mode
= mode_iter
.require ();
2864 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2865 dst_mode
= word_mode
;
2868 dst
= gen_reg_rtx (dst_mode
);
2870 for (i
= 0; i
< n_regs
; i
++)
2871 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2873 if (mode
!= dst_mode
)
2874 dst
= gen_lowpart (mode
, dst
);
2879 /* Add a USE expression for REG to the (possibly empty) list pointed
2880 to by CALL_FUSAGE. REG must denote a hard register. */
2883 use_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2885 gcc_assert (REG_P (reg
));
2887 if (!HARD_REGISTER_P (reg
))
2891 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2894 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2895 to by CALL_FUSAGE. REG must denote a hard register. */
2898 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2900 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2903 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2906 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2907 starting at REGNO. All of these registers must be hard registers. */
2910 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2914 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2916 for (i
= 0; i
< nregs
; i
++)
2917 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2920 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2921 PARALLEL REGS. This is for calls that pass values in multiple
2922 non-contiguous locations. The Irix 6 ABI has examples of this. */
2925 use_group_regs (rtx
*call_fusage
, rtx regs
)
2929 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2931 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2933 /* A NULL entry means the parameter goes both on the stack and in
2934 registers. This can also be a MEM for targets that pass values
2935 partially on the stack and partially in registers. */
2936 if (reg
!= 0 && REG_P (reg
))
2937 use_reg (call_fusage
, reg
);
2941 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2942 assigment and the code of the expresion on the RHS is CODE. Return
2946 get_def_for_expr (tree name
, enum tree_code code
)
2950 if (TREE_CODE (name
) != SSA_NAME
)
2953 def_stmt
= get_gimple_for_ssa_name (name
);
2955 || gimple_assign_rhs_code (def_stmt
) != code
)
2961 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2962 assigment and the class of the expresion on the RHS is CLASS. Return
2966 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2970 if (TREE_CODE (name
) != SSA_NAME
)
2973 def_stmt
= get_gimple_for_ssa_name (name
);
2975 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2981 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2982 its length in bytes. */
2985 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2986 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2987 unsigned HOST_WIDE_INT min_size
,
2988 unsigned HOST_WIDE_INT max_size
,
2989 unsigned HOST_WIDE_INT probable_max_size
)
2991 machine_mode mode
= GET_MODE (object
);
2994 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2996 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2997 just move a zero. Otherwise, do this a piece at a time. */
2998 poly_int64 size_val
;
3000 && poly_int_rtx_p (size
, &size_val
)
3001 && known_eq (size_val
, GET_MODE_SIZE (mode
)))
3003 rtx zero
= CONST0_RTX (mode
);
3006 emit_move_insn (object
, zero
);
3010 if (COMPLEX_MODE_P (mode
))
3012 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
3015 write_complex_part (object
, zero
, 0);
3016 write_complex_part (object
, zero
, 1);
3022 if (size
== const0_rtx
)
3025 align
= MEM_ALIGN (object
);
3027 if (CONST_INT_P (size
)
3028 && targetm
.use_by_pieces_infrastructure_p (INTVAL (size
), align
,
3030 optimize_insn_for_speed_p ()))
3031 clear_by_pieces (object
, INTVAL (size
), align
);
3032 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
3033 expected_align
, expected_size
,
3034 min_size
, max_size
, probable_max_size
))
3036 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
3037 return set_storage_via_libcall (object
, size
, const0_rtx
,
3038 method
== BLOCK_OP_TAILCALL
);
3046 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
3048 unsigned HOST_WIDE_INT max
, min
= 0;
3049 if (GET_CODE (size
) == CONST_INT
)
3050 min
= max
= UINTVAL (size
);
3052 max
= GET_MODE_MASK (GET_MODE (size
));
3053 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
3057 /* A subroutine of clear_storage. Expand a call to memset.
3058 Return the return value of memset, 0 otherwise. */
3061 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
3063 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
3064 machine_mode size_mode
;
3066 object
= copy_addr_to_reg (XEXP (object
, 0));
3067 object_tree
= make_tree (ptr_type_node
, object
);
3069 if (!CONST_INT_P (val
))
3070 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
3071 val_tree
= make_tree (integer_type_node
, val
);
3073 size_mode
= TYPE_MODE (sizetype
);
3074 size
= convert_to_mode (size_mode
, size
, 1);
3075 size
= copy_to_mode_reg (size_mode
, size
);
3076 size_tree
= make_tree (sizetype
, size
);
3078 /* It is incorrect to use the libcall calling conventions for calls to
3079 memset because it can be provided by the user. */
3080 fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
3081 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
3082 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
3084 return expand_call (call_expr
, NULL_RTX
, false);
3087 /* Expand a setmem pattern; return true if successful. */
3090 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
3091 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
3092 unsigned HOST_WIDE_INT min_size
,
3093 unsigned HOST_WIDE_INT max_size
,
3094 unsigned HOST_WIDE_INT probable_max_size
)
3096 /* Try the most limited insn first, because there's no point
3097 including more than one in the machine description unless
3098 the more limited one has some advantage. */
3100 if (expected_align
< align
)
3101 expected_align
= align
;
3102 if (expected_size
!= -1)
3104 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
3105 expected_size
= max_size
;
3106 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
3107 expected_size
= min_size
;
3110 opt_scalar_int_mode mode_iter
;
3111 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
3113 scalar_int_mode mode
= mode_iter
.require ();
3114 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
3116 if (code
!= CODE_FOR_nothing
3117 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3118 here because if SIZE is less than the mode mask, as it is
3119 returned by the macro, it will definitely be less than the
3120 actual mode mask. Since SIZE is within the Pmode address
3121 space, we limit MODE to Pmode. */
3122 && ((CONST_INT_P (size
)
3123 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3124 <= (GET_MODE_MASK (mode
) >> 1)))
3125 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
3126 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
3128 struct expand_operand ops
[9];
3131 nops
= insn_data
[(int) code
].n_generator_args
;
3132 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
3134 create_fixed_operand (&ops
[0], object
);
3135 /* The check above guarantees that this size conversion is valid. */
3136 create_convert_operand_to (&ops
[1], size
, mode
, true);
3137 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
3138 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
3141 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
3142 create_integer_operand (&ops
[5], expected_size
);
3146 create_integer_operand (&ops
[6], min_size
);
3147 /* If we cannot represent the maximal size,
3148 make parameter NULL. */
3149 if ((HOST_WIDE_INT
) max_size
!= -1)
3150 create_integer_operand (&ops
[7], max_size
);
3152 create_fixed_operand (&ops
[7], NULL
);
3156 /* If we cannot represent the maximal size,
3157 make parameter NULL. */
3158 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
3159 create_integer_operand (&ops
[8], probable_max_size
);
3161 create_fixed_operand (&ops
[8], NULL
);
3163 if (maybe_expand_insn (code
, nops
, ops
))
3172 /* Write to one of the components of the complex value CPLX. Write VAL to
3173 the real part if IMAG_P is false, and the imaginary part if its true. */
3176 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
3182 if (GET_CODE (cplx
) == CONCAT
)
3184 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3188 cmode
= GET_MODE (cplx
);
3189 imode
= GET_MODE_INNER (cmode
);
3190 ibitsize
= GET_MODE_BITSIZE (imode
);
3192 /* For MEMs simplify_gen_subreg may generate an invalid new address
3193 because, e.g., the original address is considered mode-dependent
3194 by the target, which restricts simplify_subreg from invoking
3195 adjust_address_nv. Instead of preparing fallback support for an
3196 invalid address, we call adjust_address_nv directly. */
3199 emit_move_insn (adjust_address_nv (cplx
, imode
,
3200 imag_p
? GET_MODE_SIZE (imode
) : 0),
3205 /* If the sub-object is at least word sized, then we know that subregging
3206 will work. This special case is important, since store_bit_field
3207 wants to operate on integer modes, and there's rarely an OImode to
3208 correspond to TCmode. */
3209 if (ibitsize
>= BITS_PER_WORD
3210 /* For hard regs we have exact predicates. Assume we can split
3211 the original object if it spans an even number of hard regs.
3212 This special case is important for SCmode on 64-bit platforms
3213 where the natural size of floating-point regs is 32-bit. */
3215 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3216 && REG_NREGS (cplx
) % 2 == 0))
3218 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3219 imag_p
? GET_MODE_SIZE (imode
) : 0);
3222 emit_move_insn (part
, val
);
3226 /* simplify_gen_subreg may fail for sub-word MEMs. */
3227 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3230 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
,
3234 /* Extract one of the components of the complex value CPLX. Extract the
3235 real part if IMAG_P is false, and the imaginary part if it's true. */
3238 read_complex_part (rtx cplx
, bool imag_p
)
3244 if (GET_CODE (cplx
) == CONCAT
)
3245 return XEXP (cplx
, imag_p
);
3247 cmode
= GET_MODE (cplx
);
3248 imode
= GET_MODE_INNER (cmode
);
3249 ibitsize
= GET_MODE_BITSIZE (imode
);
3251 /* Special case reads from complex constants that got spilled to memory. */
3252 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3254 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3255 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3257 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3258 if (CONSTANT_CLASS_P (part
))
3259 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3263 /* For MEMs simplify_gen_subreg may generate an invalid new address
3264 because, e.g., the original address is considered mode-dependent
3265 by the target, which restricts simplify_subreg from invoking
3266 adjust_address_nv. Instead of preparing fallback support for an
3267 invalid address, we call adjust_address_nv directly. */
3269 return adjust_address_nv (cplx
, imode
,
3270 imag_p
? GET_MODE_SIZE (imode
) : 0);
3272 /* If the sub-object is at least word sized, then we know that subregging
3273 will work. This special case is important, since extract_bit_field
3274 wants to operate on integer modes, and there's rarely an OImode to
3275 correspond to TCmode. */
3276 if (ibitsize
>= BITS_PER_WORD
3277 /* For hard regs we have exact predicates. Assume we can split
3278 the original object if it spans an even number of hard regs.
3279 This special case is important for SCmode on 64-bit platforms
3280 where the natural size of floating-point regs is 32-bit. */
3282 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3283 && REG_NREGS (cplx
) % 2 == 0))
3285 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3286 imag_p
? GET_MODE_SIZE (imode
) : 0);
3290 /* simplify_gen_subreg may fail for sub-word MEMs. */
3291 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3294 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3295 true, NULL_RTX
, imode
, imode
, false, NULL
);
3298 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3299 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3300 represented in NEW_MODE. If FORCE is true, this will never happen, as
3301 we'll force-create a SUBREG if needed. */
3304 emit_move_change_mode (machine_mode new_mode
,
3305 machine_mode old_mode
, rtx x
, bool force
)
3309 if (push_operand (x
, GET_MODE (x
)))
3311 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3312 MEM_COPY_ATTRIBUTES (ret
, x
);
3316 /* We don't have to worry about changing the address since the
3317 size in bytes is supposed to be the same. */
3318 if (reload_in_progress
)
3320 /* Copy the MEM to change the mode and move any
3321 substitutions from the old MEM to the new one. */
3322 ret
= adjust_address_nv (x
, new_mode
, 0);
3323 copy_replacements (x
, ret
);
3326 ret
= adjust_address (x
, new_mode
, 0);
3330 /* Note that we do want simplify_subreg's behavior of validating
3331 that the new mode is ok for a hard register. If we were to use
3332 simplify_gen_subreg, we would create the subreg, but would
3333 probably run into the target not being able to implement it. */
3334 /* Except, of course, when FORCE is true, when this is exactly what
3335 we want. Which is needed for CCmodes on some targets. */
3337 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3339 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3345 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3346 an integer mode of the same size as MODE. Returns the instruction
3347 emitted, or NULL if such a move could not be generated. */
3350 emit_move_via_integer (machine_mode mode
, rtx x
, rtx y
, bool force
)
3352 scalar_int_mode imode
;
3353 enum insn_code code
;
3355 /* There must exist a mode of the exact size we require. */
3356 if (!int_mode_for_mode (mode
).exists (&imode
))
3359 /* The target must support moves in this mode. */
3360 code
= optab_handler (mov_optab
, imode
);
3361 if (code
== CODE_FOR_nothing
)
3364 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3367 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3370 return emit_insn (GEN_FCN (code
) (x
, y
));
3373 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3374 Return an equivalent MEM that does not use an auto-increment. */
3377 emit_move_resolve_push (machine_mode mode
, rtx x
)
3379 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3382 poly_int64 adjust
= GET_MODE_SIZE (mode
);
3383 #ifdef PUSH_ROUNDING
3384 adjust
= PUSH_ROUNDING (adjust
);
3386 if (code
== PRE_DEC
|| code
== POST_DEC
)
3388 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3390 rtx expr
= XEXP (XEXP (x
, 0), 1);
3392 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3393 poly_int64 val
= rtx_to_poly_int64 (XEXP (expr
, 1));
3394 if (GET_CODE (expr
) == MINUS
)
3396 gcc_assert (known_eq (adjust
, val
) || known_eq (adjust
, -val
));
3400 /* Do not use anti_adjust_stack, since we don't want to update
3401 stack_pointer_delta. */
3402 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3403 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3404 0, OPTAB_LIB_WIDEN
);
3405 if (temp
!= stack_pointer_rtx
)
3406 emit_move_insn (stack_pointer_rtx
, temp
);
3413 temp
= stack_pointer_rtx
;
3418 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3424 return replace_equiv_address (x
, temp
);
3427 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3428 X is known to satisfy push_operand, and MODE is known to be complex.
3429 Returns the last instruction emitted. */
3432 emit_move_complex_push (machine_mode mode
, rtx x
, rtx y
)
3434 scalar_mode submode
= GET_MODE_INNER (mode
);
3437 #ifdef PUSH_ROUNDING
3438 poly_int64 submodesize
= GET_MODE_SIZE (submode
);
3440 /* In case we output to the stack, but the size is smaller than the
3441 machine can push exactly, we need to use move instructions. */
3442 if (maybe_ne (PUSH_ROUNDING (submodesize
), submodesize
))
3444 x
= emit_move_resolve_push (mode
, x
);
3445 return emit_move_insn (x
, y
);
3449 /* Note that the real part always precedes the imag part in memory
3450 regardless of machine's endianness. */
3451 switch (GET_CODE (XEXP (x
, 0)))
3465 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3466 read_complex_part (y
, imag_first
));
3467 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3468 read_complex_part (y
, !imag_first
));
3471 /* A subroutine of emit_move_complex. Perform the move from Y to X
3472 via two moves of the parts. Returns the last instruction emitted. */
3475 emit_move_complex_parts (rtx x
, rtx y
)
3477 /* Show the output dies here. This is necessary for SUBREGs
3478 of pseudos since we cannot track their lifetimes correctly;
3479 hard regs shouldn't appear here except as return values. */
3480 if (!reload_completed
&& !reload_in_progress
3481 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3484 write_complex_part (x
, read_complex_part (y
, false), false);
3485 write_complex_part (x
, read_complex_part (y
, true), true);
3487 return get_last_insn ();
3490 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3491 MODE is known to be complex. Returns the last instruction emitted. */
3494 emit_move_complex (machine_mode mode
, rtx x
, rtx y
)
3498 /* Need to take special care for pushes, to maintain proper ordering
3499 of the data, and possibly extra padding. */
3500 if (push_operand (x
, mode
))
3501 return emit_move_complex_push (mode
, x
, y
);
3503 /* See if we can coerce the target into moving both values at once, except
3504 for floating point where we favor moving as parts if this is easy. */
3505 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3506 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3508 && HARD_REGISTER_P (x
)
3509 && REG_NREGS (x
) == 1)
3511 && HARD_REGISTER_P (y
)
3512 && REG_NREGS (y
) == 1))
3514 /* Not possible if the values are inherently not adjacent. */
3515 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3517 /* Is possible if both are registers (or subregs of registers). */
3518 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3520 /* If one of the operands is a memory, and alignment constraints
3521 are friendly enough, we may be able to do combined memory operations.
3522 We do not attempt this if Y is a constant because that combination is
3523 usually better with the by-parts thing below. */
3524 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3525 && (!STRICT_ALIGNMENT
3526 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3535 /* For memory to memory moves, optimal behavior can be had with the
3536 existing block move logic. */
3537 if (MEM_P (x
) && MEM_P (y
))
3539 emit_block_move (x
, y
, gen_int_mode (GET_MODE_SIZE (mode
), Pmode
),
3540 BLOCK_OP_NO_LIBCALL
);
3541 return get_last_insn ();
3544 ret
= emit_move_via_integer (mode
, x
, y
, true);
3549 return emit_move_complex_parts (x
, y
);
3552 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3553 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3556 emit_move_ccmode (machine_mode mode
, rtx x
, rtx y
)
3560 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3563 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3564 if (code
!= CODE_FOR_nothing
)
3566 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3567 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3568 return emit_insn (GEN_FCN (code
) (x
, y
));
3572 /* Otherwise, find the MODE_INT mode of the same width. */
3573 ret
= emit_move_via_integer (mode
, x
, y
, false);
3574 gcc_assert (ret
!= NULL
);
3578 /* Return true if word I of OP lies entirely in the
3579 undefined bits of a paradoxical subreg. */
3582 undefined_operand_subword_p (const_rtx op
, int i
)
3584 if (GET_CODE (op
) != SUBREG
)
3586 machine_mode innermostmode
= GET_MODE (SUBREG_REG (op
));
3587 poly_int64 offset
= i
* UNITS_PER_WORD
+ subreg_memory_offset (op
);
3588 return (known_ge (offset
, GET_MODE_SIZE (innermostmode
))
3589 || known_le (offset
, -UNITS_PER_WORD
));
3592 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3593 MODE is any multi-word or full-word mode that lacks a move_insn
3594 pattern. Note that you will get better code if you define such
3595 patterns, even if they must turn into multiple assembler instructions. */
3598 emit_move_multi_word (machine_mode mode
, rtx x
, rtx y
)
3600 rtx_insn
*last_insn
= 0;
3606 /* This function can only handle cases where the number of words is
3607 known at compile time. */
3608 mode_size
= GET_MODE_SIZE (mode
).to_constant ();
3609 gcc_assert (mode_size
>= UNITS_PER_WORD
);
3611 /* If X is a push on the stack, do the push now and replace
3612 X with a reference to the stack pointer. */
3613 if (push_operand (x
, mode
))
3614 x
= emit_move_resolve_push (mode
, x
);
3616 /* If we are in reload, see if either operand is a MEM whose address
3617 is scheduled for replacement. */
3618 if (reload_in_progress
&& MEM_P (x
)
3619 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3620 x
= replace_equiv_address_nv (x
, inner
);
3621 if (reload_in_progress
&& MEM_P (y
)
3622 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3623 y
= replace_equiv_address_nv (y
, inner
);
3627 need_clobber
= false;
3628 for (i
= 0; i
< CEIL (mode_size
, UNITS_PER_WORD
); i
++)
3630 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3633 /* Do not generate code for a move if it would come entirely
3634 from the undefined bits of a paradoxical subreg. */
3635 if (undefined_operand_subword_p (y
, i
))
3638 ypart
= operand_subword (y
, i
, 1, mode
);
3640 /* If we can't get a part of Y, put Y into memory if it is a
3641 constant. Otherwise, force it into a register. Then we must
3642 be able to get a part of Y. */
3643 if (ypart
== 0 && CONSTANT_P (y
))
3645 y
= use_anchored_address (force_const_mem (mode
, y
));
3646 ypart
= operand_subword (y
, i
, 1, mode
);
3648 else if (ypart
== 0)
3649 ypart
= operand_subword_force (y
, i
, mode
);
3651 gcc_assert (xpart
&& ypart
);
3653 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3655 last_insn
= emit_move_insn (xpart
, ypart
);
3661 /* Show the output dies here. This is necessary for SUBREGs
3662 of pseudos since we cannot track their lifetimes correctly;
3663 hard regs shouldn't appear here except as return values.
3664 We never want to emit such a clobber after reload. */
3666 && ! (reload_in_progress
|| reload_completed
)
3667 && need_clobber
!= 0)
3675 /* Low level part of emit_move_insn.
3676 Called just like emit_move_insn, but assumes X and Y
3677 are basically valid. */
3680 emit_move_insn_1 (rtx x
, rtx y
)
3682 machine_mode mode
= GET_MODE (x
);
3683 enum insn_code code
;
3685 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3687 code
= optab_handler (mov_optab
, mode
);
3688 if (code
!= CODE_FOR_nothing
)
3689 return emit_insn (GEN_FCN (code
) (x
, y
));
3691 /* Expand complex moves by moving real part and imag part. */
3692 if (COMPLEX_MODE_P (mode
))
3693 return emit_move_complex (mode
, x
, y
);
3695 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3696 || ALL_FIXED_POINT_MODE_P (mode
))
3698 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3700 /* If we can't find an integer mode, use multi words. */
3704 return emit_move_multi_word (mode
, x
, y
);
3707 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3708 return emit_move_ccmode (mode
, x
, y
);
3710 /* Try using a move pattern for the corresponding integer mode. This is
3711 only safe when simplify_subreg can convert MODE constants into integer
3712 constants. At present, it can only do this reliably if the value
3713 fits within a HOST_WIDE_INT. */
3715 || known_le (GET_MODE_BITSIZE (mode
), HOST_BITS_PER_WIDE_INT
))
3717 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3721 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3726 return emit_move_multi_word (mode
, x
, y
);
3729 /* Generate code to copy Y into X.
3730 Both Y and X must have the same mode, except that
3731 Y can be a constant with VOIDmode.
3732 This mode cannot be BLKmode; use emit_block_move for that.
3734 Return the last instruction emitted. */
3737 emit_move_insn (rtx x
, rtx y
)
3739 machine_mode mode
= GET_MODE (x
);
3740 rtx y_cst
= NULL_RTX
;
3741 rtx_insn
*last_insn
;
3744 gcc_assert (mode
!= BLKmode
3745 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3750 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3751 && (last_insn
= compress_float_constant (x
, y
)))
3756 if (!targetm
.legitimate_constant_p (mode
, y
))
3758 y
= force_const_mem (mode
, y
);
3760 /* If the target's cannot_force_const_mem prevented the spill,
3761 assume that the target's move expanders will also take care
3762 of the non-legitimate constant. */
3766 y
= use_anchored_address (y
);
3770 /* If X or Y are memory references, verify that their addresses are valid
3773 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3775 && ! push_operand (x
, GET_MODE (x
))))
3776 x
= validize_mem (x
);
3779 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3780 MEM_ADDR_SPACE (y
)))
3781 y
= validize_mem (y
);
3783 gcc_assert (mode
!= BLKmode
);
3785 last_insn
= emit_move_insn_1 (x
, y
);
3787 if (y_cst
&& REG_P (x
)
3788 && (set
= single_set (last_insn
)) != NULL_RTX
3789 && SET_DEST (set
) == x
3790 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3791 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3796 /* Generate the body of an instruction to copy Y into X.
3797 It may be a list of insns, if one insn isn't enough. */
3800 gen_move_insn (rtx x
, rtx y
)
3805 emit_move_insn_1 (x
, y
);
3811 /* If Y is representable exactly in a narrower mode, and the target can
3812 perform the extension directly from constant or memory, then emit the
3813 move as an extension. */
3816 compress_float_constant (rtx x
, rtx y
)
3818 machine_mode dstmode
= GET_MODE (x
);
3819 machine_mode orig_srcmode
= GET_MODE (y
);
3820 machine_mode srcmode
;
3821 const REAL_VALUE_TYPE
*r
;
3822 int oldcost
, newcost
;
3823 bool speed
= optimize_insn_for_speed_p ();
3825 r
= CONST_DOUBLE_REAL_VALUE (y
);
3827 if (targetm
.legitimate_constant_p (dstmode
, y
))
3828 oldcost
= set_src_cost (y
, orig_srcmode
, speed
);
3830 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), dstmode
, speed
);
3832 FOR_EACH_MODE_UNTIL (srcmode
, orig_srcmode
)
3836 rtx_insn
*last_insn
;
3838 /* Skip if the target can't extend this way. */
3839 ic
= can_extend_p (dstmode
, srcmode
, 0);
3840 if (ic
== CODE_FOR_nothing
)
3843 /* Skip if the narrowed value isn't exact. */
3844 if (! exact_real_truncate (srcmode
, r
))
3847 trunc_y
= const_double_from_real_value (*r
, srcmode
);
3849 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3851 /* Skip if the target needs extra instructions to perform
3853 if (!insn_operand_matches (ic
, 1, trunc_y
))
3855 /* This is valid, but may not be cheaper than the original. */
3856 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3858 if (oldcost
< newcost
)
3861 else if (float_extend_from_mem
[dstmode
][srcmode
])
3863 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3864 /* This is valid, but may not be cheaper than the original. */
3865 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3867 if (oldcost
< newcost
)
3869 trunc_y
= validize_mem (trunc_y
);
3874 /* For CSE's benefit, force the compressed constant pool entry
3875 into a new pseudo. This constant may be used in different modes,
3876 and if not, combine will put things back together for us. */
3877 trunc_y
= force_reg (srcmode
, trunc_y
);
3879 /* If x is a hard register, perform the extension into a pseudo,
3880 so that e.g. stack realignment code is aware of it. */
3882 if (REG_P (x
) && HARD_REGISTER_P (x
))
3883 target
= gen_reg_rtx (dstmode
);
3885 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3886 last_insn
= get_last_insn ();
3889 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3892 return emit_move_insn (x
, target
);
3899 /* Pushing data onto the stack. */
3901 /* Push a block of length SIZE (perhaps variable)
3902 and return an rtx to address the beginning of the block.
3903 The value may be virtual_outgoing_args_rtx.
3905 EXTRA is the number of bytes of padding to push in addition to SIZE.
3906 BELOW nonzero means this padding comes at low addresses;
3907 otherwise, the padding comes at high addresses. */
3910 push_block (rtx size
, poly_int64 extra
, int below
)
3914 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3915 if (CONSTANT_P (size
))
3916 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3917 else if (REG_P (size
) && known_eq (extra
, 0))
3918 anti_adjust_stack (size
);
3921 temp
= copy_to_mode_reg (Pmode
, size
);
3922 if (maybe_ne (extra
, 0))
3923 temp
= expand_binop (Pmode
, add_optab
, temp
,
3924 gen_int_mode (extra
, Pmode
),
3925 temp
, 0, OPTAB_LIB_WIDEN
);
3926 anti_adjust_stack (temp
);
3929 if (STACK_GROWS_DOWNWARD
)
3931 temp
= virtual_outgoing_args_rtx
;
3932 if (maybe_ne (extra
, 0) && below
)
3933 temp
= plus_constant (Pmode
, temp
, extra
);
3938 if (poly_int_rtx_p (size
, &csize
))
3939 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3940 -csize
- (below
? 0 : extra
));
3941 else if (maybe_ne (extra
, 0) && !below
)
3942 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3943 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3946 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3947 negate_rtx (Pmode
, size
));
3950 return memory_address (NARROWEST_INT_MODE
, temp
);
3953 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3956 mem_autoinc_base (rtx mem
)
3960 rtx addr
= XEXP (mem
, 0);
3961 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3962 return XEXP (addr
, 0);
3967 /* A utility routine used here, in reload, and in try_split. The insns
3968 after PREV up to and including LAST are known to adjust the stack,
3969 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3970 placing notes as appropriate. PREV may be NULL, indicating the
3971 entire insn sequence prior to LAST should be scanned.
3973 The set of allowed stack pointer modifications is small:
3974 (1) One or more auto-inc style memory references (aka pushes),
3975 (2) One or more addition/subtraction with the SP as destination,
3976 (3) A single move insn with the SP as destination,
3977 (4) A call_pop insn,
3978 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3980 Insns in the sequence that do not modify the SP are ignored,
3981 except for noreturn calls.
3983 The return value is the amount of adjustment that can be trivially
3984 verified, via immediate operand or auto-inc. If the adjustment
3985 cannot be trivially extracted, the return value is HOST_WIDE_INT_MIN. */
3988 find_args_size_adjust (rtx_insn
*insn
)
3993 pat
= PATTERN (insn
);
3996 /* Look for a call_pop pattern. */
3999 /* We have to allow non-call_pop patterns for the case
4000 of emit_single_push_insn of a TLS address. */
4001 if (GET_CODE (pat
) != PARALLEL
)
4004 /* All call_pop have a stack pointer adjust in the parallel.
4005 The call itself is always first, and the stack adjust is
4006 usually last, so search from the end. */
4007 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
4009 set
= XVECEXP (pat
, 0, i
);
4010 if (GET_CODE (set
) != SET
)
4012 dest
= SET_DEST (set
);
4013 if (dest
== stack_pointer_rtx
)
4016 /* We'd better have found the stack pointer adjust. */
4019 /* Fall through to process the extracted SET and DEST
4020 as if it was a standalone insn. */
4022 else if (GET_CODE (pat
) == SET
)
4024 else if ((set
= single_set (insn
)) != NULL
)
4026 else if (GET_CODE (pat
) == PARALLEL
)
4028 /* ??? Some older ports use a parallel with a stack adjust
4029 and a store for a PUSH_ROUNDING pattern, rather than a
4030 PRE/POST_MODIFY rtx. Don't force them to update yet... */
4031 /* ??? See h8300 and m68k, pushqi1. */
4032 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
4034 set
= XVECEXP (pat
, 0, i
);
4035 if (GET_CODE (set
) != SET
)
4037 dest
= SET_DEST (set
);
4038 if (dest
== stack_pointer_rtx
)
4041 /* We do not expect an auto-inc of the sp in the parallel. */
4042 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
4043 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4044 != stack_pointer_rtx
);
4052 dest
= SET_DEST (set
);
4054 /* Look for direct modifications of the stack pointer. */
4055 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
4057 /* Look for a trivial adjustment, otherwise assume nothing. */
4058 /* Note that the SPU restore_stack_block pattern refers to
4059 the stack pointer in V4SImode. Consider that non-trivial. */
4061 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
4062 && strip_offset (SET_SRC (set
), &offset
) == stack_pointer_rtx
)
4064 /* ??? Reload can generate no-op moves, which will be cleaned
4065 up later. Recognize it and continue searching. */
4066 else if (rtx_equal_p (dest
, SET_SRC (set
)))
4069 return HOST_WIDE_INT_MIN
;
4075 /* Otherwise only think about autoinc patterns. */
4076 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
4079 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4080 != stack_pointer_rtx
);
4082 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
4083 mem
= SET_SRC (set
);
4087 addr
= XEXP (mem
, 0);
4088 switch (GET_CODE (addr
))
4092 return GET_MODE_SIZE (GET_MODE (mem
));
4095 return -GET_MODE_SIZE (GET_MODE (mem
));
4098 addr
= XEXP (addr
, 1);
4099 gcc_assert (GET_CODE (addr
) == PLUS
);
4100 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
4101 return rtx_to_poly_int64 (XEXP (addr
, 1));
4109 fixup_args_size_notes (rtx_insn
*prev
, rtx_insn
*last
,
4110 poly_int64 end_args_size
)
4112 poly_int64 args_size
= end_args_size
;
4113 bool saw_unknown
= false;
4116 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
4118 if (!NONDEBUG_INSN_P (insn
))
4121 /* We might have existing REG_ARGS_SIZE notes, e.g. when pushing
4122 a call argument containing a TLS address that itself requires
4123 a call to __tls_get_addr. The handling of stack_pointer_delta
4124 in emit_single_push_insn is supposed to ensure that any such
4125 notes are already correct. */
4126 rtx note
= find_reg_note (insn
, REG_ARGS_SIZE
, NULL_RTX
);
4127 gcc_assert (!note
|| known_eq (args_size
, get_args_size (note
)));
4129 poly_int64 this_delta
= find_args_size_adjust (insn
);
4130 if (known_eq (this_delta
, 0))
4133 || ACCUMULATE_OUTGOING_ARGS
4134 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
4138 gcc_assert (!saw_unknown
);
4139 if (known_eq (this_delta
, HOST_WIDE_INT_MIN
))
4143 add_args_size_note (insn
, args_size
);
4144 if (STACK_GROWS_DOWNWARD
)
4145 this_delta
= -poly_uint64 (this_delta
);
4148 args_size
= HOST_WIDE_INT_MIN
;
4150 args_size
-= this_delta
;
4156 #ifdef PUSH_ROUNDING
4157 /* Emit single push insn. */
4160 emit_single_push_insn_1 (machine_mode mode
, rtx x
, tree type
)
4163 poly_int64 rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4165 enum insn_code icode
;
4167 /* If there is push pattern, use it. Otherwise try old way of throwing
4168 MEM representing push operation to move expander. */
4169 icode
= optab_handler (push_optab
, mode
);
4170 if (icode
!= CODE_FOR_nothing
)
4172 struct expand_operand ops
[1];
4174 create_input_operand (&ops
[0], x
, mode
);
4175 if (maybe_expand_insn (icode
, 1, ops
))
4178 if (known_eq (GET_MODE_SIZE (mode
), rounded_size
))
4179 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
4180 /* If we are to pad downward, adjust the stack pointer first and
4181 then store X into the stack location using an offset. This is
4182 because emit_move_insn does not know how to pad; it does not have
4184 else if (targetm
.calls
.function_arg_padding (mode
, type
) == PAD_DOWNWARD
)
4186 emit_move_insn (stack_pointer_rtx
,
4187 expand_binop (Pmode
,
4188 STACK_GROWS_DOWNWARD
? sub_optab
4191 gen_int_mode (rounded_size
, Pmode
),
4192 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4194 poly_int64 offset
= rounded_size
- GET_MODE_SIZE (mode
);
4195 if (STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_DEC
)
4196 /* We have already decremented the stack pointer, so get the
4198 offset
+= rounded_size
;
4200 if (!STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_INC
)
4201 /* We have already incremented the stack pointer, so get the
4203 offset
-= rounded_size
;
4205 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, offset
);
4209 if (STACK_GROWS_DOWNWARD
)
4210 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4211 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, -rounded_size
);
4213 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4214 dest_addr
= plus_constant (Pmode
, stack_pointer_rtx
, rounded_size
);
4216 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4219 dest
= gen_rtx_MEM (mode
, dest_addr
);
4223 set_mem_attributes (dest
, type
, 1);
4225 if (cfun
->tail_call_marked
)
4226 /* Function incoming arguments may overlap with sibling call
4227 outgoing arguments and we cannot allow reordering of reads
4228 from function arguments with stores to outgoing arguments
4229 of sibling calls. */
4230 set_mem_alias_set (dest
, 0);
4232 emit_move_insn (dest
, x
);
4235 /* Emit and annotate a single push insn. */
4238 emit_single_push_insn (machine_mode mode
, rtx x
, tree type
)
4240 poly_int64 delta
, old_delta
= stack_pointer_delta
;
4241 rtx_insn
*prev
= get_last_insn ();
4244 emit_single_push_insn_1 (mode
, x
, type
);
4246 /* Adjust stack_pointer_delta to describe the situation after the push
4247 we just performed. Note that we must do this after the push rather
4248 than before the push in case calculating X needs pushes and pops of
4249 its own (e.g. if calling __tls_get_addr). The REG_ARGS_SIZE notes
4250 for such pushes and pops must not include the effect of the future
4252 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4254 last
= get_last_insn ();
4256 /* Notice the common case where we emitted exactly one insn. */
4257 if (PREV_INSN (last
) == prev
)
4259 add_args_size_note (last
, stack_pointer_delta
);
4263 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4264 gcc_assert (known_eq (delta
, HOST_WIDE_INT_MIN
)
4265 || known_eq (delta
, old_delta
));
4269 /* If reading SIZE bytes from X will end up reading from
4270 Y return the number of bytes that overlap. Return -1
4271 if there is no overlap or -2 if we can't determine
4272 (for example when X and Y have different base registers). */
4275 memory_load_overlap (rtx x
, rtx y
, HOST_WIDE_INT size
)
4277 rtx tmp
= plus_constant (Pmode
, x
, size
);
4278 rtx sub
= simplify_gen_binary (MINUS
, Pmode
, tmp
, y
);
4280 if (!CONST_INT_P (sub
))
4283 HOST_WIDE_INT val
= INTVAL (sub
);
4285 return IN_RANGE (val
, 1, size
) ? val
: -1;
4288 /* Generate code to push X onto the stack, assuming it has mode MODE and
4290 MODE is redundant except when X is a CONST_INT (since they don't
4292 SIZE is an rtx for the size of data to be copied (in bytes),
4293 needed only if X is BLKmode.
4294 Return true if successful. May return false if asked to push a
4295 partial argument during a sibcall optimization (as specified by
4296 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4299 ALIGN (in bits) is maximum alignment we can assume.
4301 If PARTIAL and REG are both nonzero, then copy that many of the first
4302 bytes of X into registers starting with REG, and push the rest of X.
4303 The amount of space pushed is decreased by PARTIAL bytes.
4304 REG must be a hard register in this case.
4305 If REG is zero but PARTIAL is not, take any all others actions for an
4306 argument partially in registers, but do not actually load any
4309 EXTRA is the amount in bytes of extra space to leave next to this arg.
4310 This is ignored if an argument block has already been allocated.
4312 On a machine that lacks real push insns, ARGS_ADDR is the address of
4313 the bottom of the argument block for this call. We use indexing off there
4314 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4315 argument block has not been preallocated.
4317 ARGS_SO_FAR is the size of args previously pushed for this call.
4319 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4320 for arguments passed in registers. If nonzero, it will be the number
4321 of bytes required. */
4324 emit_push_insn (rtx x
, machine_mode mode
, tree type
, rtx size
,
4325 unsigned int align
, int partial
, rtx reg
, poly_int64 extra
,
4326 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4327 rtx alignment_pad
, bool sibcall_p
)
4330 pad_direction stack_direction
4331 = STACK_GROWS_DOWNWARD
? PAD_DOWNWARD
: PAD_UPWARD
;
4333 /* Decide where to pad the argument: PAD_DOWNWARD for below,
4334 PAD_UPWARD for above, or PAD_NONE for don't pad it.
4335 Default is below for small data on big-endian machines; else above. */
4336 pad_direction where_pad
= targetm
.calls
.function_arg_padding (mode
, type
);
4338 /* Invert direction if stack is post-decrement.
4340 if (STACK_PUSH_CODE
== POST_DEC
)
4341 if (where_pad
!= PAD_NONE
)
4342 where_pad
= (where_pad
== PAD_DOWNWARD
? PAD_UPWARD
: PAD_DOWNWARD
);
4346 int nregs
= partial
/ UNITS_PER_WORD
;
4347 rtx
*tmp_regs
= NULL
;
4348 int overlapping
= 0;
4351 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4353 /* Copy a block into the stack, entirely or partially. */
4360 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4361 used
= partial
- offset
;
4363 if (mode
!= BLKmode
)
4365 /* A value is to be stored in an insufficiently aligned
4366 stack slot; copy via a suitably aligned slot if
4368 size
= gen_int_mode (GET_MODE_SIZE (mode
), Pmode
);
4369 if (!MEM_P (xinner
))
4371 temp
= assign_temp (type
, 1, 1);
4372 emit_move_insn (temp
, xinner
);
4379 /* USED is now the # of bytes we need not copy to the stack
4380 because registers will take care of them. */
4383 xinner
= adjust_address (xinner
, BLKmode
, used
);
4385 /* If the partial register-part of the arg counts in its stack size,
4386 skip the part of stack space corresponding to the registers.
4387 Otherwise, start copying to the beginning of the stack space,
4388 by setting SKIP to 0. */
4389 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4391 #ifdef PUSH_ROUNDING
4392 /* Do it with several push insns if that doesn't take lots of insns
4393 and if there is no difficulty with push insns that skip bytes
4394 on the stack for alignment purposes. */
4397 && CONST_INT_P (size
)
4399 && MEM_ALIGN (xinner
) >= align
4400 && can_move_by_pieces ((unsigned) INTVAL (size
) - used
, align
)
4401 /* Here we avoid the case of a structure whose weak alignment
4402 forces many pushes of a small amount of data,
4403 and such small pushes do rounding that causes trouble. */
4404 && ((!targetm
.slow_unaligned_access (word_mode
, align
))
4405 || align
>= BIGGEST_ALIGNMENT
4406 || known_eq (PUSH_ROUNDING (align
/ BITS_PER_UNIT
),
4407 align
/ BITS_PER_UNIT
))
4408 && known_eq (PUSH_ROUNDING (INTVAL (size
)), INTVAL (size
)))
4410 /* Push padding now if padding above and stack grows down,
4411 or if padding below and stack grows up.
4412 But if space already allocated, this has already been done. */
4413 if (maybe_ne (extra
, 0)
4415 && where_pad
!= PAD_NONE
4416 && where_pad
!= stack_direction
)
4417 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4419 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
,
4423 #endif /* PUSH_ROUNDING */
4427 /* Otherwise make space on the stack and copy the data
4428 to the address of that space. */
4430 /* Deduct words put into registers from the size we must copy. */
4433 if (CONST_INT_P (size
))
4434 size
= GEN_INT (INTVAL (size
) - used
);
4436 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4437 gen_int_mode (used
, GET_MODE (size
)),
4438 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4441 /* Get the address of the stack space.
4442 In this case, we do not deal with EXTRA separately.
4443 A single stack adjust will do. */
4447 temp
= push_block (size
, extra
, where_pad
== PAD_DOWNWARD
);
4450 else if (poly_int_rtx_p (args_so_far
, &offset
))
4451 temp
= memory_address (BLKmode
,
4452 plus_constant (Pmode
, args_addr
,
4455 temp
= memory_address (BLKmode
,
4456 plus_constant (Pmode
,
4457 gen_rtx_PLUS (Pmode
,
4462 if (!ACCUMULATE_OUTGOING_ARGS
)
4464 /* If the source is referenced relative to the stack pointer,
4465 copy it to another register to stabilize it. We do not need
4466 to do this if we know that we won't be changing sp. */
4468 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4469 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4470 temp
= copy_to_reg (temp
);
4473 target
= gen_rtx_MEM (BLKmode
, temp
);
4475 /* We do *not* set_mem_attributes here, because incoming arguments
4476 may overlap with sibling call outgoing arguments and we cannot
4477 allow reordering of reads from function arguments with stores
4478 to outgoing arguments of sibling calls. We do, however, want
4479 to record the alignment of the stack slot. */
4480 /* ALIGN may well be better aligned than TYPE, e.g. due to
4481 PARM_BOUNDARY. Assume the caller isn't lying. */
4482 set_mem_align (target
, align
);
4484 /* If part should go in registers and pushing to that part would
4485 overwrite some of the values that need to go into regs, load the
4486 overlapping values into temporary pseudos to be moved into the hard
4487 regs at the end after the stack pushing has completed.
4488 We cannot load them directly into the hard regs here because
4489 they can be clobbered by the block move expansions.
4492 if (partial
> 0 && reg
!= 0 && mode
== BLKmode
4493 && GET_CODE (reg
) != PARALLEL
)
4495 overlapping
= memory_load_overlap (XEXP (x
, 0), temp
, partial
);
4496 if (overlapping
> 0)
4498 gcc_assert (overlapping
% UNITS_PER_WORD
== 0);
4499 overlapping
/= UNITS_PER_WORD
;
4501 tmp_regs
= XALLOCAVEC (rtx
, overlapping
);
4503 for (int i
= 0; i
< overlapping
; i
++)
4504 tmp_regs
[i
] = gen_reg_rtx (word_mode
);
4506 for (int i
= 0; i
< overlapping
; i
++)
4507 emit_move_insn (tmp_regs
[i
],
4508 operand_subword_force (target
, i
, mode
));
4510 else if (overlapping
== -1)
4512 /* Could not determine whether there is overlap.
4513 Fail the sibcall. */
4521 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4524 else if (partial
> 0)
4526 /* Scalar partly in registers. This case is only supported
4527 for fixed-wdth modes. */
4528 int size
= GET_MODE_SIZE (mode
).to_constant ();
4529 size
/= UNITS_PER_WORD
;
4532 /* # bytes of start of argument
4533 that we must make space for but need not store. */
4534 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4535 int args_offset
= INTVAL (args_so_far
);
4538 /* Push padding now if padding above and stack grows down,
4539 or if padding below and stack grows up.
4540 But if space already allocated, this has already been done. */
4541 if (maybe_ne (extra
, 0)
4543 && where_pad
!= PAD_NONE
4544 && where_pad
!= stack_direction
)
4545 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4547 /* If we make space by pushing it, we might as well push
4548 the real data. Otherwise, we can leave OFFSET nonzero
4549 and leave the space uninitialized. */
4553 /* Now NOT_STACK gets the number of words that we don't need to
4554 allocate on the stack. Convert OFFSET to words too. */
4555 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4556 offset
/= UNITS_PER_WORD
;
4558 /* If the partial register-part of the arg counts in its stack size,
4559 skip the part of stack space corresponding to the registers.
4560 Otherwise, start copying to the beginning of the stack space,
4561 by setting SKIP to 0. */
4562 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4564 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4565 x
= validize_mem (force_const_mem (mode
, x
));
4567 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4568 SUBREGs of such registers are not allowed. */
4569 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4570 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4571 x
= copy_to_reg (x
);
4573 /* Loop over all the words allocated on the stack for this arg. */
4574 /* We can do it by words, because any scalar bigger than a word
4575 has a size a multiple of a word. */
4576 for (i
= size
- 1; i
>= not_stack
; i
--)
4577 if (i
>= not_stack
+ offset
)
4578 if (!emit_push_insn (operand_subword_force (x
, i
, mode
),
4579 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4581 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4583 reg_parm_stack_space
, alignment_pad
, sibcall_p
))
4591 /* Push padding now if padding above and stack grows down,
4592 or if padding below and stack grows up.
4593 But if space already allocated, this has already been done. */
4594 if (maybe_ne (extra
, 0)
4596 && where_pad
!= PAD_NONE
4597 && where_pad
!= stack_direction
)
4598 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4600 #ifdef PUSH_ROUNDING
4601 if (args_addr
== 0 && PUSH_ARGS
)
4602 emit_single_push_insn (mode
, x
, type
);
4606 addr
= simplify_gen_binary (PLUS
, Pmode
, args_addr
, args_so_far
);
4607 dest
= gen_rtx_MEM (mode
, memory_address (mode
, addr
));
4609 /* We do *not* set_mem_attributes here, because incoming arguments
4610 may overlap with sibling call outgoing arguments and we cannot
4611 allow reordering of reads from function arguments with stores
4612 to outgoing arguments of sibling calls. We do, however, want
4613 to record the alignment of the stack slot. */
4614 /* ALIGN may well be better aligned than TYPE, e.g. due to
4615 PARM_BOUNDARY. Assume the caller isn't lying. */
4616 set_mem_align (dest
, align
);
4618 emit_move_insn (dest
, x
);
4622 /* Move the partial arguments into the registers and any overlapping
4623 values that we moved into the pseudos in tmp_regs. */
4624 if (partial
> 0 && reg
!= 0)
4626 /* Handle calls that pass values in multiple non-contiguous locations.
4627 The Irix 6 ABI has examples of this. */
4628 if (GET_CODE (reg
) == PARALLEL
)
4629 emit_group_load (reg
, x
, type
, -1);
4632 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4633 move_block_to_reg (REGNO (reg
), x
, nregs
- overlapping
, mode
);
4635 for (int i
= 0; i
< overlapping
; i
++)
4636 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
)
4637 + nregs
- overlapping
+ i
),
4643 if (maybe_ne (extra
, 0) && args_addr
== 0 && where_pad
== stack_direction
)
4644 anti_adjust_stack (gen_int_mode (extra
, Pmode
));
4646 if (alignment_pad
&& args_addr
== 0)
4647 anti_adjust_stack (alignment_pad
);
4652 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4656 get_subtarget (rtx x
)
4660 /* Only registers can be subtargets. */
4662 /* Don't use hard regs to avoid extending their life. */
4663 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4667 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4668 FIELD is a bitfield. Returns true if the optimization was successful,
4669 and there's nothing else to do. */
4672 optimize_bitfield_assignment_op (poly_uint64 pbitsize
,
4673 poly_uint64 pbitpos
,
4674 poly_uint64 pbitregion_start
,
4675 poly_uint64 pbitregion_end
,
4676 machine_mode mode1
, rtx str_rtx
,
4677 tree to
, tree src
, bool reverse
)
4679 /* str_mode is not guaranteed to be a scalar type. */
4680 machine_mode str_mode
= GET_MODE (str_rtx
);
4681 unsigned int str_bitsize
;
4686 enum tree_code code
;
4688 unsigned HOST_WIDE_INT bitsize
, bitpos
, bitregion_start
, bitregion_end
;
4689 if (mode1
!= VOIDmode
4690 || !pbitsize
.is_constant (&bitsize
)
4691 || !pbitpos
.is_constant (&bitpos
)
4692 || !pbitregion_start
.is_constant (&bitregion_start
)
4693 || !pbitregion_end
.is_constant (&bitregion_end
)
4694 || bitsize
>= BITS_PER_WORD
4695 || !GET_MODE_BITSIZE (str_mode
).is_constant (&str_bitsize
)
4696 || str_bitsize
> BITS_PER_WORD
4697 || TREE_SIDE_EFFECTS (to
)
4698 || TREE_THIS_VOLATILE (to
))
4702 if (TREE_CODE (src
) != SSA_NAME
)
4704 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4707 srcstmt
= get_gimple_for_ssa_name (src
);
4709 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4712 code
= gimple_assign_rhs_code (srcstmt
);
4714 op0
= gimple_assign_rhs1 (srcstmt
);
4716 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4717 to find its initialization. Hopefully the initialization will
4718 be from a bitfield load. */
4719 if (TREE_CODE (op0
) == SSA_NAME
)
4721 gimple
*op0stmt
= get_gimple_for_ssa_name (op0
);
4723 /* We want to eventually have OP0 be the same as TO, which
4724 should be a bitfield. */
4726 || !is_gimple_assign (op0stmt
)
4727 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4729 op0
= gimple_assign_rhs1 (op0stmt
);
4732 op1
= gimple_assign_rhs2 (srcstmt
);
4734 if (!operand_equal_p (to
, op0
, 0))
4737 if (MEM_P (str_rtx
))
4739 unsigned HOST_WIDE_INT offset1
;
4741 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4742 str_bitsize
= BITS_PER_WORD
;
4744 scalar_int_mode best_mode
;
4745 if (!get_best_mode (bitsize
, bitpos
, bitregion_start
, bitregion_end
,
4746 MEM_ALIGN (str_rtx
), str_bitsize
, false, &best_mode
))
4748 str_mode
= best_mode
;
4749 str_bitsize
= GET_MODE_BITSIZE (best_mode
);
4752 bitpos
%= str_bitsize
;
4753 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4754 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4756 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4759 /* If the bit field covers the whole REG/MEM, store_field
4760 will likely generate better code. */
4761 if (bitsize
>= str_bitsize
)
4764 /* We can't handle fields split across multiple entities. */
4765 if (bitpos
+ bitsize
> str_bitsize
)
4768 if (reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4769 bitpos
= str_bitsize
- bitpos
- bitsize
;
4775 /* For now, just optimize the case of the topmost bitfield
4776 where we don't need to do any masking and also
4777 1 bit bitfields where xor can be used.
4778 We might win by one instruction for the other bitfields
4779 too if insv/extv instructions aren't used, so that
4780 can be added later. */
4781 if ((reverse
|| bitpos
+ bitsize
!= str_bitsize
)
4782 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4785 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4786 value
= convert_modes (str_mode
,
4787 TYPE_MODE (TREE_TYPE (op1
)), value
,
4788 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4790 /* We may be accessing data outside the field, which means
4791 we can alias adjacent data. */
4792 if (MEM_P (str_rtx
))
4794 str_rtx
= shallow_copy_rtx (str_rtx
);
4795 set_mem_alias_set (str_rtx
, 0);
4796 set_mem_expr (str_rtx
, 0);
4799 if (bitsize
== 1 && (reverse
|| bitpos
+ bitsize
!= str_bitsize
))
4801 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4805 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4807 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4809 value
= flip_storage_order (str_mode
, value
);
4810 result
= expand_binop (str_mode
, binop
, str_rtx
,
4811 value
, str_rtx
, 1, OPTAB_WIDEN
);
4812 if (result
!= str_rtx
)
4813 emit_move_insn (str_rtx
, result
);
4818 if (TREE_CODE (op1
) != INTEGER_CST
)
4820 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4821 value
= convert_modes (str_mode
,
4822 TYPE_MODE (TREE_TYPE (op1
)), value
,
4823 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4825 /* We may be accessing data outside the field, which means
4826 we can alias adjacent data. */
4827 if (MEM_P (str_rtx
))
4829 str_rtx
= shallow_copy_rtx (str_rtx
);
4830 set_mem_alias_set (str_rtx
, 0);
4831 set_mem_expr (str_rtx
, 0);
4834 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4835 if (bitpos
+ bitsize
!= str_bitsize
)
4837 rtx mask
= gen_int_mode ((HOST_WIDE_INT_1U
<< bitsize
) - 1,
4839 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4841 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4843 value
= flip_storage_order (str_mode
, value
);
4844 result
= expand_binop (str_mode
, binop
, str_rtx
,
4845 value
, str_rtx
, 1, OPTAB_WIDEN
);
4846 if (result
!= str_rtx
)
4847 emit_move_insn (str_rtx
, result
);
4857 /* In the C++ memory model, consecutive bit fields in a structure are
4858 considered one memory location.
4860 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4861 returns the bit range of consecutive bits in which this COMPONENT_REF
4862 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4863 and *OFFSET may be adjusted in the process.
4865 If the access does not need to be restricted, 0 is returned in both
4866 *BITSTART and *BITEND. */
4869 get_bit_range (poly_uint64_pod
*bitstart
, poly_uint64_pod
*bitend
, tree exp
,
4870 poly_int64_pod
*bitpos
, tree
*offset
)
4872 poly_int64 bitoffset
;
4875 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4877 field
= TREE_OPERAND (exp
, 1);
4878 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4879 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4880 need to limit the range we can access. */
4883 *bitstart
= *bitend
= 0;
4887 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4888 part of a larger bit field, then the representative does not serve any
4889 useful purpose. This can occur in Ada. */
4890 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4893 poly_int64 rbitsize
, rbitpos
;
4895 int unsignedp
, reversep
, volatilep
= 0;
4896 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4897 &roffset
, &rmode
, &unsignedp
, &reversep
,
4899 if (!multiple_p (rbitpos
, BITS_PER_UNIT
))
4901 *bitstart
= *bitend
= 0;
4906 /* Compute the adjustment to bitpos from the offset of the field
4907 relative to the representative. DECL_FIELD_OFFSET of field and
4908 repr are the same by construction if they are not constants,
4909 see finish_bitfield_layout. */
4910 poly_uint64 field_offset
, repr_offset
;
4911 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
4912 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
4913 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
4916 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4917 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4919 /* If the adjustment is larger than bitpos, we would have a negative bit
4920 position for the lower bound and this may wreak havoc later. Adjust
4921 offset and bitpos to make the lower bound non-negative in that case. */
4922 if (maybe_gt (bitoffset
, *bitpos
))
4924 poly_int64 adjust_bits
= upper_bound (bitoffset
, *bitpos
) - *bitpos
;
4925 poly_int64 adjust_bytes
= exact_div (adjust_bits
, BITS_PER_UNIT
);
4927 *bitpos
+= adjust_bits
;
4928 if (*offset
== NULL_TREE
)
4929 *offset
= size_int (-adjust_bytes
);
4931 *offset
= size_binop (MINUS_EXPR
, *offset
, size_int (adjust_bytes
));
4935 *bitstart
= *bitpos
- bitoffset
;
4937 *bitend
= *bitstart
+ tree_to_poly_uint64 (DECL_SIZE (repr
)) - 1;
4940 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4941 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4942 DECL_RTL was not set yet, return NORTL. */
4945 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4947 if (TREE_CODE (addr
) != ADDR_EXPR
)
4950 tree base
= TREE_OPERAND (addr
, 0);
4953 || TREE_ADDRESSABLE (base
)
4954 || DECL_MODE (base
) == BLKmode
)
4957 if (!DECL_RTL_SET_P (base
))
4960 return (!MEM_P (DECL_RTL (base
)));
4963 /* Returns true if the MEM_REF REF refers to an object that does not
4964 reside in memory and has non-BLKmode. */
4967 mem_ref_refers_to_non_mem_p (tree ref
)
4969 tree base
= TREE_OPERAND (ref
, 0);
4970 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4973 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4974 is true, try generating a nontemporal store. */
4977 expand_assignment (tree to
, tree from
, bool nontemporal
)
4983 enum insn_code icode
;
4985 /* Don't crash if the lhs of the assignment was erroneous. */
4986 if (TREE_CODE (to
) == ERROR_MARK
)
4988 expand_normal (from
);
4992 /* Optimize away no-op moves without side-effects. */
4993 if (operand_equal_p (to
, from
, 0))
4996 /* Handle misaligned stores. */
4997 mode
= TYPE_MODE (TREE_TYPE (to
));
4998 if ((TREE_CODE (to
) == MEM_REF
4999 || TREE_CODE (to
) == TARGET_MEM_REF
)
5001 && !mem_ref_refers_to_non_mem_p (to
)
5002 && ((align
= get_object_alignment (to
))
5003 < GET_MODE_ALIGNMENT (mode
))
5004 && (((icode
= optab_handler (movmisalign_optab
, mode
))
5005 != CODE_FOR_nothing
)
5006 || targetm
.slow_unaligned_access (mode
, align
)))
5010 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
5011 reg
= force_not_mem (reg
);
5012 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5013 if (TREE_CODE (to
) == MEM_REF
&& REF_REVERSE_STORAGE_ORDER (to
))
5014 reg
= flip_storage_order (mode
, reg
);
5016 if (icode
!= CODE_FOR_nothing
)
5018 struct expand_operand ops
[2];
5020 create_fixed_operand (&ops
[0], mem
);
5021 create_input_operand (&ops
[1], reg
, mode
);
5022 /* The movmisalign<mode> pattern cannot fail, else the assignment
5023 would silently be omitted. */
5024 expand_insn (icode
, 2, ops
);
5027 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
,
5032 /* Assignment of a structure component needs special treatment
5033 if the structure component's rtx is not simply a MEM.
5034 Assignment of an array element at a constant index, and assignment of
5035 an array element in an unaligned packed structure field, has the same
5036 problem. Same for (partially) storing into a non-memory object. */
5037 if (handled_component_p (to
)
5038 || (TREE_CODE (to
) == MEM_REF
5039 && (REF_REVERSE_STORAGE_ORDER (to
)
5040 || mem_ref_refers_to_non_mem_p (to
)))
5041 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
5044 poly_int64 bitsize
, bitpos
;
5045 poly_uint64 bitregion_start
= 0;
5046 poly_uint64 bitregion_end
= 0;
5048 int unsignedp
, reversep
, volatilep
= 0;
5052 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
5053 &unsignedp
, &reversep
, &volatilep
);
5055 /* Make sure bitpos is not negative, it can wreak havoc later. */
5056 if (maybe_lt (bitpos
, 0))
5058 gcc_assert (offset
== NULL_TREE
);
5059 offset
= size_int (bits_to_bytes_round_down (bitpos
));
5060 bitpos
= num_trailing_bits (bitpos
);
5063 if (TREE_CODE (to
) == COMPONENT_REF
5064 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
5065 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
5066 /* The C++ memory model naturally applies to byte-aligned fields.
5067 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5068 BITSIZE are not byte-aligned, there is no need to limit the range
5069 we can access. This can occur with packed structures in Ada. */
5070 else if (maybe_gt (bitsize
, 0)
5071 && multiple_p (bitsize
, BITS_PER_UNIT
)
5072 && multiple_p (bitpos
, BITS_PER_UNIT
))
5074 bitregion_start
= bitpos
;
5075 bitregion_end
= bitpos
+ bitsize
- 1;
5078 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5080 /* If the field has a mode, we want to access it in the
5081 field's mode, not the computed mode.
5082 If a MEM has VOIDmode (external with incomplete type),
5083 use BLKmode for it instead. */
5086 if (mode1
!= VOIDmode
)
5087 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
5088 else if (GET_MODE (to_rtx
) == VOIDmode
)
5089 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
5094 machine_mode address_mode
;
5097 if (!MEM_P (to_rtx
))
5099 /* We can get constant negative offsets into arrays with broken
5100 user code. Translate this to a trap instead of ICEing. */
5101 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
5102 expand_builtin_trap ();
5103 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
5106 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5107 address_mode
= get_address_mode (to_rtx
);
5108 if (GET_MODE (offset_rtx
) != address_mode
)
5110 /* We cannot be sure that the RTL in offset_rtx is valid outside
5111 of a memory address context, so force it into a register
5112 before attempting to convert it to the desired mode. */
5113 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
5114 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5117 /* If we have an expression in OFFSET_RTX and a non-zero
5118 byte offset in BITPOS, adding the byte offset before the
5119 OFFSET_RTX results in better intermediate code, which makes
5120 later rtl optimization passes perform better.
5122 We prefer intermediate code like this:
5124 r124:DI=r123:DI+0x18
5129 r124:DI=r123:DI+0x10
5130 [r124:DI+0x8]=r121:DI
5132 This is only done for aligned data values, as these can
5133 be expected to result in single move instructions. */
5135 if (mode1
!= VOIDmode
5136 && maybe_ne (bitpos
, 0)
5137 && maybe_gt (bitsize
, 0)
5138 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
5139 && multiple_p (bitpos
, bitsize
)
5140 && multiple_p (bitsize
, GET_MODE_ALIGNMENT (mode1
))
5141 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
5143 to_rtx
= adjust_address (to_rtx
, mode1
, bytepos
);
5144 bitregion_start
= 0;
5145 if (known_ge (bitregion_end
, poly_uint64 (bitpos
)))
5146 bitregion_end
-= bitpos
;
5150 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5151 highest_pow2_factor_for_target (to
,
5155 /* No action is needed if the target is not a memory and the field
5156 lies completely outside that target. This can occur if the source
5157 code contains an out-of-bounds access to a small array. */
5159 && GET_MODE (to_rtx
) != BLKmode
5160 && known_ge (bitpos
, GET_MODE_PRECISION (GET_MODE (to_rtx
))))
5162 expand_normal (from
);
5165 /* Handle expand_expr of a complex value returning a CONCAT. */
5166 else if (GET_CODE (to_rtx
) == CONCAT
)
5168 machine_mode to_mode
= GET_MODE (to_rtx
);
5169 gcc_checking_assert (COMPLEX_MODE_P (to_mode
));
5170 poly_int64 mode_bitsize
= GET_MODE_BITSIZE (to_mode
);
5171 unsigned short inner_bitsize
= GET_MODE_UNIT_BITSIZE (to_mode
);
5172 if (TYPE_MODE (TREE_TYPE (from
)) == to_mode
5173 && known_eq (bitpos
, 0)
5174 && known_eq (bitsize
, mode_bitsize
))
5175 result
= store_expr (from
, to_rtx
, false, nontemporal
, reversep
);
5176 else if (TYPE_MODE (TREE_TYPE (from
)) == GET_MODE_INNER (to_mode
)
5177 && known_eq (bitsize
, inner_bitsize
)
5178 && (known_eq (bitpos
, 0)
5179 || known_eq (bitpos
, inner_bitsize
)))
5180 result
= store_expr (from
, XEXP (to_rtx
, maybe_ne (bitpos
, 0)),
5181 false, nontemporal
, reversep
);
5182 else if (known_le (bitpos
+ bitsize
, inner_bitsize
))
5183 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
5184 bitregion_start
, bitregion_end
,
5185 mode1
, from
, get_alias_set (to
),
5186 nontemporal
, reversep
);
5187 else if (known_ge (bitpos
, inner_bitsize
))
5188 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
5189 bitpos
- inner_bitsize
,
5190 bitregion_start
, bitregion_end
,
5191 mode1
, from
, get_alias_set (to
),
5192 nontemporal
, reversep
);
5193 else if (known_eq (bitpos
, 0) && known_eq (bitsize
, mode_bitsize
))
5195 result
= expand_normal (from
);
5196 if (GET_CODE (result
) == CONCAT
)
5198 to_mode
= GET_MODE_INNER (to_mode
);
5199 machine_mode from_mode
= GET_MODE_INNER (GET_MODE (result
));
5201 = simplify_gen_subreg (to_mode
, XEXP (result
, 0),
5204 = simplify_gen_subreg (to_mode
, XEXP (result
, 1),
5206 if (!from_real
|| !from_imag
)
5207 goto concat_store_slow
;
5208 emit_move_insn (XEXP (to_rtx
, 0), from_real
);
5209 emit_move_insn (XEXP (to_rtx
, 1), from_imag
);
5214 = simplify_gen_subreg (to_mode
, result
,
5215 TYPE_MODE (TREE_TYPE (from
)), 0);
5218 emit_move_insn (XEXP (to_rtx
, 0),
5219 read_complex_part (from_rtx
, false));
5220 emit_move_insn (XEXP (to_rtx
, 1),
5221 read_complex_part (from_rtx
, true));
5225 machine_mode to_mode
5226 = GET_MODE_INNER (GET_MODE (to_rtx
));
5228 = simplify_gen_subreg (to_mode
, result
,
5229 TYPE_MODE (TREE_TYPE (from
)),
5232 = simplify_gen_subreg (to_mode
, result
,
5233 TYPE_MODE (TREE_TYPE (from
)),
5234 GET_MODE_SIZE (to_mode
));
5235 if (!from_real
|| !from_imag
)
5236 goto concat_store_slow
;
5237 emit_move_insn (XEXP (to_rtx
, 0), from_real
);
5238 emit_move_insn (XEXP (to_rtx
, 1), from_imag
);
5245 rtx temp
= assign_stack_temp (to_mode
,
5246 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5247 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
5248 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
5249 result
= store_field (temp
, bitsize
, bitpos
,
5250 bitregion_start
, bitregion_end
,
5251 mode1
, from
, get_alias_set (to
),
5252 nontemporal
, reversep
);
5253 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
5254 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
5257 /* For calls to functions returning variable length structures, if TO_RTX
5258 is not a MEM, go through a MEM because we must not create temporaries
5260 else if (!MEM_P (to_rtx
)
5261 && TREE_CODE (from
) == CALL_EXPR
5262 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5263 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) != INTEGER_CST
)
5265 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
5266 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5267 result
= store_field (temp
, bitsize
, bitpos
, bitregion_start
,
5268 bitregion_end
, mode1
, from
, get_alias_set (to
),
5269 nontemporal
, reversep
);
5270 emit_move_insn (to_rtx
, temp
);
5276 /* If the field is at offset zero, we could have been given the
5277 DECL_RTX of the parent struct. Don't munge it. */
5278 to_rtx
= shallow_copy_rtx (to_rtx
);
5279 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
5281 MEM_VOLATILE_P (to_rtx
) = 1;
5284 gcc_checking_assert (known_ge (bitpos
, 0));
5285 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
5286 bitregion_start
, bitregion_end
,
5287 mode1
, to_rtx
, to
, from
,
5291 result
= store_field (to_rtx
, bitsize
, bitpos
,
5292 bitregion_start
, bitregion_end
,
5293 mode1
, from
, get_alias_set (to
),
5294 nontemporal
, reversep
);
5298 preserve_temp_slots (result
);
5303 /* If the rhs is a function call and its value is not an aggregate,
5304 call the function before we start to compute the lhs.
5305 This is needed for correct code for cases such as
5306 val = setjmp (buf) on machines where reference to val
5307 requires loading up part of an address in a separate insn.
5309 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5310 since it might be a promoted variable where the zero- or sign- extension
5311 needs to be done. Handling this in the normal way is safe because no
5312 computation is done before the call. The same is true for SSA names. */
5313 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
5314 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5315 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
5317 || TREE_CODE (to
) == PARM_DECL
5318 || TREE_CODE (to
) == RESULT_DECL
)
5319 && REG_P (DECL_RTL (to
)))
5320 || TREE_CODE (to
) == SSA_NAME
))
5325 value
= expand_normal (from
);
5328 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5330 /* Handle calls that return values in multiple non-contiguous locations.
5331 The Irix 6 ABI has examples of this. */
5332 if (GET_CODE (to_rtx
) == PARALLEL
)
5334 if (GET_CODE (value
) == PARALLEL
)
5335 emit_group_move (to_rtx
, value
);
5337 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5338 int_size_in_bytes (TREE_TYPE (from
)));
5340 else if (GET_CODE (value
) == PARALLEL
)
5341 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5342 int_size_in_bytes (TREE_TYPE (from
)));
5343 else if (GET_MODE (to_rtx
) == BLKmode
)
5345 /* Handle calls that return BLKmode values in registers. */
5347 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5349 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5353 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5354 value
= convert_memory_address_addr_space
5355 (as_a
<scalar_int_mode
> (GET_MODE (to_rtx
)), value
,
5356 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5358 emit_move_insn (to_rtx
, value
);
5361 preserve_temp_slots (to_rtx
);
5366 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5367 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5369 /* Don't move directly into a return register. */
5370 if (TREE_CODE (to
) == RESULT_DECL
5371 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5377 /* If the source is itself a return value, it still is in a pseudo at
5378 this point so we can move it back to the return register directly. */
5380 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5381 && TREE_CODE (from
) != CALL_EXPR
)
5382 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5384 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5386 /* Handle calls that return values in multiple non-contiguous locations.
5387 The Irix 6 ABI has examples of this. */
5388 if (GET_CODE (to_rtx
) == PARALLEL
)
5390 if (GET_CODE (temp
) == PARALLEL
)
5391 emit_group_move (to_rtx
, temp
);
5393 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5394 int_size_in_bytes (TREE_TYPE (from
)));
5397 emit_move_insn (to_rtx
, temp
);
5399 preserve_temp_slots (to_rtx
);
5404 /* In case we are returning the contents of an object which overlaps
5405 the place the value is being stored, use a safe function when copying
5406 a value through a pointer into a structure value return block. */
5407 if (TREE_CODE (to
) == RESULT_DECL
5408 && TREE_CODE (from
) == INDIRECT_REF
5409 && ADDR_SPACE_GENERIC_P
5410 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5411 && refs_may_alias_p (to
, from
)
5412 && cfun
->returns_struct
5413 && !cfun
->returns_pcc_struct
)
5418 size
= expr_size (from
);
5419 from_rtx
= expand_normal (from
);
5421 emit_block_move_via_libcall (XEXP (to_rtx
, 0), XEXP (from_rtx
, 0), size
);
5423 preserve_temp_slots (to_rtx
);
5428 /* Compute FROM and store the value in the rtx we got. */
5431 result
= store_expr (from
, to_rtx
, 0, nontemporal
, false);
5432 preserve_temp_slots (result
);
5437 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5438 succeeded, false otherwise. */
5441 emit_storent_insn (rtx to
, rtx from
)
5443 struct expand_operand ops
[2];
5444 machine_mode mode
= GET_MODE (to
);
5445 enum insn_code code
= optab_handler (storent_optab
, mode
);
5447 if (code
== CODE_FOR_nothing
)
5450 create_fixed_operand (&ops
[0], to
);
5451 create_input_operand (&ops
[1], from
, mode
);
5452 return maybe_expand_insn (code
, 2, ops
);
5455 /* Generate code for computing expression EXP,
5456 and storing the value into TARGET.
5458 If the mode is BLKmode then we may return TARGET itself.
5459 It turns out that in BLKmode it doesn't cause a problem.
5460 because C has no operators that could combine two different
5461 assignments into the same BLKmode object with different values
5462 with no sequence point. Will other languages need this to
5465 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5466 stack, and block moves may need to be treated specially.
5468 If NONTEMPORAL is true, try using a nontemporal store instruction.
5470 If REVERSE is true, the store is to be done in reverse order. */
5473 store_expr (tree exp
, rtx target
, int call_param_p
,
5474 bool nontemporal
, bool reverse
)
5477 rtx alt_rtl
= NULL_RTX
;
5478 location_t loc
= curr_insn_location ();
5480 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5482 /* C++ can generate ?: expressions with a throw expression in one
5483 branch and an rvalue in the other. Here, we resolve attempts to
5484 store the throw expression's nonexistent result. */
5485 gcc_assert (!call_param_p
);
5486 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5489 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5491 /* Perform first part of compound expression, then assign from second
5493 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5494 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5495 return store_expr (TREE_OPERAND (exp
, 1), target
,
5496 call_param_p
, nontemporal
, reverse
);
5498 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5500 /* For conditional expression, get safe form of the target. Then
5501 test the condition, doing the appropriate assignment on either
5502 side. This avoids the creation of unnecessary temporaries.
5503 For non-BLKmode, it is more efficient not to do this. */
5505 rtx_code_label
*lab1
= gen_label_rtx (), *lab2
= gen_label_rtx ();
5507 do_pending_stack_adjust ();
5509 jumpifnot (TREE_OPERAND (exp
, 0), lab1
,
5510 profile_probability::uninitialized ());
5511 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5512 nontemporal
, reverse
);
5513 emit_jump_insn (targetm
.gen_jump (lab2
));
5516 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5517 nontemporal
, reverse
);
5523 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5524 /* If this is a scalar in a register that is stored in a wider mode
5525 than the declared mode, compute the result into its declared mode
5526 and then convert to the wider mode. Our value is the computed
5529 rtx inner_target
= 0;
5530 scalar_int_mode outer_mode
= subreg_unpromoted_mode (target
);
5531 scalar_int_mode inner_mode
= subreg_promoted_mode (target
);
5533 /* We can do the conversion inside EXP, which will often result
5534 in some optimizations. Do the conversion in two steps: first
5535 change the signedness, if needed, then the extend. But don't
5536 do this if the type of EXP is a subtype of something else
5537 since then the conversion might involve more than just
5538 converting modes. */
5539 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5540 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5541 && GET_MODE_PRECISION (outer_mode
)
5542 == TYPE_PRECISION (TREE_TYPE (exp
)))
5544 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5545 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5547 /* Some types, e.g. Fortran's logical*4, won't have a signed
5548 version, so use the mode instead. */
5550 = (signed_or_unsigned_type_for
5551 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5553 ntype
= lang_hooks
.types
.type_for_mode
5554 (TYPE_MODE (TREE_TYPE (exp
)),
5555 SUBREG_PROMOTED_SIGN (target
));
5557 exp
= fold_convert_loc (loc
, ntype
, exp
);
5560 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5561 (inner_mode
, SUBREG_PROMOTED_SIGN (target
)),
5564 inner_target
= SUBREG_REG (target
);
5567 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5568 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5571 /* If TEMP is a VOIDmode constant, use convert_modes to make
5572 sure that we properly convert it. */
5573 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5575 temp
= convert_modes (outer_mode
, TYPE_MODE (TREE_TYPE (exp
)),
5576 temp
, SUBREG_PROMOTED_SIGN (target
));
5577 temp
= convert_modes (inner_mode
, outer_mode
, temp
,
5578 SUBREG_PROMOTED_SIGN (target
));
5581 convert_move (SUBREG_REG (target
), temp
,
5582 SUBREG_PROMOTED_SIGN (target
));
5586 else if ((TREE_CODE (exp
) == STRING_CST
5587 || (TREE_CODE (exp
) == MEM_REF
5588 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5589 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5591 && integer_zerop (TREE_OPERAND (exp
, 1))))
5592 && !nontemporal
&& !call_param_p
5595 /* Optimize initialization of an array with a STRING_CST. */
5596 HOST_WIDE_INT exp_len
, str_copy_len
;
5598 tree str
= TREE_CODE (exp
) == STRING_CST
5599 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5601 exp_len
= int_expr_size (exp
);
5605 if (TREE_STRING_LENGTH (str
) <= 0)
5608 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5609 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5612 str_copy_len
= TREE_STRING_LENGTH (str
);
5613 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5614 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5616 str_copy_len
+= STORE_MAX_PIECES
- 1;
5617 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5619 str_copy_len
= MIN (str_copy_len
, exp_len
);
5620 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5621 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5622 MEM_ALIGN (target
), false))
5627 memop_ret retmode
= exp_len
> str_copy_len
? RETURN_END
: RETURN_BEGIN
;
5628 dest_mem
= store_by_pieces (dest_mem
,
5629 str_copy_len
, builtin_strncpy_read_str
,
5631 TREE_STRING_POINTER (str
)),
5632 MEM_ALIGN (target
), false,
5634 if (exp_len
> str_copy_len
)
5635 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5636 GEN_INT (exp_len
- str_copy_len
),
5645 /* If we want to use a nontemporal or a reverse order store, force the
5646 value into a register first. */
5647 tmp_target
= nontemporal
|| reverse
? NULL_RTX
: target
;
5648 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5650 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5654 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5655 the same as that of TARGET, adjust the constant. This is needed, for
5656 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5657 only a word-sized value. */
5658 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5659 && TREE_CODE (exp
) != ERROR_MARK
5660 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5662 if (GET_MODE_CLASS (GET_MODE (target
))
5663 != GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp
)))
5664 && known_eq (GET_MODE_BITSIZE (GET_MODE (target
)),
5665 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)))))
5667 rtx t
= simplify_gen_subreg (GET_MODE (target
), temp
,
5668 TYPE_MODE (TREE_TYPE (exp
)), 0);
5672 if (GET_MODE (temp
) == VOIDmode
)
5673 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5674 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5677 /* If value was not generated in the target, store it there.
5678 Convert the value to TARGET's type first if necessary and emit the
5679 pending incrementations that have been queued when expanding EXP.
5680 Note that we cannot emit the whole queue blindly because this will
5681 effectively disable the POST_INC optimization later.
5683 If TEMP and TARGET compare equal according to rtx_equal_p, but
5684 one or both of them are volatile memory refs, we have to distinguish
5686 - expand_expr has used TARGET. In this case, we must not generate
5687 another copy. This can be detected by TARGET being equal according
5689 - expand_expr has not used TARGET - that means that the source just
5690 happens to have the same RTX form. Since temp will have been created
5691 by expand_expr, it will compare unequal according to == .
5692 We must generate a copy in this case, to reach the correct number
5693 of volatile memory references. */
5695 if ((! rtx_equal_p (temp
, target
)
5696 || (temp
!= target
&& (side_effects_p (temp
)
5697 || side_effects_p (target
))))
5698 && TREE_CODE (exp
) != ERROR_MARK
5699 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5700 but TARGET is not valid memory reference, TEMP will differ
5701 from TARGET although it is really the same location. */
5703 && rtx_equal_p (alt_rtl
, target
)
5704 && !side_effects_p (alt_rtl
)
5705 && !side_effects_p (target
))
5706 /* If there's nothing to copy, don't bother. Don't call
5707 expr_size unless necessary, because some front-ends (C++)
5708 expr_size-hook must not be given objects that are not
5709 supposed to be bit-copied or bit-initialized. */
5710 && expr_size (exp
) != const0_rtx
)
5712 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5714 if (GET_MODE (target
) == BLKmode
)
5716 /* Handle calls that return BLKmode values in registers. */
5717 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5718 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5720 store_bit_field (target
,
5721 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5722 0, 0, 0, GET_MODE (temp
), temp
, reverse
);
5725 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5728 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5730 /* Handle copying a string constant into an array. The string
5731 constant may be shorter than the array. So copy just the string's
5732 actual length, and clear the rest. First get the size of the data
5733 type of the string, which is actually the size of the target. */
5734 rtx size
= expr_size (exp
);
5736 if (CONST_INT_P (size
)
5737 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5738 emit_block_move (target
, temp
, size
,
5740 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5743 machine_mode pointer_mode
5744 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5745 machine_mode address_mode
= get_address_mode (target
);
5747 /* Compute the size of the data to copy from the string. */
5749 = size_binop_loc (loc
, MIN_EXPR
,
5750 make_tree (sizetype
, size
),
5751 size_int (TREE_STRING_LENGTH (exp
)));
5753 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5755 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5756 rtx_code_label
*label
= 0;
5758 /* Copy that much. */
5759 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5760 TYPE_UNSIGNED (sizetype
));
5761 emit_block_move (target
, temp
, copy_size_rtx
,
5763 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5765 /* Figure out how much is left in TARGET that we have to clear.
5766 Do all calculations in pointer_mode. */
5767 poly_int64 const_copy_size
;
5768 if (poly_int_rtx_p (copy_size_rtx
, &const_copy_size
))
5770 size
= plus_constant (address_mode
, size
, -const_copy_size
);
5771 target
= adjust_address (target
, BLKmode
, const_copy_size
);
5775 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5776 copy_size_rtx
, NULL_RTX
, 0,
5779 if (GET_MODE (copy_size_rtx
) != address_mode
)
5780 copy_size_rtx
= convert_to_mode (address_mode
,
5782 TYPE_UNSIGNED (sizetype
));
5784 target
= offset_address (target
, copy_size_rtx
,
5785 highest_pow2_factor (copy_size
));
5786 label
= gen_label_rtx ();
5787 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5788 GET_MODE (size
), 0, label
);
5791 if (size
!= const0_rtx
)
5792 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5798 /* Handle calls that return values in multiple non-contiguous locations.
5799 The Irix 6 ABI has examples of this. */
5800 else if (GET_CODE (target
) == PARALLEL
)
5802 if (GET_CODE (temp
) == PARALLEL
)
5803 emit_group_move (target
, temp
);
5805 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5806 int_size_in_bytes (TREE_TYPE (exp
)));
5808 else if (GET_CODE (temp
) == PARALLEL
)
5809 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5810 int_size_in_bytes (TREE_TYPE (exp
)));
5811 else if (GET_MODE (temp
) == BLKmode
)
5812 emit_block_move (target
, temp
, expr_size (exp
),
5814 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5815 /* If we emit a nontemporal store, there is nothing else to do. */
5816 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5821 temp
= flip_storage_order (GET_MODE (target
), temp
);
5822 temp
= force_operand (temp
, target
);
5824 emit_move_insn (target
, temp
);
5831 /* Return true if field F of structure TYPE is a flexible array. */
5834 flexible_array_member_p (const_tree f
, const_tree type
)
5839 return (DECL_CHAIN (f
) == NULL
5840 && TREE_CODE (tf
) == ARRAY_TYPE
5842 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5843 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5844 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5845 && int_size_in_bytes (type
) >= 0);
5848 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5849 must have in order for it to completely initialize a value of type TYPE.
5850 Return -1 if the number isn't known.
5852 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5854 static HOST_WIDE_INT
5855 count_type_elements (const_tree type
, bool for_ctor_p
)
5857 switch (TREE_CODE (type
))
5863 nelts
= array_type_nelts (type
);
5864 if (nelts
&& tree_fits_uhwi_p (nelts
))
5866 unsigned HOST_WIDE_INT n
;
5868 n
= tree_to_uhwi (nelts
) + 1;
5869 if (n
== 0 || for_ctor_p
)
5872 return n
* count_type_elements (TREE_TYPE (type
), false);
5874 return for_ctor_p
? -1 : 1;
5879 unsigned HOST_WIDE_INT n
;
5883 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5884 if (TREE_CODE (f
) == FIELD_DECL
)
5887 n
+= count_type_elements (TREE_TYPE (f
), false);
5888 else if (!flexible_array_member_p (f
, type
))
5889 /* Don't count flexible arrays, which are not supposed
5890 to be initialized. */
5898 case QUAL_UNION_TYPE
:
5903 gcc_assert (!for_ctor_p
);
5904 /* Estimate the number of scalars in each field and pick the
5905 maximum. Other estimates would do instead; the idea is simply
5906 to make sure that the estimate is not sensitive to the ordering
5909 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5910 if (TREE_CODE (f
) == FIELD_DECL
)
5912 m
= count_type_elements (TREE_TYPE (f
), false);
5913 /* If the field doesn't span the whole union, add an extra
5914 scalar for the rest. */
5915 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5916 TYPE_SIZE (type
)) != 1)
5929 unsigned HOST_WIDE_INT nelts
;
5930 if (TYPE_VECTOR_SUBPARTS (type
).is_constant (&nelts
))
5938 case FIXED_POINT_TYPE
:
5943 case REFERENCE_TYPE
:
5959 /* Helper for categorize_ctor_elements. Identical interface. */
5962 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5963 HOST_WIDE_INT
*p_unique_nz_elts
,
5964 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5966 unsigned HOST_WIDE_INT idx
;
5967 HOST_WIDE_INT nz_elts
, unique_nz_elts
, init_elts
, num_fields
;
5968 tree value
, purpose
, elt_type
;
5970 /* Whether CTOR is a valid constant initializer, in accordance with what
5971 initializer_constant_valid_p does. If inferred from the constructor
5972 elements, true until proven otherwise. */
5973 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5974 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5980 elt_type
= NULL_TREE
;
5982 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5984 HOST_WIDE_INT mult
= 1;
5986 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5988 tree lo_index
= TREE_OPERAND (purpose
, 0);
5989 tree hi_index
= TREE_OPERAND (purpose
, 1);
5991 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5992 mult
= (tree_to_uhwi (hi_index
)
5993 - tree_to_uhwi (lo_index
) + 1);
5996 elt_type
= TREE_TYPE (value
);
5998 switch (TREE_CODE (value
))
6002 HOST_WIDE_INT nz
= 0, unz
= 0, ic
= 0;
6004 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &unz
,
6007 nz_elts
+= mult
* nz
;
6008 unique_nz_elts
+= unz
;
6009 init_elts
+= mult
* ic
;
6011 if (const_from_elts_p
&& const_p
)
6012 const_p
= const_elt_p
;
6019 if (!initializer_zerop (value
))
6028 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
6029 unique_nz_elts
+= TREE_STRING_LENGTH (value
);
6030 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
6034 if (!initializer_zerop (TREE_REALPART (value
)))
6039 if (!initializer_zerop (TREE_IMAGPART (value
)))
6044 init_elts
+= 2 * mult
;
6049 /* We can only construct constant-length vectors using
6051 unsigned int nunits
= VECTOR_CST_NELTS (value
).to_constant ();
6052 for (unsigned int i
= 0; i
< nunits
; ++i
)
6054 tree v
= VECTOR_CST_ELT (value
, i
);
6055 if (!initializer_zerop (v
))
6067 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
6068 nz_elts
+= mult
* tc
;
6069 unique_nz_elts
+= tc
;
6070 init_elts
+= mult
* tc
;
6072 if (const_from_elts_p
&& const_p
)
6074 = initializer_constant_valid_p (value
,
6076 TYPE_REVERSE_STORAGE_ORDER
6084 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
6085 num_fields
, elt_type
))
6086 *p_complete
= false;
6088 *p_nz_elts
+= nz_elts
;
6089 *p_unique_nz_elts
+= unique_nz_elts
;
6090 *p_init_elts
+= init_elts
;
6095 /* Examine CTOR to discover:
6096 * how many scalar fields are set to nonzero values,
6097 and place it in *P_NZ_ELTS;
6098 * the same, but counting RANGE_EXPRs as multiplier of 1 instead of
6099 high - low + 1 (this can be useful for callers to determine ctors
6100 that could be cheaply initialized with - perhaps nested - loops
6101 compared to copied from huge read-only data),
6102 and place it in *P_UNIQUE_NZ_ELTS;
6103 * how many scalar fields in total are in CTOR,
6104 and place it in *P_ELT_COUNT.
6105 * whether the constructor is complete -- in the sense that every
6106 meaningful byte is explicitly given a value --
6107 and place it in *P_COMPLETE.
6109 Return whether or not CTOR is a valid static constant initializer, the same
6110 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
6113 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
6114 HOST_WIDE_INT
*p_unique_nz_elts
,
6115 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
6118 *p_unique_nz_elts
= 0;
6122 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_unique_nz_elts
,
6123 p_init_elts
, p_complete
);
6126 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6127 of which had type LAST_TYPE. Each element was itself a complete
6128 initializer, in the sense that every meaningful byte was explicitly
6129 given a value. Return true if the same is true for the constructor
6133 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
6134 const_tree last_type
)
6136 if (TREE_CODE (type
) == UNION_TYPE
6137 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6142 gcc_assert (num_elts
== 1 && last_type
);
6144 /* ??? We could look at each element of the union, and find the
6145 largest element. Which would avoid comparing the size of the
6146 initialized element against any tail padding in the union.
6147 Doesn't seem worth the effort... */
6148 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
6151 return count_type_elements (type
, true) == num_elts
;
6154 /* Return 1 if EXP contains mostly (3/4) zeros. */
6157 mostly_zeros_p (const_tree exp
)
6159 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6161 HOST_WIDE_INT nz_elts
, unz_elts
, init_elts
;
6164 categorize_ctor_elements (exp
, &nz_elts
, &unz_elts
, &init_elts
,
6166 return !complete_p
|| nz_elts
< init_elts
/ 4;
6169 return initializer_zerop (exp
);
6172 /* Return 1 if EXP contains all zeros. */
6175 all_zeros_p (const_tree exp
)
6177 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6179 HOST_WIDE_INT nz_elts
, unz_elts
, init_elts
;
6182 categorize_ctor_elements (exp
, &nz_elts
, &unz_elts
, &init_elts
,
6184 return nz_elts
== 0;
6187 return initializer_zerop (exp
);
6190 /* Helper function for store_constructor.
6191 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6192 CLEARED is as for store_constructor.
6193 ALIAS_SET is the alias set to use for any stores.
6194 If REVERSE is true, the store is to be done in reverse order.
6196 This provides a recursive shortcut back to store_constructor when it isn't
6197 necessary to go through store_field. This is so that we can pass through
6198 the cleared field to let store_constructor know that we may not have to
6199 clear a substructure if the outer structure has already been cleared. */
6202 store_constructor_field (rtx target
, poly_uint64 bitsize
, poly_int64 bitpos
,
6203 poly_uint64 bitregion_start
,
6204 poly_uint64 bitregion_end
,
6206 tree exp
, int cleared
,
6207 alias_set_type alias_set
, bool reverse
)
6210 poly_uint64 bytesize
;
6211 if (TREE_CODE (exp
) == CONSTRUCTOR
6212 /* We can only call store_constructor recursively if the size and
6213 bit position are on a byte boundary. */
6214 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
6215 && maybe_ne (bitsize
, 0U)
6216 && multiple_p (bitsize
, BITS_PER_UNIT
, &bytesize
)
6217 /* If we have a nonzero bitpos for a register target, then we just
6218 let store_field do the bitfield handling. This is unlikely to
6219 generate unnecessary clear instructions anyways. */
6220 && (known_eq (bitpos
, 0) || MEM_P (target
)))
6224 machine_mode target_mode
= GET_MODE (target
);
6225 if (target_mode
!= BLKmode
6226 && !multiple_p (bitpos
, GET_MODE_ALIGNMENT (target_mode
)))
6227 target_mode
= BLKmode
;
6228 target
= adjust_address (target
, target_mode
, bytepos
);
6232 /* Update the alias set, if required. */
6233 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
6234 && MEM_ALIAS_SET (target
) != 0)
6236 target
= copy_rtx (target
);
6237 set_mem_alias_set (target
, alias_set
);
6240 store_constructor (exp
, target
, cleared
, bytesize
, reverse
);
6243 store_field (target
, bitsize
, bitpos
, bitregion_start
, bitregion_end
, mode
,
6244 exp
, alias_set
, false, reverse
);
6248 /* Returns the number of FIELD_DECLs in TYPE. */
6251 fields_length (const_tree type
)
6253 tree t
= TYPE_FIELDS (type
);
6256 for (; t
; t
= DECL_CHAIN (t
))
6257 if (TREE_CODE (t
) == FIELD_DECL
)
6264 /* Store the value of constructor EXP into the rtx TARGET.
6265 TARGET is either a REG or a MEM; we know it cannot conflict, since
6266 safe_from_p has been called.
6267 CLEARED is true if TARGET is known to have been zero'd.
6268 SIZE is the number of bytes of TARGET we are allowed to modify: this
6269 may not be the same as the size of EXP if we are assigning to a field
6270 which has been packed to exclude padding bits.
6271 If REVERSE is true, the store is to be done in reverse order. */
6274 store_constructor (tree exp
, rtx target
, int cleared
, poly_int64 size
,
6277 tree type
= TREE_TYPE (exp
);
6278 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
6279 poly_int64 bitregion_end
= known_gt (size
, 0) ? size
* BITS_PER_UNIT
- 1 : 0;
6281 switch (TREE_CODE (type
))
6285 case QUAL_UNION_TYPE
:
6287 unsigned HOST_WIDE_INT idx
;
6290 /* The storage order is specified for every aggregate type. */
6291 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6293 /* If size is zero or the target is already cleared, do nothing. */
6294 if (known_eq (size
, 0) || cleared
)
6296 /* We either clear the aggregate or indicate the value is dead. */
6297 else if ((TREE_CODE (type
) == UNION_TYPE
6298 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6299 && ! CONSTRUCTOR_ELTS (exp
))
6300 /* If the constructor is empty, clear the union. */
6302 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6306 /* If we are building a static constructor into a register,
6307 set the initial value as zero so we can fold the value into
6308 a constant. But if more than one register is involved,
6309 this probably loses. */
6310 else if (REG_P (target
) && TREE_STATIC (exp
)
6311 && known_le (GET_MODE_SIZE (GET_MODE (target
)),
6312 REGMODE_NATURAL_SIZE (GET_MODE (target
))))
6314 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6318 /* If the constructor has fewer fields than the structure or
6319 if we are initializing the structure to mostly zeros, clear
6320 the whole structure first. Don't do this if TARGET is a
6321 register whose mode size isn't equal to SIZE since
6322 clear_storage can't handle this case. */
6323 else if (known_size_p (size
)
6324 && (((int) CONSTRUCTOR_NELTS (exp
) != fields_length (type
))
6325 || mostly_zeros_p (exp
))
6327 || known_eq (GET_MODE_SIZE (GET_MODE (target
)), size
)))
6329 clear_storage (target
, gen_int_mode (size
, Pmode
),
6334 if (REG_P (target
) && !cleared
)
6335 emit_clobber (target
);
6337 /* Store each element of the constructor into the
6338 corresponding field of TARGET. */
6339 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
6342 HOST_WIDE_INT bitsize
;
6343 HOST_WIDE_INT bitpos
= 0;
6345 rtx to_rtx
= target
;
6347 /* Just ignore missing fields. We cleared the whole
6348 structure, above, if any fields are missing. */
6352 if (cleared
&& initializer_zerop (value
))
6355 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
6356 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
6360 mode
= DECL_MODE (field
);
6361 if (DECL_BIT_FIELD (field
))
6364 offset
= DECL_FIELD_OFFSET (field
);
6365 if (tree_fits_shwi_p (offset
)
6366 && tree_fits_shwi_p (bit_position (field
)))
6368 bitpos
= int_bit_position (field
);
6374 /* If this initializes a field that is smaller than a
6375 word, at the start of a word, try to widen it to a full
6376 word. This special case allows us to output C++ member
6377 function initializations in a form that the optimizers
6379 if (WORD_REGISTER_OPERATIONS
6381 && bitsize
< BITS_PER_WORD
6382 && bitpos
% BITS_PER_WORD
== 0
6383 && GET_MODE_CLASS (mode
) == MODE_INT
6384 && TREE_CODE (value
) == INTEGER_CST
6386 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6388 tree type
= TREE_TYPE (value
);
6390 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6392 type
= lang_hooks
.types
.type_for_mode
6393 (word_mode
, TYPE_UNSIGNED (type
));
6394 value
= fold_convert (type
, value
);
6395 /* Make sure the bits beyond the original bitsize are zero
6396 so that we can correctly avoid extra zeroing stores in
6397 later constructor elements. */
6399 = wide_int_to_tree (type
, wi::mask (bitsize
, false,
6401 value
= fold_build2 (BIT_AND_EXPR
, type
, value
, bitsize_mask
);
6404 if (BYTES_BIG_ENDIAN
)
6406 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6407 build_int_cst (type
,
6408 BITS_PER_WORD
- bitsize
));
6409 bitsize
= BITS_PER_WORD
;
6413 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6414 && DECL_NONADDRESSABLE_P (field
))
6416 to_rtx
= copy_rtx (to_rtx
);
6417 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6420 store_constructor_field (to_rtx
, bitsize
, bitpos
,
6421 0, bitregion_end
, mode
,
6423 get_alias_set (TREE_TYPE (field
)),
6431 unsigned HOST_WIDE_INT i
;
6434 tree elttype
= TREE_TYPE (type
);
6436 HOST_WIDE_INT minelt
= 0;
6437 HOST_WIDE_INT maxelt
= 0;
6439 /* The storage order is specified for every aggregate type. */
6440 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6442 domain
= TYPE_DOMAIN (type
);
6443 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6444 && TYPE_MAX_VALUE (domain
)
6445 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6446 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6448 /* If we have constant bounds for the range of the type, get them. */
6451 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6452 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6455 /* If the constructor has fewer elements than the array, clear
6456 the whole array first. Similarly if this is static
6457 constructor of a non-BLKmode object. */
6460 else if (REG_P (target
) && TREE_STATIC (exp
))
6464 unsigned HOST_WIDE_INT idx
;
6466 HOST_WIDE_INT count
= 0, zero_count
= 0;
6467 need_to_clear
= ! const_bounds_p
;
6469 /* This loop is a more accurate version of the loop in
6470 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6471 is also needed to check for missing elements. */
6472 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6474 HOST_WIDE_INT this_node_count
;
6479 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6481 tree lo_index
= TREE_OPERAND (index
, 0);
6482 tree hi_index
= TREE_OPERAND (index
, 1);
6484 if (! tree_fits_uhwi_p (lo_index
)
6485 || ! tree_fits_uhwi_p (hi_index
))
6491 this_node_count
= (tree_to_uhwi (hi_index
)
6492 - tree_to_uhwi (lo_index
) + 1);
6495 this_node_count
= 1;
6497 count
+= this_node_count
;
6498 if (mostly_zeros_p (value
))
6499 zero_count
+= this_node_count
;
6502 /* Clear the entire array first if there are any missing
6503 elements, or if the incidence of zero elements is >=
6506 && (count
< maxelt
- minelt
+ 1
6507 || 4 * zero_count
>= 3 * count
))
6511 if (need_to_clear
&& maybe_gt (size
, 0))
6514 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6516 clear_storage (target
, gen_int_mode (size
, Pmode
),
6521 if (!cleared
&& REG_P (target
))
6522 /* Inform later passes that the old value is dead. */
6523 emit_clobber (target
);
6525 /* Store each element of the constructor into the
6526 corresponding element of TARGET, determined by counting the
6528 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6532 HOST_WIDE_INT bitpos
;
6533 rtx xtarget
= target
;
6535 if (cleared
&& initializer_zerop (value
))
6538 mode
= TYPE_MODE (elttype
);
6539 if (mode
!= BLKmode
)
6540 bitsize
= GET_MODE_BITSIZE (mode
);
6541 else if (!poly_int_tree_p (TYPE_SIZE (elttype
), &bitsize
))
6544 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6546 tree lo_index
= TREE_OPERAND (index
, 0);
6547 tree hi_index
= TREE_OPERAND (index
, 1);
6548 rtx index_r
, pos_rtx
;
6549 HOST_WIDE_INT lo
, hi
, count
;
6552 /* If the range is constant and "small", unroll the loop. */
6554 && tree_fits_shwi_p (lo_index
)
6555 && tree_fits_shwi_p (hi_index
)
6556 && (lo
= tree_to_shwi (lo_index
),
6557 hi
= tree_to_shwi (hi_index
),
6558 count
= hi
- lo
+ 1,
6561 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6562 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6565 lo
-= minelt
; hi
-= minelt
;
6566 for (; lo
<= hi
; lo
++)
6568 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6571 && !MEM_KEEP_ALIAS_SET_P (target
)
6572 && TREE_CODE (type
) == ARRAY_TYPE
6573 && TYPE_NONALIASED_COMPONENT (type
))
6575 target
= copy_rtx (target
);
6576 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6579 store_constructor_field
6580 (target
, bitsize
, bitpos
, 0, bitregion_end
,
6581 mode
, value
, cleared
,
6582 get_alias_set (elttype
), reverse
);
6587 rtx_code_label
*loop_start
= gen_label_rtx ();
6588 rtx_code_label
*loop_end
= gen_label_rtx ();
6591 expand_normal (hi_index
);
6593 index
= build_decl (EXPR_LOCATION (exp
),
6594 VAR_DECL
, NULL_TREE
, domain
);
6595 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6596 SET_DECL_RTL (index
, index_r
);
6597 store_expr (lo_index
, index_r
, 0, false, reverse
);
6599 /* Build the head of the loop. */
6600 do_pending_stack_adjust ();
6601 emit_label (loop_start
);
6603 /* Assign value to element index. */
6605 fold_convert (ssizetype
,
6606 fold_build2 (MINUS_EXPR
,
6609 TYPE_MIN_VALUE (domain
)));
6612 size_binop (MULT_EXPR
, position
,
6613 fold_convert (ssizetype
,
6614 TYPE_SIZE_UNIT (elttype
)));
6616 pos_rtx
= expand_normal (position
);
6617 xtarget
= offset_address (target
, pos_rtx
,
6618 highest_pow2_factor (position
));
6619 xtarget
= adjust_address (xtarget
, mode
, 0);
6620 if (TREE_CODE (value
) == CONSTRUCTOR
)
6621 store_constructor (value
, xtarget
, cleared
,
6622 exact_div (bitsize
, BITS_PER_UNIT
),
6625 store_expr (value
, xtarget
, 0, false, reverse
);
6627 /* Generate a conditional jump to exit the loop. */
6628 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6630 jumpif (exit_cond
, loop_end
,
6631 profile_probability::uninitialized ());
6633 /* Update the loop counter, and jump to the head of
6635 expand_assignment (index
,
6636 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6637 index
, integer_one_node
),
6640 emit_jump (loop_start
);
6642 /* Build the end of the loop. */
6643 emit_label (loop_end
);
6646 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6647 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6652 index
= ssize_int (1);
6655 index
= fold_convert (ssizetype
,
6656 fold_build2 (MINUS_EXPR
,
6659 TYPE_MIN_VALUE (domain
)));
6662 size_binop (MULT_EXPR
, index
,
6663 fold_convert (ssizetype
,
6664 TYPE_SIZE_UNIT (elttype
)));
6665 xtarget
= offset_address (target
,
6666 expand_normal (position
),
6667 highest_pow2_factor (position
));
6668 xtarget
= adjust_address (xtarget
, mode
, 0);
6669 store_expr (value
, xtarget
, 0, false, reverse
);
6674 bitpos
= ((tree_to_shwi (index
) - minelt
)
6675 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6677 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6679 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6680 && TREE_CODE (type
) == ARRAY_TYPE
6681 && TYPE_NONALIASED_COMPONENT (type
))
6683 target
= copy_rtx (target
);
6684 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6686 store_constructor_field (target
, bitsize
, bitpos
, 0,
6687 bitregion_end
, mode
, value
,
6688 cleared
, get_alias_set (elttype
),
6697 unsigned HOST_WIDE_INT idx
;
6698 constructor_elt
*ce
;
6701 insn_code icode
= CODE_FOR_nothing
;
6703 tree elttype
= TREE_TYPE (type
);
6704 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6705 machine_mode eltmode
= TYPE_MODE (elttype
);
6706 HOST_WIDE_INT bitsize
;
6707 HOST_WIDE_INT bitpos
;
6708 rtvec vector
= NULL
;
6710 unsigned HOST_WIDE_INT const_n_elts
;
6711 alias_set_type alias
;
6712 bool vec_vec_init_p
= false;
6713 machine_mode mode
= GET_MODE (target
);
6715 gcc_assert (eltmode
!= BLKmode
);
6717 /* Try using vec_duplicate_optab for uniform vectors. */
6718 if (!TREE_SIDE_EFFECTS (exp
)
6719 && VECTOR_MODE_P (mode
)
6720 && eltmode
== GET_MODE_INNER (mode
)
6721 && ((icode
= optab_handler (vec_duplicate_optab
, mode
))
6722 != CODE_FOR_nothing
)
6723 && (elt
= uniform_vector_p (exp
)))
6725 struct expand_operand ops
[2];
6726 create_output_operand (&ops
[0], target
, mode
);
6727 create_input_operand (&ops
[1], expand_normal (elt
), eltmode
);
6728 expand_insn (icode
, 2, ops
);
6729 if (!rtx_equal_p (target
, ops
[0].value
))
6730 emit_move_insn (target
, ops
[0].value
);
6734 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6736 && VECTOR_MODE_P (mode
)
6737 && n_elts
.is_constant (&const_n_elts
))
6739 machine_mode emode
= eltmode
;
6741 if (CONSTRUCTOR_NELTS (exp
)
6742 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
))
6745 tree etype
= TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
);
6746 gcc_assert (known_eq (CONSTRUCTOR_NELTS (exp
)
6747 * TYPE_VECTOR_SUBPARTS (etype
),
6749 emode
= TYPE_MODE (etype
);
6751 icode
= convert_optab_handler (vec_init_optab
, mode
, emode
);
6752 if (icode
!= CODE_FOR_nothing
)
6754 unsigned int i
, n
= const_n_elts
;
6756 if (emode
!= eltmode
)
6758 n
= CONSTRUCTOR_NELTS (exp
);
6759 vec_vec_init_p
= true;
6761 vector
= rtvec_alloc (n
);
6762 for (i
= 0; i
< n
; i
++)
6763 RTVEC_ELT (vector
, i
) = CONST0_RTX (emode
);
6767 /* If the constructor has fewer elements than the vector,
6768 clear the whole array first. Similarly if this is static
6769 constructor of a non-BLKmode object. */
6772 else if (REG_P (target
) && TREE_STATIC (exp
))
6776 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6779 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6781 tree sz
= TYPE_SIZE (TREE_TYPE (value
));
6783 = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR
, sz
,
6784 TYPE_SIZE (elttype
)));
6786 count
+= n_elts_here
;
6787 if (mostly_zeros_p (value
))
6788 zero_count
+= n_elts_here
;
6791 /* Clear the entire vector first if there are any missing elements,
6792 or if the incidence of zero elements is >= 75%. */
6793 need_to_clear
= (maybe_lt (count
, n_elts
)
6794 || 4 * zero_count
>= 3 * count
);
6797 if (need_to_clear
&& maybe_gt (size
, 0) && !vector
)
6800 emit_move_insn (target
, CONST0_RTX (mode
));
6802 clear_storage (target
, gen_int_mode (size
, Pmode
),
6807 /* Inform later passes that the old value is dead. */
6808 if (!cleared
&& !vector
&& REG_P (target
))
6809 emit_move_insn (target
, CONST0_RTX (mode
));
6812 alias
= MEM_ALIAS_SET (target
);
6814 alias
= get_alias_set (elttype
);
6816 /* Store each element of the constructor into the corresponding
6817 element of TARGET, determined by counting the elements. */
6818 for (idx
= 0, i
= 0;
6819 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6820 idx
++, i
+= bitsize
/ elt_size
)
6822 HOST_WIDE_INT eltpos
;
6823 tree value
= ce
->value
;
6825 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6826 if (cleared
&& initializer_zerop (value
))
6830 eltpos
= tree_to_uhwi (ce
->index
);
6838 gcc_assert (ce
->index
== NULL_TREE
);
6839 gcc_assert (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
);
6843 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6844 RTVEC_ELT (vector
, eltpos
) = expand_normal (value
);
6848 machine_mode value_mode
6849 = (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6850 ? TYPE_MODE (TREE_TYPE (value
)) : eltmode
);
6851 bitpos
= eltpos
* elt_size
;
6852 store_constructor_field (target
, bitsize
, bitpos
, 0,
6853 bitregion_end
, value_mode
,
6854 value
, cleared
, alias
, reverse
);
6859 emit_insn (GEN_FCN (icode
) (target
,
6860 gen_rtx_PARALLEL (mode
, vector
)));
6869 /* Store the value of EXP (an expression tree)
6870 into a subfield of TARGET which has mode MODE and occupies
6871 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6872 If MODE is VOIDmode, it means that we are storing into a bit-field.
6874 BITREGION_START is bitpos of the first bitfield in this region.
6875 BITREGION_END is the bitpos of the ending bitfield in this region.
6876 These two fields are 0, if the C++ memory model does not apply,
6877 or we are not interested in keeping track of bitfield regions.
6879 Always return const0_rtx unless we have something particular to
6882 ALIAS_SET is the alias set for the destination. This value will
6883 (in general) be different from that for TARGET, since TARGET is a
6884 reference to the containing structure.
6886 If NONTEMPORAL is true, try generating a nontemporal store.
6888 If REVERSE is true, the store is to be done in reverse order. */
6891 store_field (rtx target
, poly_int64 bitsize
, poly_int64 bitpos
,
6892 poly_uint64 bitregion_start
, poly_uint64 bitregion_end
,
6893 machine_mode mode
, tree exp
,
6894 alias_set_type alias_set
, bool nontemporal
, bool reverse
)
6896 if (TREE_CODE (exp
) == ERROR_MARK
)
6899 /* If we have nothing to store, do nothing unless the expression has
6900 side-effects. Don't do that for zero sized addressable lhs of
6902 if (known_eq (bitsize
, 0)
6903 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
6904 || TREE_CODE (exp
) != CALL_EXPR
))
6905 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6907 if (GET_CODE (target
) == CONCAT
)
6909 /* We're storing into a struct containing a single __complex. */
6911 gcc_assert (known_eq (bitpos
, 0));
6912 return store_expr (exp
, target
, 0, nontemporal
, reverse
);
6915 /* If the structure is in a register or if the component
6916 is a bit field, we cannot use addressing to access it.
6917 Use bit-field techniques or SUBREG to store in it. */
6919 poly_int64 decl_bitsize
;
6920 if (mode
== VOIDmode
6921 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6922 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6923 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6925 || GET_CODE (target
) == SUBREG
6926 /* If the field isn't aligned enough to store as an ordinary memref,
6927 store it as a bit field. */
6929 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6930 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode
)))
6931 && targetm
.slow_unaligned_access (mode
, MEM_ALIGN (target
)))
6932 || !multiple_p (bitpos
, BITS_PER_UNIT
)))
6933 || (known_size_p (bitsize
)
6935 && maybe_gt (GET_MODE_BITSIZE (mode
), bitsize
))
6936 /* If the RHS and field are a constant size and the size of the
6937 RHS isn't the same size as the bitfield, we must use bitfield
6939 || (known_size_p (bitsize
)
6940 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp
)))
6941 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp
))),
6943 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6944 we will handle specially below. */
6945 && !(TREE_CODE (exp
) == CONSTRUCTOR
6946 && multiple_p (bitsize
, BITS_PER_UNIT
))
6947 /* And except for bitwise copying of TREE_ADDRESSABLE types,
6948 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6949 includes some extra padding. store_expr / expand_expr will in
6950 that case call get_inner_reference that will have the bitsize
6951 we check here and thus the block move will not clobber the
6952 padding that shouldn't be clobbered. In the future we could
6953 replace the TREE_ADDRESSABLE check with a check that
6954 get_base_address needs to live in memory. */
6955 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
6956 || TREE_CODE (exp
) != COMPONENT_REF
6957 || !multiple_p (bitsize
, BITS_PER_UNIT
)
6958 || !multiple_p (bitpos
, BITS_PER_UNIT
)
6959 || !poly_int_tree_p (DECL_SIZE (TREE_OPERAND (exp
, 1)),
6961 || maybe_ne (decl_bitsize
, bitsize
)))
6962 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6963 decl we must use bitfield operations. */
6964 || (known_size_p (bitsize
)
6965 && TREE_CODE (exp
) == MEM_REF
6966 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6967 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6968 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6969 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6974 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6975 implies a mask operation. If the precision is the same size as
6976 the field we're storing into, that mask is redundant. This is
6977 particularly common with bit field assignments generated by the
6979 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6982 tree type
= TREE_TYPE (exp
);
6983 if (INTEGRAL_TYPE_P (type
)
6984 && maybe_ne (TYPE_PRECISION (type
),
6985 GET_MODE_BITSIZE (TYPE_MODE (type
)))
6986 && known_eq (bitsize
, TYPE_PRECISION (type
)))
6988 tree op
= gimple_assign_rhs1 (nop_def
);
6989 type
= TREE_TYPE (op
);
6990 if (INTEGRAL_TYPE_P (type
)
6991 && known_ge (TYPE_PRECISION (type
), bitsize
))
6996 temp
= expand_normal (exp
);
6998 /* We don't support variable-sized BLKmode bitfields, since our
6999 handling of BLKmode is bound up with the ability to break
7000 things into words. */
7001 gcc_assert (mode
!= BLKmode
|| bitsize
.is_constant ());
7003 /* Handle calls that return values in multiple non-contiguous locations.
7004 The Irix 6 ABI has examples of this. */
7005 if (GET_CODE (temp
) == PARALLEL
)
7007 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
7008 machine_mode temp_mode
= GET_MODE (temp
);
7009 if (temp_mode
== BLKmode
|| temp_mode
== VOIDmode
)
7010 temp_mode
= smallest_int_mode_for_size (size
* BITS_PER_UNIT
);
7011 rtx temp_target
= gen_reg_rtx (temp_mode
);
7012 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
7016 /* Handle calls that return BLKmode values in registers. */
7017 else if (mode
== BLKmode
&& REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
7019 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
7020 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
7024 /* If the value has aggregate type and an integral mode then, if BITSIZE
7025 is narrower than this mode and this is for big-endian data, we first
7026 need to put the value into the low-order bits for store_bit_field,
7027 except when MODE is BLKmode and BITSIZE larger than the word size
7028 (see the handling of fields larger than a word in store_bit_field).
7029 Moreover, the field may be not aligned on a byte boundary; in this
7030 case, if it has reverse storage order, it needs to be accessed as a
7031 scalar field with reverse storage order and we must first put the
7032 value into target order. */
7033 scalar_int_mode temp_mode
;
7034 if (AGGREGATE_TYPE_P (TREE_TYPE (exp
))
7035 && is_int_mode (GET_MODE (temp
), &temp_mode
))
7037 HOST_WIDE_INT size
= GET_MODE_BITSIZE (temp_mode
);
7039 reverse
= TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp
));
7042 temp
= flip_storage_order (temp_mode
, temp
);
7044 gcc_checking_assert (known_le (bitsize
, size
));
7045 if (maybe_lt (bitsize
, size
)
7046 && reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
7047 /* Use of to_constant for BLKmode was checked above. */
7048 && !(mode
== BLKmode
&& bitsize
.to_constant () > BITS_PER_WORD
))
7049 temp
= expand_shift (RSHIFT_EXPR
, temp_mode
, temp
,
7050 size
- bitsize
, NULL_RTX
, 1);
7053 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
7054 if (mode
!= VOIDmode
&& mode
!= BLKmode
7055 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
7056 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
7058 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
7059 and BITPOS must be aligned on a byte boundary. If so, we simply do
7060 a block copy. Likewise for a BLKmode-like TARGET. */
7061 if (GET_MODE (temp
) == BLKmode
7062 && (GET_MODE (target
) == BLKmode
7064 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
7065 && multiple_p (bitpos
, BITS_PER_UNIT
)
7066 && multiple_p (bitsize
, BITS_PER_UNIT
))))
7068 gcc_assert (MEM_P (target
) && MEM_P (temp
));
7069 poly_int64 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
7070 poly_int64 bytesize
= bits_to_bytes_round_up (bitsize
);
7072 target
= adjust_address (target
, VOIDmode
, bytepos
);
7073 emit_block_move (target
, temp
,
7074 gen_int_mode (bytesize
, Pmode
),
7080 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
7081 word size, we need to load the value (see again store_bit_field). */
7082 if (GET_MODE (temp
) == BLKmode
&& known_le (bitsize
, BITS_PER_WORD
))
7084 scalar_int_mode temp_mode
= smallest_int_mode_for_size (bitsize
);
7085 temp
= extract_bit_field (temp
, bitsize
, 0, 1, NULL_RTX
, temp_mode
,
7086 temp_mode
, false, NULL
);
7089 /* Store the value in the bitfield. */
7090 gcc_checking_assert (known_ge (bitpos
, 0));
7091 store_bit_field (target
, bitsize
, bitpos
,
7092 bitregion_start
, bitregion_end
,
7093 mode
, temp
, reverse
);
7099 /* Now build a reference to just the desired component. */
7100 rtx to_rtx
= adjust_address (target
, mode
,
7101 exact_div (bitpos
, BITS_PER_UNIT
));
7103 if (to_rtx
== target
)
7104 to_rtx
= copy_rtx (to_rtx
);
7106 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
7107 set_mem_alias_set (to_rtx
, alias_set
);
7109 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
7110 into a target smaller than its type; handle that case now. */
7111 if (TREE_CODE (exp
) == CONSTRUCTOR
&& known_size_p (bitsize
))
7113 poly_int64 bytesize
= exact_div (bitsize
, BITS_PER_UNIT
);
7114 store_constructor (exp
, to_rtx
, 0, bytesize
, reverse
);
7118 return store_expr (exp
, to_rtx
, 0, nontemporal
, reverse
);
7122 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
7123 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
7124 codes and find the ultimate containing object, which we return.
7126 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
7127 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
7128 storage order of the field.
7129 If the position of the field is variable, we store a tree
7130 giving the variable offset (in units) in *POFFSET.
7131 This offset is in addition to the bit position.
7132 If the position is not variable, we store 0 in *POFFSET.
7134 If any of the extraction expressions is volatile,
7135 we store 1 in *PVOLATILEP. Otherwise we don't change that.
7137 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
7138 Otherwise, it is a mode that can be used to access the field.
7140 If the field describes a variable-sized object, *PMODE is set to
7141 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
7142 this case, but the address of the object can be found. */
7145 get_inner_reference (tree exp
, poly_int64_pod
*pbitsize
,
7146 poly_int64_pod
*pbitpos
, tree
*poffset
,
7147 machine_mode
*pmode
, int *punsignedp
,
7148 int *preversep
, int *pvolatilep
)
7151 machine_mode mode
= VOIDmode
;
7152 bool blkmode_bitfield
= false;
7153 tree offset
= size_zero_node
;
7154 poly_offset_int bit_offset
= 0;
7156 /* First get the mode, signedness, storage order and size. We do this from
7157 just the outermost expression. */
7159 if (TREE_CODE (exp
) == COMPONENT_REF
)
7161 tree field
= TREE_OPERAND (exp
, 1);
7162 size_tree
= DECL_SIZE (field
);
7163 if (flag_strict_volatile_bitfields
> 0
7164 && TREE_THIS_VOLATILE (exp
)
7165 && DECL_BIT_FIELD_TYPE (field
)
7166 && DECL_MODE (field
) != BLKmode
)
7167 /* Volatile bitfields should be accessed in the mode of the
7168 field's type, not the mode computed based on the bit
7170 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
7171 else if (!DECL_BIT_FIELD (field
))
7173 mode
= DECL_MODE (field
);
7174 /* For vector fields re-check the target flags, as DECL_MODE
7175 could have been set with different target flags than
7176 the current function has. */
7178 && VECTOR_TYPE_P (TREE_TYPE (field
))
7179 && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field
))))
7180 mode
= TYPE_MODE (TREE_TYPE (field
));
7182 else if (DECL_MODE (field
) == BLKmode
)
7183 blkmode_bitfield
= true;
7185 *punsignedp
= DECL_UNSIGNED (field
);
7187 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
7189 size_tree
= TREE_OPERAND (exp
, 1);
7190 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
7191 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
7193 /* For vector types, with the correct size of access, use the mode of
7195 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
7196 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
7197 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
7198 mode
= TYPE_MODE (TREE_TYPE (exp
));
7202 mode
= TYPE_MODE (TREE_TYPE (exp
));
7203 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
7205 if (mode
== BLKmode
)
7206 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
7208 *pbitsize
= GET_MODE_BITSIZE (mode
);
7213 if (! tree_fits_uhwi_p (size_tree
))
7214 mode
= BLKmode
, *pbitsize
= -1;
7216 *pbitsize
= tree_to_uhwi (size_tree
);
7219 *preversep
= reverse_storage_order_for_component_p (exp
);
7221 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7222 and find the ultimate containing object. */
7225 switch (TREE_CODE (exp
))
7228 bit_offset
+= wi::to_poly_offset (TREE_OPERAND (exp
, 2));
7233 tree field
= TREE_OPERAND (exp
, 1);
7234 tree this_offset
= component_ref_field_offset (exp
);
7236 /* If this field hasn't been filled in yet, don't go past it.
7237 This should only happen when folding expressions made during
7238 type construction. */
7239 if (this_offset
== 0)
7242 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
7243 bit_offset
+= wi::to_poly_offset (DECL_FIELD_BIT_OFFSET (field
));
7245 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7250 case ARRAY_RANGE_REF
:
7252 tree index
= TREE_OPERAND (exp
, 1);
7253 tree low_bound
= array_ref_low_bound (exp
);
7254 tree unit_size
= array_ref_element_size (exp
);
7256 /* We assume all arrays have sizes that are a multiple of a byte.
7257 First subtract the lower bound, if any, in the type of the
7258 index, then convert to sizetype and multiply by the size of
7259 the array element. */
7260 if (! integer_zerop (low_bound
))
7261 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
7264 offset
= size_binop (PLUS_EXPR
, offset
,
7265 size_binop (MULT_EXPR
,
7266 fold_convert (sizetype
, index
),
7275 bit_offset
+= *pbitsize
;
7278 case VIEW_CONVERT_EXPR
:
7282 /* Hand back the decl for MEM[&decl, off]. */
7283 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
7285 tree off
= TREE_OPERAND (exp
, 1);
7286 if (!integer_zerop (off
))
7288 poly_offset_int boff
= mem_ref_offset (exp
);
7289 boff
<<= LOG2_BITS_PER_UNIT
;
7292 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7300 /* If any reference in the chain is volatile, the effect is volatile. */
7301 if (TREE_THIS_VOLATILE (exp
))
7304 exp
= TREE_OPERAND (exp
, 0);
7308 /* If OFFSET is constant, see if we can return the whole thing as a
7309 constant bit position. Make sure to handle overflow during
7311 if (poly_int_tree_p (offset
))
7313 poly_offset_int tem
= wi::sext (wi::to_poly_offset (offset
),
7314 TYPE_PRECISION (sizetype
));
7315 tem
<<= LOG2_BITS_PER_UNIT
;
7317 if (tem
.to_shwi (pbitpos
))
7318 *poffset
= offset
= NULL_TREE
;
7321 /* Otherwise, split it up. */
7324 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7325 if (!bit_offset
.to_shwi (pbitpos
) || maybe_lt (*pbitpos
, 0))
7327 *pbitpos
= num_trailing_bits (bit_offset
.force_shwi ());
7328 poly_offset_int bytes
= bits_to_bytes_round_down (bit_offset
);
7329 offset
= size_binop (PLUS_EXPR
, offset
,
7330 build_int_cst (sizetype
, bytes
.force_shwi ()));
7336 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7337 if (mode
== VOIDmode
7339 && multiple_p (*pbitpos
, BITS_PER_UNIT
)
7340 && multiple_p (*pbitsize
, BITS_PER_UNIT
))
7348 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7350 static unsigned HOST_WIDE_INT
7351 target_align (const_tree target
)
7353 /* We might have a chain of nested references with intermediate misaligning
7354 bitfields components, so need to recurse to find out. */
7356 unsigned HOST_WIDE_INT this_align
, outer_align
;
7358 switch (TREE_CODE (target
))
7364 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7365 outer_align
= target_align (TREE_OPERAND (target
, 0));
7366 return MIN (this_align
, outer_align
);
7369 case ARRAY_RANGE_REF
:
7370 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7371 outer_align
= target_align (TREE_OPERAND (target
, 0));
7372 return MIN (this_align
, outer_align
);
7375 case NON_LVALUE_EXPR
:
7376 case VIEW_CONVERT_EXPR
:
7377 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7378 outer_align
= target_align (TREE_OPERAND (target
, 0));
7379 return MAX (this_align
, outer_align
);
7382 return TYPE_ALIGN (TREE_TYPE (target
));
7387 /* Given an rtx VALUE that may contain additions and multiplications, return
7388 an equivalent value that just refers to a register, memory, or constant.
7389 This is done by generating instructions to perform the arithmetic and
7390 returning a pseudo-register containing the value.
7392 The returned value may be a REG, SUBREG, MEM or constant. */
7395 force_operand (rtx value
, rtx target
)
7398 /* Use subtarget as the target for operand 0 of a binary operation. */
7399 rtx subtarget
= get_subtarget (target
);
7400 enum rtx_code code
= GET_CODE (value
);
7402 /* Check for subreg applied to an expression produced by loop optimizer. */
7404 && !REG_P (SUBREG_REG (value
))
7405 && !MEM_P (SUBREG_REG (value
)))
7408 = simplify_gen_subreg (GET_MODE (value
),
7409 force_reg (GET_MODE (SUBREG_REG (value
)),
7410 force_operand (SUBREG_REG (value
),
7412 GET_MODE (SUBREG_REG (value
)),
7413 SUBREG_BYTE (value
));
7414 code
= GET_CODE (value
);
7417 /* Check for a PIC address load. */
7418 if ((code
== PLUS
|| code
== MINUS
)
7419 && XEXP (value
, 0) == pic_offset_table_rtx
7420 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7421 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7422 || GET_CODE (XEXP (value
, 1)) == CONST
))
7425 subtarget
= gen_reg_rtx (GET_MODE (value
));
7426 emit_move_insn (subtarget
, value
);
7430 if (ARITHMETIC_P (value
))
7432 op2
= XEXP (value
, 1);
7433 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7435 if (code
== MINUS
&& CONST_INT_P (op2
))
7438 op2
= negate_rtx (GET_MODE (value
), op2
);
7441 /* Check for an addition with OP2 a constant integer and our first
7442 operand a PLUS of a virtual register and something else. In that
7443 case, we want to emit the sum of the virtual register and the
7444 constant first and then add the other value. This allows virtual
7445 register instantiation to simply modify the constant rather than
7446 creating another one around this addition. */
7447 if (code
== PLUS
&& CONST_INT_P (op2
)
7448 && GET_CODE (XEXP (value
, 0)) == PLUS
7449 && REG_P (XEXP (XEXP (value
, 0), 0))
7450 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7451 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7453 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7454 XEXP (XEXP (value
, 0), 0), op2
,
7455 subtarget
, 0, OPTAB_LIB_WIDEN
);
7456 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7457 force_operand (XEXP (XEXP (value
,
7459 target
, 0, OPTAB_LIB_WIDEN
);
7462 op1
= force_operand (XEXP (value
, 0), subtarget
);
7463 op2
= force_operand (op2
, NULL_RTX
);
7467 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7469 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7470 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7471 target
, 1, OPTAB_LIB_WIDEN
);
7473 return expand_divmod (0,
7474 FLOAT_MODE_P (GET_MODE (value
))
7475 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7476 GET_MODE (value
), op1
, op2
, target
, 0);
7478 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7481 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7484 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7487 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7488 target
, 0, OPTAB_LIB_WIDEN
);
7490 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7491 target
, 1, OPTAB_LIB_WIDEN
);
7494 if (UNARY_P (value
))
7497 target
= gen_reg_rtx (GET_MODE (value
));
7498 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7505 case FLOAT_TRUNCATE
:
7506 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7511 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7515 case UNSIGNED_FLOAT
:
7516 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7520 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7524 #ifdef INSN_SCHEDULING
7525 /* On machines that have insn scheduling, we want all memory reference to be
7526 explicit, so we need to deal with such paradoxical SUBREGs. */
7527 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7529 = simplify_gen_subreg (GET_MODE (value
),
7530 force_reg (GET_MODE (SUBREG_REG (value
)),
7531 force_operand (SUBREG_REG (value
),
7533 GET_MODE (SUBREG_REG (value
)),
7534 SUBREG_BYTE (value
));
7540 /* Subroutine of expand_expr: return nonzero iff there is no way that
7541 EXP can reference X, which is being modified. TOP_P is nonzero if this
7542 call is going to be used to determine whether we need a temporary
7543 for EXP, as opposed to a recursive call to this function.
7545 It is always safe for this routine to return zero since it merely
7546 searches for optimization opportunities. */
7549 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7555 /* If EXP has varying size, we MUST use a target since we currently
7556 have no way of allocating temporaries of variable size
7557 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7558 So we assume here that something at a higher level has prevented a
7559 clash. This is somewhat bogus, but the best we can do. Only
7560 do this when X is BLKmode and when we are at the top level. */
7561 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7562 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7563 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7564 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7565 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7567 && GET_MODE (x
) == BLKmode
)
7568 /* If X is in the outgoing argument area, it is always safe. */
7570 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7571 || (GET_CODE (XEXP (x
, 0)) == PLUS
7572 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7575 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7576 find the underlying pseudo. */
7577 if (GET_CODE (x
) == SUBREG
)
7580 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7584 /* Now look at our tree code and possibly recurse. */
7585 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7587 case tcc_declaration
:
7588 exp_rtl
= DECL_RTL_IF_SET (exp
);
7594 case tcc_exceptional
:
7595 if (TREE_CODE (exp
) == TREE_LIST
)
7599 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7601 exp
= TREE_CHAIN (exp
);
7604 if (TREE_CODE (exp
) != TREE_LIST
)
7605 return safe_from_p (x
, exp
, 0);
7608 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7610 constructor_elt
*ce
;
7611 unsigned HOST_WIDE_INT idx
;
7613 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7614 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7615 || !safe_from_p (x
, ce
->value
, 0))
7619 else if (TREE_CODE (exp
) == ERROR_MARK
)
7620 return 1; /* An already-visited SAVE_EXPR? */
7625 /* The only case we look at here is the DECL_INITIAL inside a
7627 return (TREE_CODE (exp
) != DECL_EXPR
7628 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7629 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7630 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7633 case tcc_comparison
:
7634 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7639 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7641 case tcc_expression
:
7644 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7645 the expression. If it is set, we conflict iff we are that rtx or
7646 both are in memory. Otherwise, we check all operands of the
7647 expression recursively. */
7649 switch (TREE_CODE (exp
))
7652 /* If the operand is static or we are static, we can't conflict.
7653 Likewise if we don't conflict with the operand at all. */
7654 if (staticp (TREE_OPERAND (exp
, 0))
7655 || TREE_STATIC (exp
)
7656 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7659 /* Otherwise, the only way this can conflict is if we are taking
7660 the address of a DECL a that address if part of X, which is
7662 exp
= TREE_OPERAND (exp
, 0);
7665 if (!DECL_RTL_SET_P (exp
)
7666 || !MEM_P (DECL_RTL (exp
)))
7669 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7675 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7676 get_alias_set (exp
)))
7681 /* Assume that the call will clobber all hard registers and
7683 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7688 case WITH_CLEANUP_EXPR
:
7689 case CLEANUP_POINT_EXPR
:
7690 /* Lowered by gimplify.c. */
7694 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7700 /* If we have an rtx, we do not need to scan our operands. */
7704 nops
= TREE_OPERAND_LENGTH (exp
);
7705 for (i
= 0; i
< nops
; i
++)
7706 if (TREE_OPERAND (exp
, i
) != 0
7707 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7713 /* Should never get a type here. */
7717 /* If we have an rtl, find any enclosed object. Then see if we conflict
7721 if (GET_CODE (exp_rtl
) == SUBREG
)
7723 exp_rtl
= SUBREG_REG (exp_rtl
);
7725 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7729 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7730 are memory and they conflict. */
7731 return ! (rtx_equal_p (x
, exp_rtl
)
7732 || (MEM_P (x
) && MEM_P (exp_rtl
)
7733 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7736 /* If we reach here, it is safe. */
7741 /* Return the highest power of two that EXP is known to be a multiple of.
7742 This is used in updating alignment of MEMs in array references. */
7744 unsigned HOST_WIDE_INT
7745 highest_pow2_factor (const_tree exp
)
7747 unsigned HOST_WIDE_INT ret
;
7748 int trailing_zeros
= tree_ctz (exp
);
7749 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7750 return BIGGEST_ALIGNMENT
;
7751 ret
= HOST_WIDE_INT_1U
<< trailing_zeros
;
7752 if (ret
> BIGGEST_ALIGNMENT
)
7753 return BIGGEST_ALIGNMENT
;
7757 /* Similar, except that the alignment requirements of TARGET are
7758 taken into account. Assume it is at least as aligned as its
7759 type, unless it is a COMPONENT_REF in which case the layout of
7760 the structure gives the alignment. */
7762 static unsigned HOST_WIDE_INT
7763 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7765 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7766 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7768 return MAX (factor
, talign
);
7771 /* Convert the tree comparison code TCODE to the rtl one where the
7772 signedness is UNSIGNEDP. */
7774 static enum rtx_code
7775 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7787 code
= unsignedp
? LTU
: LT
;
7790 code
= unsignedp
? LEU
: LE
;
7793 code
= unsignedp
? GTU
: GT
;
7796 code
= unsignedp
? GEU
: GE
;
7798 case UNORDERED_EXPR
:
7829 /* Subroutine of expand_expr. Expand the two operands of a binary
7830 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7831 The value may be stored in TARGET if TARGET is nonzero. The
7832 MODIFIER argument is as documented by expand_expr. */
7835 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7836 enum expand_modifier modifier
)
7838 if (! safe_from_p (target
, exp1
, 1))
7840 if (operand_equal_p (exp0
, exp1
, 0))
7842 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7843 *op1
= copy_rtx (*op0
);
7847 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7848 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7853 /* Return a MEM that contains constant EXP. DEFER is as for
7854 output_constant_def and MODIFIER is as for expand_expr. */
7857 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7861 mem
= output_constant_def (exp
, defer
);
7862 if (modifier
!= EXPAND_INITIALIZER
)
7863 mem
= use_anchored_address (mem
);
7867 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7868 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7871 expand_expr_addr_expr_1 (tree exp
, rtx target
, scalar_int_mode tmode
,
7872 enum expand_modifier modifier
, addr_space_t as
)
7874 rtx result
, subtarget
;
7876 poly_int64 bitsize
, bitpos
;
7877 int unsignedp
, reversep
, volatilep
= 0;
7880 /* If we are taking the address of a constant and are at the top level,
7881 we have to use output_constant_def since we can't call force_const_mem
7883 /* ??? This should be considered a front-end bug. We should not be
7884 generating ADDR_EXPR of something that isn't an LVALUE. The only
7885 exception here is STRING_CST. */
7886 if (CONSTANT_CLASS_P (exp
))
7888 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7889 if (modifier
< EXPAND_SUM
)
7890 result
= force_operand (result
, target
);
7894 /* Everything must be something allowed by is_gimple_addressable. */
7895 switch (TREE_CODE (exp
))
7898 /* This case will happen via recursion for &a->b. */
7899 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7903 tree tem
= TREE_OPERAND (exp
, 0);
7904 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7905 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7906 return expand_expr (tem
, target
, tmode
, modifier
);
7909 case TARGET_MEM_REF
:
7910 return addr_for_mem_ref (exp
, as
, true);
7913 /* Expand the initializer like constants above. */
7914 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7916 if (modifier
< EXPAND_SUM
)
7917 result
= force_operand (result
, target
);
7921 /* The real part of the complex number is always first, therefore
7922 the address is the same as the address of the parent object. */
7925 inner
= TREE_OPERAND (exp
, 0);
7929 /* The imaginary part of the complex number is always second.
7930 The expression is therefore always offset by the size of the
7933 bitpos
= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (exp
)));
7934 inner
= TREE_OPERAND (exp
, 0);
7937 case COMPOUND_LITERAL_EXPR
:
7938 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7939 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7940 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7941 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7942 the initializers aren't gimplified. */
7943 if (COMPOUND_LITERAL_EXPR_DECL (exp
)
7944 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp
)))
7945 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7946 target
, tmode
, modifier
, as
);
7949 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7950 expand_expr, as that can have various side effects; LABEL_DECLs for
7951 example, may not have their DECL_RTL set yet. Expand the rtl of
7952 CONSTRUCTORs too, which should yield a memory reference for the
7953 constructor's contents. Assume language specific tree nodes can
7954 be expanded in some interesting way. */
7955 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7957 || TREE_CODE (exp
) == CONSTRUCTOR
7958 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7960 result
= expand_expr (exp
, target
, tmode
,
7961 modifier
== EXPAND_INITIALIZER
7962 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7964 /* If the DECL isn't in memory, then the DECL wasn't properly
7965 marked TREE_ADDRESSABLE, which will be either a front-end
7966 or a tree optimizer bug. */
7968 gcc_assert (MEM_P (result
));
7969 result
= XEXP (result
, 0);
7971 /* ??? Is this needed anymore? */
7973 TREE_USED (exp
) = 1;
7975 if (modifier
!= EXPAND_INITIALIZER
7976 && modifier
!= EXPAND_CONST_ADDRESS
7977 && modifier
!= EXPAND_SUM
)
7978 result
= force_operand (result
, target
);
7982 /* Pass FALSE as the last argument to get_inner_reference although
7983 we are expanding to RTL. The rationale is that we know how to
7984 handle "aligning nodes" here: we can just bypass them because
7985 they won't change the final object whose address will be returned
7986 (they actually exist only for that purpose). */
7987 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
7988 &unsignedp
, &reversep
, &volatilep
);
7992 /* We must have made progress. */
7993 gcc_assert (inner
!= exp
);
7995 subtarget
= offset
|| maybe_ne (bitpos
, 0) ? NULL_RTX
: target
;
7996 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7997 inner alignment, force the inner to be sufficiently aligned. */
7998 if (CONSTANT_CLASS_P (inner
)
7999 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
8001 inner
= copy_node (inner
);
8002 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
8003 SET_TYPE_ALIGN (TREE_TYPE (inner
), TYPE_ALIGN (TREE_TYPE (exp
)));
8004 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
8006 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
8012 if (modifier
!= EXPAND_NORMAL
)
8013 result
= force_operand (result
, NULL
);
8014 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
8015 modifier
== EXPAND_INITIALIZER
8016 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
8018 /* expand_expr is allowed to return an object in a mode other
8019 than TMODE. If it did, we need to convert. */
8020 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
8021 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
8022 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
8023 result
= convert_memory_address_addr_space (tmode
, result
, as
);
8024 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
8026 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8027 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
8030 subtarget
= maybe_ne (bitpos
, 0) ? NULL_RTX
: target
;
8031 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
8032 1, OPTAB_LIB_WIDEN
);
8036 if (maybe_ne (bitpos
, 0))
8038 /* Someone beforehand should have rejected taking the address
8039 of an object that isn't byte-aligned. */
8040 poly_int64 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
8041 result
= convert_memory_address_addr_space (tmode
, result
, as
);
8042 result
= plus_constant (tmode
, result
, bytepos
);
8043 if (modifier
< EXPAND_SUM
)
8044 result
= force_operand (result
, target
);
8050 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
8051 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
8054 expand_expr_addr_expr (tree exp
, rtx target
, machine_mode tmode
,
8055 enum expand_modifier modifier
)
8057 addr_space_t as
= ADDR_SPACE_GENERIC
;
8058 scalar_int_mode address_mode
= Pmode
;
8059 scalar_int_mode pointer_mode
= ptr_mode
;
8063 /* Target mode of VOIDmode says "whatever's natural". */
8064 if (tmode
== VOIDmode
)
8065 tmode
= TYPE_MODE (TREE_TYPE (exp
));
8067 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
8069 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
8070 address_mode
= targetm
.addr_space
.address_mode (as
);
8071 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
8074 /* We can get called with some Weird Things if the user does silliness
8075 like "(short) &a". In that case, convert_memory_address won't do
8076 the right thing, so ignore the given target mode. */
8077 scalar_int_mode new_tmode
= (tmode
== pointer_mode
8081 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
8082 new_tmode
, modifier
, as
);
8084 /* Despite expand_expr claims concerning ignoring TMODE when not
8085 strictly convenient, stuff breaks if we don't honor it. Note
8086 that combined with the above, we only do this for pointer modes. */
8087 rmode
= GET_MODE (result
);
8088 if (rmode
== VOIDmode
)
8090 if (rmode
!= new_tmode
)
8091 result
= convert_memory_address_addr_space (new_tmode
, result
, as
);
8096 /* Generate code for computing CONSTRUCTOR EXP.
8097 An rtx for the computed value is returned. If AVOID_TEMP_MEM
8098 is TRUE, instead of creating a temporary variable in memory
8099 NULL is returned and the caller needs to handle it differently. */
8102 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
8103 bool avoid_temp_mem
)
8105 tree type
= TREE_TYPE (exp
);
8106 machine_mode mode
= TYPE_MODE (type
);
8108 /* Try to avoid creating a temporary at all. This is possible
8109 if all of the initializer is zero.
8110 FIXME: try to handle all [0..255] initializers we can handle
8112 if (TREE_STATIC (exp
)
8113 && !TREE_ADDRESSABLE (exp
)
8114 && target
!= 0 && mode
== BLKmode
8115 && all_zeros_p (exp
))
8117 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
8121 /* All elts simple constants => refer to a constant in memory. But
8122 if this is a non-BLKmode mode, let it store a field at a time
8123 since that should make a CONST_INT, CONST_WIDE_INT or
8124 CONST_DOUBLE when we fold. Likewise, if we have a target we can
8125 use, it is best to store directly into the target unless the type
8126 is large enough that memcpy will be used. If we are making an
8127 initializer and all operands are constant, put it in memory as
8130 FIXME: Avoid trying to fill vector constructors piece-meal.
8131 Output them with output_constant_def below unless we're sure
8132 they're zeros. This should go away when vector initializers
8133 are treated like VECTOR_CST instead of arrays. */
8134 if ((TREE_STATIC (exp
)
8135 && ((mode
== BLKmode
8136 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
8137 || TREE_ADDRESSABLE (exp
)
8138 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
8139 && (! can_move_by_pieces
8140 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
8142 && ! mostly_zeros_p (exp
))))
8143 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
8144 && TREE_CONSTANT (exp
)))
8151 constructor
= expand_expr_constant (exp
, 1, modifier
);
8153 if (modifier
!= EXPAND_CONST_ADDRESS
8154 && modifier
!= EXPAND_INITIALIZER
8155 && modifier
!= EXPAND_SUM
)
8156 constructor
= validize_mem (constructor
);
8161 /* Handle calls that pass values in multiple non-contiguous
8162 locations. The Irix 6 ABI has examples of this. */
8163 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
8164 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
8169 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
8172 store_constructor (exp
, target
, 0, int_expr_size (exp
), false);
8177 /* expand_expr: generate code for computing expression EXP.
8178 An rtx for the computed value is returned. The value is never null.
8179 In the case of a void EXP, const0_rtx is returned.
8181 The value may be stored in TARGET if TARGET is nonzero.
8182 TARGET is just a suggestion; callers must assume that
8183 the rtx returned may not be the same as TARGET.
8185 If TARGET is CONST0_RTX, it means that the value will be ignored.
8187 If TMODE is not VOIDmode, it suggests generating the
8188 result in mode TMODE. But this is done only when convenient.
8189 Otherwise, TMODE is ignored and the value generated in its natural mode.
8190 TMODE is just a suggestion; callers must assume that
8191 the rtx returned may not have mode TMODE.
8193 Note that TARGET may have neither TMODE nor MODE. In that case, it
8194 probably will not be used.
8196 If MODIFIER is EXPAND_SUM then when EXP is an addition
8197 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8198 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8199 products as above, or REG or MEM, or constant.
8200 Ordinarily in such cases we would output mul or add instructions
8201 and then return a pseudo reg containing the sum.
8203 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8204 it also marks a label as absolutely required (it can't be dead).
8205 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8206 This is used for outputting expressions used in initializers.
8208 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8209 with a constant address even if that address is not normally legitimate.
8210 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8212 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8213 a call parameter. Such targets require special care as we haven't yet
8214 marked TARGET so that it's safe from being trashed by libcalls. We
8215 don't want to use TARGET for anything but the final result;
8216 Intermediate values must go elsewhere. Additionally, calls to
8217 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8219 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8220 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8221 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8222 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8225 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8226 In this case, we don't adjust a returned MEM rtx that wouldn't be
8227 sufficiently aligned for its mode; instead, it's up to the caller
8228 to deal with it afterwards. This is used to make sure that unaligned
8229 base objects for which out-of-bounds accesses are supported, for
8230 example record types with trailing arrays, aren't realigned behind
8231 the back of the caller.
8232 The normal operating mode is to pass FALSE for this parameter. */
8235 expand_expr_real (tree exp
, rtx target
, machine_mode tmode
,
8236 enum expand_modifier modifier
, rtx
*alt_rtl
,
8237 bool inner_reference_p
)
8241 /* Handle ERROR_MARK before anybody tries to access its type. */
8242 if (TREE_CODE (exp
) == ERROR_MARK
8243 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
8245 ret
= CONST0_RTX (tmode
);
8246 return ret
? ret
: const0_rtx
;
8249 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
8254 /* Try to expand the conditional expression which is represented by
8255 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8256 return the rtl reg which represents the result. Otherwise return
8260 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
8261 tree treeop1 ATTRIBUTE_UNUSED
,
8262 tree treeop2 ATTRIBUTE_UNUSED
)
8265 rtx op00
, op01
, op1
, op2
;
8266 enum rtx_code comparison_code
;
8267 machine_mode comparison_mode
;
8270 tree type
= TREE_TYPE (treeop1
);
8271 int unsignedp
= TYPE_UNSIGNED (type
);
8272 machine_mode mode
= TYPE_MODE (type
);
8273 machine_mode orig_mode
= mode
;
8274 static bool expanding_cond_expr_using_cmove
= false;
8276 /* Conditional move expansion can end up TERing two operands which,
8277 when recursively hitting conditional expressions can result in
8278 exponential behavior if the cmove expansion ultimatively fails.
8279 It's hardly profitable to TER a cmove into a cmove so avoid doing
8280 that by failing early if we end up recursing. */
8281 if (expanding_cond_expr_using_cmove
)
8284 /* If we cannot do a conditional move on the mode, try doing it
8285 with the promoted mode. */
8286 if (!can_conditionally_move_p (mode
))
8288 mode
= promote_mode (type
, mode
, &unsignedp
);
8289 if (!can_conditionally_move_p (mode
))
8291 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
8294 temp
= assign_temp (type
, 0, 1);
8296 expanding_cond_expr_using_cmove
= true;
8298 expand_operands (treeop1
, treeop2
,
8299 temp
, &op1
, &op2
, EXPAND_NORMAL
);
8301 if (TREE_CODE (treeop0
) == SSA_NAME
8302 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
8304 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
8305 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
8306 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
8307 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
8308 comparison_mode
= TYPE_MODE (type
);
8309 unsignedp
= TYPE_UNSIGNED (type
);
8310 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8312 else if (COMPARISON_CLASS_P (treeop0
))
8314 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
8315 enum tree_code cmpcode
= TREE_CODE (treeop0
);
8316 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
8317 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
8318 unsignedp
= TYPE_UNSIGNED (type
);
8319 comparison_mode
= TYPE_MODE (type
);
8320 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8324 op00
= expand_normal (treeop0
);
8326 comparison_code
= NE
;
8327 comparison_mode
= GET_MODE (op00
);
8328 if (comparison_mode
== VOIDmode
)
8329 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8331 expanding_cond_expr_using_cmove
= false;
8333 if (GET_MODE (op1
) != mode
)
8334 op1
= gen_lowpart (mode
, op1
);
8336 if (GET_MODE (op2
) != mode
)
8337 op2
= gen_lowpart (mode
, op2
);
8339 /* Try to emit the conditional move. */
8340 insn
= emit_conditional_move (temp
, comparison_code
,
8341 op00
, op01
, comparison_mode
,
8345 /* If we could do the conditional move, emit the sequence,
8349 rtx_insn
*seq
= get_insns ();
8352 return convert_modes (orig_mode
, mode
, temp
, 0);
8355 /* Otherwise discard the sequence and fall back to code with
8362 expand_expr_real_2 (sepops ops
, rtx target
, machine_mode tmode
,
8363 enum expand_modifier modifier
)
8365 rtx op0
, op1
, op2
, temp
;
8366 rtx_code_label
*lab
;
8370 scalar_int_mode int_mode
;
8371 enum tree_code code
= ops
->code
;
8373 rtx subtarget
, original_target
;
8375 bool reduce_bit_field
;
8376 location_t loc
= ops
->location
;
8377 tree treeop0
, treeop1
, treeop2
;
8378 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8379 ? reduce_to_bit_field_precision ((expr), \
8385 mode
= TYPE_MODE (type
);
8386 unsignedp
= TYPE_UNSIGNED (type
);
8392 /* We should be called only on simple (binary or unary) expressions,
8393 exactly those that are valid in gimple expressions that aren't
8394 GIMPLE_SINGLE_RHS (or invalid). */
8395 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8396 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8397 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8399 ignore
= (target
== const0_rtx
8400 || ((CONVERT_EXPR_CODE_P (code
)
8401 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8402 && TREE_CODE (type
) == VOID_TYPE
));
8404 /* We should be called only if we need the result. */
8405 gcc_assert (!ignore
);
8407 /* An operation in what may be a bit-field type needs the
8408 result to be reduced to the precision of the bit-field type,
8409 which is narrower than that of the type's mode. */
8410 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8411 && !type_has_mode_precision_p (type
));
8413 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8416 /* Use subtarget as the target for operand 0 of a binary operation. */
8417 subtarget
= get_subtarget (target
);
8418 original_target
= target
;
8422 case NON_LVALUE_EXPR
:
8425 if (treeop0
== error_mark_node
)
8428 if (TREE_CODE (type
) == UNION_TYPE
)
8430 tree valtype
= TREE_TYPE (treeop0
);
8432 /* If both input and output are BLKmode, this conversion isn't doing
8433 anything except possibly changing memory attribute. */
8434 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8436 rtx result
= expand_expr (treeop0
, target
, tmode
,
8439 result
= copy_rtx (result
);
8440 set_mem_attributes (result
, type
, 0);
8446 if (TYPE_MODE (type
) != BLKmode
)
8447 target
= gen_reg_rtx (TYPE_MODE (type
));
8449 target
= assign_temp (type
, 1, 1);
8453 /* Store data into beginning of memory target. */
8454 store_expr (treeop0
,
8455 adjust_address (target
, TYPE_MODE (valtype
), 0),
8456 modifier
== EXPAND_STACK_PARM
,
8457 false, TYPE_REVERSE_STORAGE_ORDER (type
));
8461 gcc_assert (REG_P (target
)
8462 && !TYPE_REVERSE_STORAGE_ORDER (type
));
8464 /* Store this field into a union of the proper type. */
8465 poly_uint64 op0_size
8466 = tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (treeop0
)));
8467 poly_uint64 union_size
= GET_MODE_BITSIZE (mode
);
8468 store_field (target
,
8469 /* The conversion must be constructed so that
8470 we know at compile time how many bits
8472 ordered_min (op0_size
, union_size
),
8473 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0,
8477 /* Return the entire union. */
8481 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8483 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8486 /* If the signedness of the conversion differs and OP0 is
8487 a promoted SUBREG, clear that indication since we now
8488 have to do the proper extension. */
8489 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8490 && GET_CODE (op0
) == SUBREG
)
8491 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8493 return REDUCE_BIT_FIELD (op0
);
8496 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8497 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8498 if (GET_MODE (op0
) == mode
)
8501 /* If OP0 is a constant, just convert it into the proper mode. */
8502 else if (CONSTANT_P (op0
))
8504 tree inner_type
= TREE_TYPE (treeop0
);
8505 machine_mode inner_mode
= GET_MODE (op0
);
8507 if (inner_mode
== VOIDmode
)
8508 inner_mode
= TYPE_MODE (inner_type
);
8510 if (modifier
== EXPAND_INITIALIZER
)
8511 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
8513 op0
= convert_modes (mode
, inner_mode
, op0
,
8514 TYPE_UNSIGNED (inner_type
));
8517 else if (modifier
== EXPAND_INITIALIZER
)
8518 op0
= gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8519 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8521 else if (target
== 0)
8522 op0
= convert_to_mode (mode
, op0
,
8523 TYPE_UNSIGNED (TREE_TYPE
8527 convert_move (target
, op0
,
8528 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8532 return REDUCE_BIT_FIELD (op0
);
8534 case ADDR_SPACE_CONVERT_EXPR
:
8536 tree treeop0_type
= TREE_TYPE (treeop0
);
8538 gcc_assert (POINTER_TYPE_P (type
));
8539 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8541 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8542 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8544 /* Conversions between pointers to the same address space should
8545 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8546 gcc_assert (as_to
!= as_from
);
8548 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8550 /* Ask target code to handle conversion between pointers
8551 to overlapping address spaces. */
8552 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8553 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8555 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8559 /* For disjoint address spaces, converting anything but a null
8560 pointer invokes undefined behavior. We truncate or extend the
8561 value as if we'd converted via integers, which handles 0 as
8562 required, and all others as the programmer likely expects. */
8563 #ifndef POINTERS_EXTEND_UNSIGNED
8564 const int POINTERS_EXTEND_UNSIGNED
= 1;
8566 op0
= convert_modes (mode
, TYPE_MODE (treeop0_type
),
8567 op0
, POINTERS_EXTEND_UNSIGNED
);
8573 case POINTER_PLUS_EXPR
:
8574 /* Even though the sizetype mode and the pointer's mode can be different
8575 expand is able to handle this correctly and get the correct result out
8576 of the PLUS_EXPR code. */
8577 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8578 if sizetype precision is smaller than pointer precision. */
8579 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8580 treeop1
= fold_convert_loc (loc
, type
,
8581 fold_convert_loc (loc
, ssizetype
,
8583 /* If sizetype precision is larger than pointer precision, truncate the
8584 offset to have matching modes. */
8585 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8586 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8590 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8591 something else, make sure we add the register to the constant and
8592 then to the other thing. This case can occur during strength
8593 reduction and doing it this way will produce better code if the
8594 frame pointer or argument pointer is eliminated.
8596 fold-const.c will ensure that the constant is always in the inner
8597 PLUS_EXPR, so the only case we need to do anything about is if
8598 sp, ap, or fp is our second argument, in which case we must swap
8599 the innermost first argument and our second argument. */
8601 if (TREE_CODE (treeop0
) == PLUS_EXPR
8602 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8604 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8605 || DECL_RTL (treeop1
) == stack_pointer_rtx
8606 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8611 /* If the result is to be ptr_mode and we are adding an integer to
8612 something, we might be forming a constant. So try to use
8613 plus_constant. If it produces a sum and we can't accept it,
8614 use force_operand. This allows P = &ARR[const] to generate
8615 efficient code on machines where a SYMBOL_REF is not a valid
8618 If this is an EXPAND_SUM call, always return the sum. */
8619 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8620 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8622 if (modifier
== EXPAND_STACK_PARM
)
8624 if (TREE_CODE (treeop0
) == INTEGER_CST
8625 && HWI_COMPUTABLE_MODE_P (mode
)
8626 && TREE_CONSTANT (treeop1
))
8630 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8632 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8634 /* Use wi::shwi to ensure that the constant is
8635 truncated according to the mode of OP1, then sign extended
8636 to a HOST_WIDE_INT. Using the constant directly can result
8637 in non-canonical RTL in a 64x32 cross compile. */
8638 wc
= TREE_INT_CST_LOW (treeop0
);
8640 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8641 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8642 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8643 op1
= force_operand (op1
, target
);
8644 return REDUCE_BIT_FIELD (op1
);
8647 else if (TREE_CODE (treeop1
) == INTEGER_CST
8648 && HWI_COMPUTABLE_MODE_P (mode
)
8649 && TREE_CONSTANT (treeop0
))
8653 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8655 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8656 (modifier
== EXPAND_INITIALIZER
8657 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8658 if (! CONSTANT_P (op0
))
8660 op1
= expand_expr (treeop1
, NULL_RTX
,
8661 VOIDmode
, modifier
);
8662 /* Return a PLUS if modifier says it's OK. */
8663 if (modifier
== EXPAND_SUM
8664 || modifier
== EXPAND_INITIALIZER
)
8665 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8668 /* Use wi::shwi to ensure that the constant is
8669 truncated according to the mode of OP1, then sign extended
8670 to a HOST_WIDE_INT. Using the constant directly can result
8671 in non-canonical RTL in a 64x32 cross compile. */
8672 wc
= TREE_INT_CST_LOW (treeop1
);
8674 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8675 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8676 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8677 op0
= force_operand (op0
, target
);
8678 return REDUCE_BIT_FIELD (op0
);
8682 /* Use TER to expand pointer addition of a negated value
8683 as pointer subtraction. */
8684 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8685 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8686 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8687 && TREE_CODE (treeop1
) == SSA_NAME
8688 && TYPE_MODE (TREE_TYPE (treeop0
))
8689 == TYPE_MODE (TREE_TYPE (treeop1
)))
8691 gimple
*def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8694 treeop1
= gimple_assign_rhs1 (def
);
8700 /* No sense saving up arithmetic to be done
8701 if it's all in the wrong mode to form part of an address.
8702 And force_operand won't know whether to sign-extend or
8704 if (modifier
!= EXPAND_INITIALIZER
8705 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8707 expand_operands (treeop0
, treeop1
,
8708 subtarget
, &op0
, &op1
, modifier
);
8709 if (op0
== const0_rtx
)
8711 if (op1
== const0_rtx
)
8716 expand_operands (treeop0
, treeop1
,
8717 subtarget
, &op0
, &op1
, modifier
);
8718 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8721 case POINTER_DIFF_EXPR
:
8723 /* For initializers, we are allowed to return a MINUS of two
8724 symbolic constants. Here we handle all cases when both operands
8726 /* Handle difference of two symbolic constants,
8727 for the sake of an initializer. */
8728 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8729 && really_constant_p (treeop0
)
8730 && really_constant_p (treeop1
))
8732 expand_operands (treeop0
, treeop1
,
8733 NULL_RTX
, &op0
, &op1
, modifier
);
8734 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
8737 /* No sense saving up arithmetic to be done
8738 if it's all in the wrong mode to form part of an address.
8739 And force_operand won't know whether to sign-extend or
8741 if (modifier
!= EXPAND_INITIALIZER
8742 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8745 expand_operands (treeop0
, treeop1
,
8746 subtarget
, &op0
, &op1
, modifier
);
8748 /* Convert A - const to A + (-const). */
8749 if (CONST_INT_P (op1
))
8751 op1
= negate_rtx (mode
, op1
);
8752 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8757 case WIDEN_MULT_PLUS_EXPR
:
8758 case WIDEN_MULT_MINUS_EXPR
:
8759 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8760 op2
= expand_normal (treeop2
);
8761 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8765 case WIDEN_MULT_EXPR
:
8766 /* If first operand is constant, swap them.
8767 Thus the following special case checks need only
8768 check the second operand. */
8769 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8770 std::swap (treeop0
, treeop1
);
8772 /* First, check if we have a multiplication of one signed and one
8773 unsigned operand. */
8774 if (TREE_CODE (treeop1
) != INTEGER_CST
8775 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8776 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8778 machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8779 this_optab
= usmul_widen_optab
;
8780 if (find_widening_optab_handler (this_optab
, mode
, innermode
)
8781 != CODE_FOR_nothing
)
8783 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8784 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8787 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8789 /* op0 and op1 might still be constant, despite the above
8790 != INTEGER_CST check. Handle it. */
8791 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8793 op0
= convert_modes (mode
, innermode
, op0
, true);
8794 op1
= convert_modes (mode
, innermode
, op1
, false);
8795 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8796 target
, unsignedp
));
8801 /* Check for a multiplication with matching signedness. */
8802 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8803 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8804 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8805 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8807 tree op0type
= TREE_TYPE (treeop0
);
8808 machine_mode innermode
= TYPE_MODE (op0type
);
8809 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8810 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8811 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8813 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8815 if (find_widening_optab_handler (this_optab
, mode
, innermode
)
8816 != CODE_FOR_nothing
)
8818 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8820 /* op0 and op1 might still be constant, despite the above
8821 != INTEGER_CST check. Handle it. */
8822 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8825 op0
= convert_modes (mode
, innermode
, op0
, zextend_p
);
8827 = convert_modes (mode
, innermode
, op1
,
8828 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8829 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8833 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8834 unsignedp
, this_optab
);
8835 return REDUCE_BIT_FIELD (temp
);
8837 if (find_widening_optab_handler (other_optab
, mode
, innermode
)
8839 && innermode
== word_mode
)
8842 op0
= expand_normal (treeop0
);
8843 op1
= expand_normal (treeop1
);
8844 /* op0 and op1 might be constants, despite the above
8845 != INTEGER_CST check. Handle it. */
8846 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8847 goto widen_mult_const
;
8848 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8849 op1
= convert_modes (mode
, word_mode
, op1
,
8850 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8851 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8852 unsignedp
, OPTAB_LIB_WIDEN
);
8853 hipart
= gen_highpart (word_mode
, temp
);
8854 htem
= expand_mult_highpart_adjust (word_mode
, hipart
,
8858 emit_move_insn (hipart
, htem
);
8859 return REDUCE_BIT_FIELD (temp
);
8863 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8864 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8865 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8866 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8869 /* If this is a fixed-point operation, then we cannot use the code
8870 below because "expand_mult" doesn't support sat/no-sat fixed-point
8872 if (ALL_FIXED_POINT_MODE_P (mode
))
8875 /* If first operand is constant, swap them.
8876 Thus the following special case checks need only
8877 check the second operand. */
8878 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8879 std::swap (treeop0
, treeop1
);
8881 /* Attempt to return something suitable for generating an
8882 indexed address, for machines that support that. */
8884 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8885 && tree_fits_shwi_p (treeop1
))
8887 tree exp1
= treeop1
;
8889 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8893 op0
= force_operand (op0
, NULL_RTX
);
8895 op0
= copy_to_mode_reg (mode
, op0
);
8897 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8898 gen_int_mode (tree_to_shwi (exp1
),
8899 TYPE_MODE (TREE_TYPE (exp1
)))));
8902 if (modifier
== EXPAND_STACK_PARM
)
8905 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8906 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8908 case TRUNC_MOD_EXPR
:
8909 case FLOOR_MOD_EXPR
:
8911 case ROUND_MOD_EXPR
:
8913 case TRUNC_DIV_EXPR
:
8914 case FLOOR_DIV_EXPR
:
8916 case ROUND_DIV_EXPR
:
8917 case EXACT_DIV_EXPR
:
8919 /* If this is a fixed-point operation, then we cannot use the code
8920 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8922 if (ALL_FIXED_POINT_MODE_P (mode
))
8925 if (modifier
== EXPAND_STACK_PARM
)
8927 /* Possible optimization: compute the dividend with EXPAND_SUM
8928 then if the divisor is constant can optimize the case
8929 where some terms of the dividend have coeffs divisible by it. */
8930 expand_operands (treeop0
, treeop1
,
8931 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8932 bool mod_p
= code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
8933 || code
== CEIL_MOD_EXPR
|| code
== ROUND_MOD_EXPR
;
8934 if (SCALAR_INT_MODE_P (mode
)
8936 && get_range_pos_neg (treeop0
) == 1
8937 && get_range_pos_neg (treeop1
) == 1)
8939 /* If both arguments are known to be positive when interpreted
8940 as signed, we can expand it as both signed and unsigned
8941 division or modulo. Choose the cheaper sequence in that case. */
8942 bool speed_p
= optimize_insn_for_speed_p ();
8943 do_pending_stack_adjust ();
8945 rtx uns_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 1);
8946 rtx_insn
*uns_insns
= get_insns ();
8949 rtx sgn_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 0);
8950 rtx_insn
*sgn_insns
= get_insns ();
8952 unsigned uns_cost
= seq_cost (uns_insns
, speed_p
);
8953 unsigned sgn_cost
= seq_cost (sgn_insns
, speed_p
);
8955 /* If costs are the same then use as tie breaker the other
8957 if (uns_cost
== sgn_cost
)
8959 uns_cost
= seq_cost (uns_insns
, !speed_p
);
8960 sgn_cost
= seq_cost (sgn_insns
, !speed_p
);
8963 if (uns_cost
< sgn_cost
|| (uns_cost
== sgn_cost
&& unsignedp
))
8965 emit_insn (uns_insns
);
8968 emit_insn (sgn_insns
);
8971 return expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, unsignedp
);
8976 case MULT_HIGHPART_EXPR
:
8977 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8978 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8982 case FIXED_CONVERT_EXPR
:
8983 op0
= expand_normal (treeop0
);
8984 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8985 target
= gen_reg_rtx (mode
);
8987 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8988 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8989 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8990 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8992 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8995 case FIX_TRUNC_EXPR
:
8996 op0
= expand_normal (treeop0
);
8997 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8998 target
= gen_reg_rtx (mode
);
8999 expand_fix (target
, op0
, unsignedp
);
9003 op0
= expand_normal (treeop0
);
9004 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
9005 target
= gen_reg_rtx (mode
);
9006 /* expand_float can't figure out what to do if FROM has VOIDmode.
9007 So give it the correct mode. With -O, cse will optimize this. */
9008 if (GET_MODE (op0
) == VOIDmode
)
9009 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
9011 expand_float (target
, op0
,
9012 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9016 op0
= expand_expr (treeop0
, subtarget
,
9017 VOIDmode
, EXPAND_NORMAL
);
9018 if (modifier
== EXPAND_STACK_PARM
)
9020 temp
= expand_unop (mode
,
9021 optab_for_tree_code (NEGATE_EXPR
, type
,
9025 return REDUCE_BIT_FIELD (temp
);
9029 op0
= expand_expr (treeop0
, subtarget
,
9030 VOIDmode
, EXPAND_NORMAL
);
9031 if (modifier
== EXPAND_STACK_PARM
)
9034 /* ABS_EXPR is not valid for complex arguments. */
9035 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
9036 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
9038 /* Unsigned abs is simply the operand. Testing here means we don't
9039 risk generating incorrect code below. */
9040 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
9043 return expand_abs (mode
, op0
, target
, unsignedp
,
9044 safe_from_p (target
, treeop0
, 1));
9048 target
= original_target
;
9050 || modifier
== EXPAND_STACK_PARM
9051 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
9052 || GET_MODE (target
) != mode
9054 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
9055 target
= gen_reg_rtx (mode
);
9056 expand_operands (treeop0
, treeop1
,
9057 target
, &op0
, &op1
, EXPAND_NORMAL
);
9059 /* First try to do it with a special MIN or MAX instruction.
9060 If that does not win, use a conditional jump to select the proper
9062 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9063 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
9068 /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
9069 and similarly for MAX <x, y>. */
9070 if (VECTOR_TYPE_P (type
))
9072 tree t0
= make_tree (type
, op0
);
9073 tree t1
= make_tree (type
, op1
);
9074 tree comparison
= build2 (code
== MIN_EXPR
? LE_EXPR
: GE_EXPR
,
9076 return expand_vec_cond_expr (type
, comparison
, t0
, t1
,
9080 /* At this point, a MEM target is no longer useful; we will get better
9083 if (! REG_P (target
))
9084 target
= gen_reg_rtx (mode
);
9086 /* If op1 was placed in target, swap op0 and op1. */
9087 if (target
!= op0
&& target
== op1
)
9088 std::swap (op0
, op1
);
9090 /* We generate better code and avoid problems with op1 mentioning
9091 target by forcing op1 into a pseudo if it isn't a constant. */
9092 if (! CONSTANT_P (op1
))
9093 op1
= force_reg (mode
, op1
);
9096 enum rtx_code comparison_code
;
9099 if (code
== MAX_EXPR
)
9100 comparison_code
= unsignedp
? GEU
: GE
;
9102 comparison_code
= unsignedp
? LEU
: LE
;
9104 /* Canonicalize to comparisons against 0. */
9105 if (op1
== const1_rtx
)
9107 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9108 or (a != 0 ? a : 1) for unsigned.
9109 For MIN we are safe converting (a <= 1 ? a : 1)
9110 into (a <= 0 ? a : 1) */
9111 cmpop1
= const0_rtx
;
9112 if (code
== MAX_EXPR
)
9113 comparison_code
= unsignedp
? NE
: GT
;
9115 if (op1
== constm1_rtx
&& !unsignedp
)
9117 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9118 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9119 cmpop1
= const0_rtx
;
9120 if (code
== MIN_EXPR
)
9121 comparison_code
= LT
;
9124 /* Use a conditional move if possible. */
9125 if (can_conditionally_move_p (mode
))
9131 /* Try to emit the conditional move. */
9132 insn
= emit_conditional_move (target
, comparison_code
,
9137 /* If we could do the conditional move, emit the sequence,
9141 rtx_insn
*seq
= get_insns ();
9147 /* Otherwise discard the sequence and fall back to code with
9153 emit_move_insn (target
, op0
);
9155 lab
= gen_label_rtx ();
9156 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
9157 unsignedp
, mode
, NULL_RTX
, NULL
, lab
,
9158 profile_probability::uninitialized ());
9160 emit_move_insn (target
, op1
);
9165 op0
= expand_expr (treeop0
, subtarget
,
9166 VOIDmode
, EXPAND_NORMAL
);
9167 if (modifier
== EXPAND_STACK_PARM
)
9169 /* In case we have to reduce the result to bitfield precision
9170 for unsigned bitfield expand this as XOR with a proper constant
9172 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
9174 int_mode
= SCALAR_INT_TYPE_MODE (type
);
9175 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
9176 false, GET_MODE_PRECISION (int_mode
));
9178 temp
= expand_binop (int_mode
, xor_optab
, op0
,
9179 immed_wide_int_const (mask
, int_mode
),
9180 target
, 1, OPTAB_LIB_WIDEN
);
9183 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
9187 /* ??? Can optimize bitwise operations with one arg constant.
9188 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9189 and (a bitwise1 b) bitwise2 b (etc)
9190 but that is probably not worth while. */
9199 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
9200 || type_has_mode_precision_p (type
));
9206 /* If this is a fixed-point operation, then we cannot use the code
9207 below because "expand_shift" doesn't support sat/no-sat fixed-point
9209 if (ALL_FIXED_POINT_MODE_P (mode
))
9212 if (! safe_from_p (subtarget
, treeop1
, 1))
9214 if (modifier
== EXPAND_STACK_PARM
)
9216 op0
= expand_expr (treeop0
, subtarget
,
9217 VOIDmode
, EXPAND_NORMAL
);
9219 /* Left shift optimization when shifting across word_size boundary.
9221 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9222 there isn't native instruction to support this wide mode
9223 left shift. Given below scenario:
9225 Type A = (Type) B << C
9228 | dest_high | dest_low |
9232 If the shift amount C caused we shift B to across the word
9233 size boundary, i.e part of B shifted into high half of
9234 destination register, and part of B remains in the low
9235 half, then GCC will use the following left shift expand
9238 1. Initialize dest_low to B.
9239 2. Initialize every bit of dest_high to the sign bit of B.
9240 3. Logic left shift dest_low by C bit to finalize dest_low.
9241 The value of dest_low before this shift is kept in a temp D.
9242 4. Logic left shift dest_high by C.
9243 5. Logic right shift D by (word_size - C).
9244 6. Or the result of 4 and 5 to finalize dest_high.
9246 While, by checking gimple statements, if operand B is
9247 coming from signed extension, then we can simplify above
9250 1. dest_high = src_low >> (word_size - C).
9251 2. dest_low = src_low << C.
9253 We can use one arithmetic right shift to finish all the
9254 purpose of steps 2, 4, 5, 6, thus we reduce the steps
9255 needed from 6 into 2.
9257 The case is similar for zero extension, except that we
9258 initialize dest_high to zero rather than copies of the sign
9259 bit from B. Furthermore, we need to use a logical right shift
9262 The choice of sign-extension versus zero-extension is
9263 determined entirely by whether or not B is signed and is
9264 independent of the current setting of unsignedp. */
9267 if (code
== LSHIFT_EXPR
9270 && GET_MODE_2XWIDER_MODE (word_mode
).exists (&int_mode
)
9272 && TREE_CONSTANT (treeop1
)
9273 && TREE_CODE (treeop0
) == SSA_NAME
)
9275 gimple
*def
= SSA_NAME_DEF_STMT (treeop0
);
9276 if (is_gimple_assign (def
)
9277 && gimple_assign_rhs_code (def
) == NOP_EXPR
)
9279 scalar_int_mode rmode
= SCALAR_INT_TYPE_MODE
9280 (TREE_TYPE (gimple_assign_rhs1 (def
)));
9282 if (GET_MODE_SIZE (rmode
) < GET_MODE_SIZE (int_mode
)
9283 && TREE_INT_CST_LOW (treeop1
) < GET_MODE_BITSIZE (word_mode
)
9284 && ((TREE_INT_CST_LOW (treeop1
) + GET_MODE_BITSIZE (rmode
))
9285 >= GET_MODE_BITSIZE (word_mode
)))
9287 rtx_insn
*seq
, *seq_old
;
9288 poly_uint64 high_off
= subreg_highpart_offset (word_mode
,
9290 bool extend_unsigned
9291 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def
)));
9292 rtx low
= lowpart_subreg (word_mode
, op0
, int_mode
);
9293 rtx dest_low
= lowpart_subreg (word_mode
, target
, int_mode
);
9294 rtx dest_high
= simplify_gen_subreg (word_mode
, target
,
9295 int_mode
, high_off
);
9296 HOST_WIDE_INT ramount
= (BITS_PER_WORD
9297 - TREE_INT_CST_LOW (treeop1
));
9298 tree rshift
= build_int_cst (TREE_TYPE (treeop1
), ramount
);
9301 /* dest_high = src_low >> (word_size - C). */
9302 temp
= expand_variable_shift (RSHIFT_EXPR
, word_mode
, low
,
9305 if (temp
!= dest_high
)
9306 emit_move_insn (dest_high
, temp
);
9308 /* dest_low = src_low << C. */
9309 temp
= expand_variable_shift (LSHIFT_EXPR
, word_mode
, low
,
9310 treeop1
, dest_low
, unsignedp
);
9311 if (temp
!= dest_low
)
9312 emit_move_insn (dest_low
, temp
);
9318 if (have_insn_for (ASHIFT
, int_mode
))
9320 bool speed_p
= optimize_insn_for_speed_p ();
9322 rtx ret_old
= expand_variable_shift (code
, int_mode
,
9327 seq_old
= get_insns ();
9329 if (seq_cost (seq
, speed_p
)
9330 >= seq_cost (seq_old
, speed_p
))
9341 if (temp
== NULL_RTX
)
9342 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
9344 if (code
== LSHIFT_EXPR
)
9345 temp
= REDUCE_BIT_FIELD (temp
);
9349 /* Could determine the answer when only additive constants differ. Also,
9350 the addition of one can be handled by changing the condition. */
9357 case UNORDERED_EXPR
:
9366 temp
= do_store_flag (ops
,
9367 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
9368 tmode
!= VOIDmode
? tmode
: mode
);
9372 /* Use a compare and a jump for BLKmode comparisons, or for function
9373 type comparisons is have_canonicalize_funcptr_for_compare. */
9376 || modifier
== EXPAND_STACK_PARM
9377 || ! safe_from_p (target
, treeop0
, 1)
9378 || ! safe_from_p (target
, treeop1
, 1)
9379 /* Make sure we don't have a hard reg (such as function's return
9380 value) live across basic blocks, if not optimizing. */
9381 || (!optimize
&& REG_P (target
)
9382 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
9383 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9385 emit_move_insn (target
, const0_rtx
);
9387 rtx_code_label
*lab1
= gen_label_rtx ();
9388 jumpifnot_1 (code
, treeop0
, treeop1
, lab1
,
9389 profile_probability::uninitialized ());
9391 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
9392 emit_move_insn (target
, constm1_rtx
);
9394 emit_move_insn (target
, const1_rtx
);
9400 /* Get the rtx code of the operands. */
9401 op0
= expand_normal (treeop0
);
9402 op1
= expand_normal (treeop1
);
9405 target
= gen_reg_rtx (TYPE_MODE (type
));
9407 /* If target overlaps with op1, then either we need to force
9408 op1 into a pseudo (if target also overlaps with op0),
9409 or write the complex parts in reverse order. */
9410 switch (GET_CODE (target
))
9413 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
9415 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
9417 complex_expr_force_op1
:
9418 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
9419 emit_move_insn (temp
, op1
);
9423 complex_expr_swap_order
:
9424 /* Move the imaginary (op1) and real (op0) parts to their
9426 write_complex_part (target
, op1
, true);
9427 write_complex_part (target
, op0
, false);
9433 temp
= adjust_address_nv (target
,
9434 GET_MODE_INNER (GET_MODE (target
)), 0);
9435 if (reg_overlap_mentioned_p (temp
, op1
))
9437 scalar_mode imode
= GET_MODE_INNER (GET_MODE (target
));
9438 temp
= adjust_address_nv (target
, imode
,
9439 GET_MODE_SIZE (imode
));
9440 if (reg_overlap_mentioned_p (temp
, op0
))
9441 goto complex_expr_force_op1
;
9442 goto complex_expr_swap_order
;
9446 if (reg_overlap_mentioned_p (target
, op1
))
9448 if (reg_overlap_mentioned_p (target
, op0
))
9449 goto complex_expr_force_op1
;
9450 goto complex_expr_swap_order
;
9455 /* Move the real (op0) and imaginary (op1) parts to their location. */
9456 write_complex_part (target
, op0
, false);
9457 write_complex_part (target
, op1
, true);
9461 case WIDEN_SUM_EXPR
:
9463 tree oprnd0
= treeop0
;
9464 tree oprnd1
= treeop1
;
9466 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9467 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9472 case VEC_UNPACK_HI_EXPR
:
9473 case VEC_UNPACK_LO_EXPR
:
9474 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
9475 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
9477 op0
= expand_normal (treeop0
);
9478 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9484 case VEC_UNPACK_FLOAT_HI_EXPR
:
9485 case VEC_UNPACK_FLOAT_LO_EXPR
:
9487 op0
= expand_normal (treeop0
);
9488 /* The signedness is determined from input operand. */
9489 temp
= expand_widen_pattern_expr
9490 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9491 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9497 case VEC_WIDEN_MULT_HI_EXPR
:
9498 case VEC_WIDEN_MULT_LO_EXPR
:
9499 case VEC_WIDEN_MULT_EVEN_EXPR
:
9500 case VEC_WIDEN_MULT_ODD_EXPR
:
9501 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9502 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9503 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9504 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9506 gcc_assert (target
);
9509 case VEC_PACK_SAT_EXPR
:
9510 case VEC_PACK_FIX_TRUNC_EXPR
:
9511 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9514 case VEC_PACK_TRUNC_EXPR
:
9515 if (VECTOR_BOOLEAN_TYPE_P (type
)
9516 && VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (treeop0
))
9517 && mode
== TYPE_MODE (TREE_TYPE (treeop0
))
9518 && SCALAR_INT_MODE_P (mode
))
9520 struct expand_operand eops
[4];
9521 machine_mode imode
= TYPE_MODE (TREE_TYPE (treeop0
));
9522 expand_operands (treeop0
, treeop1
,
9523 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9524 this_optab
= vec_pack_sbool_trunc_optab
;
9525 enum insn_code icode
= optab_handler (this_optab
, imode
);
9526 create_output_operand (&eops
[0], target
, mode
);
9527 create_convert_operand_from (&eops
[1], op0
, imode
, false);
9528 create_convert_operand_from (&eops
[2], op1
, imode
, false);
9529 temp
= GEN_INT (TYPE_VECTOR_SUBPARTS (type
).to_constant ());
9530 create_input_operand (&eops
[3], temp
, imode
);
9531 expand_insn (icode
, 4, eops
);
9532 return eops
[0].value
;
9534 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9537 case VEC_PACK_FLOAT_EXPR
:
9538 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9539 expand_operands (treeop0
, treeop1
,
9540 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9541 this_optab
= optab_for_tree_code (code
, TREE_TYPE (treeop0
),
9543 target
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9544 TYPE_UNSIGNED (TREE_TYPE (treeop0
)),
9546 gcc_assert (target
);
9551 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9552 vec_perm_builder sel
;
9553 if (TREE_CODE (treeop2
) == VECTOR_CST
9554 && tree_to_vec_perm_builder (&sel
, treeop2
))
9556 machine_mode sel_mode
= TYPE_MODE (TREE_TYPE (treeop2
));
9557 temp
= expand_vec_perm_const (mode
, op0
, op1
, sel
,
9562 op2
= expand_normal (treeop2
);
9563 temp
= expand_vec_perm_var (mode
, op0
, op1
, op2
, target
);
9571 tree oprnd0
= treeop0
;
9572 tree oprnd1
= treeop1
;
9573 tree oprnd2
= treeop2
;
9576 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9577 op2
= expand_normal (oprnd2
);
9578 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9585 tree oprnd0
= treeop0
;
9586 tree oprnd1
= treeop1
;
9587 tree oprnd2
= treeop2
;
9590 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9591 op2
= expand_normal (oprnd2
);
9592 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9597 case REALIGN_LOAD_EXPR
:
9599 tree oprnd0
= treeop0
;
9600 tree oprnd1
= treeop1
;
9601 tree oprnd2
= treeop2
;
9604 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9605 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9606 op2
= expand_normal (oprnd2
);
9607 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9615 /* A COND_EXPR with its type being VOID_TYPE represents a
9616 conditional jump and is handled in
9617 expand_gimple_cond_expr. */
9618 gcc_assert (!VOID_TYPE_P (type
));
9620 /* Note that COND_EXPRs whose type is a structure or union
9621 are required to be constructed to contain assignments of
9622 a temporary variable, so that we can evaluate them here
9623 for side effect only. If type is void, we must do likewise. */
9625 gcc_assert (!TREE_ADDRESSABLE (type
)
9627 && TREE_TYPE (treeop1
) != void_type_node
9628 && TREE_TYPE (treeop2
) != void_type_node
);
9630 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9634 /* If we are not to produce a result, we have no target. Otherwise,
9635 if a target was specified use it; it will not be used as an
9636 intermediate target unless it is safe. If no target, use a
9639 if (modifier
!= EXPAND_STACK_PARM
9641 && safe_from_p (original_target
, treeop0
, 1)
9642 && GET_MODE (original_target
) == mode
9643 && !MEM_P (original_target
))
9644 temp
= original_target
;
9646 temp
= assign_temp (type
, 0, 1);
9648 do_pending_stack_adjust ();
9650 rtx_code_label
*lab0
= gen_label_rtx ();
9651 rtx_code_label
*lab1
= gen_label_rtx ();
9652 jumpifnot (treeop0
, lab0
,
9653 profile_probability::uninitialized ());
9654 store_expr (treeop1
, temp
,
9655 modifier
== EXPAND_STACK_PARM
,
9658 emit_jump_insn (targetm
.gen_jump (lab1
));
9661 store_expr (treeop2
, temp
,
9662 modifier
== EXPAND_STACK_PARM
,
9671 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9674 case VEC_DUPLICATE_EXPR
:
9675 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
9676 target
= expand_vector_broadcast (mode
, op0
);
9677 gcc_assert (target
);
9680 case VEC_SERIES_EXPR
:
9681 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, modifier
);
9682 return expand_vec_series_expr (mode
, op0
, op1
, target
);
9684 case BIT_INSERT_EXPR
:
9686 unsigned bitpos
= tree_to_uhwi (treeop2
);
9688 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1
)))
9689 bitsize
= TYPE_PRECISION (TREE_TYPE (treeop1
));
9691 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1
)));
9692 rtx op0
= expand_normal (treeop0
);
9693 rtx op1
= expand_normal (treeop1
);
9694 rtx dst
= gen_reg_rtx (mode
);
9695 emit_move_insn (dst
, op0
);
9696 store_bit_field (dst
, bitsize
, bitpos
, 0, 0,
9697 TYPE_MODE (TREE_TYPE (treeop1
)), op1
, false);
9705 /* Here to do an ordinary binary operator. */
9707 expand_operands (treeop0
, treeop1
,
9708 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9710 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9712 if (modifier
== EXPAND_STACK_PARM
)
9714 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9715 unsignedp
, OPTAB_LIB_WIDEN
);
9717 /* Bitwise operations do not need bitfield reduction as we expect their
9718 operands being properly truncated. */
9719 if (code
== BIT_XOR_EXPR
9720 || code
== BIT_AND_EXPR
9721 || code
== BIT_IOR_EXPR
)
9723 return REDUCE_BIT_FIELD (temp
);
9725 #undef REDUCE_BIT_FIELD
9728 /* Return TRUE if expression STMT is suitable for replacement.
9729 Never consider memory loads as replaceable, because those don't ever lead
9730 into constant expressions. */
9733 stmt_is_replaceable_p (gimple
*stmt
)
9735 if (ssa_is_replaceable_p (stmt
))
9737 /* Don't move around loads. */
9738 if (!gimple_assign_single_p (stmt
)
9739 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9746 expand_expr_real_1 (tree exp
, rtx target
, machine_mode tmode
,
9747 enum expand_modifier modifier
, rtx
*alt_rtl
,
9748 bool inner_reference_p
)
9750 rtx op0
, op1
, temp
, decl_rtl
;
9753 machine_mode mode
, dmode
;
9754 enum tree_code code
= TREE_CODE (exp
);
9755 rtx subtarget
, original_target
;
9758 bool reduce_bit_field
;
9759 location_t loc
= EXPR_LOCATION (exp
);
9760 struct separate_ops ops
;
9761 tree treeop0
, treeop1
, treeop2
;
9762 tree ssa_name
= NULL_TREE
;
9765 type
= TREE_TYPE (exp
);
9766 mode
= TYPE_MODE (type
);
9767 unsignedp
= TYPE_UNSIGNED (type
);
9769 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9770 if (!VL_EXP_CLASS_P (exp
))
9771 switch (TREE_CODE_LENGTH (code
))
9774 case 3: treeop2
= TREE_OPERAND (exp
, 2); /* FALLTHRU */
9775 case 2: treeop1
= TREE_OPERAND (exp
, 1); /* FALLTHRU */
9776 case 1: treeop0
= TREE_OPERAND (exp
, 0); /* FALLTHRU */
9786 ignore
= (target
== const0_rtx
9787 || ((CONVERT_EXPR_CODE_P (code
)
9788 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9789 && TREE_CODE (type
) == VOID_TYPE
));
9791 /* An operation in what may be a bit-field type needs the
9792 result to be reduced to the precision of the bit-field type,
9793 which is narrower than that of the type's mode. */
9794 reduce_bit_field
= (!ignore
9795 && INTEGRAL_TYPE_P (type
)
9796 && !type_has_mode_precision_p (type
));
9798 /* If we are going to ignore this result, we need only do something
9799 if there is a side-effect somewhere in the expression. If there
9800 is, short-circuit the most common cases here. Note that we must
9801 not call expand_expr with anything but const0_rtx in case this
9802 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9806 if (! TREE_SIDE_EFFECTS (exp
))
9809 /* Ensure we reference a volatile object even if value is ignored, but
9810 don't do this if all we are doing is taking its address. */
9811 if (TREE_THIS_VOLATILE (exp
)
9812 && TREE_CODE (exp
) != FUNCTION_DECL
9813 && mode
!= VOIDmode
&& mode
!= BLKmode
9814 && modifier
!= EXPAND_CONST_ADDRESS
)
9816 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9822 if (TREE_CODE_CLASS (code
) == tcc_unary
9823 || code
== BIT_FIELD_REF
9824 || code
== COMPONENT_REF
9825 || code
== INDIRECT_REF
)
9826 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9829 else if (TREE_CODE_CLASS (code
) == tcc_binary
9830 || TREE_CODE_CLASS (code
) == tcc_comparison
9831 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9833 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9834 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9841 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9844 /* Use subtarget as the target for operand 0 of a binary operation. */
9845 subtarget
= get_subtarget (target
);
9846 original_target
= target
;
9852 tree function
= decl_function_context (exp
);
9854 temp
= label_rtx (exp
);
9855 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9857 if (function
!= current_function_decl
9859 LABEL_REF_NONLOCAL_P (temp
) = 1;
9861 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9866 /* ??? ivopts calls expander, without any preparation from
9867 out-of-ssa. So fake instructions as if this was an access to the
9868 base variable. This unnecessarily allocates a pseudo, see how we can
9869 reuse it, if partition base vars have it set already. */
9870 if (!currently_expanding_to_rtl
)
9872 tree var
= SSA_NAME_VAR (exp
);
9873 if (var
&& DECL_RTL_SET_P (var
))
9874 return DECL_RTL (var
);
9875 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9876 LAST_VIRTUAL_REGISTER
+ 1);
9879 g
= get_gimple_for_ssa_name (exp
);
9880 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9882 && modifier
== EXPAND_INITIALIZER
9883 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9884 && (optimize
|| !SSA_NAME_VAR (exp
)
9885 || DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9886 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9887 g
= SSA_NAME_DEF_STMT (exp
);
9891 location_t saved_loc
= curr_insn_location ();
9892 location_t loc
= gimple_location (g
);
9893 if (loc
!= UNKNOWN_LOCATION
)
9894 set_curr_insn_location (loc
);
9895 ops
.code
= gimple_assign_rhs_code (g
);
9896 switch (get_gimple_rhs_class (ops
.code
))
9898 case GIMPLE_TERNARY_RHS
:
9899 ops
.op2
= gimple_assign_rhs3 (g
);
9901 case GIMPLE_BINARY_RHS
:
9902 ops
.op1
= gimple_assign_rhs2 (g
);
9904 /* Try to expand conditonal compare. */
9905 if (targetm
.gen_ccmp_first
)
9907 gcc_checking_assert (targetm
.gen_ccmp_next
!= NULL
);
9908 r
= expand_ccmp_expr (g
, mode
);
9913 case GIMPLE_UNARY_RHS
:
9914 ops
.op0
= gimple_assign_rhs1 (g
);
9915 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9917 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9919 case GIMPLE_SINGLE_RHS
:
9921 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9922 tmode
, modifier
, alt_rtl
,
9929 set_curr_insn_location (saved_loc
);
9930 if (REG_P (r
) && !REG_EXPR (r
))
9931 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9936 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9937 exp
= SSA_NAME_VAR (ssa_name
);
9938 goto expand_decl_rtl
;
9942 /* If a static var's type was incomplete when the decl was written,
9943 but the type is complete now, lay out the decl now. */
9944 if (DECL_SIZE (exp
) == 0
9945 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9946 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9947 layout_decl (exp
, 0);
9953 decl_rtl
= DECL_RTL (exp
);
9955 gcc_assert (decl_rtl
);
9957 /* DECL_MODE might change when TYPE_MODE depends on attribute target
9958 settings for VECTOR_TYPE_P that might switch for the function. */
9959 if (currently_expanding_to_rtl
9960 && code
== VAR_DECL
&& MEM_P (decl_rtl
)
9961 && VECTOR_TYPE_P (type
) && exp
&& DECL_MODE (exp
) != mode
)
9962 decl_rtl
= change_address (decl_rtl
, TYPE_MODE (type
), 0);
9964 decl_rtl
= copy_rtx (decl_rtl
);
9966 /* Record writes to register variables. */
9967 if (modifier
== EXPAND_WRITE
9969 && HARD_REGISTER_P (decl_rtl
))
9970 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9971 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9973 /* Ensure variable marked as used even if it doesn't go through
9974 a parser. If it hasn't be used yet, write out an external
9977 TREE_USED (exp
) = 1;
9979 /* Show we haven't gotten RTL for this yet. */
9982 /* Variables inherited from containing functions should have
9983 been lowered by this point. */
9985 context
= decl_function_context (exp
);
9987 || SCOPE_FILE_SCOPE_P (context
)
9988 || context
== current_function_decl
9989 || TREE_STATIC (exp
)
9990 || DECL_EXTERNAL (exp
)
9991 /* ??? C++ creates functions that are not TREE_STATIC. */
9992 || TREE_CODE (exp
) == FUNCTION_DECL
);
9994 /* This is the case of an array whose size is to be determined
9995 from its initializer, while the initializer is still being parsed.
9996 ??? We aren't parsing while expanding anymore. */
9998 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9999 temp
= validize_mem (decl_rtl
);
10001 /* If DECL_RTL is memory, we are in the normal case and the
10002 address is not valid, get the address into a register. */
10004 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
10007 *alt_rtl
= decl_rtl
;
10008 decl_rtl
= use_anchored_address (decl_rtl
);
10009 if (modifier
!= EXPAND_CONST_ADDRESS
10010 && modifier
!= EXPAND_SUM
10011 && !memory_address_addr_space_p (exp
? DECL_MODE (exp
)
10012 : GET_MODE (decl_rtl
),
10013 XEXP (decl_rtl
, 0),
10014 MEM_ADDR_SPACE (decl_rtl
)))
10015 temp
= replace_equiv_address (decl_rtl
,
10016 copy_rtx (XEXP (decl_rtl
, 0)));
10019 /* If we got something, return it. But first, set the alignment
10020 if the address is a register. */
10023 if (exp
&& MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
10024 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
10030 dmode
= DECL_MODE (exp
);
10032 dmode
= TYPE_MODE (TREE_TYPE (ssa_name
));
10034 /* If the mode of DECL_RTL does not match that of the decl,
10035 there are two cases: we are dealing with a BLKmode value
10036 that is returned in a register, or we are dealing with
10037 a promoted value. In the latter case, return a SUBREG
10038 of the wanted mode, but mark it so that we know that it
10039 was already extended. */
10040 if (REG_P (decl_rtl
)
10041 && dmode
!= BLKmode
10042 && GET_MODE (decl_rtl
) != dmode
)
10044 machine_mode pmode
;
10046 /* Get the signedness to be used for this variable. Ensure we get
10047 the same mode we got when the variable was declared. */
10048 if (code
!= SSA_NAME
)
10049 pmode
= promote_decl_mode (exp
, &unsignedp
);
10050 else if ((g
= SSA_NAME_DEF_STMT (ssa_name
))
10051 && gimple_code (g
) == GIMPLE_CALL
10052 && !gimple_call_internal_p (g
))
10053 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
10054 gimple_call_fntype (g
),
10057 pmode
= promote_ssa_mode (ssa_name
, &unsignedp
);
10058 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
10060 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
10061 SUBREG_PROMOTED_VAR_P (temp
) = 1;
10062 SUBREG_PROMOTED_SET (temp
, unsignedp
);
10070 /* Given that TYPE_PRECISION (type) is not always equal to
10071 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
10072 the former to the latter according to the signedness of the
10074 scalar_int_mode mode
= SCALAR_INT_TYPE_MODE (type
);
10075 temp
= immed_wide_int_const
10076 (wi::to_wide (exp
, GET_MODE_PRECISION (mode
)), mode
);
10082 tree tmp
= NULL_TREE
;
10083 if (VECTOR_MODE_P (mode
))
10084 return const_vector_from_tree (exp
);
10085 scalar_int_mode int_mode
;
10086 if (is_int_mode (mode
, &int_mode
))
10088 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
10089 return const_scalar_mask_from_tree (int_mode
, exp
);
10093 = lang_hooks
.types
.type_for_mode (int_mode
, 1);
10095 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
,
10096 type_for_mode
, exp
);
10101 vec
<constructor_elt
, va_gc
> *v
;
10102 /* Constructors need to be fixed-length. FIXME. */
10103 unsigned int nunits
= VECTOR_CST_NELTS (exp
).to_constant ();
10104 vec_alloc (v
, nunits
);
10105 for (unsigned int i
= 0; i
< nunits
; ++i
)
10106 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
10107 tmp
= build_constructor (type
, v
);
10109 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
10114 if (modifier
== EXPAND_WRITE
)
10116 /* Writing into CONST_DECL is always invalid, but handle it
10118 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
10119 scalar_int_mode address_mode
= targetm
.addr_space
.address_mode (as
);
10120 op0
= expand_expr_addr_expr_1 (exp
, NULL_RTX
, address_mode
,
10121 EXPAND_NORMAL
, as
);
10122 op0
= memory_address_addr_space (mode
, op0
, as
);
10123 temp
= gen_rtx_MEM (mode
, op0
);
10124 set_mem_addr_space (temp
, as
);
10127 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
10130 /* If optimized, generate immediate CONST_DOUBLE
10131 which will be turned into memory by reload if necessary.
10133 We used to force a register so that loop.c could see it. But
10134 this does not allow gen_* patterns to perform optimizations with
10135 the constants. It also produces two insns in cases like "x = 1.0;".
10136 On most machines, floating-point constants are not permitted in
10137 many insns, so we'd end up copying it to a register in any case.
10139 Now, we do the copying in expand_binop, if appropriate. */
10140 return const_double_from_real_value (TREE_REAL_CST (exp
),
10141 TYPE_MODE (TREE_TYPE (exp
)));
10144 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
10145 TYPE_MODE (TREE_TYPE (exp
)));
10148 /* Handle evaluating a complex constant in a CONCAT target. */
10149 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
10151 machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
10154 rtarg
= XEXP (original_target
, 0);
10155 itarg
= XEXP (original_target
, 1);
10157 /* Move the real and imaginary parts separately. */
10158 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
10159 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
10162 emit_move_insn (rtarg
, op0
);
10164 emit_move_insn (itarg
, op1
);
10166 return original_target
;
10172 temp
= expand_expr_constant (exp
, 1, modifier
);
10174 /* temp contains a constant address.
10175 On RISC machines where a constant address isn't valid,
10176 make some insns to get that address into a register. */
10177 if (modifier
!= EXPAND_CONST_ADDRESS
10178 && modifier
!= EXPAND_INITIALIZER
10179 && modifier
!= EXPAND_SUM
10180 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
10181 MEM_ADDR_SPACE (temp
)))
10182 return replace_equiv_address (temp
,
10183 copy_rtx (XEXP (temp
, 0)));
10187 return immed_wide_int_const (poly_int_cst_value (exp
), mode
);
10191 tree val
= treeop0
;
10192 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
10193 inner_reference_p
);
10195 if (!SAVE_EXPR_RESOLVED_P (exp
))
10197 /* We can indeed still hit this case, typically via builtin
10198 expanders calling save_expr immediately before expanding
10199 something. Assume this means that we only have to deal
10200 with non-BLKmode values. */
10201 gcc_assert (GET_MODE (ret
) != BLKmode
);
10203 val
= build_decl (curr_insn_location (),
10204 VAR_DECL
, NULL
, TREE_TYPE (exp
));
10205 DECL_ARTIFICIAL (val
) = 1;
10206 DECL_IGNORED_P (val
) = 1;
10208 TREE_OPERAND (exp
, 0) = treeop0
;
10209 SAVE_EXPR_RESOLVED_P (exp
) = 1;
10211 if (!CONSTANT_P (ret
))
10212 ret
= copy_to_reg (ret
);
10213 SET_DECL_RTL (val
, ret
);
10221 /* If we don't need the result, just ensure we evaluate any
10225 unsigned HOST_WIDE_INT idx
;
10228 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
10229 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10234 return expand_constructor (exp
, target
, modifier
, false);
10236 case TARGET_MEM_REF
:
10239 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10240 enum insn_code icode
;
10241 unsigned int align
;
10243 op0
= addr_for_mem_ref (exp
, as
, true);
10244 op0
= memory_address_addr_space (mode
, op0
, as
);
10245 temp
= gen_rtx_MEM (mode
, op0
);
10246 set_mem_attributes (temp
, exp
, 0);
10247 set_mem_addr_space (temp
, as
);
10248 align
= get_object_alignment (exp
);
10249 if (modifier
!= EXPAND_WRITE
10250 && modifier
!= EXPAND_MEMORY
10252 && align
< GET_MODE_ALIGNMENT (mode
)
10253 /* If the target does not have special handling for unaligned
10254 loads of mode then it can use regular moves for them. */
10255 && ((icode
= optab_handler (movmisalign_optab
, mode
))
10256 != CODE_FOR_nothing
))
10258 struct expand_operand ops
[2];
10260 /* We've already validated the memory, and we're creating a
10261 new pseudo destination. The predicates really can't fail,
10262 nor can the generator. */
10263 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10264 create_fixed_operand (&ops
[1], temp
);
10265 expand_insn (icode
, 2, ops
);
10266 temp
= ops
[0].value
;
10273 const bool reverse
= REF_REVERSE_STORAGE_ORDER (exp
);
10275 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10276 machine_mode address_mode
;
10277 tree base
= TREE_OPERAND (exp
, 0);
10279 enum insn_code icode
;
10281 /* Handle expansion of non-aliased memory with non-BLKmode. That
10282 might end up in a register. */
10283 if (mem_ref_refers_to_non_mem_p (exp
))
10285 poly_int64 offset
= mem_ref_offset (exp
).force_shwi ();
10286 base
= TREE_OPERAND (base
, 0);
10287 poly_uint64 type_size
;
10288 if (known_eq (offset
, 0)
10290 && poly_int_tree_p (TYPE_SIZE (type
), &type_size
)
10291 && known_eq (GET_MODE_BITSIZE (DECL_MODE (base
)), type_size
))
10292 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
10293 target
, tmode
, modifier
);
10294 if (TYPE_MODE (type
) == BLKmode
)
10296 temp
= assign_stack_temp (DECL_MODE (base
),
10297 GET_MODE_SIZE (DECL_MODE (base
)));
10298 store_expr (base
, temp
, 0, false, false);
10299 temp
= adjust_address (temp
, BLKmode
, offset
);
10300 set_mem_size (temp
, int_size_in_bytes (type
));
10303 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
10304 bitsize_int (offset
* BITS_PER_UNIT
));
10305 REF_REVERSE_STORAGE_ORDER (exp
) = reverse
;
10306 return expand_expr (exp
, target
, tmode
, modifier
);
10308 address_mode
= targetm
.addr_space
.address_mode (as
);
10309 base
= TREE_OPERAND (exp
, 0);
10310 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
10312 tree mask
= gimple_assign_rhs2 (def_stmt
);
10313 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
10314 gimple_assign_rhs1 (def_stmt
), mask
);
10315 TREE_OPERAND (exp
, 0) = base
;
10317 align
= get_object_alignment (exp
);
10318 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
10319 op0
= memory_address_addr_space (mode
, op0
, as
);
10320 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
10322 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
10323 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
10324 op0
= memory_address_addr_space (mode
, op0
, as
);
10326 temp
= gen_rtx_MEM (mode
, op0
);
10327 set_mem_attributes (temp
, exp
, 0);
10328 set_mem_addr_space (temp
, as
);
10329 if (TREE_THIS_VOLATILE (exp
))
10330 MEM_VOLATILE_P (temp
) = 1;
10331 if (modifier
!= EXPAND_WRITE
10332 && modifier
!= EXPAND_MEMORY
10333 && !inner_reference_p
10335 && align
< GET_MODE_ALIGNMENT (mode
))
10337 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10338 != CODE_FOR_nothing
)
10340 struct expand_operand ops
[2];
10342 /* We've already validated the memory, and we're creating a
10343 new pseudo destination. The predicates really can't fail,
10344 nor can the generator. */
10345 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10346 create_fixed_operand (&ops
[1], temp
);
10347 expand_insn (icode
, 2, ops
);
10348 temp
= ops
[0].value
;
10350 else if (targetm
.slow_unaligned_access (mode
, align
))
10351 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
10352 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
10353 (modifier
== EXPAND_STACK_PARM
10354 ? NULL_RTX
: target
),
10355 mode
, mode
, false, alt_rtl
);
10358 && modifier
!= EXPAND_MEMORY
10359 && modifier
!= EXPAND_WRITE
)
10360 temp
= flip_storage_order (mode
, temp
);
10367 tree array
= treeop0
;
10368 tree index
= treeop1
;
10371 /* Fold an expression like: "foo"[2].
10372 This is not done in fold so it won't happen inside &.
10373 Don't fold if this is for wide characters since it's too
10374 difficult to do correctly and this is a very rare case. */
10376 if (modifier
!= EXPAND_CONST_ADDRESS
10377 && modifier
!= EXPAND_INITIALIZER
10378 && modifier
!= EXPAND_MEMORY
)
10380 tree t
= fold_read_from_constant_string (exp
);
10383 return expand_expr (t
, target
, tmode
, modifier
);
10386 /* If this is a constant index into a constant array,
10387 just get the value from the array. Handle both the cases when
10388 we have an explicit constructor and when our operand is a variable
10389 that was declared const. */
10391 if (modifier
!= EXPAND_CONST_ADDRESS
10392 && modifier
!= EXPAND_INITIALIZER
10393 && modifier
!= EXPAND_MEMORY
10394 && TREE_CODE (array
) == CONSTRUCTOR
10395 && ! TREE_SIDE_EFFECTS (array
)
10396 && TREE_CODE (index
) == INTEGER_CST
)
10398 unsigned HOST_WIDE_INT ix
;
10401 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
10403 if (tree_int_cst_equal (field
, index
))
10405 if (!TREE_SIDE_EFFECTS (value
))
10406 return expand_expr (fold (value
), target
, tmode
, modifier
);
10411 else if (optimize
>= 1
10412 && modifier
!= EXPAND_CONST_ADDRESS
10413 && modifier
!= EXPAND_INITIALIZER
10414 && modifier
!= EXPAND_MEMORY
10415 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
10416 && TREE_CODE (index
) == INTEGER_CST
10417 && (VAR_P (array
) || TREE_CODE (array
) == CONST_DECL
)
10418 && (init
= ctor_for_folding (array
)) != error_mark_node
)
10420 if (init
== NULL_TREE
)
10422 tree value
= build_zero_cst (type
);
10423 if (TREE_CODE (value
) == CONSTRUCTOR
)
10425 /* If VALUE is a CONSTRUCTOR, this optimization is only
10426 useful if this doesn't store the CONSTRUCTOR into
10427 memory. If it does, it is more efficient to just
10428 load the data from the array directly. */
10429 rtx ret
= expand_constructor (value
, target
,
10431 if (ret
== NULL_RTX
)
10436 return expand_expr (value
, target
, tmode
, modifier
);
10438 else if (TREE_CODE (init
) == CONSTRUCTOR
)
10440 unsigned HOST_WIDE_INT ix
;
10443 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
10445 if (tree_int_cst_equal (field
, index
))
10447 if (TREE_SIDE_EFFECTS (value
))
10450 if (TREE_CODE (value
) == CONSTRUCTOR
)
10452 /* If VALUE is a CONSTRUCTOR, this
10453 optimization is only useful if
10454 this doesn't store the CONSTRUCTOR
10455 into memory. If it does, it is more
10456 efficient to just load the data from
10457 the array directly. */
10458 rtx ret
= expand_constructor (value
, target
,
10460 if (ret
== NULL_RTX
)
10465 expand_expr (fold (value
), target
, tmode
, modifier
);
10468 else if (TREE_CODE (init
) == STRING_CST
)
10470 tree low_bound
= array_ref_low_bound (exp
);
10471 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
10473 /* Optimize the special case of a zero lower bound.
10475 We convert the lower bound to sizetype to avoid problems
10476 with constant folding. E.g. suppose the lower bound is
10477 1 and its mode is QI. Without the conversion
10478 (ARRAY + (INDEX - (unsigned char)1))
10480 (ARRAY + (-(unsigned char)1) + INDEX)
10482 (ARRAY + 255 + INDEX). Oops! */
10483 if (!integer_zerop (low_bound
))
10484 index1
= size_diffop_loc (loc
, index1
,
10485 fold_convert_loc (loc
, sizetype
,
10488 if (tree_fits_uhwi_p (index1
)
10489 && compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
10491 tree type
= TREE_TYPE (TREE_TYPE (init
));
10492 scalar_int_mode mode
;
10494 if (is_int_mode (TYPE_MODE (type
), &mode
)
10495 && GET_MODE_SIZE (mode
) == 1)
10496 return gen_int_mode (TREE_STRING_POINTER (init
)
10497 [TREE_INT_CST_LOW (index1
)],
10503 goto normal_inner_ref
;
10505 case COMPONENT_REF
:
10506 /* If the operand is a CONSTRUCTOR, we can just extract the
10507 appropriate field if it is present. */
10508 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
10510 unsigned HOST_WIDE_INT idx
;
10512 scalar_int_mode field_mode
;
10514 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
10516 if (field
== treeop1
10517 /* We can normally use the value of the field in the
10518 CONSTRUCTOR. However, if this is a bitfield in
10519 an integral mode that we can fit in a HOST_WIDE_INT,
10520 we must mask only the number of bits in the bitfield,
10521 since this is done implicitly by the constructor. If
10522 the bitfield does not meet either of those conditions,
10523 we can't do this optimization. */
10524 && (! DECL_BIT_FIELD (field
)
10525 || (is_int_mode (DECL_MODE (field
), &field_mode
)
10526 && (GET_MODE_PRECISION (field_mode
)
10527 <= HOST_BITS_PER_WIDE_INT
))))
10529 if (DECL_BIT_FIELD (field
)
10530 && modifier
== EXPAND_STACK_PARM
)
10532 op0
= expand_expr (value
, target
, tmode
, modifier
);
10533 if (DECL_BIT_FIELD (field
))
10535 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10536 scalar_int_mode imode
10537 = SCALAR_INT_TYPE_MODE (TREE_TYPE (field
));
10539 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10541 op1
= gen_int_mode ((HOST_WIDE_INT_1
<< bitsize
) - 1,
10543 op0
= expand_and (imode
, op0
, op1
, target
);
10547 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10549 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10551 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10559 goto normal_inner_ref
;
10561 case BIT_FIELD_REF
:
10562 case ARRAY_RANGE_REF
:
10565 machine_mode mode1
, mode2
;
10566 poly_int64 bitsize
, bitpos
, bytepos
;
10568 int reversep
, volatilep
= 0, must_force_mem
;
10570 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
10571 &unsignedp
, &reversep
, &volatilep
);
10572 rtx orig_op0
, memloc
;
10573 bool clear_mem_expr
= false;
10575 /* If we got back the original object, something is wrong. Perhaps
10576 we are evaluating an expression too early. In any event, don't
10577 infinitely recurse. */
10578 gcc_assert (tem
!= exp
);
10580 /* If TEM's type is a union of variable size, pass TARGET to the inner
10581 computation, since it will need a temporary and TARGET is known
10582 to have to do. This occurs in unchecked conversion in Ada. */
10584 = expand_expr_real (tem
,
10585 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10586 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10587 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10589 && modifier
!= EXPAND_STACK_PARM
10590 ? target
: NULL_RTX
),
10592 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10595 /* If the field has a mode, we want to access it in the
10596 field's mode, not the computed mode.
10597 If a MEM has VOIDmode (external with incomplete type),
10598 use BLKmode for it instead. */
10601 if (mode1
!= VOIDmode
)
10602 op0
= adjust_address (op0
, mode1
, 0);
10603 else if (GET_MODE (op0
) == VOIDmode
)
10604 op0
= adjust_address (op0
, BLKmode
, 0);
10608 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10610 /* Make sure bitpos is not negative, it can wreak havoc later. */
10611 if (maybe_lt (bitpos
, 0))
10613 gcc_checking_assert (offset
== NULL_TREE
);
10614 offset
= size_int (bits_to_bytes_round_down (bitpos
));
10615 bitpos
= num_trailing_bits (bitpos
);
10618 /* If we have either an offset, a BLKmode result, or a reference
10619 outside the underlying object, we must force it to memory.
10620 Such a case can occur in Ada if we have unchecked conversion
10621 of an expression from a scalar type to an aggregate type or
10622 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10623 passed a partially uninitialized object or a view-conversion
10624 to a larger size. */
10625 must_force_mem
= (offset
10626 || mode1
== BLKmode
10627 || (mode
== BLKmode
10628 && !int_mode_for_size (bitsize
, 1).exists ())
10629 || maybe_gt (bitpos
+ bitsize
,
10630 GET_MODE_BITSIZE (mode2
)));
10632 /* Handle CONCAT first. */
10633 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10635 if (known_eq (bitpos
, 0)
10636 && known_eq (bitsize
, GET_MODE_BITSIZE (GET_MODE (op0
)))
10637 && COMPLEX_MODE_P (mode1
)
10638 && COMPLEX_MODE_P (GET_MODE (op0
))
10639 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1
))
10640 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0
)))))
10643 op0
= flip_storage_order (GET_MODE (op0
), op0
);
10644 if (mode1
!= GET_MODE (op0
))
10647 for (int i
= 0; i
< 2; i
++)
10649 rtx op
= read_complex_part (op0
, i
!= 0);
10650 if (GET_CODE (op
) == SUBREG
)
10651 op
= force_reg (GET_MODE (op
), op
);
10652 rtx temp
= gen_lowpart_common (GET_MODE_INNER (mode1
),
10658 if (!REG_P (op
) && !MEM_P (op
))
10659 op
= force_reg (GET_MODE (op
), op
);
10660 op
= gen_lowpart (GET_MODE_INNER (mode1
), op
);
10664 op0
= gen_rtx_CONCAT (mode1
, parts
[0], parts
[1]);
10668 if (known_eq (bitpos
, 0)
10669 && known_eq (bitsize
,
10670 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0))))
10671 && maybe_ne (bitsize
, 0))
10673 op0
= XEXP (op0
, 0);
10674 mode2
= GET_MODE (op0
);
10676 else if (known_eq (bitpos
,
10677 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0))))
10678 && known_eq (bitsize
,
10679 GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1))))
10680 && maybe_ne (bitpos
, 0)
10681 && maybe_ne (bitsize
, 0))
10683 op0
= XEXP (op0
, 1);
10685 mode2
= GET_MODE (op0
);
10688 /* Otherwise force into memory. */
10689 must_force_mem
= 1;
10692 /* If this is a constant, put it in a register if it is a legitimate
10693 constant and we don't need a memory reference. */
10694 if (CONSTANT_P (op0
)
10695 && mode2
!= BLKmode
10696 && targetm
.legitimate_constant_p (mode2
, op0
)
10697 && !must_force_mem
)
10698 op0
= force_reg (mode2
, op0
);
10700 /* Otherwise, if this is a constant, try to force it to the constant
10701 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10702 is a legitimate constant. */
10703 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10704 op0
= validize_mem (memloc
);
10706 /* Otherwise, if this is a constant or the object is not in memory
10707 and need be, put it there. */
10708 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10710 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10711 emit_move_insn (memloc
, op0
);
10713 clear_mem_expr
= true;
10718 machine_mode address_mode
;
10719 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10722 gcc_assert (MEM_P (op0
));
10724 address_mode
= get_address_mode (op0
);
10725 if (GET_MODE (offset_rtx
) != address_mode
)
10727 /* We cannot be sure that the RTL in offset_rtx is valid outside
10728 of a memory address context, so force it into a register
10729 before attempting to convert it to the desired mode. */
10730 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
10731 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10734 /* See the comment in expand_assignment for the rationale. */
10735 if (mode1
!= VOIDmode
10736 && maybe_ne (bitpos
, 0)
10737 && maybe_gt (bitsize
, 0)
10738 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
10739 && multiple_p (bitpos
, bitsize
)
10740 && multiple_p (bitsize
, GET_MODE_ALIGNMENT (mode1
))
10741 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10743 op0
= adjust_address (op0
, mode1
, bytepos
);
10747 op0
= offset_address (op0
, offset_rtx
,
10748 highest_pow2_factor (offset
));
10751 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10752 record its alignment as BIGGEST_ALIGNMENT. */
10754 && known_eq (bitpos
, 0)
10756 && is_aligning_offset (offset
, tem
))
10757 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10759 /* Don't forget about volatility even if this is a bitfield. */
10760 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10762 if (op0
== orig_op0
)
10763 op0
= copy_rtx (op0
);
10765 MEM_VOLATILE_P (op0
) = 1;
10768 /* In cases where an aligned union has an unaligned object
10769 as a field, we might be extracting a BLKmode value from
10770 an integer-mode (e.g., SImode) object. Handle this case
10771 by doing the extract into an object as wide as the field
10772 (which we know to be the width of a basic mode), then
10773 storing into memory, and changing the mode to BLKmode. */
10774 if (mode1
== VOIDmode
10775 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10776 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10777 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10778 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10779 && modifier
!= EXPAND_CONST_ADDRESS
10780 && modifier
!= EXPAND_INITIALIZER
10781 && modifier
!= EXPAND_MEMORY
)
10782 /* If the bitfield is volatile and the bitsize
10783 is narrower than the access size of the bitfield,
10784 we need to extract bitfields from the access. */
10785 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10786 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10787 && mode1
!= BLKmode
10788 && maybe_lt (bitsize
, GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
))
10789 /* If the field isn't aligned enough to fetch as a memref,
10790 fetch it as a bit field. */
10791 || (mode1
!= BLKmode
10793 ? MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10794 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode1
))
10795 : TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10796 || !multiple_p (bitpos
, GET_MODE_ALIGNMENT (mode
)))
10797 && modifier
!= EXPAND_MEMORY
10798 && ((modifier
== EXPAND_CONST_ADDRESS
10799 || modifier
== EXPAND_INITIALIZER
)
10801 : targetm
.slow_unaligned_access (mode1
,
10803 || !multiple_p (bitpos
, BITS_PER_UNIT
)))
10804 /* If the type and the field are a constant size and the
10805 size of the type isn't the same size as the bitfield,
10806 we must use bitfield operations. */
10807 || (known_size_p (bitsize
)
10808 && TYPE_SIZE (TREE_TYPE (exp
))
10809 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp
)))
10810 && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp
))),
10813 machine_mode ext_mode
= mode
;
10815 if (ext_mode
== BLKmode
10816 && ! (target
!= 0 && MEM_P (op0
)
10818 && multiple_p (bitpos
, BITS_PER_UNIT
)))
10819 ext_mode
= int_mode_for_size (bitsize
, 1).else_blk ();
10821 if (ext_mode
== BLKmode
)
10824 target
= assign_temp (type
, 1, 1);
10826 /* ??? Unlike the similar test a few lines below, this one is
10827 very likely obsolete. */
10828 if (known_eq (bitsize
, 0))
10831 /* In this case, BITPOS must start at a byte boundary and
10832 TARGET, if specified, must be a MEM. */
10833 gcc_assert (MEM_P (op0
)
10834 && (!target
|| MEM_P (target
)));
10836 bytepos
= exact_div (bitpos
, BITS_PER_UNIT
);
10837 poly_int64 bytesize
= bits_to_bytes_round_up (bitsize
);
10838 emit_block_move (target
,
10839 adjust_address (op0
, VOIDmode
, bytepos
),
10840 gen_int_mode (bytesize
, Pmode
),
10841 (modifier
== EXPAND_STACK_PARM
10842 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10847 /* If we have nothing to extract, the result will be 0 for targets
10848 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10849 return 0 for the sake of consistency, as reading a zero-sized
10850 bitfield is valid in Ada and the value is fully specified. */
10851 if (known_eq (bitsize
, 0))
10854 op0
= validize_mem (op0
);
10856 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10857 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10859 /* If the result has a record type and the extraction is done in
10860 an integral mode, then the field may be not aligned on a byte
10861 boundary; in this case, if it has reverse storage order, it
10862 needs to be extracted as a scalar field with reverse storage
10863 order and put back into memory order afterwards. */
10864 if (TREE_CODE (type
) == RECORD_TYPE
10865 && GET_MODE_CLASS (ext_mode
) == MODE_INT
)
10866 reversep
= TYPE_REVERSE_STORAGE_ORDER (type
);
10868 gcc_checking_assert (known_ge (bitpos
, 0));
10869 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10870 (modifier
== EXPAND_STACK_PARM
10871 ? NULL_RTX
: target
),
10872 ext_mode
, ext_mode
, reversep
, alt_rtl
);
10874 /* If the result has a record type and the mode of OP0 is an
10875 integral mode then, if BITSIZE is narrower than this mode
10876 and this is for big-endian data, we must put the field
10877 into the high-order bits. And we must also put it back
10878 into memory order if it has been previously reversed. */
10879 scalar_int_mode op0_mode
;
10880 if (TREE_CODE (type
) == RECORD_TYPE
10881 && is_int_mode (GET_MODE (op0
), &op0_mode
))
10883 HOST_WIDE_INT size
= GET_MODE_BITSIZE (op0_mode
);
10885 gcc_checking_assert (known_le (bitsize
, size
));
10886 if (maybe_lt (bitsize
, size
)
10887 && reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
10888 op0
= expand_shift (LSHIFT_EXPR
, op0_mode
, op0
,
10889 size
- bitsize
, op0
, 1);
10892 op0
= flip_storage_order (op0_mode
, op0
);
10895 /* If the result type is BLKmode, store the data into a temporary
10896 of the appropriate type, but with the mode corresponding to the
10897 mode for the data we have (op0's mode). */
10898 if (mode
== BLKmode
)
10901 = assign_stack_temp_for_type (ext_mode
,
10902 GET_MODE_BITSIZE (ext_mode
),
10904 emit_move_insn (new_rtx
, op0
);
10905 op0
= copy_rtx (new_rtx
);
10906 PUT_MODE (op0
, BLKmode
);
10912 /* If the result is BLKmode, use that to access the object
10914 if (mode
== BLKmode
)
10917 /* Get a reference to just this component. */
10918 bytepos
= bits_to_bytes_round_down (bitpos
);
10919 if (modifier
== EXPAND_CONST_ADDRESS
10920 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10921 op0
= adjust_address_nv (op0
, mode1
, bytepos
);
10923 op0
= adjust_address (op0
, mode1
, bytepos
);
10925 if (op0
== orig_op0
)
10926 op0
= copy_rtx (op0
);
10928 /* Don't set memory attributes if the base expression is
10929 SSA_NAME that got expanded as a MEM. In that case, we should
10930 just honor its original memory attributes. */
10931 if (TREE_CODE (tem
) != SSA_NAME
|| !MEM_P (orig_op0
))
10932 set_mem_attributes (op0
, exp
, 0);
10934 if (REG_P (XEXP (op0
, 0)))
10935 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10937 /* If op0 is a temporary because the original expressions was forced
10938 to memory, clear MEM_EXPR so that the original expression cannot
10939 be marked as addressable through MEM_EXPR of the temporary. */
10940 if (clear_mem_expr
)
10941 set_mem_expr (op0
, NULL_TREE
);
10943 MEM_VOLATILE_P (op0
) |= volatilep
;
10946 && modifier
!= EXPAND_MEMORY
10947 && modifier
!= EXPAND_WRITE
)
10948 op0
= flip_storage_order (mode1
, op0
);
10950 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10951 || modifier
== EXPAND_CONST_ADDRESS
10952 || modifier
== EXPAND_INITIALIZER
)
10956 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10958 convert_move (target
, op0
, unsignedp
);
10963 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10966 /* All valid uses of __builtin_va_arg_pack () are removed during
10968 if (CALL_EXPR_VA_ARG_PACK (exp
))
10969 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10971 tree fndecl
= get_callee_fndecl (exp
), attr
;
10974 /* Don't diagnose the error attribute in thunks, those are
10975 artificially created. */
10976 && !CALL_FROM_THUNK_P (exp
)
10977 && (attr
= lookup_attribute ("error",
10978 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10980 const char *ident
= lang_hooks
.decl_printable_name (fndecl
, 1);
10981 error ("%Kcall to %qs declared with attribute error: %s", exp
,
10982 identifier_to_locale (ident
),
10983 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10986 /* Don't diagnose the warning attribute in thunks, those are
10987 artificially created. */
10988 && !CALL_FROM_THUNK_P (exp
)
10989 && (attr
= lookup_attribute ("warning",
10990 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10992 const char *ident
= lang_hooks
.decl_printable_name (fndecl
, 1);
10993 warning_at (tree_nonartificial_location (exp
),
10994 OPT_Wattribute_warning
,
10995 "%Kcall to %qs declared with attribute warning: %s",
10996 exp
, identifier_to_locale (ident
),
10997 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
11000 /* Check for a built-in function. */
11001 if (fndecl
&& fndecl_built_in_p (fndecl
))
11003 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
11004 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
11007 return expand_call (exp
, target
, ignore
);
11009 case VIEW_CONVERT_EXPR
:
11012 /* If we are converting to BLKmode, try to avoid an intermediate
11013 temporary by fetching an inner memory reference. */
11014 if (mode
== BLKmode
11015 && poly_int_tree_p (TYPE_SIZE (type
))
11016 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
11017 && handled_component_p (treeop0
))
11019 machine_mode mode1
;
11020 poly_int64 bitsize
, bitpos
, bytepos
;
11022 int unsignedp
, reversep
, volatilep
= 0;
11024 = get_inner_reference (treeop0
, &bitsize
, &bitpos
, &offset
, &mode1
,
11025 &unsignedp
, &reversep
, &volatilep
);
11028 /* ??? We should work harder and deal with non-zero offsets. */
11030 && multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
11032 && known_size_p (bitsize
)
11033 && known_eq (wi::to_poly_offset (TYPE_SIZE (type
)), bitsize
))
11035 /* See the normal_inner_ref case for the rationale. */
11037 = expand_expr_real (tem
,
11038 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
11039 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
11041 && modifier
!= EXPAND_STACK_PARM
11042 ? target
: NULL_RTX
),
11044 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
11047 if (MEM_P (orig_op0
))
11051 /* Get a reference to just this component. */
11052 if (modifier
== EXPAND_CONST_ADDRESS
11053 || modifier
== EXPAND_SUM
11054 || modifier
== EXPAND_INITIALIZER
)
11055 op0
= adjust_address_nv (op0
, mode
, bytepos
);
11057 op0
= adjust_address (op0
, mode
, bytepos
);
11059 if (op0
== orig_op0
)
11060 op0
= copy_rtx (op0
);
11062 set_mem_attributes (op0
, treeop0
, 0);
11063 if (REG_P (XEXP (op0
, 0)))
11064 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
11066 MEM_VOLATILE_P (op0
) |= volatilep
;
11072 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
11073 NULL
, inner_reference_p
);
11075 /* If the input and output modes are both the same, we are done. */
11076 if (mode
== GET_MODE (op0
))
11078 /* If neither mode is BLKmode, and both modes are the same size
11079 then we can use gen_lowpart. */
11080 else if (mode
!= BLKmode
11081 && GET_MODE (op0
) != BLKmode
11082 && known_eq (GET_MODE_PRECISION (mode
),
11083 GET_MODE_PRECISION (GET_MODE (op0
)))
11084 && !COMPLEX_MODE_P (GET_MODE (op0
)))
11086 if (GET_CODE (op0
) == SUBREG
)
11087 op0
= force_reg (GET_MODE (op0
), op0
);
11088 temp
= gen_lowpart_common (mode
, op0
);
11093 if (!REG_P (op0
) && !MEM_P (op0
))
11094 op0
= force_reg (GET_MODE (op0
), op0
);
11095 op0
= gen_lowpart (mode
, op0
);
11098 /* If both types are integral, convert from one mode to the other. */
11099 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
11100 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
11101 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
11102 /* If the output type is a bit-field type, do an extraction. */
11103 else if (reduce_bit_field
)
11104 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
11105 TYPE_UNSIGNED (type
), NULL_RTX
,
11106 mode
, mode
, false, NULL
);
11107 /* As a last resort, spill op0 to memory, and reload it in a
11109 else if (!MEM_P (op0
))
11111 /* If the operand is not a MEM, force it into memory. Since we
11112 are going to be changing the mode of the MEM, don't call
11113 force_const_mem for constants because we don't allow pool
11114 constants to change mode. */
11115 tree inner_type
= TREE_TYPE (treeop0
);
11117 gcc_assert (!TREE_ADDRESSABLE (exp
));
11119 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
11121 = assign_stack_temp_for_type
11122 (TYPE_MODE (inner_type
),
11123 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
11125 emit_move_insn (target
, op0
);
11129 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
11130 output type is such that the operand is known to be aligned, indicate
11131 that it is. Otherwise, we need only be concerned about alignment for
11132 non-BLKmode results. */
11135 enum insn_code icode
;
11137 if (modifier
!= EXPAND_WRITE
11138 && modifier
!= EXPAND_MEMORY
11139 && !inner_reference_p
11141 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
11143 /* If the target does have special handling for unaligned
11144 loads of mode then use them. */
11145 if ((icode
= optab_handler (movmisalign_optab
, mode
))
11146 != CODE_FOR_nothing
)
11150 op0
= adjust_address (op0
, mode
, 0);
11151 /* We've already validated the memory, and we're creating a
11152 new pseudo destination. The predicates really can't
11154 reg
= gen_reg_rtx (mode
);
11156 /* Nor can the insn generator. */
11157 rtx_insn
*insn
= GEN_FCN (icode
) (reg
, op0
);
11161 else if (STRICT_ALIGNMENT
)
11163 poly_uint64 mode_size
= GET_MODE_SIZE (mode
);
11164 poly_uint64 temp_size
= mode_size
;
11165 if (GET_MODE (op0
) != BLKmode
)
11166 temp_size
= upper_bound (temp_size
,
11167 GET_MODE_SIZE (GET_MODE (op0
)));
11169 = assign_stack_temp_for_type (mode
, temp_size
, type
);
11170 rtx new_with_op0_mode
11171 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
11173 gcc_assert (!TREE_ADDRESSABLE (exp
));
11175 if (GET_MODE (op0
) == BLKmode
)
11177 rtx size_rtx
= gen_int_mode (mode_size
, Pmode
);
11178 emit_block_move (new_with_op0_mode
, op0
, size_rtx
,
11179 (modifier
== EXPAND_STACK_PARM
11180 ? BLOCK_OP_CALL_PARM
11181 : BLOCK_OP_NORMAL
));
11184 emit_move_insn (new_with_op0_mode
, op0
);
11190 op0
= adjust_address (op0
, mode
, 0);
11197 tree lhs
= treeop0
;
11198 tree rhs
= treeop1
;
11199 gcc_assert (ignore
);
11201 /* Check for |= or &= of a bitfield of size one into another bitfield
11202 of size 1. In this case, (unless we need the result of the
11203 assignment) we can do this more efficiently with a
11204 test followed by an assignment, if necessary.
11206 ??? At this point, we can't get a BIT_FIELD_REF here. But if
11207 things change so we do, this code should be enhanced to
11209 if (TREE_CODE (lhs
) == COMPONENT_REF
11210 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
11211 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
11212 && TREE_OPERAND (rhs
, 0) == lhs
11213 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
11214 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
11215 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
11217 rtx_code_label
*label
= gen_label_rtx ();
11218 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
11219 profile_probability prob
= profile_probability::uninitialized ();
11221 jumpifnot (TREE_OPERAND (rhs
, 1), label
, prob
);
11223 jumpif (TREE_OPERAND (rhs
, 1), label
, prob
);
11224 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
11226 do_pending_stack_adjust ();
11227 emit_label (label
);
11231 expand_assignment (lhs
, rhs
, false);
11236 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
11238 case REALPART_EXPR
:
11239 op0
= expand_normal (treeop0
);
11240 return read_complex_part (op0
, false);
11242 case IMAGPART_EXPR
:
11243 op0
= expand_normal (treeop0
);
11244 return read_complex_part (op0
, true);
11251 /* Expanded in cfgexpand.c. */
11252 gcc_unreachable ();
11254 case TRY_CATCH_EXPR
:
11256 case EH_FILTER_EXPR
:
11257 case TRY_FINALLY_EXPR
:
11258 /* Lowered by tree-eh.c. */
11259 gcc_unreachable ();
11261 case WITH_CLEANUP_EXPR
:
11262 case CLEANUP_POINT_EXPR
:
11264 case CASE_LABEL_EXPR
:
11269 case COMPOUND_EXPR
:
11270 case PREINCREMENT_EXPR
:
11271 case PREDECREMENT_EXPR
:
11272 case POSTINCREMENT_EXPR
:
11273 case POSTDECREMENT_EXPR
:
11276 case COMPOUND_LITERAL_EXPR
:
11277 /* Lowered by gimplify.c. */
11278 gcc_unreachable ();
11281 /* Function descriptors are not valid except for as
11282 initialization constants, and should not be expanded. */
11283 gcc_unreachable ();
11285 case WITH_SIZE_EXPR
:
11286 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11287 have pulled out the size to use in whatever context it needed. */
11288 return expand_expr_real (treeop0
, original_target
, tmode
,
11289 modifier
, alt_rtl
, inner_reference_p
);
11292 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
11296 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11297 signedness of TYPE), possibly returning the result in TARGET.
11298 TYPE is known to be a partial integer type. */
11300 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
11302 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
11303 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
11305 /* For constant values, reduce using build_int_cst_type. */
11306 poly_int64 const_exp
;
11307 if (poly_int_rtx_p (exp
, &const_exp
))
11309 tree t
= build_int_cst_type (type
, const_exp
);
11310 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
11312 else if (TYPE_UNSIGNED (type
))
11314 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (exp
));
11315 rtx mask
= immed_wide_int_const
11316 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
11317 return expand_and (mode
, exp
, mask
, target
);
11321 scalar_int_mode mode
= as_a
<scalar_int_mode
> (GET_MODE (exp
));
11322 int count
= GET_MODE_PRECISION (mode
) - prec
;
11323 exp
= expand_shift (LSHIFT_EXPR
, mode
, exp
, count
, target
, 0);
11324 return expand_shift (RSHIFT_EXPR
, mode
, exp
, count
, target
, 0);
11328 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11329 when applied to the address of EXP produces an address known to be
11330 aligned more than BIGGEST_ALIGNMENT. */
11333 is_aligning_offset (const_tree offset
, const_tree exp
)
11335 /* Strip off any conversions. */
11336 while (CONVERT_EXPR_P (offset
))
11337 offset
= TREE_OPERAND (offset
, 0);
11339 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11340 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11341 if (TREE_CODE (offset
) != BIT_AND_EXPR
11342 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
11343 || compare_tree_int (TREE_OPERAND (offset
, 1),
11344 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
11345 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1))
11348 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11349 It must be NEGATE_EXPR. Then strip any more conversions. */
11350 offset
= TREE_OPERAND (offset
, 0);
11351 while (CONVERT_EXPR_P (offset
))
11352 offset
= TREE_OPERAND (offset
, 0);
11354 if (TREE_CODE (offset
) != NEGATE_EXPR
)
11357 offset
= TREE_OPERAND (offset
, 0);
11358 while (CONVERT_EXPR_P (offset
))
11359 offset
= TREE_OPERAND (offset
, 0);
11361 /* This must now be the address of EXP. */
11362 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
11365 /* Return the tree node if an ARG corresponds to a string constant or zero
11366 if it doesn't. If we return nonzero, set *PTR_OFFSET to the (possibly
11367 non-constant) offset in bytes within the string that ARG is accessing.
11368 If MEM_SIZE is non-zero the storage size of the memory is returned.
11369 If DECL is non-zero the constant declaration is returned if available. */
11372 string_constant (tree arg
, tree
*ptr_offset
, tree
*mem_size
, tree
*decl
)
11377 /* Non-constant index into the character array in an ARRAY_REF
11378 expression or null. */
11379 tree varidx
= NULL_TREE
;
11381 poly_int64 base_off
= 0;
11383 if (TREE_CODE (arg
) == ADDR_EXPR
)
11385 arg
= TREE_OPERAND (arg
, 0);
11387 if (TREE_CODE (arg
) == ARRAY_REF
)
11389 tree idx
= TREE_OPERAND (arg
, 1);
11390 if (TREE_CODE (idx
) != INTEGER_CST
)
11392 /* From a pointer (but not array) argument extract the variable
11393 index to prevent get_addr_base_and_unit_offset() from failing
11394 due to it. Use it later to compute the non-constant offset
11395 into the string and return it to the caller. */
11397 ref
= TREE_OPERAND (arg
, 0);
11399 if (TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
)
11402 if (!integer_zerop (array_ref_low_bound (arg
)))
11405 if (!integer_onep (array_ref_element_size (arg
)))
11409 array
= get_addr_base_and_unit_offset (ref
, &base_off
);
11411 || (TREE_CODE (array
) != VAR_DECL
11412 && TREE_CODE (array
) != CONST_DECL
11413 && TREE_CODE (array
) != STRING_CST
))
11416 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
11418 tree arg0
= TREE_OPERAND (arg
, 0);
11419 tree arg1
= TREE_OPERAND (arg
, 1);
11422 tree str
= string_constant (arg0
, &offset
, mem_size
, decl
);
11425 str
= string_constant (arg1
, &offset
, mem_size
, decl
);
11431 /* Avoid pointers to arrays (see bug 86622). */
11432 if (POINTER_TYPE_P (TREE_TYPE (arg
))
11433 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == ARRAY_TYPE
11434 && !(decl
&& !*decl
)
11435 && !(decl
&& tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl
))
11436 && mem_size
&& tree_fits_uhwi_p (*mem_size
)
11437 && tree_int_cst_equal (*mem_size
, DECL_SIZE_UNIT (*decl
))))
11440 tree type
= TREE_TYPE (offset
);
11441 arg1
= fold_convert (type
, arg1
);
11442 *ptr_offset
= fold_build2 (PLUS_EXPR
, type
, offset
, arg1
);
11447 else if (TREE_CODE (arg
) == SSA_NAME
)
11449 gimple
*stmt
= SSA_NAME_DEF_STMT (arg
);
11450 if (!is_gimple_assign (stmt
))
11453 tree rhs1
= gimple_assign_rhs1 (stmt
);
11454 tree_code code
= gimple_assign_rhs_code (stmt
);
11455 if (code
== ADDR_EXPR
)
11456 return string_constant (rhs1
, ptr_offset
, mem_size
, decl
);
11457 else if (code
!= POINTER_PLUS_EXPR
)
11461 if (tree str
= string_constant (rhs1
, &offset
, mem_size
, decl
))
11463 /* Avoid pointers to arrays (see bug 86622). */
11464 if (POINTER_TYPE_P (TREE_TYPE (rhs1
))
11465 && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs1
))) == ARRAY_TYPE
11466 && !(decl
&& !*decl
)
11467 && !(decl
&& tree_fits_uhwi_p (DECL_SIZE_UNIT (*decl
))
11468 && mem_size
&& tree_fits_uhwi_p (*mem_size
)
11469 && tree_int_cst_equal (*mem_size
, DECL_SIZE_UNIT (*decl
))))
11472 tree rhs2
= gimple_assign_rhs2 (stmt
);
11473 tree type
= TREE_TYPE (offset
);
11474 rhs2
= fold_convert (type
, rhs2
);
11475 *ptr_offset
= fold_build2 (PLUS_EXPR
, type
, offset
, rhs2
);
11480 else if (DECL_P (arg
))
11485 tree offset
= wide_int_to_tree (sizetype
, base_off
);
11488 if (TREE_CODE (TREE_TYPE (array
)) != ARRAY_TYPE
)
11491 gcc_assert (TREE_CODE (arg
) == ARRAY_REF
);
11492 tree chartype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (arg
, 0)));
11493 if (TREE_CODE (chartype
) != INTEGER_TYPE
)
11496 offset
= fold_convert (sizetype
, varidx
);
11499 if (TREE_CODE (array
) == STRING_CST
)
11501 *ptr_offset
= fold_convert (sizetype
, offset
);
11503 *mem_size
= TYPE_SIZE_UNIT (TREE_TYPE (array
));
11506 gcc_checking_assert (tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (array
)))
11507 >= TREE_STRING_LENGTH (array
));
11511 if (!VAR_P (array
) && TREE_CODE (array
) != CONST_DECL
)
11514 tree init
= ctor_for_folding (array
);
11516 /* Handle variables initialized with string literals. */
11517 if (!init
|| init
== error_mark_node
)
11519 if (TREE_CODE (init
) == CONSTRUCTOR
)
11521 /* Convert the 64-bit constant offset to a wider type to avoid
11524 if (!base_off
.is_constant (&wioff
))
11527 wioff
*= BITS_PER_UNIT
;
11528 if (!wi::fits_uhwi_p (wioff
))
11531 base_off
= wioff
.to_uhwi ();
11532 unsigned HOST_WIDE_INT fieldoff
= 0;
11533 init
= fold_ctor_reference (NULL_TREE
, init
, base_off
, 0, array
,
11535 HOST_WIDE_INT cstoff
;
11536 if (!base_off
.is_constant (&cstoff
))
11539 cstoff
= (cstoff
- fieldoff
) / BITS_PER_UNIT
;
11540 tree off
= build_int_cst (sizetype
, cstoff
);
11542 offset
= fold_build2 (PLUS_EXPR
, TREE_TYPE (offset
), offset
, off
);
11550 *ptr_offset
= offset
;
11552 tree eltype
= TREE_TYPE (init
);
11553 tree initsize
= TYPE_SIZE_UNIT (eltype
);
11555 *mem_size
= initsize
;
11560 if (TREE_CODE (init
) == INTEGER_CST
11561 && (TREE_CODE (TREE_TYPE (array
)) == INTEGER_TYPE
11562 || TYPE_MAIN_VARIANT (eltype
) == char_type_node
))
11564 /* For a reference to (address of) a single constant character,
11565 store the native representation of the character in CHARBUF.
11566 If the reference is to an element of an array or a member
11567 of a struct, only consider narrow characters until ctors
11568 for wide character arrays are transformed to STRING_CSTs
11569 like those for narrow arrays. */
11570 unsigned char charbuf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
11571 int len
= native_encode_expr (init
, charbuf
, sizeof charbuf
, 0);
11574 /* Construct a string literal with elements of ELTYPE and
11575 the representation above. Then strip
11576 the ADDR_EXPR (ARRAY_REF (...)) around the STRING_CST. */
11577 init
= build_string_literal (len
, (char *)charbuf
, eltype
);
11578 init
= TREE_OPERAND (TREE_OPERAND (init
, 0), 0);
11582 if (TREE_CODE (init
) != STRING_CST
)
11585 gcc_checking_assert (tree_to_shwi (initsize
) >= TREE_STRING_LENGTH (init
));
11590 /* Compute the modular multiplicative inverse of A modulo M
11591 using extended Euclid's algorithm. Assumes A and M are coprime. */
11593 mod_inv (const wide_int
&a
, const wide_int
&b
)
11595 /* Verify the assumption. */
11596 gcc_checking_assert (wi::eq_p (wi::gcd (a
, b
), 1));
11598 unsigned int p
= a
.get_precision () + 1;
11599 gcc_checking_assert (b
.get_precision () + 1 == p
);
11600 wide_int c
= wide_int::from (a
, p
, UNSIGNED
);
11601 wide_int d
= wide_int::from (b
, p
, UNSIGNED
);
11602 wide_int x0
= wide_int::from (0, p
, UNSIGNED
);
11603 wide_int x1
= wide_int::from (1, p
, UNSIGNED
);
11605 if (wi::eq_p (b
, 1))
11606 return wide_int::from (1, p
, UNSIGNED
);
11608 while (wi::gt_p (c
, 1, UNSIGNED
))
11611 wide_int q
= wi::divmod_trunc (c
, d
, UNSIGNED
, &d
);
11614 x0
= wi::sub (x1
, wi::mul (q
, x0
));
11617 if (wi::lt_p (x1
, 0, SIGNED
))
11622 /* Optimize x % C1 == C2 for signed modulo if C1 is a power of two and C2
11623 is non-zero and C3 ((1<<(prec-1)) | (C1 - 1)):
11624 for C2 > 0 to x & C3 == C2
11625 for C2 < 0 to x & C3 == (C2 & C3). */
11627 maybe_optimize_pow2p_mod_cmp (enum tree_code code
, tree
*arg0
, tree
*arg1
)
11629 gimple
*stmt
= get_def_for_expr (*arg0
, TRUNC_MOD_EXPR
);
11630 tree treeop0
= gimple_assign_rhs1 (stmt
);
11631 tree treeop1
= gimple_assign_rhs2 (stmt
);
11632 tree type
= TREE_TYPE (*arg0
);
11633 scalar_int_mode mode
;
11634 if (!is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
))
11636 if (GET_MODE_BITSIZE (mode
) != TYPE_PRECISION (type
)
11637 || TYPE_PRECISION (type
) <= 1
11638 || TYPE_UNSIGNED (type
)
11639 /* Signed x % c == 0 should have been optimized into unsigned modulo
11641 || integer_zerop (*arg1
)
11642 /* If c is known to be non-negative, modulo will be expanded as unsigned
11644 || get_range_pos_neg (treeop0
) == 1)
11647 /* x % c == d where d < 0 && d <= -c should be always false. */
11648 if (tree_int_cst_sgn (*arg1
) == -1
11649 && -wi::to_widest (treeop1
) >= wi::to_widest (*arg1
))
11652 int prec
= TYPE_PRECISION (type
);
11653 wide_int w
= wi::to_wide (treeop1
) - 1;
11654 w
|= wi::shifted_mask (0, prec
- 1, true, prec
);
11655 tree c3
= wide_int_to_tree (type
, w
);
11657 if (tree_int_cst_sgn (*arg1
) == -1)
11658 c4
= wide_int_to_tree (type
, w
& wi::to_wide (*arg1
));
11660 rtx op0
= expand_normal (treeop0
);
11661 treeop0
= make_tree (TREE_TYPE (treeop0
), op0
);
11663 bool speed_p
= optimize_insn_for_speed_p ();
11665 do_pending_stack_adjust ();
11667 location_t loc
= gimple_location (stmt
);
11668 struct separate_ops ops
;
11669 ops
.code
= TRUNC_MOD_EXPR
;
11670 ops
.location
= loc
;
11671 ops
.type
= TREE_TYPE (treeop0
);
11674 ops
.op2
= NULL_TREE
;
11676 rtx mor
= expand_expr_real_2 (&ops
, NULL_RTX
, TYPE_MODE (ops
.type
),
11678 rtx_insn
*moinsns
= get_insns ();
11681 unsigned mocost
= seq_cost (moinsns
, speed_p
);
11682 mocost
+= rtx_cost (mor
, mode
, EQ
, 0, speed_p
);
11683 mocost
+= rtx_cost (expand_normal (*arg1
), mode
, EQ
, 1, speed_p
);
11685 ops
.code
= BIT_AND_EXPR
;
11686 ops
.location
= loc
;
11687 ops
.type
= TREE_TYPE (treeop0
);
11690 ops
.op2
= NULL_TREE
;
11692 rtx mur
= expand_expr_real_2 (&ops
, NULL_RTX
, TYPE_MODE (ops
.type
),
11694 rtx_insn
*muinsns
= get_insns ();
11697 unsigned mucost
= seq_cost (muinsns
, speed_p
);
11698 mucost
+= rtx_cost (mur
, mode
, EQ
, 0, speed_p
);
11699 mucost
+= rtx_cost (expand_normal (c4
), mode
, EQ
, 1, speed_p
);
11701 if (mocost
<= mucost
)
11703 emit_insn (moinsns
);
11704 *arg0
= make_tree (TREE_TYPE (*arg0
), mor
);
11708 emit_insn (muinsns
);
11709 *arg0
= make_tree (TREE_TYPE (*arg0
), mur
);
11714 /* Attempt to optimize unsigned (X % C1) == C2 (or (X % C1) != C2).
11716 (X - C2) * C3 <= C4 (or >), where
11717 C3 is modular multiplicative inverse of C1 and 1<<prec and
11718 C4 is ((1<<prec) - 1) / C1 or ((1<<prec) - 1) / C1 - 1 (the latter
11719 if C2 > ((1<<prec) - 1) % C1).
11720 If C1 is even, S = ctz (C1) and C2 is 0, use
11721 ((X * C3) r>> S) <= C4, where C3 is modular multiplicative
11722 inverse of C1>>S and 1<<prec and C4 is (((1<<prec) - 1) / (C1>>S)) >> S.
11724 For signed (X % C1) == 0 if C1 is odd to (all operations in it
11726 (X * C3) + C4 <= 2 * C4, where
11727 C3 is modular multiplicative inverse of (unsigned) C1 and 1<<prec and
11728 C4 is ((1<<(prec - 1) - 1) / C1).
11729 If C1 is even, S = ctz(C1), use
11730 ((X * C3) + C4) r>> S <= (C4 >> (S - 1))
11731 where C3 is modular multiplicative inverse of (unsigned)(C1>>S) and 1<<prec
11732 and C4 is ((1<<(prec - 1) - 1) / (C1>>S)) & (-1<<S).
11734 See the Hacker's Delight book, section 10-17. */
11736 maybe_optimize_mod_cmp (enum tree_code code
, tree
*arg0
, tree
*arg1
)
11738 gcc_checking_assert (code
== EQ_EXPR
|| code
== NE_EXPR
);
11739 gcc_checking_assert (TREE_CODE (*arg1
) == INTEGER_CST
);
11744 gimple
*stmt
= get_def_for_expr (*arg0
, TRUNC_MOD_EXPR
);
11748 tree treeop0
= gimple_assign_rhs1 (stmt
);
11749 tree treeop1
= gimple_assign_rhs2 (stmt
);
11750 if (TREE_CODE (treeop0
) != SSA_NAME
11751 || TREE_CODE (treeop1
) != INTEGER_CST
11752 /* Don't optimize the undefined behavior case x % 0;
11753 x % 1 should have been optimized into zero, punt if
11754 it makes it here for whatever reason;
11755 x % -c should have been optimized into x % c. */
11756 || compare_tree_int (treeop1
, 2) <= 0
11757 /* Likewise x % c == d where d >= c should be always false. */
11758 || tree_int_cst_le (treeop1
, *arg1
))
11761 /* Unsigned x % pow2 is handled right already, for signed
11762 modulo handle it in maybe_optimize_pow2p_mod_cmp. */
11763 if (integer_pow2p (treeop1
))
11764 return maybe_optimize_pow2p_mod_cmp (code
, arg0
, arg1
);
11766 tree type
= TREE_TYPE (*arg0
);
11767 scalar_int_mode mode
;
11768 if (!is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
))
11770 if (GET_MODE_BITSIZE (mode
) != TYPE_PRECISION (type
)
11771 || TYPE_PRECISION (type
) <= 1)
11774 signop sgn
= UNSIGNED
;
11775 /* If both operands are known to have the sign bit clear, handle
11776 even the signed modulo case as unsigned. treeop1 is always
11777 positive >= 2, checked above. */
11778 if (!TYPE_UNSIGNED (type
) && get_range_pos_neg (treeop0
) != 1)
11781 if (!TYPE_UNSIGNED (type
))
11783 if (tree_int_cst_sgn (*arg1
) == -1)
11785 type
= unsigned_type_for (type
);
11786 if (!type
|| TYPE_MODE (type
) != TYPE_MODE (TREE_TYPE (*arg0
)))
11790 int prec
= TYPE_PRECISION (type
);
11791 wide_int w
= wi::to_wide (treeop1
);
11792 int shift
= wi::ctz (w
);
11793 /* Unsigned (X % C1) == C2 is equivalent to (X - C2) % C1 == 0 if
11794 C2 <= -1U % C1, because for any Z >= 0U - C2 in that case (Z % C1) != 0.
11795 If C1 is odd, we can handle all cases by subtracting
11796 C4 below. We could handle even the even C1 and C2 > -1U % C1 cases
11797 e.g. by testing for overflow on the subtraction, punt on that for now
11799 if ((sgn
== SIGNED
|| shift
) && !integer_zerop (*arg1
))
11803 wide_int x
= wi::umod_trunc (wi::mask (prec
, false, prec
), w
);
11804 if (wi::gtu_p (wi::to_wide (*arg1
), x
))
11808 imm_use_iterator imm_iter
;
11809 use_operand_p use_p
;
11810 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, treeop0
)
11812 gimple
*use_stmt
= USE_STMT (use_p
);
11813 /* Punt if treeop0 is used in the same bb in a division
11814 or another modulo with the same divisor. We should expect
11815 the division and modulo combined together. */
11816 if (use_stmt
== stmt
11817 || gimple_bb (use_stmt
) != gimple_bb (stmt
))
11819 if (!is_gimple_assign (use_stmt
)
11820 || (gimple_assign_rhs_code (use_stmt
) != TRUNC_DIV_EXPR
11821 && gimple_assign_rhs_code (use_stmt
) != TRUNC_MOD_EXPR
))
11823 if (gimple_assign_rhs1 (use_stmt
) != treeop0
11824 || !operand_equal_p (gimple_assign_rhs2 (use_stmt
), treeop1
, 0))
11829 w
= wi::lrshift (w
, shift
);
11830 wide_int a
= wide_int::from (w
, prec
+ 1, UNSIGNED
);
11831 wide_int b
= wi::shifted_mask (prec
, 1, false, prec
+ 1);
11832 wide_int m
= wide_int::from (mod_inv (a
, b
), prec
, UNSIGNED
);
11833 tree c3
= wide_int_to_tree (type
, m
);
11834 tree c5
= NULL_TREE
;
11836 if (sgn
== UNSIGNED
)
11838 d
= wi::divmod_trunc (wi::mask (prec
, false, prec
), w
, UNSIGNED
, &e
);
11839 /* Use <= floor ((1<<prec) - 1) / C1 only if C2 <= ((1<<prec) - 1) % C1,
11840 otherwise use < or subtract one from C4. E.g. for
11841 x % 3U == 0 we transform this into x * 0xaaaaaaab <= 0x55555555, but
11842 x % 3U == 1 already needs to be
11843 (x - 1) * 0xaaaaaaabU <= 0x55555554. */
11844 if (!shift
&& wi::gtu_p (wi::to_wide (*arg1
), e
))
11847 d
= wi::lrshift (d
, shift
);
11851 e
= wi::udiv_trunc (wi::mask (prec
- 1, false, prec
), w
);
11853 d
= wi::lshift (e
, 1);
11856 e
= wi::bit_and (e
, wi::mask (shift
, true, prec
));
11857 d
= wi::lrshift (e
, shift
- 1);
11859 c5
= wide_int_to_tree (type
, e
);
11861 tree c4
= wide_int_to_tree (type
, d
);
11863 rtx op0
= expand_normal (treeop0
);
11864 treeop0
= make_tree (TREE_TYPE (treeop0
), op0
);
11866 bool speed_p
= optimize_insn_for_speed_p ();
11868 do_pending_stack_adjust ();
11870 location_t loc
= gimple_location (stmt
);
11871 struct separate_ops ops
;
11872 ops
.code
= TRUNC_MOD_EXPR
;
11873 ops
.location
= loc
;
11874 ops
.type
= TREE_TYPE (treeop0
);
11877 ops
.op2
= NULL_TREE
;
11879 rtx mor
= expand_expr_real_2 (&ops
, NULL_RTX
, TYPE_MODE (ops
.type
),
11881 rtx_insn
*moinsns
= get_insns ();
11884 unsigned mocost
= seq_cost (moinsns
, speed_p
);
11885 mocost
+= rtx_cost (mor
, mode
, EQ
, 0, speed_p
);
11886 mocost
+= rtx_cost (expand_normal (*arg1
), mode
, EQ
, 1, speed_p
);
11888 tree t
= fold_convert_loc (loc
, type
, treeop0
);
11889 if (!integer_zerop (*arg1
))
11890 t
= fold_build2_loc (loc
, MINUS_EXPR
, type
, t
, fold_convert (type
, *arg1
));
11891 t
= fold_build2_loc (loc
, MULT_EXPR
, type
, t
, c3
);
11893 t
= fold_build2_loc (loc
, PLUS_EXPR
, type
, t
, c5
);
11896 tree s
= build_int_cst (NULL_TREE
, shift
);
11897 t
= fold_build2_loc (loc
, RROTATE_EXPR
, type
, t
, s
);
11901 rtx mur
= expand_normal (t
);
11902 rtx_insn
*muinsns
= get_insns ();
11905 unsigned mucost
= seq_cost (muinsns
, speed_p
);
11906 mucost
+= rtx_cost (mur
, mode
, LE
, 0, speed_p
);
11907 mucost
+= rtx_cost (expand_normal (c4
), mode
, LE
, 1, speed_p
);
11909 if (mocost
<= mucost
)
11911 emit_insn (moinsns
);
11912 *arg0
= make_tree (TREE_TYPE (*arg0
), mor
);
11916 emit_insn (muinsns
);
11917 *arg0
= make_tree (type
, mur
);
11919 return code
== EQ_EXPR
? LE_EXPR
: GT_EXPR
;
11922 /* Generate code to calculate OPS, and exploded expression
11923 using a store-flag instruction and return an rtx for the result.
11924 OPS reflects a comparison.
11926 If TARGET is nonzero, store the result there if convenient.
11928 Return zero if there is no suitable set-flag instruction
11929 available on this machine.
11931 Once expand_expr has been called on the arguments of the comparison,
11932 we are committed to doing the store flag, since it is not safe to
11933 re-evaluate the expression. We emit the store-flag insn by calling
11934 emit_store_flag, but only expand the arguments if we have a reason
11935 to believe that emit_store_flag will be successful. If we think that
11936 it will, but it isn't, we have to simulate the store-flag with a
11937 set/jump/set sequence. */
11940 do_store_flag (sepops ops
, rtx target
, machine_mode mode
)
11942 enum rtx_code code
;
11943 tree arg0
, arg1
, type
;
11944 machine_mode operand_mode
;
11947 rtx subtarget
= target
;
11948 location_t loc
= ops
->location
;
11953 /* Don't crash if the comparison was erroneous. */
11954 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
11957 type
= TREE_TYPE (arg0
);
11958 operand_mode
= TYPE_MODE (type
);
11959 unsignedp
= TYPE_UNSIGNED (type
);
11961 /* We won't bother with BLKmode store-flag operations because it would mean
11962 passing a lot of information to emit_store_flag. */
11963 if (operand_mode
== BLKmode
)
11966 /* We won't bother with store-flag operations involving function pointers
11967 when function pointers must be canonicalized before comparisons. */
11968 if (targetm
.have_canonicalize_funcptr_for_compare ()
11969 && ((POINTER_TYPE_P (TREE_TYPE (arg0
))
11970 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg0
))))
11971 || (POINTER_TYPE_P (TREE_TYPE (arg1
))
11972 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg1
))))))
11978 /* For vector typed comparisons emit code to generate the desired
11979 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11980 expander for this. */
11981 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
11983 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
11984 if (VECTOR_BOOLEAN_TYPE_P (ops
->type
)
11985 && expand_vec_cmp_expr_p (TREE_TYPE (arg0
), ops
->type
, ops
->code
))
11986 return expand_vec_cmp_expr (ops
->type
, ifexp
, target
);
11989 tree if_true
= constant_boolean_node (true, ops
->type
);
11990 tree if_false
= constant_boolean_node (false, ops
->type
);
11991 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
,
11996 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
11997 into (x - C2) * C3 < C4. */
11998 if ((ops
->code
== EQ_EXPR
|| ops
->code
== NE_EXPR
)
11999 && TREE_CODE (arg0
) == SSA_NAME
12000 && TREE_CODE (arg1
) == INTEGER_CST
)
12002 enum tree_code code
= maybe_optimize_mod_cmp (ops
->code
, &arg0
, &arg1
);
12003 if (code
!= ops
->code
)
12005 struct separate_ops nops
= *ops
;
12006 nops
.code
= ops
->code
= code
;
12009 nops
.type
= TREE_TYPE (arg0
);
12010 return do_store_flag (&nops
, target
, mode
);
12014 /* Get the rtx comparison code to use. We know that EXP is a comparison
12015 operation of some type. Some comparisons against 1 and -1 can be
12016 converted to comparisons with zero. Do so here so that the tests
12017 below will be aware that we have a comparison with zero. These
12018 tests will not catch constants in the first operand, but constants
12019 are rarely passed as the first operand. */
12030 if (integer_onep (arg1
))
12031 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
12033 code
= unsignedp
? LTU
: LT
;
12036 if (! unsignedp
&& integer_all_onesp (arg1
))
12037 arg1
= integer_zero_node
, code
= LT
;
12039 code
= unsignedp
? LEU
: LE
;
12042 if (! unsignedp
&& integer_all_onesp (arg1
))
12043 arg1
= integer_zero_node
, code
= GE
;
12045 code
= unsignedp
? GTU
: GT
;
12048 if (integer_onep (arg1
))
12049 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
12051 code
= unsignedp
? GEU
: GE
;
12054 case UNORDERED_EXPR
:
12080 gcc_unreachable ();
12083 /* Put a constant second. */
12084 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
12085 || TREE_CODE (arg0
) == FIXED_CST
)
12087 std::swap (arg0
, arg1
);
12088 code
= swap_condition (code
);
12091 /* If this is an equality or inequality test of a single bit, we can
12092 do this by shifting the bit being tested to the low-order bit and
12093 masking the result with the constant 1. If the condition was EQ,
12094 we xor it with 1. This does not require an scc insn and is faster
12095 than an scc insn even if we have it.
12097 The code to make this transformation was moved into fold_single_bit_test,
12098 so we just call into the folder and expand its result. */
12100 if ((code
== NE
|| code
== EQ
)
12101 && integer_zerop (arg1
)
12102 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
12104 gimple
*srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
12106 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
12108 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
12109 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
12110 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
12111 gimple_assign_rhs1 (srcstmt
),
12112 gimple_assign_rhs2 (srcstmt
));
12113 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
12115 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
12119 if (! get_subtarget (target
)
12120 || GET_MODE (subtarget
) != operand_mode
)
12123 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
12126 target
= gen_reg_rtx (mode
);
12128 /* Try a cstore if possible. */
12129 return emit_store_flag_force (target
, code
, op0
, op1
,
12130 operand_mode
, unsignedp
,
12131 (TYPE_PRECISION (ops
->type
) == 1
12132 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
12135 /* Attempt to generate a casesi instruction. Returns 1 if successful,
12136 0 otherwise (i.e. if there is no casesi instruction).
12138 DEFAULT_PROBABILITY is the probability of jumping to the default
12141 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
12142 rtx table_label
, rtx default_label
, rtx fallback_label
,
12143 profile_probability default_probability
)
12145 struct expand_operand ops
[5];
12146 scalar_int_mode index_mode
= SImode
;
12147 rtx op1
, op2
, index
;
12149 if (! targetm
.have_casesi ())
12152 /* The index must be some form of integer. Convert it to SImode. */
12153 scalar_int_mode omode
= SCALAR_INT_TYPE_MODE (index_type
);
12154 if (GET_MODE_BITSIZE (omode
) > GET_MODE_BITSIZE (index_mode
))
12156 rtx rangertx
= expand_normal (range
);
12158 /* We must handle the endpoints in the original mode. */
12159 index_expr
= build2 (MINUS_EXPR
, index_type
,
12160 index_expr
, minval
);
12161 minval
= integer_zero_node
;
12162 index
= expand_normal (index_expr
);
12164 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
12165 omode
, 1, default_label
,
12166 default_probability
);
12167 /* Now we can safely truncate. */
12168 index
= convert_to_mode (index_mode
, index
, 0);
12172 if (omode
!= index_mode
)
12174 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
12175 index_expr
= fold_convert (index_type
, index_expr
);
12178 index
= expand_normal (index_expr
);
12181 do_pending_stack_adjust ();
12183 op1
= expand_normal (minval
);
12184 op2
= expand_normal (range
);
12186 create_input_operand (&ops
[0], index
, index_mode
);
12187 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
12188 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
12189 create_fixed_operand (&ops
[3], table_label
);
12190 create_fixed_operand (&ops
[4], (default_label
12192 : fallback_label
));
12193 expand_jump_insn (targetm
.code_for_casesi
, 5, ops
);
12197 /* Attempt to generate a tablejump instruction; same concept. */
12198 /* Subroutine of the next function.
12200 INDEX is the value being switched on, with the lowest value
12201 in the table already subtracted.
12202 MODE is its expected mode (needed if INDEX is constant).
12203 RANGE is the length of the jump table.
12204 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
12206 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
12207 index value is out of range.
12208 DEFAULT_PROBABILITY is the probability of jumping to
12209 the default label. */
12212 do_tablejump (rtx index
, machine_mode mode
, rtx range
, rtx table_label
,
12213 rtx default_label
, profile_probability default_probability
)
12217 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
12218 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
12220 /* Do an unsigned comparison (in the proper mode) between the index
12221 expression and the value which represents the length of the range.
12222 Since we just finished subtracting the lower bound of the range
12223 from the index expression, this comparison allows us to simultaneously
12224 check that the original index expression value is both greater than
12225 or equal to the minimum value of the range and less than or equal to
12226 the maximum value of the range. */
12229 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
12230 default_label
, default_probability
);
12232 /* If index is in range, it must fit in Pmode.
12233 Convert to Pmode so we can index with it. */
12236 unsigned int width
;
12238 /* We know the value of INDEX is between 0 and RANGE. If we have a
12239 sign-extended subreg, and RANGE does not have the sign bit set, then
12240 we have a value that is valid for both sign and zero extension. In
12241 this case, we get better code if we sign extend. */
12242 if (GET_CODE (index
) == SUBREG
12243 && SUBREG_PROMOTED_VAR_P (index
)
12244 && SUBREG_PROMOTED_SIGNED_P (index
)
12245 && ((width
= GET_MODE_PRECISION (as_a
<scalar_int_mode
> (mode
)))
12246 <= HOST_BITS_PER_WIDE_INT
)
12247 && ! (UINTVAL (range
) & (HOST_WIDE_INT_1U
<< (width
- 1))))
12248 index
= convert_to_mode (Pmode
, index
, 0);
12250 index
= convert_to_mode (Pmode
, index
, 1);
12253 /* Don't let a MEM slip through, because then INDEX that comes
12254 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
12255 and break_out_memory_refs will go to work on it and mess it up. */
12256 #ifdef PIC_CASE_VECTOR_ADDRESS
12257 if (flag_pic
&& !REG_P (index
))
12258 index
= copy_to_mode_reg (Pmode
, index
);
12261 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
12262 GET_MODE_SIZE, because this indicates how large insns are. The other
12263 uses should all be Pmode, because they are addresses. This code
12264 could fail if addresses and insns are not the same size. */
12265 index
= simplify_gen_binary (MULT
, Pmode
, index
,
12266 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
12268 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
12269 gen_rtx_LABEL_REF (Pmode
, table_label
));
12271 #ifdef PIC_CASE_VECTOR_ADDRESS
12273 index
= PIC_CASE_VECTOR_ADDRESS (index
);
12276 index
= memory_address (CASE_VECTOR_MODE
, index
);
12277 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
12278 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
12279 convert_move (temp
, vector
, 0);
12281 emit_jump_insn (targetm
.gen_tablejump (temp
, table_label
));
12283 /* If we are generating PIC code or if the table is PC-relative, the
12284 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
12285 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
12290 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
12291 rtx table_label
, rtx default_label
,
12292 profile_probability default_probability
)
12296 if (! targetm
.have_tablejump ())
12299 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
12300 fold_convert (index_type
, index_expr
),
12301 fold_convert (index_type
, minval
));
12302 index
= expand_normal (index_expr
);
12303 do_pending_stack_adjust ();
12305 do_tablejump (index
, TYPE_MODE (index_type
),
12306 convert_modes (TYPE_MODE (index_type
),
12307 TYPE_MODE (TREE_TYPE (range
)),
12308 expand_normal (range
),
12309 TYPE_UNSIGNED (TREE_TYPE (range
))),
12310 table_label
, default_label
, default_probability
);
12314 /* Return a CONST_VECTOR rtx representing vector mask for
12315 a VECTOR_CST of booleans. */
12317 const_vector_mask_from_tree (tree exp
)
12319 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
12320 machine_mode inner
= GET_MODE_INNER (mode
);
12322 rtx_vector_builder
builder (mode
, VECTOR_CST_NPATTERNS (exp
),
12323 VECTOR_CST_NELTS_PER_PATTERN (exp
));
12324 unsigned int count
= builder
.encoded_nelts ();
12325 for (unsigned int i
= 0; i
< count
; ++i
)
12327 tree elt
= VECTOR_CST_ELT (exp
, i
);
12328 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
12329 if (integer_zerop (elt
))
12330 builder
.quick_push (CONST0_RTX (inner
));
12331 else if (integer_onep (elt
)
12332 || integer_minus_onep (elt
))
12333 builder
.quick_push (CONSTM1_RTX (inner
));
12335 gcc_unreachable ();
12337 return builder
.build ();
12340 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones.
12341 Return a constant scalar rtx of mode MODE in which bit X is set if element
12342 X of EXP is nonzero. */
12344 const_scalar_mask_from_tree (scalar_int_mode mode
, tree exp
)
12346 wide_int res
= wi::zero (GET_MODE_PRECISION (mode
));
12349 /* The result has a fixed number of bits so the input must too. */
12350 unsigned int nunits
= VECTOR_CST_NELTS (exp
).to_constant ();
12351 for (unsigned int i
= 0; i
< nunits
; ++i
)
12353 elt
= VECTOR_CST_ELT (exp
, i
);
12354 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
12355 if (integer_all_onesp (elt
))
12356 res
= wi::set_bit (res
, i
);
12358 gcc_assert (integer_zerop (elt
));
12361 return immed_wide_int_const (res
, mode
);
12364 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
12366 const_vector_from_tree (tree exp
)
12368 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
12370 if (initializer_zerop (exp
))
12371 return CONST0_RTX (mode
);
12373 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
12374 return const_vector_mask_from_tree (exp
);
12376 machine_mode inner
= GET_MODE_INNER (mode
);
12378 rtx_vector_builder
builder (mode
, VECTOR_CST_NPATTERNS (exp
),
12379 VECTOR_CST_NELTS_PER_PATTERN (exp
));
12380 unsigned int count
= builder
.encoded_nelts ();
12381 for (unsigned int i
= 0; i
< count
; ++i
)
12383 tree elt
= VECTOR_CST_ELT (exp
, i
);
12384 if (TREE_CODE (elt
) == REAL_CST
)
12385 builder
.quick_push (const_double_from_real_value (TREE_REAL_CST (elt
),
12387 else if (TREE_CODE (elt
) == FIXED_CST
)
12388 builder
.quick_push (CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
12391 builder
.quick_push (immed_wide_int_const (wi::to_poly_wide (elt
),
12394 return builder
.build ();
12397 /* Build a decl for a personality function given a language prefix. */
12400 build_personality_function (const char *lang
)
12402 const char *unwind_and_version
;
12406 switch (targetm_common
.except_unwind_info (&global_options
))
12411 unwind_and_version
= "_sj0";
12415 unwind_and_version
= "_v0";
12418 unwind_and_version
= "_seh0";
12421 gcc_unreachable ();
12424 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
12426 type
= build_function_type_list (integer_type_node
, integer_type_node
,
12427 long_long_unsigned_type_node
,
12428 ptr_type_node
, ptr_type_node
, NULL_TREE
);
12429 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
12430 get_identifier (name
), type
);
12431 DECL_ARTIFICIAL (decl
) = 1;
12432 DECL_EXTERNAL (decl
) = 1;
12433 TREE_PUBLIC (decl
) = 1;
12435 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
12436 are the flags assigned by targetm.encode_section_info. */
12437 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
12442 /* Extracts the personality function of DECL and returns the corresponding
12446 get_personality_function (tree decl
)
12448 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
12449 enum eh_personality_kind pk
;
12451 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
12452 if (pk
== eh_personality_none
)
12456 && pk
== eh_personality_any
)
12457 personality
= lang_hooks
.eh_personality ();
12459 if (pk
== eh_personality_lang
)
12460 gcc_assert (personality
!= NULL_TREE
);
12462 return XEXP (DECL_RTL (personality
), 0);
12465 /* Returns a tree for the size of EXP in bytes. */
12468 tree_expr_size (const_tree exp
)
12471 && DECL_SIZE_UNIT (exp
) != 0)
12472 return DECL_SIZE_UNIT (exp
);
12474 return size_in_bytes (TREE_TYPE (exp
));
12477 /* Return an rtx for the size in bytes of the value of EXP. */
12480 expr_size (tree exp
)
12484 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
12485 size
= TREE_OPERAND (exp
, 1);
12488 size
= tree_expr_size (exp
);
12490 gcc_assert (size
== SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, exp
));
12493 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
), EXPAND_NORMAL
);
12496 /* Return a wide integer for the size in bytes of the value of EXP, or -1
12497 if the size can vary or is larger than an integer. */
12499 static HOST_WIDE_INT
12500 int_expr_size (tree exp
)
12504 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
12505 size
= TREE_OPERAND (exp
, 1);
12508 size
= tree_expr_size (exp
);
12512 if (size
== 0 || !tree_fits_shwi_p (size
))
12515 return tree_to_shwi (size
);