1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
38 #include "diagnostic.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
45 #include "insn-attr.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
52 #include "optabs-tree.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
57 #include "tree-ssa-live.h"
58 #include "tree-outof-ssa.h"
59 #include "tree-ssa-address.h"
61 #include "tree-chkp.h"
66 /* If this is nonzero, we do not bother generating VOLATILE
67 around volatile memory references, and we are willing to
68 output indirect addresses. If cse is to follow, we reject
69 indirect addresses so a useful potential cse is generated;
70 if it is used only once, instruction combination will produce
71 the same indirect address eventually. */
74 static bool block_move_libcall_safe_for_call_parm (void);
75 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
76 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
77 unsigned HOST_WIDE_INT
);
78 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
79 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
80 static rtx_insn
*compress_float_constant (rtx
, rtx
);
81 static rtx
get_subtarget (rtx
);
82 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
83 HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
84 unsigned HOST_WIDE_INT
, machine_mode
,
85 tree
, int, alias_set_type
, bool);
86 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
, bool);
87 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
,
88 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
89 machine_mode
, tree
, alias_set_type
, bool, bool);
91 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
93 static int is_aligning_offset (const_tree
, const_tree
);
94 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
95 static rtx
do_store_flag (sepops
, rtx
, machine_mode
);
97 static void emit_single_push_insn (machine_mode
, rtx
, tree
);
99 static void do_tablejump (rtx
, machine_mode
, rtx
, rtx
, rtx
,
100 profile_probability
);
101 static rtx
const_vector_from_tree (tree
);
102 static rtx
const_scalar_mask_from_tree (tree
);
103 static tree
tree_expr_size (const_tree
);
104 static HOST_WIDE_INT
int_expr_size (tree
);
107 /* This is run to set up which modes can be used
108 directly in memory and to initialize the block move optab. It is run
109 at the beginning of compilation and when the target is reinitialized. */
112 init_expr_target (void)
119 /* Try indexing by frame ptr and try by stack ptr.
120 It is known that on the Convex the stack ptr isn't a valid index.
121 With luck, one or the other is valid on any machine. */
122 mem
= gen_rtx_MEM (word_mode
, stack_pointer_rtx
);
123 mem1
= gen_rtx_MEM (word_mode
, frame_pointer_rtx
);
125 /* A scratch register we can modify in-place below to avoid
126 useless RTL allocations. */
127 reg
= gen_rtx_REG (word_mode
, LAST_VIRTUAL_REGISTER
+ 1);
129 rtx_insn
*insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
130 pat
= gen_rtx_SET (NULL_RTX
, NULL_RTX
);
131 PATTERN (insn
) = pat
;
133 for (machine_mode mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
134 mode
= (machine_mode
) ((int) mode
+ 1))
138 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
139 PUT_MODE (mem
, mode
);
140 PUT_MODE (mem1
, mode
);
142 /* See if there is some register that can be used in this mode and
143 directly loaded or stored from memory. */
145 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
146 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
147 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
150 if (! HARD_REGNO_MODE_OK (regno
, mode
))
153 set_mode_and_regno (reg
, mode
, regno
);
156 SET_DEST (pat
) = reg
;
157 if (recog (pat
, insn
, &num_clobbers
) >= 0)
158 direct_load
[(int) mode
] = 1;
160 SET_SRC (pat
) = mem1
;
161 SET_DEST (pat
) = reg
;
162 if (recog (pat
, insn
, &num_clobbers
) >= 0)
163 direct_load
[(int) mode
] = 1;
166 SET_DEST (pat
) = mem
;
167 if (recog (pat
, insn
, &num_clobbers
) >= 0)
168 direct_store
[(int) mode
] = 1;
171 SET_DEST (pat
) = mem1
;
172 if (recog (pat
, insn
, &num_clobbers
) >= 0)
173 direct_store
[(int) mode
] = 1;
177 mem
= gen_rtx_MEM (VOIDmode
, gen_raw_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1));
179 opt_scalar_float_mode mode_iter
;
180 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_FLOAT
)
182 scalar_float_mode mode
= mode_iter
.require ();
183 scalar_float_mode srcmode
;
184 FOR_EACH_MODE_UNTIL (srcmode
, mode
)
188 ic
= can_extend_p (mode
, srcmode
, 0);
189 if (ic
== CODE_FOR_nothing
)
192 PUT_MODE (mem
, srcmode
);
194 if (insn_operand_matches (ic
, 1, mem
))
195 float_extend_from_mem
[mode
][srcmode
] = true;
200 /* This is run at the start of compiling a function. */
205 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
208 /* Copy data from FROM to TO, where the machine modes are not the same.
209 Both modes may be integer, or both may be floating, or both may be
211 UNSIGNEDP should be nonzero if FROM is an unsigned type.
212 This causes zero-extension instead of sign-extension. */
215 convert_move (rtx to
, rtx from
, int unsignedp
)
217 machine_mode to_mode
= GET_MODE (to
);
218 machine_mode from_mode
= GET_MODE (from
);
219 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
220 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
224 /* rtx code for making an equivalent value. */
225 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
226 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
229 gcc_assert (to_real
== from_real
);
230 gcc_assert (to_mode
!= BLKmode
);
231 gcc_assert (from_mode
!= BLKmode
);
233 /* If the source and destination are already the same, then there's
238 /* If FROM is a SUBREG that indicates that we have already done at least
239 the required extension, strip it. We don't handle such SUBREGs as
242 scalar_int_mode to_int_mode
;
243 if (GET_CODE (from
) == SUBREG
244 && SUBREG_PROMOTED_VAR_P (from
)
245 && is_a
<scalar_int_mode
> (to_mode
, &to_int_mode
)
246 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from
)))
247 >= GET_MODE_PRECISION (to_int_mode
))
248 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
249 from
= gen_lowpart (to_int_mode
, from
), from_mode
= to_int_mode
;
251 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
253 if (to_mode
== from_mode
254 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
256 emit_move_insn (to
, from
);
260 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
262 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
264 if (VECTOR_MODE_P (to_mode
))
265 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
267 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
269 emit_move_insn (to
, from
);
273 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
275 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
276 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
286 gcc_assert ((GET_MODE_PRECISION (from_mode
)
287 != GET_MODE_PRECISION (to_mode
))
288 || (DECIMAL_FLOAT_MODE_P (from_mode
)
289 != DECIMAL_FLOAT_MODE_P (to_mode
)));
291 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
292 /* Conversion between decimal float and binary float, same size. */
293 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
294 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
299 /* Try converting directly if the insn is supported. */
301 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
302 if (code
!= CODE_FOR_nothing
)
304 emit_unop_insn (code
, to
, from
,
305 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
309 /* Otherwise use a libcall. */
310 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
312 /* Is this conversion implemented yet? */
313 gcc_assert (libcall
);
316 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
318 insns
= get_insns ();
320 emit_libcall_block (insns
, to
, value
,
321 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
323 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
327 /* Handle pointer conversion. */ /* SPEE 900220. */
328 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
332 if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
339 if (convert_optab_handler (ctab
, to_mode
, from_mode
)
342 emit_unop_insn (convert_optab_handler (ctab
, to_mode
, from_mode
),
348 /* Targets are expected to provide conversion insns between PxImode and
349 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
350 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
352 scalar_int_mode full_mode
353 = smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode
));
355 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
356 != CODE_FOR_nothing
);
358 if (full_mode
!= from_mode
)
359 from
= convert_to_mode (full_mode
, from
, unsignedp
);
360 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
364 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
367 scalar_int_mode full_mode
368 = smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode
));
369 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
370 enum insn_code icode
;
372 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
373 gcc_assert (icode
!= CODE_FOR_nothing
);
375 if (to_mode
== full_mode
)
377 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
381 new_from
= gen_reg_rtx (full_mode
);
382 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
384 /* else proceed to integer conversions below. */
385 from_mode
= full_mode
;
389 /* Make sure both are fixed-point modes or both are not. */
390 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
391 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
392 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
394 /* If we widen from_mode to to_mode and they are in the same class,
395 we won't saturate the result.
396 Otherwise, always saturate the result to play safe. */
397 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
398 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
399 expand_fixed_convert (to
, from
, 0, 0);
401 expand_fixed_convert (to
, from
, 0, 1);
405 /* Now both modes are integers. */
407 /* Handle expanding beyond a word. */
408 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
409 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
416 machine_mode lowpart_mode
;
417 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
419 /* Try converting directly if the insn is supported. */
420 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
423 /* If FROM is a SUBREG, put it into a register. Do this
424 so that we always generate the same set of insns for
425 better cse'ing; if an intermediate assignment occurred,
426 we won't be doing the operation directly on the SUBREG. */
427 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
428 from
= force_reg (from_mode
, from
);
429 emit_unop_insn (code
, to
, from
, equiv_code
);
432 /* Next, try converting via full word. */
433 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
434 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
435 != CODE_FOR_nothing
))
437 rtx word_to
= gen_reg_rtx (word_mode
);
440 if (reg_overlap_mentioned_p (to
, from
))
441 from
= force_reg (from_mode
, from
);
444 convert_move (word_to
, from
, unsignedp
);
445 emit_unop_insn (code
, to
, word_to
, equiv_code
);
449 /* No special multiword conversion insn; do it by hand. */
452 /* Since we will turn this into a no conflict block, we must ensure
453 the source does not overlap the target so force it into an isolated
454 register when maybe so. Likewise for any MEM input, since the
455 conversion sequence might require several references to it and we
456 must ensure we're getting the same value every time. */
458 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
459 from
= force_reg (from_mode
, from
);
461 /* Get a copy of FROM widened to a word, if necessary. */
462 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
463 lowpart_mode
= word_mode
;
465 lowpart_mode
= from_mode
;
467 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
469 lowpart
= gen_lowpart (lowpart_mode
, to
);
470 emit_move_insn (lowpart
, lowfrom
);
472 /* Compute the value to put in each remaining word. */
474 fill_value
= const0_rtx
;
476 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
477 LT
, lowfrom
, const0_rtx
,
478 lowpart_mode
, 0, -1);
480 /* Fill the remaining words. */
481 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
483 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
484 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
486 gcc_assert (subword
);
488 if (fill_value
!= subword
)
489 emit_move_insn (subword
, fill_value
);
492 insns
= get_insns ();
499 /* Truncating multi-word to a word or less. */
500 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
501 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
504 && ! MEM_VOLATILE_P (from
)
505 && direct_load
[(int) to_mode
]
506 && ! mode_dependent_address_p (XEXP (from
, 0),
507 MEM_ADDR_SPACE (from
)))
509 || GET_CODE (from
) == SUBREG
))
510 from
= force_reg (from_mode
, from
);
511 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
515 /* Now follow all the conversions between integers
516 no more than a word long. */
518 /* For truncation, usually we can just refer to FROM in a narrower mode. */
519 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
520 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
523 && ! MEM_VOLATILE_P (from
)
524 && direct_load
[(int) to_mode
]
525 && ! mode_dependent_address_p (XEXP (from
, 0),
526 MEM_ADDR_SPACE (from
)))
528 || GET_CODE (from
) == SUBREG
))
529 from
= force_reg (from_mode
, from
);
530 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
531 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
532 from
= copy_to_reg (from
);
533 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
537 /* Handle extension. */
538 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
540 /* Convert directly if that works. */
541 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
544 emit_unop_insn (code
, to
, from
, equiv_code
);
549 machine_mode intermediate
;
553 /* Search for a mode to convert via. */
554 FOR_EACH_MODE_FROM (intermediate
, from_mode
)
555 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
557 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
558 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, intermediate
)))
559 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
560 != CODE_FOR_nothing
))
562 convert_move (to
, convert_to_mode (intermediate
, from
,
563 unsignedp
), unsignedp
);
567 /* No suitable intermediate mode.
568 Generate what we need with shifts. */
569 shift_amount
= (GET_MODE_PRECISION (to_mode
)
570 - GET_MODE_PRECISION (from_mode
));
571 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
572 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
574 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
577 emit_move_insn (to
, tmp
);
582 /* Support special truncate insns for certain modes. */
583 if (convert_optab_handler (trunc_optab
, to_mode
,
584 from_mode
) != CODE_FOR_nothing
)
586 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
591 /* Handle truncation of volatile memrefs, and so on;
592 the things that couldn't be truncated directly,
593 and for which there was no special instruction.
595 ??? Code above formerly short-circuited this, for most integer
596 mode pairs, with a force_reg in from_mode followed by a recursive
597 call to this routine. Appears always to have been wrong. */
598 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
600 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
601 emit_move_insn (to
, temp
);
605 /* Mode combination is not recognized. */
609 /* Return an rtx for a value that would result
610 from converting X to mode MODE.
611 Both X and MODE may be floating, or both integer.
612 UNSIGNEDP is nonzero if X is an unsigned value.
613 This can be done by referring to a part of X in place
614 or by copying to a new temporary with conversion. */
617 convert_to_mode (machine_mode mode
, rtx x
, int unsignedp
)
619 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
622 /* Return an rtx for a value that would result
623 from converting X from mode OLDMODE to mode MODE.
624 Both modes may be floating, or both integer.
625 UNSIGNEDP is nonzero if X is an unsigned value.
627 This can be done by referring to a part of X in place
628 or by copying to a new temporary with conversion.
630 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
633 convert_modes (machine_mode mode
, machine_mode oldmode
, rtx x
, int unsignedp
)
636 scalar_int_mode int_mode
;
638 /* If FROM is a SUBREG that indicates that we have already done at least
639 the required extension, strip it. */
641 if (GET_CODE (x
) == SUBREG
642 && SUBREG_PROMOTED_VAR_P (x
)
643 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
644 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (int_mode
)
645 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
646 x
= gen_lowpart (int_mode
, SUBREG_REG (x
));
648 if (GET_MODE (x
) != VOIDmode
)
649 oldmode
= GET_MODE (x
);
654 if (CONST_SCALAR_INT_P (x
)
655 && is_int_mode (mode
, &int_mode
))
657 /* If the caller did not tell us the old mode, then there is not
658 much to do with respect to canonicalization. We have to
659 assume that all the bits are significant. */
660 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
661 oldmode
= MAX_MODE_INT
;
662 wide_int w
= wide_int::from (rtx_mode_t (x
, oldmode
),
663 GET_MODE_PRECISION (int_mode
),
664 unsignedp
? UNSIGNED
: SIGNED
);
665 return immed_wide_int_const (w
, int_mode
);
668 /* We can do this with a gen_lowpart if both desired and current modes
669 are integer, and this is either a constant integer, a register, or a
671 scalar_int_mode int_oldmode
;
672 if (is_int_mode (mode
, &int_mode
)
673 && is_int_mode (oldmode
, &int_oldmode
)
674 && GET_MODE_PRECISION (int_mode
) <= GET_MODE_PRECISION (int_oldmode
)
675 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) int_mode
])
677 && (!HARD_REGISTER_P (x
)
678 || HARD_REGNO_MODE_OK (REGNO (x
), int_mode
))
679 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode
, GET_MODE (x
)))))
680 return gen_lowpart (int_mode
, x
);
682 /* Converting from integer constant into mode is always equivalent to an
684 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
686 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
687 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
690 temp
= gen_reg_rtx (mode
);
691 convert_move (temp
, x
, unsignedp
);
695 /* Return the largest alignment we can use for doing a move (or store)
696 of MAX_PIECES. ALIGN is the largest alignment we could use. */
699 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
703 tmode
= mode_for_size (max_pieces
* BITS_PER_UNIT
, MODE_INT
, 1);
704 if (align
>= GET_MODE_ALIGNMENT (tmode
))
705 align
= GET_MODE_ALIGNMENT (tmode
);
708 machine_mode tmode
, xmode
;
710 xmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
711 FOR_EACH_MODE_IN_CLASS (tmode
, MODE_INT
)
713 if (GET_MODE_SIZE (tmode
) > max_pieces
714 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
719 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
725 /* Return the widest integer mode no wider than SIZE. If no such mode
726 can be found, return VOIDmode. */
729 widest_int_mode_for_size (unsigned int size
)
731 machine_mode tmode
, mode
= VOIDmode
;
733 FOR_EACH_MODE_IN_CLASS (tmode
, MODE_INT
)
734 if (GET_MODE_SIZE (tmode
) < size
)
740 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
741 and should be performed piecewise. */
744 can_do_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
,
745 enum by_pieces_operation op
)
747 return targetm
.use_by_pieces_infrastructure_p (len
, align
, op
,
748 optimize_insn_for_speed_p ());
751 /* Determine whether the LEN bytes can be moved by using several move
752 instructions. Return nonzero if a call to move_by_pieces should
756 can_move_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
)
758 return can_do_by_pieces (len
, align
, MOVE_BY_PIECES
);
761 /* Return number of insns required to perform operation OP by pieces
762 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
764 unsigned HOST_WIDE_INT
765 by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
766 unsigned int max_size
, by_pieces_operation op
)
768 unsigned HOST_WIDE_INT n_insns
= 0;
770 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
772 while (max_size
> 1 && l
> 0)
775 enum insn_code icode
;
777 mode
= widest_int_mode_for_size (max_size
);
779 if (mode
== VOIDmode
)
781 unsigned int modesize
= GET_MODE_SIZE (mode
);
783 icode
= optab_handler (mov_optab
, mode
);
784 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
786 unsigned HOST_WIDE_INT n_pieces
= l
/ modesize
;
794 case COMPARE_BY_PIECES
:
795 int batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
796 int batch_ops
= 4 * batch
- 1;
797 unsigned HOST_WIDE_INT full
= n_pieces
/ batch
;
798 n_insns
+= full
* batch_ops
;
799 if (n_pieces
% batch
!= 0)
812 /* Used when performing piecewise block operations, holds information
813 about one of the memory objects involved. The member functions
814 can be used to generate code for loading from the object and
815 updating the address when iterating. */
819 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
822 /* The address of the object. Can differ from that seen in the
823 MEM rtx if we copied the address to a register. */
825 /* Nonzero if the address on the object has an autoincrement already,
826 signifies whether that was an increment or decrement. */
827 signed char m_addr_inc
;
828 /* Nonzero if we intend to use autoinc without the address already
829 having autoinc form. We will insert add insns around each memory
830 reference, expecting later passes to form autoinc addressing modes.
831 The only supported options are predecrement and postincrement. */
832 signed char m_explicit_inc
;
833 /* True if we have either of the two possible cases of using
836 /* True if this is an address to be used for load operations rather
840 /* Optionally, a function to obtain constants for any given offset into
841 the objects, and data associated with it. */
842 by_pieces_constfn m_constfn
;
845 pieces_addr (rtx
, bool, by_pieces_constfn
, void *);
846 rtx
adjust (machine_mode
, HOST_WIDE_INT
);
847 void increment_address (HOST_WIDE_INT
);
848 void maybe_predec (HOST_WIDE_INT
);
849 void maybe_postinc (HOST_WIDE_INT
);
850 void decide_autoinc (machine_mode
, bool, HOST_WIDE_INT
);
857 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
858 true if the operation to be performed on this object is a load
859 rather than a store. For stores, OBJ can be NULL, in which case we
860 assume the operation is a stack push. For loads, the optional
861 CONSTFN and its associated CFNDATA can be used in place of the
864 pieces_addr::pieces_addr (rtx obj
, bool is_load
, by_pieces_constfn constfn
,
866 : m_obj (obj
), m_is_load (is_load
), m_constfn (constfn
), m_cfndata (cfndata
)
872 rtx addr
= XEXP (obj
, 0);
873 rtx_code code
= GET_CODE (addr
);
875 bool dec
= code
== PRE_DEC
|| code
== POST_DEC
;
876 bool inc
= code
== PRE_INC
|| code
== POST_INC
;
879 m_addr_inc
= dec
? -1 : 1;
881 /* While we have always looked for these codes here, the code
882 implementing the memory operation has never handled them.
883 Support could be added later if necessary or beneficial. */
884 gcc_assert (code
!= PRE_INC
&& code
!= POST_DEC
);
892 if (STACK_GROWS_DOWNWARD
)
898 gcc_assert (constfn
!= NULL
);
902 gcc_assert (is_load
);
905 /* Decide whether to use autoinc for an address involved in a memory op.
906 MODE is the mode of the accesses, REVERSE is true if we've decided to
907 perform the operation starting from the end, and LEN is the length of
908 the operation. Don't override an earlier decision to set m_auto. */
911 pieces_addr::decide_autoinc (machine_mode
ARG_UNUSED (mode
), bool reverse
,
914 if (m_auto
|| m_obj
== NULL_RTX
)
917 bool use_predec
= (m_is_load
918 ? USE_LOAD_PRE_DECREMENT (mode
)
919 : USE_STORE_PRE_DECREMENT (mode
));
920 bool use_postinc
= (m_is_load
921 ? USE_LOAD_POST_INCREMENT (mode
)
922 : USE_STORE_POST_INCREMENT (mode
));
923 machine_mode addr_mode
= get_address_mode (m_obj
);
925 if (use_predec
&& reverse
)
927 m_addr
= copy_to_mode_reg (addr_mode
,
928 plus_constant (addr_mode
,
933 else if (use_postinc
&& !reverse
)
935 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
939 else if (CONSTANT_P (m_addr
))
940 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
943 /* Adjust the address to refer to the data at OFFSET in MODE. If we
944 are using autoincrement for this address, we don't add the offset,
945 but we still modify the MEM's properties. */
948 pieces_addr::adjust (machine_mode mode
, HOST_WIDE_INT offset
)
951 return m_constfn (m_cfndata
, offset
, mode
);
952 if (m_obj
== NULL_RTX
)
955 return adjust_automodify_address (m_obj
, mode
, m_addr
, offset
);
957 return adjust_address (m_obj
, mode
, offset
);
960 /* Emit an add instruction to increment the address by SIZE. */
963 pieces_addr::increment_address (HOST_WIDE_INT size
)
965 rtx amount
= gen_int_mode (size
, GET_MODE (m_addr
));
966 emit_insn (gen_add2_insn (m_addr
, amount
));
969 /* If we are supposed to decrement the address after each access, emit code
970 to do so now. Increment by SIZE (which has should have the correct sign
974 pieces_addr::maybe_predec (HOST_WIDE_INT size
)
976 if (m_explicit_inc
>= 0)
978 gcc_assert (HAVE_PRE_DECREMENT
);
979 increment_address (size
);
982 /* If we are supposed to decrement the address after each access, emit code
983 to do so now. Increment by SIZE. */
986 pieces_addr::maybe_postinc (HOST_WIDE_INT size
)
988 if (m_explicit_inc
<= 0)
990 gcc_assert (HAVE_POST_INCREMENT
);
991 increment_address (size
);
994 /* This structure is used by do_op_by_pieces to describe the operation
1000 pieces_addr m_to
, m_from
;
1001 unsigned HOST_WIDE_INT m_len
;
1002 HOST_WIDE_INT m_offset
;
1003 unsigned int m_align
;
1004 unsigned int m_max_size
;
1007 /* Virtual functions, overriden by derived classes for the specific
1009 virtual void generate (rtx
, rtx
, machine_mode
) = 0;
1010 virtual bool prepare_mode (machine_mode
, unsigned int) = 0;
1011 virtual void finish_mode (machine_mode
)
1016 op_by_pieces_d (rtx
, bool, rtx
, bool, by_pieces_constfn
, void *,
1017 unsigned HOST_WIDE_INT
, unsigned int);
1021 /* The constructor for an op_by_pieces_d structure. We require two
1022 objects named TO and FROM, which are identified as loads or stores
1023 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1024 and its associated FROM_CFN_DATA can be used to replace loads with
1025 constant values. LEN describes the length of the operation. */
1027 op_by_pieces_d::op_by_pieces_d (rtx to
, bool to_load
,
1028 rtx from
, bool from_load
,
1029 by_pieces_constfn from_cfn
,
1030 void *from_cfn_data
,
1031 unsigned HOST_WIDE_INT len
,
1033 : m_to (to
, to_load
, NULL
, NULL
),
1034 m_from (from
, from_load
, from_cfn
, from_cfn_data
),
1035 m_len (len
), m_max_size (MOVE_MAX_PIECES
+ 1)
1037 int toi
= m_to
.get_addr_inc ();
1038 int fromi
= m_from
.get_addr_inc ();
1039 if (toi
>= 0 && fromi
>= 0)
1041 else if (toi
<= 0 && fromi
<= 0)
1046 m_offset
= m_reverse
? len
: 0;
1047 align
= MIN (to
? MEM_ALIGN (to
) : align
,
1048 from
? MEM_ALIGN (from
) : align
);
1050 /* If copying requires more than two move insns,
1051 copy addresses to registers (to make displacements shorter)
1052 and use post-increment if available. */
1053 if (by_pieces_ninsns (len
, align
, m_max_size
, MOVE_BY_PIECES
) > 2)
1055 /* Find the mode of the largest comparison. */
1056 machine_mode mode
= widest_int_mode_for_size (m_max_size
);
1058 m_from
.decide_autoinc (mode
, m_reverse
, len
);
1059 m_to
.decide_autoinc (mode
, m_reverse
, len
);
1062 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1066 /* This function contains the main loop used for expanding a block
1067 operation. First move what we can in the largest integer mode,
1068 then go to successively smaller modes. For every access, call
1069 GENFUN with the two operands and the EXTRA_DATA. */
1072 op_by_pieces_d::run ()
1074 while (m_max_size
> 1 && m_len
> 0)
1076 machine_mode mode
= widest_int_mode_for_size (m_max_size
);
1078 if (mode
== VOIDmode
)
1081 if (prepare_mode (mode
, m_align
))
1083 unsigned int size
= GET_MODE_SIZE (mode
);
1084 rtx to1
= NULL_RTX
, from1
;
1086 while (m_len
>= size
)
1091 to1
= m_to
.adjust (mode
, m_offset
);
1092 from1
= m_from
.adjust (mode
, m_offset
);
1094 m_to
.maybe_predec (-(HOST_WIDE_INT
)size
);
1095 m_from
.maybe_predec (-(HOST_WIDE_INT
)size
);
1097 generate (to1
, from1
, mode
);
1099 m_to
.maybe_postinc (size
);
1100 m_from
.maybe_postinc (size
);
1111 m_max_size
= GET_MODE_SIZE (mode
);
1114 /* The code above should have handled everything. */
1115 gcc_assert (!m_len
);
1118 /* Derived class from op_by_pieces_d, providing support for block move
1121 class move_by_pieces_d
: public op_by_pieces_d
1123 insn_gen_fn m_gen_fun
;
1124 void generate (rtx
, rtx
, machine_mode
);
1125 bool prepare_mode (machine_mode
, unsigned int);
1128 move_by_pieces_d (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1130 : op_by_pieces_d (to
, false, from
, true, NULL
, NULL
, len
, align
)
1133 rtx
finish_endp (int);
1136 /* Return true if MODE can be used for a set of copies, given an
1137 alignment ALIGN. Prepare whatever data is necessary for later
1138 calls to generate. */
1141 move_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1143 insn_code icode
= optab_handler (mov_optab
, mode
);
1144 m_gen_fun
= GEN_FCN (icode
);
1145 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1148 /* A callback used when iterating for a compare_by_pieces_operation.
1149 OP0 and OP1 are the values that have been loaded and should be
1150 compared in MODE. If OP0 is NULL, this means we should generate a
1151 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1152 gen function that should be used to generate the mode. */
1155 move_by_pieces_d::generate (rtx op0
, rtx op1
,
1156 machine_mode mode ATTRIBUTE_UNUSED
)
1158 #ifdef PUSH_ROUNDING
1159 if (op0
== NULL_RTX
)
1161 emit_single_push_insn (mode
, op1
, NULL
);
1165 emit_insn (m_gen_fun (op0
, op1
));
1168 /* Perform the final adjustment at the end of a string to obtain the
1169 correct return value for the block operation. If ENDP is 1 return
1170 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1171 end minus one byte ala stpcpy. */
1174 move_by_pieces_d::finish_endp (int endp
)
1176 gcc_assert (!m_reverse
);
1179 m_to
.maybe_postinc (-1);
1182 return m_to
.adjust (QImode
, m_offset
);
1185 /* Generate several move instructions to copy LEN bytes from block FROM to
1186 block TO. (These are MEM rtx's with BLKmode).
1188 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1189 used to push FROM to the stack.
1191 ALIGN is maximum stack alignment we can assume.
1193 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1194 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1198 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1199 unsigned int align
, int endp
)
1201 #ifndef PUSH_ROUNDING
1206 move_by_pieces_d
data (to
, from
, len
, align
);
1211 return data
.finish_endp (endp
);
1216 /* Derived class from op_by_pieces_d, providing support for block move
1219 class store_by_pieces_d
: public op_by_pieces_d
1221 insn_gen_fn m_gen_fun
;
1222 void generate (rtx
, rtx
, machine_mode
);
1223 bool prepare_mode (machine_mode
, unsigned int);
1226 store_by_pieces_d (rtx to
, by_pieces_constfn cfn
, void *cfn_data
,
1227 unsigned HOST_WIDE_INT len
, unsigned int align
)
1228 : op_by_pieces_d (to
, false, NULL_RTX
, true, cfn
, cfn_data
, len
, align
)
1231 rtx
finish_endp (int);
1234 /* Return true if MODE can be used for a set of stores, given an
1235 alignment ALIGN. Prepare whatever data is necessary for later
1236 calls to generate. */
1239 store_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1241 insn_code icode
= optab_handler (mov_optab
, mode
);
1242 m_gen_fun
= GEN_FCN (icode
);
1243 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1246 /* A callback used when iterating for a store_by_pieces_operation.
1247 OP0 and OP1 are the values that have been loaded and should be
1248 compared in MODE. If OP0 is NULL, this means we should generate a
1249 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1250 gen function that should be used to generate the mode. */
1253 store_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode
)
1255 emit_insn (m_gen_fun (op0
, op1
));
1258 /* Perform the final adjustment at the end of a string to obtain the
1259 correct return value for the block operation. If ENDP is 1 return
1260 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1261 end minus one byte ala stpcpy. */
1264 store_by_pieces_d::finish_endp (int endp
)
1266 gcc_assert (!m_reverse
);
1269 m_to
.maybe_postinc (-1);
1272 return m_to
.adjust (QImode
, m_offset
);
1275 /* Determine whether the LEN bytes generated by CONSTFUN can be
1276 stored to memory using several move instructions. CONSTFUNDATA is
1277 a pointer which will be passed as argument in every CONSTFUN call.
1278 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1279 a memset operation and false if it's a copy of a constant string.
1280 Return nonzero if a call to store_by_pieces should succeed. */
1283 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
1284 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
),
1285 void *constfundata
, unsigned int align
, bool memsetp
)
1287 unsigned HOST_WIDE_INT l
;
1288 unsigned int max_size
;
1289 HOST_WIDE_INT offset
= 0;
1291 enum insn_code icode
;
1293 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1294 rtx cst ATTRIBUTE_UNUSED
;
1299 if (!targetm
.use_by_pieces_infrastructure_p (len
, align
,
1303 optimize_insn_for_speed_p ()))
1306 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
1308 /* We would first store what we can in the largest integer mode, then go to
1309 successively smaller modes. */
1312 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
1316 max_size
= STORE_MAX_PIECES
+ 1;
1317 while (max_size
> 1 && l
> 0)
1319 mode
= widest_int_mode_for_size (max_size
);
1321 if (mode
== VOIDmode
)
1324 icode
= optab_handler (mov_optab
, mode
);
1325 if (icode
!= CODE_FOR_nothing
1326 && align
>= GET_MODE_ALIGNMENT (mode
))
1328 unsigned int size
= GET_MODE_SIZE (mode
);
1335 cst
= (*constfun
) (constfundata
, offset
, mode
);
1336 if (!targetm
.legitimate_constant_p (mode
, cst
))
1346 max_size
= GET_MODE_SIZE (mode
);
1349 /* The code above should have handled everything. */
1356 /* Generate several move instructions to store LEN bytes generated by
1357 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1358 pointer which will be passed as argument in every CONSTFUN call.
1359 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1360 a memset operation and false if it's a copy of a constant string.
1361 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1362 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1366 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
1367 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
),
1368 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
1372 gcc_assert (endp
!= 2);
1376 gcc_assert (targetm
.use_by_pieces_infrastructure_p
1378 memsetp
? SET_BY_PIECES
: STORE_BY_PIECES
,
1379 optimize_insn_for_speed_p ()));
1381 store_by_pieces_d
data (to
, constfun
, constfundata
, len
, align
);
1385 return data
.finish_endp (endp
);
1390 /* Callback routine for clear_by_pieces.
1391 Return const0_rtx unconditionally. */
1394 clear_by_pieces_1 (void *, HOST_WIDE_INT
, machine_mode
)
1399 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1400 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1403 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
1408 store_by_pieces_d
data (to
, clear_by_pieces_1
, NULL
, len
, align
);
1412 /* Context used by compare_by_pieces_genfn. It stores the fail label
1413 to jump to in case of miscomparison, and for branch ratios greater than 1,
1414 it stores an accumulator and the current and maximum counts before
1415 emitting another branch. */
1417 class compare_by_pieces_d
: public op_by_pieces_d
1419 rtx_code_label
*m_fail_label
;
1421 int m_count
, m_batch
;
1423 void generate (rtx
, rtx
, machine_mode
);
1424 bool prepare_mode (machine_mode
, unsigned int);
1425 void finish_mode (machine_mode
);
1427 compare_by_pieces_d (rtx op0
, rtx op1
, by_pieces_constfn op1_cfn
,
1428 void *op1_cfn_data
, HOST_WIDE_INT len
, int align
,
1429 rtx_code_label
*fail_label
)
1430 : op_by_pieces_d (op0
, true, op1
, true, op1_cfn
, op1_cfn_data
, len
, align
)
1432 m_fail_label
= fail_label
;
1436 /* A callback used when iterating for a compare_by_pieces_operation.
1437 OP0 and OP1 are the values that have been loaded and should be
1438 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1439 context structure. */
1442 compare_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode mode
)
1446 rtx temp
= expand_binop (mode
, sub_optab
, op0
, op1
, NULL_RTX
,
1447 true, OPTAB_LIB_WIDEN
);
1449 temp
= expand_binop (mode
, ior_optab
, m_accumulator
, temp
, temp
,
1450 true, OPTAB_LIB_WIDEN
);
1451 m_accumulator
= temp
;
1453 if (++m_count
< m_batch
)
1457 op0
= m_accumulator
;
1459 m_accumulator
= NULL_RTX
;
1461 do_compare_rtx_and_jump (op0
, op1
, NE
, true, mode
, NULL_RTX
, NULL
,
1462 m_fail_label
, profile_probability::uninitialized ());
1465 /* Return true if MODE can be used for a set of moves and comparisons,
1466 given an alignment ALIGN. Prepare whatever data is necessary for
1467 later calls to generate. */
1470 compare_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1472 insn_code icode
= optab_handler (mov_optab
, mode
);
1473 if (icode
== CODE_FOR_nothing
1474 || align
< GET_MODE_ALIGNMENT (mode
)
1475 || !can_compare_p (EQ
, mode
, ccp_jump
))
1477 m_batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
1480 m_accumulator
= NULL_RTX
;
1485 /* Called after expanding a series of comparisons in MODE. If we have
1486 accumulated results for which we haven't emitted a branch yet, do
1490 compare_by_pieces_d::finish_mode (machine_mode mode
)
1492 if (m_accumulator
!= NULL_RTX
)
1493 do_compare_rtx_and_jump (m_accumulator
, const0_rtx
, NE
, true, mode
,
1494 NULL_RTX
, NULL
, m_fail_label
,
1495 profile_probability::uninitialized ());
1498 /* Generate several move instructions to compare LEN bytes from blocks
1499 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1501 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1502 used to push FROM to the stack.
1504 ALIGN is maximum stack alignment we can assume.
1506 Optionally, the caller can pass a constfn and associated data in A1_CFN
1507 and A1_CFN_DATA. describing that the second operand being compared is a
1508 known constant and how to obtain its data. */
1511 compare_by_pieces (rtx arg0
, rtx arg1
, unsigned HOST_WIDE_INT len
,
1512 rtx target
, unsigned int align
,
1513 by_pieces_constfn a1_cfn
, void *a1_cfn_data
)
1515 rtx_code_label
*fail_label
= gen_label_rtx ();
1516 rtx_code_label
*end_label
= gen_label_rtx ();
1518 if (target
== NULL_RTX
1519 || !REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
1520 target
= gen_reg_rtx (TYPE_MODE (integer_type_node
));
1522 compare_by_pieces_d
data (arg0
, arg1
, a1_cfn
, a1_cfn_data
, len
, align
,
1527 emit_move_insn (target
, const0_rtx
);
1528 emit_jump (end_label
);
1530 emit_label (fail_label
);
1531 emit_move_insn (target
, const1_rtx
);
1532 emit_label (end_label
);
1537 /* Emit code to move a block Y to a block X. This may be done with
1538 string-move instructions, with multiple scalar move instructions,
1539 or with a library call.
1541 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1542 SIZE is an rtx that says how long they are.
1543 ALIGN is the maximum alignment we can assume they have.
1544 METHOD describes what kind of copy this is, and what mechanisms may be used.
1545 MIN_SIZE is the minimal size of block to move
1546 MAX_SIZE is the maximal size of block to move, if it can not be represented
1547 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1549 Return the address of the new block, if memcpy is called and returns it,
1553 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1554 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1555 unsigned HOST_WIDE_INT min_size
,
1556 unsigned HOST_WIDE_INT max_size
,
1557 unsigned HOST_WIDE_INT probable_max_size
)
1564 if (CONST_INT_P (size
) && INTVAL (size
) == 0)
1569 case BLOCK_OP_NORMAL
:
1570 case BLOCK_OP_TAILCALL
:
1571 may_use_call
= true;
1574 case BLOCK_OP_CALL_PARM
:
1575 may_use_call
= block_move_libcall_safe_for_call_parm ();
1577 /* Make inhibit_defer_pop nonzero around the library call
1578 to force it to pop the arguments right away. */
1582 case BLOCK_OP_NO_LIBCALL
:
1583 may_use_call
= false;
1590 gcc_assert (MEM_P (x
) && MEM_P (y
));
1591 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1592 gcc_assert (align
>= BITS_PER_UNIT
);
1594 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1595 block copy is more efficient for other large modes, e.g. DCmode. */
1596 x
= adjust_address (x
, BLKmode
, 0);
1597 y
= adjust_address (y
, BLKmode
, 0);
1599 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1600 can be incorrect is coming from __builtin_memcpy. */
1601 if (CONST_INT_P (size
))
1603 x
= shallow_copy_rtx (x
);
1604 y
= shallow_copy_rtx (y
);
1605 set_mem_size (x
, INTVAL (size
));
1606 set_mem_size (y
, INTVAL (size
));
1609 if (CONST_INT_P (size
) && can_move_by_pieces (INTVAL (size
), align
))
1610 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1611 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1612 expected_align
, expected_size
,
1613 min_size
, max_size
, probable_max_size
))
1615 else if (may_use_call
1616 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1617 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1619 /* Since x and y are passed to a libcall, mark the corresponding
1620 tree EXPR as addressable. */
1621 tree y_expr
= MEM_EXPR (y
);
1622 tree x_expr
= MEM_EXPR (x
);
1624 mark_addressable (y_expr
);
1626 mark_addressable (x_expr
);
1627 retval
= emit_block_copy_via_libcall (x
, y
, size
,
1628 method
== BLOCK_OP_TAILCALL
);
1632 emit_block_move_via_loop (x
, y
, size
, align
);
1634 if (method
== BLOCK_OP_CALL_PARM
)
1641 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1643 unsigned HOST_WIDE_INT max
, min
= 0;
1644 if (GET_CODE (size
) == CONST_INT
)
1645 min
= max
= UINTVAL (size
);
1647 max
= GET_MODE_MASK (GET_MODE (size
));
1648 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1652 /* A subroutine of emit_block_move. Returns true if calling the
1653 block move libcall will not clobber any parameters which may have
1654 already been placed on the stack. */
1657 block_move_libcall_safe_for_call_parm (void)
1659 #if defined (REG_PARM_STACK_SPACE)
1663 /* If arguments are pushed on the stack, then they're safe. */
1667 /* If registers go on the stack anyway, any argument is sure to clobber
1668 an outgoing argument. */
1669 #if defined (REG_PARM_STACK_SPACE)
1670 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1671 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1672 depend on its argument. */
1674 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1675 && REG_PARM_STACK_SPACE (fn
) != 0)
1679 /* If any argument goes in memory, then it might clobber an outgoing
1682 CUMULATIVE_ARGS args_so_far_v
;
1683 cumulative_args_t args_so_far
;
1686 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1687 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1688 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1690 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1691 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1693 machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1694 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1696 if (!tmp
|| !REG_P (tmp
))
1698 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1700 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1707 /* A subroutine of emit_block_move. Expand a movmem pattern;
1708 return true if successful. */
1711 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1712 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1713 unsigned HOST_WIDE_INT min_size
,
1714 unsigned HOST_WIDE_INT max_size
,
1715 unsigned HOST_WIDE_INT probable_max_size
)
1717 int save_volatile_ok
= volatile_ok
;
1720 if (expected_align
< align
)
1721 expected_align
= align
;
1722 if (expected_size
!= -1)
1724 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1725 expected_size
= probable_max_size
;
1726 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1727 expected_size
= min_size
;
1730 /* Since this is a move insn, we don't care about volatility. */
1733 /* Try the most limited insn first, because there's no point
1734 including more than one in the machine description unless
1735 the more limited one has some advantage. */
1737 FOR_EACH_MODE_IN_CLASS (mode
, MODE_INT
)
1739 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1741 if (code
!= CODE_FOR_nothing
1742 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1743 here because if SIZE is less than the mode mask, as it is
1744 returned by the macro, it will definitely be less than the
1745 actual mode mask. Since SIZE is within the Pmode address
1746 space, we limit MODE to Pmode. */
1747 && ((CONST_INT_P (size
)
1748 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1749 <= (GET_MODE_MASK (mode
) >> 1)))
1750 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1751 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1753 struct expand_operand ops
[9];
1756 /* ??? When called via emit_block_move_for_call, it'd be
1757 nice if there were some way to inform the backend, so
1758 that it doesn't fail the expansion because it thinks
1759 emitting the libcall would be more efficient. */
1760 nops
= insn_data
[(int) code
].n_generator_args
;
1761 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1763 create_fixed_operand (&ops
[0], x
);
1764 create_fixed_operand (&ops
[1], y
);
1765 /* The check above guarantees that this size conversion is valid. */
1766 create_convert_operand_to (&ops
[2], size
, mode
, true);
1767 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1770 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1771 create_integer_operand (&ops
[5], expected_size
);
1775 create_integer_operand (&ops
[6], min_size
);
1776 /* If we can not represent the maximal size,
1777 make parameter NULL. */
1778 if ((HOST_WIDE_INT
) max_size
!= -1)
1779 create_integer_operand (&ops
[7], max_size
);
1781 create_fixed_operand (&ops
[7], NULL
);
1785 /* If we can not represent the maximal size,
1786 make parameter NULL. */
1787 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1788 create_integer_operand (&ops
[8], probable_max_size
);
1790 create_fixed_operand (&ops
[8], NULL
);
1792 if (maybe_expand_insn (code
, nops
, ops
))
1794 volatile_ok
= save_volatile_ok
;
1800 volatile_ok
= save_volatile_ok
;
1804 /* A subroutine of emit_block_move. Copy the data via an explicit
1805 loop. This is used only when libcalls are forbidden. */
1806 /* ??? It'd be nice to copy in hunks larger than QImode. */
1809 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1810 unsigned int align ATTRIBUTE_UNUSED
)
1812 rtx_code_label
*cmp_label
, *top_label
;
1813 rtx iter
, x_addr
, y_addr
, tmp
;
1814 machine_mode x_addr_mode
= get_address_mode (x
);
1815 machine_mode y_addr_mode
= get_address_mode (y
);
1816 machine_mode iter_mode
;
1818 iter_mode
= GET_MODE (size
);
1819 if (iter_mode
== VOIDmode
)
1820 iter_mode
= word_mode
;
1822 top_label
= gen_label_rtx ();
1823 cmp_label
= gen_label_rtx ();
1824 iter
= gen_reg_rtx (iter_mode
);
1826 emit_move_insn (iter
, const0_rtx
);
1828 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1829 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1830 do_pending_stack_adjust ();
1832 emit_jump (cmp_label
);
1833 emit_label (top_label
);
1835 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1836 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1838 if (x_addr_mode
!= y_addr_mode
)
1839 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1840 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1842 x
= change_address (x
, QImode
, x_addr
);
1843 y
= change_address (y
, QImode
, y_addr
);
1845 emit_move_insn (x
, y
);
1847 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1848 true, OPTAB_LIB_WIDEN
);
1850 emit_move_insn (iter
, tmp
);
1852 emit_label (cmp_label
);
1854 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1856 profile_probability::guessed_always ()
1857 .apply_scale (9, 10));
1860 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1861 TAILCALL is true if this is a tail call. */
1864 emit_block_op_via_libcall (enum built_in_function fncode
, rtx dst
, rtx src
,
1865 rtx size
, bool tailcall
)
1867 rtx dst_addr
, src_addr
;
1868 tree call_expr
, dst_tree
, src_tree
, size_tree
;
1869 machine_mode size_mode
;
1871 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1872 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1873 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1875 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1876 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1877 src_tree
= make_tree (ptr_type_node
, src_addr
);
1879 size_mode
= TYPE_MODE (sizetype
);
1880 size
= convert_to_mode (size_mode
, size
, 1);
1881 size
= copy_to_mode_reg (size_mode
, size
);
1882 size_tree
= make_tree (sizetype
, size
);
1884 /* It is incorrect to use the libcall calling conventions for calls to
1885 memcpy/memmove/memcmp because they can be provided by the user. */
1886 tree fn
= builtin_decl_implicit (fncode
);
1887 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1888 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1890 return expand_call (call_expr
, NULL_RTX
, false);
1893 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1894 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1895 otherwise return null. */
1898 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
1899 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
1900 HOST_WIDE_INT align
)
1902 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
1904 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
1907 struct expand_operand ops
[5];
1908 create_output_operand (&ops
[0], target
, insn_mode
);
1909 create_fixed_operand (&ops
[1], arg1_rtx
);
1910 create_fixed_operand (&ops
[2], arg2_rtx
);
1911 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
1912 TYPE_UNSIGNED (arg3_type
));
1913 create_integer_operand (&ops
[4], align
);
1914 if (maybe_expand_insn (icode
, 5, ops
))
1915 return ops
[0].value
;
1919 /* Expand a block compare between X and Y with length LEN using the
1920 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
1921 of the expression that was used to calculate the length. ALIGN
1922 gives the known minimum common alignment. */
1925 emit_block_cmp_via_cmpmem (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1928 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1929 implementing memcmp because it will stop if it encounters two
1931 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
1933 if (icode
== CODE_FOR_nothing
)
1936 return expand_cmpstrn_or_cmpmem (icode
, target
, x
, y
, len_type
, len
, align
);
1939 /* Emit code to compare a block Y to a block X. This may be done with
1940 string-compare instructions, with multiple scalar instructions,
1941 or with a library call.
1943 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
1944 they are. LEN_TYPE is the type of the expression that was used to
1947 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1948 value of a normal memcmp call, instead we can just compare for equality.
1949 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1952 Optionally, the caller can pass a constfn and associated data in Y_CFN
1953 and Y_CFN_DATA. describing that the second operand being compared is a
1954 known constant and how to obtain its data.
1955 Return the result of the comparison, or NULL_RTX if we failed to
1956 perform the operation. */
1959 emit_block_cmp_hints (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1960 bool equality_only
, by_pieces_constfn y_cfn
,
1965 if (CONST_INT_P (len
) && INTVAL (len
) == 0)
1968 gcc_assert (MEM_P (x
) && MEM_P (y
));
1969 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1970 gcc_assert (align
>= BITS_PER_UNIT
);
1972 x
= adjust_address (x
, BLKmode
, 0);
1973 y
= adjust_address (y
, BLKmode
, 0);
1976 && CONST_INT_P (len
)
1977 && can_do_by_pieces (INTVAL (len
), align
, COMPARE_BY_PIECES
))
1978 result
= compare_by_pieces (x
, y
, INTVAL (len
), target
, align
,
1981 result
= emit_block_cmp_via_cmpmem (x
, y
, len
, len_type
, target
, align
);
1986 /* Copy all or part of a value X into registers starting at REGNO.
1987 The number of registers to be filled is NREGS. */
1990 move_block_to_reg (int regno
, rtx x
, int nregs
, machine_mode mode
)
1995 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
1996 x
= validize_mem (force_const_mem (mode
, x
));
1998 /* See if the machine can do this with a load multiple insn. */
1999 if (targetm
.have_load_multiple ())
2001 rtx_insn
*last
= get_last_insn ();
2002 rtx first
= gen_rtx_REG (word_mode
, regno
);
2003 if (rtx_insn
*pat
= targetm
.gen_load_multiple (first
, x
,
2010 delete_insns_since (last
);
2013 for (int i
= 0; i
< nregs
; i
++)
2014 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2015 operand_subword_force (x
, i
, mode
));
2018 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2019 The number of registers to be filled is NREGS. */
2022 move_block_from_reg (int regno
, rtx x
, int nregs
)
2027 /* See if the machine can do this with a store multiple insn. */
2028 if (targetm
.have_store_multiple ())
2030 rtx_insn
*last
= get_last_insn ();
2031 rtx first
= gen_rtx_REG (word_mode
, regno
);
2032 if (rtx_insn
*pat
= targetm
.gen_store_multiple (x
, first
,
2039 delete_insns_since (last
);
2042 for (int i
= 0; i
< nregs
; i
++)
2044 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2048 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2052 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2053 ORIG, where ORIG is a non-consecutive group of registers represented by
2054 a PARALLEL. The clone is identical to the original except in that the
2055 original set of registers is replaced by a new set of pseudo registers.
2056 The new set has the same modes as the original set. */
2059 gen_group_rtx (rtx orig
)
2064 gcc_assert (GET_CODE (orig
) == PARALLEL
);
2066 length
= XVECLEN (orig
, 0);
2067 tmps
= XALLOCAVEC (rtx
, length
);
2069 /* Skip a NULL entry in first slot. */
2070 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2075 for (; i
< length
; i
++)
2077 machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2078 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2080 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2083 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2086 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2087 except that values are placed in TMPS[i], and must later be moved
2088 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2091 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
2095 machine_mode m
= GET_MODE (orig_src
);
2097 gcc_assert (GET_CODE (dst
) == PARALLEL
);
2100 && !SCALAR_INT_MODE_P (m
)
2101 && !MEM_P (orig_src
)
2102 && GET_CODE (orig_src
) != CONCAT
)
2104 scalar_int_mode imode
;
2105 if (int_mode_for_mode (GET_MODE (orig_src
)).exists (&imode
))
2107 src
= gen_reg_rtx (imode
);
2108 emit_move_insn (gen_lowpart (GET_MODE (orig_src
), src
), orig_src
);
2112 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
2113 emit_move_insn (src
, orig_src
);
2115 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2119 /* Check for a NULL entry, used to indicate that the parameter goes
2120 both on the stack and in registers. */
2121 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2126 /* Process the pieces. */
2127 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2129 machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2130 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2131 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2134 /* Handle trailing fragments that run over the size of the struct. */
2135 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2137 /* Arrange to shift the fragment to where it belongs.
2138 extract_bit_field loads to the lsb of the reg. */
2140 #ifdef BLOCK_REG_PADDING
2141 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
2142 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2147 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2148 bytelen
= ssize
- bytepos
;
2149 gcc_assert (bytelen
> 0);
2152 /* If we won't be loading directly from memory, protect the real source
2153 from strange tricks we might play; but make sure that the source can
2154 be loaded directly into the destination. */
2156 if (!MEM_P (orig_src
)
2157 && (!CONSTANT_P (orig_src
)
2158 || (GET_MODE (orig_src
) != mode
2159 && GET_MODE (orig_src
) != VOIDmode
)))
2161 if (GET_MODE (orig_src
) == VOIDmode
)
2162 src
= gen_reg_rtx (mode
);
2164 src
= gen_reg_rtx (GET_MODE (orig_src
));
2166 emit_move_insn (src
, orig_src
);
2169 /* Optimize the access just a bit. */
2171 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
2172 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
2173 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2174 && bytelen
== GET_MODE_SIZE (mode
))
2176 tmps
[i
] = gen_reg_rtx (mode
);
2177 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2179 else if (COMPLEX_MODE_P (mode
)
2180 && GET_MODE (src
) == mode
2181 && bytelen
== GET_MODE_SIZE (mode
))
2182 /* Let emit_move_complex do the bulk of the work. */
2184 else if (GET_CODE (src
) == CONCAT
)
2186 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
2187 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2188 unsigned int elt
= bytepos
/ slen0
;
2189 unsigned int subpos
= bytepos
% slen0
;
2191 if (subpos
+ bytelen
<= slen0
)
2193 /* The following assumes that the concatenated objects all
2194 have the same size. In this case, a simple calculation
2195 can be used to determine the object and the bit field
2197 tmps
[i
] = XEXP (src
, elt
);
2199 || subpos
+ bytelen
!= slen0
2200 || (!CONSTANT_P (tmps
[i
])
2201 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
)))
2202 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2203 subpos
* BITS_PER_UNIT
,
2204 1, NULL_RTX
, mode
, mode
, false,
2211 gcc_assert (!bytepos
);
2212 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2213 emit_move_insn (mem
, src
);
2214 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
2215 0, 1, NULL_RTX
, mode
, mode
, false,
2219 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2220 SIMD register, which is currently broken. While we get GCC
2221 to emit proper RTL for these cases, let's dump to memory. */
2222 else if (VECTOR_MODE_P (GET_MODE (dst
))
2225 int slen
= GET_MODE_SIZE (GET_MODE (src
));
2228 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2229 emit_move_insn (mem
, src
);
2230 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
2232 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
2233 && XVECLEN (dst
, 0) > 1)
2234 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
2235 else if (CONSTANT_P (src
))
2237 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
2245 /* TODO: const_wide_int can have sizes other than this... */
2246 gcc_assert (2 * len
== ssize
);
2247 split_double (src
, &first
, &second
);
2254 else if (REG_P (src
) && GET_MODE (src
) == mode
)
2257 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2258 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2259 mode
, mode
, false, NULL
);
2262 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
2267 /* Emit code to move a block SRC of type TYPE to a block DST,
2268 where DST is non-consecutive registers represented by a PARALLEL.
2269 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2273 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
2278 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
2279 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2281 /* Copy the extracted pieces into the proper (probable) hard regs. */
2282 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
2284 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
2287 emit_move_insn (d
, tmps
[i
]);
2291 /* Similar, but load SRC into new pseudos in a format that looks like
2292 PARALLEL. This can later be fed to emit_group_move to get things
2293 in the right place. */
2296 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
2301 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
2302 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
2304 /* Convert the vector to look just like the original PARALLEL, except
2305 with the computed values. */
2306 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
2308 rtx e
= XVECEXP (parallel
, 0, i
);
2309 rtx d
= XEXP (e
, 0);
2313 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
2314 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
2316 RTVEC_ELT (vec
, i
) = e
;
2319 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
2322 /* Emit code to move a block SRC to block DST, where SRC and DST are
2323 non-consecutive groups of registers, each represented by a PARALLEL. */
2326 emit_group_move (rtx dst
, rtx src
)
2330 gcc_assert (GET_CODE (src
) == PARALLEL
2331 && GET_CODE (dst
) == PARALLEL
2332 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
2334 /* Skip first entry if NULL. */
2335 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2336 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2337 XEXP (XVECEXP (src
, 0, i
), 0));
2340 /* Move a group of registers represented by a PARALLEL into pseudos. */
2343 emit_group_move_into_temps (rtx src
)
2345 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
2348 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
2350 rtx e
= XVECEXP (src
, 0, i
);
2351 rtx d
= XEXP (e
, 0);
2354 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
2355 RTVEC_ELT (vec
, i
) = e
;
2358 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
2361 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2362 where SRC is non-consecutive registers represented by a PARALLEL.
2363 SSIZE represents the total size of block ORIG_DST, or -1 if not
2367 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
2370 int start
, finish
, i
;
2371 machine_mode m
= GET_MODE (orig_dst
);
2373 gcc_assert (GET_CODE (src
) == PARALLEL
);
2375 if (!SCALAR_INT_MODE_P (m
)
2376 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
2378 scalar_int_mode imode
;
2379 if (int_mode_for_mode (GET_MODE (orig_dst
)).exists (&imode
))
2381 dst
= gen_reg_rtx (imode
);
2382 emit_group_store (dst
, src
, type
, ssize
);
2383 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
2387 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
2388 emit_group_store (dst
, src
, type
, ssize
);
2390 emit_move_insn (orig_dst
, dst
);
2394 /* Check for a NULL entry, used to indicate that the parameter goes
2395 both on the stack and in registers. */
2396 if (XEXP (XVECEXP (src
, 0, 0), 0))
2400 finish
= XVECLEN (src
, 0);
2402 tmps
= XALLOCAVEC (rtx
, finish
);
2404 /* Copy the (probable) hard regs into pseudos. */
2405 for (i
= start
; i
< finish
; i
++)
2407 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2408 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
2410 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2411 emit_move_insn (tmps
[i
], reg
);
2417 /* If we won't be storing directly into memory, protect the real destination
2418 from strange tricks we might play. */
2420 if (GET_CODE (dst
) == PARALLEL
)
2424 /* We can get a PARALLEL dst if there is a conditional expression in
2425 a return statement. In that case, the dst and src are the same,
2426 so no action is necessary. */
2427 if (rtx_equal_p (dst
, src
))
2430 /* It is unclear if we can ever reach here, but we may as well handle
2431 it. Allocate a temporary, and split this into a store/load to/from
2433 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
2434 emit_group_store (temp
, src
, type
, ssize
);
2435 emit_group_load (dst
, temp
, type
, ssize
);
2438 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
2440 machine_mode outer
= GET_MODE (dst
);
2442 HOST_WIDE_INT bytepos
;
2446 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
2447 dst
= gen_reg_rtx (outer
);
2449 /* Make life a bit easier for combine. */
2450 /* If the first element of the vector is the low part
2451 of the destination mode, use a paradoxical subreg to
2452 initialize the destination. */
2455 inner
= GET_MODE (tmps
[start
]);
2456 bytepos
= subreg_lowpart_offset (inner
, outer
);
2457 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
2459 temp
= simplify_gen_subreg (outer
, tmps
[start
],
2463 emit_move_insn (dst
, temp
);
2470 /* If the first element wasn't the low part, try the last. */
2472 && start
< finish
- 1)
2474 inner
= GET_MODE (tmps
[finish
- 1]);
2475 bytepos
= subreg_lowpart_offset (inner
, outer
);
2476 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
2478 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2482 emit_move_insn (dst
, temp
);
2489 /* Otherwise, simply initialize the result to zero. */
2491 emit_move_insn (dst
, CONST0_RTX (outer
));
2494 /* Process the pieces. */
2495 for (i
= start
; i
< finish
; i
++)
2497 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2498 machine_mode mode
= GET_MODE (tmps
[i
]);
2499 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2500 unsigned int adj_bytelen
;
2503 /* Handle trailing fragments that run over the size of the struct. */
2504 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2505 adj_bytelen
= ssize
- bytepos
;
2507 adj_bytelen
= bytelen
;
2509 if (GET_CODE (dst
) == CONCAT
)
2511 if (bytepos
+ adj_bytelen
2512 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2513 dest
= XEXP (dst
, 0);
2514 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2516 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2517 dest
= XEXP (dst
, 1);
2521 machine_mode dest_mode
= GET_MODE (dest
);
2522 machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2524 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2526 if (GET_MODE_ALIGNMENT (dest_mode
)
2527 >= GET_MODE_ALIGNMENT (tmp_mode
))
2529 dest
= assign_stack_temp (dest_mode
,
2530 GET_MODE_SIZE (dest_mode
));
2531 emit_move_insn (adjust_address (dest
,
2539 dest
= assign_stack_temp (tmp_mode
,
2540 GET_MODE_SIZE (tmp_mode
));
2541 emit_move_insn (dest
, tmps
[i
]);
2542 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2548 /* Handle trailing fragments that run over the size of the struct. */
2549 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2551 /* store_bit_field always takes its value from the lsb.
2552 Move the fragment to the lsb if it's not already there. */
2554 #ifdef BLOCK_REG_PADDING
2555 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2556 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2562 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2563 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2567 /* Make sure not to write past the end of the struct. */
2568 store_bit_field (dest
,
2569 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2570 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2571 VOIDmode
, tmps
[i
], false);
2574 /* Optimize the access just a bit. */
2575 else if (MEM_P (dest
)
2576 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2577 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2578 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2579 && bytelen
== GET_MODE_SIZE (mode
))
2580 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2583 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2584 0, 0, mode
, tmps
[i
], false);
2587 /* Copy from the pseudo into the (probable) hard reg. */
2588 if (orig_dst
!= dst
)
2589 emit_move_insn (orig_dst
, dst
);
2592 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2593 of the value stored in X. */
2596 maybe_emit_group_store (rtx x
, tree type
)
2598 machine_mode mode
= TYPE_MODE (type
);
2599 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2600 if (GET_CODE (x
) == PARALLEL
)
2602 rtx result
= gen_reg_rtx (mode
);
2603 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2609 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2611 This is used on targets that return BLKmode values in registers. */
2614 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2616 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2617 rtx src
= NULL
, dst
= NULL
;
2618 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2619 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2620 machine_mode mode
= GET_MODE (srcreg
);
2621 machine_mode tmode
= GET_MODE (target
);
2622 machine_mode copy_mode
;
2624 /* BLKmode registers created in the back-end shouldn't have survived. */
2625 gcc_assert (mode
!= BLKmode
);
2627 /* If the structure doesn't take up a whole number of words, see whether
2628 SRCREG is padded on the left or on the right. If it's on the left,
2629 set PADDING_CORRECTION to the number of bits to skip.
2631 In most ABIs, the structure will be returned at the least end of
2632 the register, which translates to right padding on little-endian
2633 targets and left padding on big-endian targets. The opposite
2634 holds if the structure is returned at the most significant
2635 end of the register. */
2636 if (bytes
% UNITS_PER_WORD
!= 0
2637 && (targetm
.calls
.return_in_msb (type
)
2639 : BYTES_BIG_ENDIAN
))
2641 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2643 /* We can use a single move if we have an exact mode for the size. */
2644 else if (MEM_P (target
)
2645 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
2646 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2647 && bytes
== GET_MODE_SIZE (mode
))
2649 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2653 /* And if we additionally have the same mode for a register. */
2654 else if (REG_P (target
)
2655 && GET_MODE (target
) == mode
2656 && bytes
== GET_MODE_SIZE (mode
))
2658 emit_move_insn (target
, srcreg
);
2662 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2663 into a new pseudo which is a full word. */
2664 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2666 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2670 /* Copy the structure BITSIZE bits at a time. If the target lives in
2671 memory, take care of not reading/writing past its end by selecting
2672 a copy mode suited to BITSIZE. This should always be possible given
2675 If the target lives in register, make sure not to select a copy mode
2676 larger than the mode of the register.
2678 We could probably emit more efficient code for machines which do not use
2679 strict alignment, but it doesn't seem worth the effort at the current
2682 copy_mode
= word_mode
;
2685 opt_scalar_int_mode mem_mode
= int_mode_for_size (bitsize
, 1);
2686 if (mem_mode
.exists ())
2687 copy_mode
= mem_mode
.require ();
2689 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2692 for (bitpos
= 0, xbitpos
= padding_correction
;
2693 bitpos
< bytes
* BITS_PER_UNIT
;
2694 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2696 /* We need a new source operand each time xbitpos is on a
2697 word boundary and when xbitpos == padding_correction
2698 (the first time through). */
2699 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2700 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2702 /* We need a new destination operand each time bitpos is on
2704 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2706 else if (bitpos
% BITS_PER_WORD
== 0)
2707 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2709 /* Use xbitpos for the source extraction (right justified) and
2710 bitpos for the destination store (left justified). */
2711 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2712 extract_bit_field (src
, bitsize
,
2713 xbitpos
% BITS_PER_WORD
, 1,
2714 NULL_RTX
, copy_mode
, copy_mode
,
2720 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2721 register if it contains any data, otherwise return null.
2723 This is used on targets that return BLKmode values in registers. */
2726 copy_blkmode_to_reg (machine_mode mode
, tree src
)
2729 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2730 unsigned int bitsize
;
2731 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2732 machine_mode dst_mode
;
2734 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2736 x
= expand_normal (src
);
2738 bytes
= int_size_in_bytes (TREE_TYPE (src
));
2742 /* If the structure doesn't take up a whole number of words, see
2743 whether the register value should be padded on the left or on
2744 the right. Set PADDING_CORRECTION to the number of padding
2745 bits needed on the left side.
2747 In most ABIs, the structure will be returned at the least end of
2748 the register, which translates to right padding on little-endian
2749 targets and left padding on big-endian targets. The opposite
2750 holds if the structure is returned at the most significant
2751 end of the register. */
2752 if (bytes
% UNITS_PER_WORD
!= 0
2753 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2755 : BYTES_BIG_ENDIAN
))
2756 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2759 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2760 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2761 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2763 /* Copy the structure BITSIZE bits at a time. */
2764 for (bitpos
= 0, xbitpos
= padding_correction
;
2765 bitpos
< bytes
* BITS_PER_UNIT
;
2766 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2768 /* We need a new destination pseudo each time xbitpos is
2769 on a word boundary and when xbitpos == padding_correction
2770 (the first time through). */
2771 if (xbitpos
% BITS_PER_WORD
== 0
2772 || xbitpos
== padding_correction
)
2774 /* Generate an appropriate register. */
2775 dst_word
= gen_reg_rtx (word_mode
);
2776 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2778 /* Clear the destination before we move anything into it. */
2779 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2782 /* We need a new source operand each time bitpos is on a word
2784 if (bitpos
% BITS_PER_WORD
== 0)
2785 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2787 /* Use bitpos for the source extraction (left justified) and
2788 xbitpos for the destination store (right justified). */
2789 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2791 extract_bit_field (src_word
, bitsize
,
2792 bitpos
% BITS_PER_WORD
, 1,
2793 NULL_RTX
, word_mode
, word_mode
,
2798 if (mode
== BLKmode
)
2800 /* Find the smallest integer mode large enough to hold the
2801 entire structure. */
2802 FOR_EACH_MODE_IN_CLASS (mode
, MODE_INT
)
2803 /* Have we found a large enough mode? */
2804 if (GET_MODE_SIZE (mode
) >= bytes
)
2807 /* A suitable mode should have been found. */
2808 gcc_assert (mode
!= VOIDmode
);
2811 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2812 dst_mode
= word_mode
;
2815 dst
= gen_reg_rtx (dst_mode
);
2817 for (i
= 0; i
< n_regs
; i
++)
2818 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2820 if (mode
!= dst_mode
)
2821 dst
= gen_lowpart (mode
, dst
);
2826 /* Add a USE expression for REG to the (possibly empty) list pointed
2827 to by CALL_FUSAGE. REG must denote a hard register. */
2830 use_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2832 gcc_assert (REG_P (reg
));
2834 if (!HARD_REGISTER_P (reg
))
2838 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2841 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2842 to by CALL_FUSAGE. REG must denote a hard register. */
2845 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2847 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2850 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2853 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2854 starting at REGNO. All of these registers must be hard registers. */
2857 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2861 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2863 for (i
= 0; i
< nregs
; i
++)
2864 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2867 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2868 PARALLEL REGS. This is for calls that pass values in multiple
2869 non-contiguous locations. The Irix 6 ABI has examples of this. */
2872 use_group_regs (rtx
*call_fusage
, rtx regs
)
2876 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2878 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2880 /* A NULL entry means the parameter goes both on the stack and in
2881 registers. This can also be a MEM for targets that pass values
2882 partially on the stack and partially in registers. */
2883 if (reg
!= 0 && REG_P (reg
))
2884 use_reg (call_fusage
, reg
);
2888 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2889 assigment and the code of the expresion on the RHS is CODE. Return
2893 get_def_for_expr (tree name
, enum tree_code code
)
2897 if (TREE_CODE (name
) != SSA_NAME
)
2900 def_stmt
= get_gimple_for_ssa_name (name
);
2902 || gimple_assign_rhs_code (def_stmt
) != code
)
2908 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2909 assigment and the class of the expresion on the RHS is CLASS. Return
2913 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2917 if (TREE_CODE (name
) != SSA_NAME
)
2920 def_stmt
= get_gimple_for_ssa_name (name
);
2922 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2928 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2929 its length in bytes. */
2932 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2933 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2934 unsigned HOST_WIDE_INT min_size
,
2935 unsigned HOST_WIDE_INT max_size
,
2936 unsigned HOST_WIDE_INT probable_max_size
)
2938 machine_mode mode
= GET_MODE (object
);
2941 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2943 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2944 just move a zero. Otherwise, do this a piece at a time. */
2946 && CONST_INT_P (size
)
2947 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2949 rtx zero
= CONST0_RTX (mode
);
2952 emit_move_insn (object
, zero
);
2956 if (COMPLEX_MODE_P (mode
))
2958 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2961 write_complex_part (object
, zero
, 0);
2962 write_complex_part (object
, zero
, 1);
2968 if (size
== const0_rtx
)
2971 align
= MEM_ALIGN (object
);
2973 if (CONST_INT_P (size
)
2974 && targetm
.use_by_pieces_infrastructure_p (INTVAL (size
), align
,
2976 optimize_insn_for_speed_p ()))
2977 clear_by_pieces (object
, INTVAL (size
), align
);
2978 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2979 expected_align
, expected_size
,
2980 min_size
, max_size
, probable_max_size
))
2982 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2983 return set_storage_via_libcall (object
, size
, const0_rtx
,
2984 method
== BLOCK_OP_TAILCALL
);
2992 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2994 unsigned HOST_WIDE_INT max
, min
= 0;
2995 if (GET_CODE (size
) == CONST_INT
)
2996 min
= max
= UINTVAL (size
);
2998 max
= GET_MODE_MASK (GET_MODE (size
));
2999 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
3003 /* A subroutine of clear_storage. Expand a call to memset.
3004 Return the return value of memset, 0 otherwise. */
3007 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
3009 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
3010 machine_mode size_mode
;
3012 object
= copy_addr_to_reg (XEXP (object
, 0));
3013 object_tree
= make_tree (ptr_type_node
, object
);
3015 if (!CONST_INT_P (val
))
3016 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
3017 val_tree
= make_tree (integer_type_node
, val
);
3019 size_mode
= TYPE_MODE (sizetype
);
3020 size
= convert_to_mode (size_mode
, size
, 1);
3021 size
= copy_to_mode_reg (size_mode
, size
);
3022 size_tree
= make_tree (sizetype
, size
);
3024 /* It is incorrect to use the libcall calling conventions for calls to
3025 memset because it can be provided by the user. */
3026 fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
3027 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
3028 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
3030 return expand_call (call_expr
, NULL_RTX
, false);
3033 /* Expand a setmem pattern; return true if successful. */
3036 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
3037 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
3038 unsigned HOST_WIDE_INT min_size
,
3039 unsigned HOST_WIDE_INT max_size
,
3040 unsigned HOST_WIDE_INT probable_max_size
)
3042 /* Try the most limited insn first, because there's no point
3043 including more than one in the machine description unless
3044 the more limited one has some advantage. */
3048 if (expected_align
< align
)
3049 expected_align
= align
;
3050 if (expected_size
!= -1)
3052 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
3053 expected_size
= max_size
;
3054 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
3055 expected_size
= min_size
;
3058 FOR_EACH_MODE_IN_CLASS (mode
, MODE_INT
)
3060 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
3062 if (code
!= CODE_FOR_nothing
3063 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3064 here because if SIZE is less than the mode mask, as it is
3065 returned by the macro, it will definitely be less than the
3066 actual mode mask. Since SIZE is within the Pmode address
3067 space, we limit MODE to Pmode. */
3068 && ((CONST_INT_P (size
)
3069 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3070 <= (GET_MODE_MASK (mode
) >> 1)))
3071 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
3072 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
3074 struct expand_operand ops
[9];
3077 nops
= insn_data
[(int) code
].n_generator_args
;
3078 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
3080 create_fixed_operand (&ops
[0], object
);
3081 /* The check above guarantees that this size conversion is valid. */
3082 create_convert_operand_to (&ops
[1], size
, mode
, true);
3083 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
3084 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
3087 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
3088 create_integer_operand (&ops
[5], expected_size
);
3092 create_integer_operand (&ops
[6], min_size
);
3093 /* If we can not represent the maximal size,
3094 make parameter NULL. */
3095 if ((HOST_WIDE_INT
) max_size
!= -1)
3096 create_integer_operand (&ops
[7], max_size
);
3098 create_fixed_operand (&ops
[7], NULL
);
3102 /* If we can not represent the maximal size,
3103 make parameter NULL. */
3104 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
3105 create_integer_operand (&ops
[8], probable_max_size
);
3107 create_fixed_operand (&ops
[8], NULL
);
3109 if (maybe_expand_insn (code
, nops
, ops
))
3118 /* Write to one of the components of the complex value CPLX. Write VAL to
3119 the real part if IMAG_P is false, and the imaginary part if its true. */
3122 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
3128 if (GET_CODE (cplx
) == CONCAT
)
3130 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3134 cmode
= GET_MODE (cplx
);
3135 imode
= GET_MODE_INNER (cmode
);
3136 ibitsize
= GET_MODE_BITSIZE (imode
);
3138 /* For MEMs simplify_gen_subreg may generate an invalid new address
3139 because, e.g., the original address is considered mode-dependent
3140 by the target, which restricts simplify_subreg from invoking
3141 adjust_address_nv. Instead of preparing fallback support for an
3142 invalid address, we call adjust_address_nv directly. */
3145 emit_move_insn (adjust_address_nv (cplx
, imode
,
3146 imag_p
? GET_MODE_SIZE (imode
) : 0),
3151 /* If the sub-object is at least word sized, then we know that subregging
3152 will work. This special case is important, since store_bit_field
3153 wants to operate on integer modes, and there's rarely an OImode to
3154 correspond to TCmode. */
3155 if (ibitsize
>= BITS_PER_WORD
3156 /* For hard regs we have exact predicates. Assume we can split
3157 the original object if it spans an even number of hard regs.
3158 This special case is important for SCmode on 64-bit platforms
3159 where the natural size of floating-point regs is 32-bit. */
3161 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3162 && REG_NREGS (cplx
) % 2 == 0))
3164 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3165 imag_p
? GET_MODE_SIZE (imode
) : 0);
3168 emit_move_insn (part
, val
);
3172 /* simplify_gen_subreg may fail for sub-word MEMs. */
3173 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3176 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
,
3180 /* Extract one of the components of the complex value CPLX. Extract the
3181 real part if IMAG_P is false, and the imaginary part if it's true. */
3184 read_complex_part (rtx cplx
, bool imag_p
)
3186 machine_mode cmode
, imode
;
3189 if (GET_CODE (cplx
) == CONCAT
)
3190 return XEXP (cplx
, imag_p
);
3192 cmode
= GET_MODE (cplx
);
3193 imode
= GET_MODE_INNER (cmode
);
3194 ibitsize
= GET_MODE_BITSIZE (imode
);
3196 /* Special case reads from complex constants that got spilled to memory. */
3197 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3199 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3200 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3202 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3203 if (CONSTANT_CLASS_P (part
))
3204 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3208 /* For MEMs simplify_gen_subreg may generate an invalid new address
3209 because, e.g., the original address is considered mode-dependent
3210 by the target, which restricts simplify_subreg from invoking
3211 adjust_address_nv. Instead of preparing fallback support for an
3212 invalid address, we call adjust_address_nv directly. */
3214 return adjust_address_nv (cplx
, imode
,
3215 imag_p
? GET_MODE_SIZE (imode
) : 0);
3217 /* If the sub-object is at least word sized, then we know that subregging
3218 will work. This special case is important, since extract_bit_field
3219 wants to operate on integer modes, and there's rarely an OImode to
3220 correspond to TCmode. */
3221 if (ibitsize
>= BITS_PER_WORD
3222 /* For hard regs we have exact predicates. Assume we can split
3223 the original object if it spans an even number of hard regs.
3224 This special case is important for SCmode on 64-bit platforms
3225 where the natural size of floating-point regs is 32-bit. */
3227 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3228 && REG_NREGS (cplx
) % 2 == 0))
3230 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3231 imag_p
? GET_MODE_SIZE (imode
) : 0);
3235 /* simplify_gen_subreg may fail for sub-word MEMs. */
3236 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3239 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3240 true, NULL_RTX
, imode
, imode
, false, NULL
);
3243 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3244 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3245 represented in NEW_MODE. If FORCE is true, this will never happen, as
3246 we'll force-create a SUBREG if needed. */
3249 emit_move_change_mode (machine_mode new_mode
,
3250 machine_mode old_mode
, rtx x
, bool force
)
3254 if (push_operand (x
, GET_MODE (x
)))
3256 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3257 MEM_COPY_ATTRIBUTES (ret
, x
);
3261 /* We don't have to worry about changing the address since the
3262 size in bytes is supposed to be the same. */
3263 if (reload_in_progress
)
3265 /* Copy the MEM to change the mode and move any
3266 substitutions from the old MEM to the new one. */
3267 ret
= adjust_address_nv (x
, new_mode
, 0);
3268 copy_replacements (x
, ret
);
3271 ret
= adjust_address (x
, new_mode
, 0);
3275 /* Note that we do want simplify_subreg's behavior of validating
3276 that the new mode is ok for a hard register. If we were to use
3277 simplify_gen_subreg, we would create the subreg, but would
3278 probably run into the target not being able to implement it. */
3279 /* Except, of course, when FORCE is true, when this is exactly what
3280 we want. Which is needed for CCmodes on some targets. */
3282 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3284 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3290 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3291 an integer mode of the same size as MODE. Returns the instruction
3292 emitted, or NULL if such a move could not be generated. */
3295 emit_move_via_integer (machine_mode mode
, rtx x
, rtx y
, bool force
)
3297 scalar_int_mode imode
;
3298 enum insn_code code
;
3300 /* There must exist a mode of the exact size we require. */
3301 if (!int_mode_for_mode (mode
).exists (&imode
))
3304 /* The target must support moves in this mode. */
3305 code
= optab_handler (mov_optab
, imode
);
3306 if (code
== CODE_FOR_nothing
)
3309 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3312 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3315 return emit_insn (GEN_FCN (code
) (x
, y
));
3318 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3319 Return an equivalent MEM that does not use an auto-increment. */
3322 emit_move_resolve_push (machine_mode mode
, rtx x
)
3324 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3325 HOST_WIDE_INT adjust
;
3328 adjust
= GET_MODE_SIZE (mode
);
3329 #ifdef PUSH_ROUNDING
3330 adjust
= PUSH_ROUNDING (adjust
);
3332 if (code
== PRE_DEC
|| code
== POST_DEC
)
3334 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3336 rtx expr
= XEXP (XEXP (x
, 0), 1);
3339 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3340 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3341 val
= INTVAL (XEXP (expr
, 1));
3342 if (GET_CODE (expr
) == MINUS
)
3344 gcc_assert (adjust
== val
|| adjust
== -val
);
3348 /* Do not use anti_adjust_stack, since we don't want to update
3349 stack_pointer_delta. */
3350 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3351 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3352 0, OPTAB_LIB_WIDEN
);
3353 if (temp
!= stack_pointer_rtx
)
3354 emit_move_insn (stack_pointer_rtx
, temp
);
3361 temp
= stack_pointer_rtx
;
3366 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3372 return replace_equiv_address (x
, temp
);
3375 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3376 X is known to satisfy push_operand, and MODE is known to be complex.
3377 Returns the last instruction emitted. */
3380 emit_move_complex_push (machine_mode mode
, rtx x
, rtx y
)
3382 machine_mode submode
= GET_MODE_INNER (mode
);
3385 #ifdef PUSH_ROUNDING
3386 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3388 /* In case we output to the stack, but the size is smaller than the
3389 machine can push exactly, we need to use move instructions. */
3390 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3392 x
= emit_move_resolve_push (mode
, x
);
3393 return emit_move_insn (x
, y
);
3397 /* Note that the real part always precedes the imag part in memory
3398 regardless of machine's endianness. */
3399 switch (GET_CODE (XEXP (x
, 0)))
3413 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3414 read_complex_part (y
, imag_first
));
3415 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3416 read_complex_part (y
, !imag_first
));
3419 /* A subroutine of emit_move_complex. Perform the move from Y to X
3420 via two moves of the parts. Returns the last instruction emitted. */
3423 emit_move_complex_parts (rtx x
, rtx y
)
3425 /* Show the output dies here. This is necessary for SUBREGs
3426 of pseudos since we cannot track their lifetimes correctly;
3427 hard regs shouldn't appear here except as return values. */
3428 if (!reload_completed
&& !reload_in_progress
3429 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3432 write_complex_part (x
, read_complex_part (y
, false), false);
3433 write_complex_part (x
, read_complex_part (y
, true), true);
3435 return get_last_insn ();
3438 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3439 MODE is known to be complex. Returns the last instruction emitted. */
3442 emit_move_complex (machine_mode mode
, rtx x
, rtx y
)
3446 /* Need to take special care for pushes, to maintain proper ordering
3447 of the data, and possibly extra padding. */
3448 if (push_operand (x
, mode
))
3449 return emit_move_complex_push (mode
, x
, y
);
3451 /* See if we can coerce the target into moving both values at once, except
3452 for floating point where we favor moving as parts if this is easy. */
3453 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3454 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3456 && HARD_REGISTER_P (x
)
3457 && REG_NREGS (x
) == 1)
3459 && HARD_REGISTER_P (y
)
3460 && REG_NREGS (y
) == 1))
3462 /* Not possible if the values are inherently not adjacent. */
3463 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3465 /* Is possible if both are registers (or subregs of registers). */
3466 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3468 /* If one of the operands is a memory, and alignment constraints
3469 are friendly enough, we may be able to do combined memory operations.
3470 We do not attempt this if Y is a constant because that combination is
3471 usually better with the by-parts thing below. */
3472 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3473 && (!STRICT_ALIGNMENT
3474 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3483 /* For memory to memory moves, optimal behavior can be had with the
3484 existing block move logic. */
3485 if (MEM_P (x
) && MEM_P (y
))
3487 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3488 BLOCK_OP_NO_LIBCALL
);
3489 return get_last_insn ();
3492 ret
= emit_move_via_integer (mode
, x
, y
, true);
3497 return emit_move_complex_parts (x
, y
);
3500 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3501 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3504 emit_move_ccmode (machine_mode mode
, rtx x
, rtx y
)
3508 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3511 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3512 if (code
!= CODE_FOR_nothing
)
3514 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3515 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3516 return emit_insn (GEN_FCN (code
) (x
, y
));
3520 /* Otherwise, find the MODE_INT mode of the same width. */
3521 ret
= emit_move_via_integer (mode
, x
, y
, false);
3522 gcc_assert (ret
!= NULL
);
3526 /* Return true if word I of OP lies entirely in the
3527 undefined bits of a paradoxical subreg. */
3530 undefined_operand_subword_p (const_rtx op
, int i
)
3532 machine_mode innermode
, innermostmode
;
3534 if (GET_CODE (op
) != SUBREG
)
3536 innermode
= GET_MODE (op
);
3537 innermostmode
= GET_MODE (SUBREG_REG (op
));
3538 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3539 /* The SUBREG_BYTE represents offset, as if the value were stored in
3540 memory, except for a paradoxical subreg where we define
3541 SUBREG_BYTE to be 0; undo this exception as in
3543 if (SUBREG_BYTE (op
) == 0
3544 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3546 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3547 if (WORDS_BIG_ENDIAN
)
3548 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3549 if (BYTES_BIG_ENDIAN
)
3550 offset
+= difference
% UNITS_PER_WORD
;
3552 if (offset
>= GET_MODE_SIZE (innermostmode
)
3553 || offset
<= -GET_MODE_SIZE (word_mode
))
3558 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3559 MODE is any multi-word or full-word mode that lacks a move_insn
3560 pattern. Note that you will get better code if you define such
3561 patterns, even if they must turn into multiple assembler instructions. */
3564 emit_move_multi_word (machine_mode mode
, rtx x
, rtx y
)
3566 rtx_insn
*last_insn
= 0;
3572 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3574 /* If X is a push on the stack, do the push now and replace
3575 X with a reference to the stack pointer. */
3576 if (push_operand (x
, mode
))
3577 x
= emit_move_resolve_push (mode
, x
);
3579 /* If we are in reload, see if either operand is a MEM whose address
3580 is scheduled for replacement. */
3581 if (reload_in_progress
&& MEM_P (x
)
3582 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3583 x
= replace_equiv_address_nv (x
, inner
);
3584 if (reload_in_progress
&& MEM_P (y
)
3585 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3586 y
= replace_equiv_address_nv (y
, inner
);
3590 need_clobber
= false;
3592 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3595 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3598 /* Do not generate code for a move if it would come entirely
3599 from the undefined bits of a paradoxical subreg. */
3600 if (undefined_operand_subword_p (y
, i
))
3603 ypart
= operand_subword (y
, i
, 1, mode
);
3605 /* If we can't get a part of Y, put Y into memory if it is a
3606 constant. Otherwise, force it into a register. Then we must
3607 be able to get a part of Y. */
3608 if (ypart
== 0 && CONSTANT_P (y
))
3610 y
= use_anchored_address (force_const_mem (mode
, y
));
3611 ypart
= operand_subword (y
, i
, 1, mode
);
3613 else if (ypart
== 0)
3614 ypart
= operand_subword_force (y
, i
, mode
);
3616 gcc_assert (xpart
&& ypart
);
3618 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3620 last_insn
= emit_move_insn (xpart
, ypart
);
3626 /* Show the output dies here. This is necessary for SUBREGs
3627 of pseudos since we cannot track their lifetimes correctly;
3628 hard regs shouldn't appear here except as return values.
3629 We never want to emit such a clobber after reload. */
3631 && ! (reload_in_progress
|| reload_completed
)
3632 && need_clobber
!= 0)
3640 /* Low level part of emit_move_insn.
3641 Called just like emit_move_insn, but assumes X and Y
3642 are basically valid. */
3645 emit_move_insn_1 (rtx x
, rtx y
)
3647 machine_mode mode
= GET_MODE (x
);
3648 enum insn_code code
;
3650 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3652 code
= optab_handler (mov_optab
, mode
);
3653 if (code
!= CODE_FOR_nothing
)
3654 return emit_insn (GEN_FCN (code
) (x
, y
));
3656 /* Expand complex moves by moving real part and imag part. */
3657 if (COMPLEX_MODE_P (mode
))
3658 return emit_move_complex (mode
, x
, y
);
3660 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3661 || ALL_FIXED_POINT_MODE_P (mode
))
3663 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3665 /* If we can't find an integer mode, use multi words. */
3669 return emit_move_multi_word (mode
, x
, y
);
3672 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3673 return emit_move_ccmode (mode
, x
, y
);
3675 /* Try using a move pattern for the corresponding integer mode. This is
3676 only safe when simplify_subreg can convert MODE constants into integer
3677 constants. At present, it can only do this reliably if the value
3678 fits within a HOST_WIDE_INT. */
3679 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3681 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3685 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3690 return emit_move_multi_word (mode
, x
, y
);
3693 /* Generate code to copy Y into X.
3694 Both Y and X must have the same mode, except that
3695 Y can be a constant with VOIDmode.
3696 This mode cannot be BLKmode; use emit_block_move for that.
3698 Return the last instruction emitted. */
3701 emit_move_insn (rtx x
, rtx y
)
3703 machine_mode mode
= GET_MODE (x
);
3704 rtx y_cst
= NULL_RTX
;
3705 rtx_insn
*last_insn
;
3708 gcc_assert (mode
!= BLKmode
3709 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3714 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3715 && (last_insn
= compress_float_constant (x
, y
)))
3720 if (!targetm
.legitimate_constant_p (mode
, y
))
3722 y
= force_const_mem (mode
, y
);
3724 /* If the target's cannot_force_const_mem prevented the spill,
3725 assume that the target's move expanders will also take care
3726 of the non-legitimate constant. */
3730 y
= use_anchored_address (y
);
3734 /* If X or Y are memory references, verify that their addresses are valid
3737 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3739 && ! push_operand (x
, GET_MODE (x
))))
3740 x
= validize_mem (x
);
3743 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3744 MEM_ADDR_SPACE (y
)))
3745 y
= validize_mem (y
);
3747 gcc_assert (mode
!= BLKmode
);
3749 last_insn
= emit_move_insn_1 (x
, y
);
3751 if (y_cst
&& REG_P (x
)
3752 && (set
= single_set (last_insn
)) != NULL_RTX
3753 && SET_DEST (set
) == x
3754 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3755 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3760 /* Generate the body of an instruction to copy Y into X.
3761 It may be a list of insns, if one insn isn't enough. */
3764 gen_move_insn (rtx x
, rtx y
)
3769 emit_move_insn_1 (x
, y
);
3775 /* If Y is representable exactly in a narrower mode, and the target can
3776 perform the extension directly from constant or memory, then emit the
3777 move as an extension. */
3780 compress_float_constant (rtx x
, rtx y
)
3782 machine_mode dstmode
= GET_MODE (x
);
3783 machine_mode orig_srcmode
= GET_MODE (y
);
3784 machine_mode srcmode
;
3785 const REAL_VALUE_TYPE
*r
;
3786 int oldcost
, newcost
;
3787 bool speed
= optimize_insn_for_speed_p ();
3789 r
= CONST_DOUBLE_REAL_VALUE (y
);
3791 if (targetm
.legitimate_constant_p (dstmode
, y
))
3792 oldcost
= set_src_cost (y
, orig_srcmode
, speed
);
3794 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), dstmode
, speed
);
3796 FOR_EACH_MODE_UNTIL (srcmode
, orig_srcmode
)
3800 rtx_insn
*last_insn
;
3802 /* Skip if the target can't extend this way. */
3803 ic
= can_extend_p (dstmode
, srcmode
, 0);
3804 if (ic
== CODE_FOR_nothing
)
3807 /* Skip if the narrowed value isn't exact. */
3808 if (! exact_real_truncate (srcmode
, r
))
3811 trunc_y
= const_double_from_real_value (*r
, srcmode
);
3813 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3815 /* Skip if the target needs extra instructions to perform
3817 if (!insn_operand_matches (ic
, 1, trunc_y
))
3819 /* This is valid, but may not be cheaper than the original. */
3820 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3822 if (oldcost
< newcost
)
3825 else if (float_extend_from_mem
[dstmode
][srcmode
])
3827 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3828 /* This is valid, but may not be cheaper than the original. */
3829 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3831 if (oldcost
< newcost
)
3833 trunc_y
= validize_mem (trunc_y
);
3838 /* For CSE's benefit, force the compressed constant pool entry
3839 into a new pseudo. This constant may be used in different modes,
3840 and if not, combine will put things back together for us. */
3841 trunc_y
= force_reg (srcmode
, trunc_y
);
3843 /* If x is a hard register, perform the extension into a pseudo,
3844 so that e.g. stack realignment code is aware of it. */
3846 if (REG_P (x
) && HARD_REGISTER_P (x
))
3847 target
= gen_reg_rtx (dstmode
);
3849 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3850 last_insn
= get_last_insn ();
3853 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3856 return emit_move_insn (x
, target
);
3863 /* Pushing data onto the stack. */
3865 /* Push a block of length SIZE (perhaps variable)
3866 and return an rtx to address the beginning of the block.
3867 The value may be virtual_outgoing_args_rtx.
3869 EXTRA is the number of bytes of padding to push in addition to SIZE.
3870 BELOW nonzero means this padding comes at low addresses;
3871 otherwise, the padding comes at high addresses. */
3874 push_block (rtx size
, int extra
, int below
)
3878 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3879 if (CONSTANT_P (size
))
3880 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3881 else if (REG_P (size
) && extra
== 0)
3882 anti_adjust_stack (size
);
3885 temp
= copy_to_mode_reg (Pmode
, size
);
3887 temp
= expand_binop (Pmode
, add_optab
, temp
,
3888 gen_int_mode (extra
, Pmode
),
3889 temp
, 0, OPTAB_LIB_WIDEN
);
3890 anti_adjust_stack (temp
);
3893 if (STACK_GROWS_DOWNWARD
)
3895 temp
= virtual_outgoing_args_rtx
;
3896 if (extra
!= 0 && below
)
3897 temp
= plus_constant (Pmode
, temp
, extra
);
3901 if (CONST_INT_P (size
))
3902 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3903 -INTVAL (size
) - (below
? 0 : extra
));
3904 else if (extra
!= 0 && !below
)
3905 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3906 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3909 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3910 negate_rtx (Pmode
, size
));
3913 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3916 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3919 mem_autoinc_base (rtx mem
)
3923 rtx addr
= XEXP (mem
, 0);
3924 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3925 return XEXP (addr
, 0);
3930 /* A utility routine used here, in reload, and in try_split. The insns
3931 after PREV up to and including LAST are known to adjust the stack,
3932 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3933 placing notes as appropriate. PREV may be NULL, indicating the
3934 entire insn sequence prior to LAST should be scanned.
3936 The set of allowed stack pointer modifications is small:
3937 (1) One or more auto-inc style memory references (aka pushes),
3938 (2) One or more addition/subtraction with the SP as destination,
3939 (3) A single move insn with the SP as destination,
3940 (4) A call_pop insn,
3941 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3943 Insns in the sequence that do not modify the SP are ignored,
3944 except for noreturn calls.
3946 The return value is the amount of adjustment that can be trivially
3947 verified, via immediate operand or auto-inc. If the adjustment
3948 cannot be trivially extracted, the return value is INT_MIN. */
3951 find_args_size_adjust (rtx_insn
*insn
)
3956 pat
= PATTERN (insn
);
3959 /* Look for a call_pop pattern. */
3962 /* We have to allow non-call_pop patterns for the case
3963 of emit_single_push_insn of a TLS address. */
3964 if (GET_CODE (pat
) != PARALLEL
)
3967 /* All call_pop have a stack pointer adjust in the parallel.
3968 The call itself is always first, and the stack adjust is
3969 usually last, so search from the end. */
3970 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3972 set
= XVECEXP (pat
, 0, i
);
3973 if (GET_CODE (set
) != SET
)
3975 dest
= SET_DEST (set
);
3976 if (dest
== stack_pointer_rtx
)
3979 /* We'd better have found the stack pointer adjust. */
3982 /* Fall through to process the extracted SET and DEST
3983 as if it was a standalone insn. */
3985 else if (GET_CODE (pat
) == SET
)
3987 else if ((set
= single_set (insn
)) != NULL
)
3989 else if (GET_CODE (pat
) == PARALLEL
)
3991 /* ??? Some older ports use a parallel with a stack adjust
3992 and a store for a PUSH_ROUNDING pattern, rather than a
3993 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3994 /* ??? See h8300 and m68k, pushqi1. */
3995 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
3997 set
= XVECEXP (pat
, 0, i
);
3998 if (GET_CODE (set
) != SET
)
4000 dest
= SET_DEST (set
);
4001 if (dest
== stack_pointer_rtx
)
4004 /* We do not expect an auto-inc of the sp in the parallel. */
4005 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
4006 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4007 != stack_pointer_rtx
);
4015 dest
= SET_DEST (set
);
4017 /* Look for direct modifications of the stack pointer. */
4018 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
4020 /* Look for a trivial adjustment, otherwise assume nothing. */
4021 /* Note that the SPU restore_stack_block pattern refers to
4022 the stack pointer in V4SImode. Consider that non-trivial. */
4023 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
4024 && GET_CODE (SET_SRC (set
)) == PLUS
4025 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
4026 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
4027 return INTVAL (XEXP (SET_SRC (set
), 1));
4028 /* ??? Reload can generate no-op moves, which will be cleaned
4029 up later. Recognize it and continue searching. */
4030 else if (rtx_equal_p (dest
, SET_SRC (set
)))
4033 return HOST_WIDE_INT_MIN
;
4039 /* Otherwise only think about autoinc patterns. */
4040 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
4043 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4044 != stack_pointer_rtx
);
4046 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
4047 mem
= SET_SRC (set
);
4051 addr
= XEXP (mem
, 0);
4052 switch (GET_CODE (addr
))
4056 return GET_MODE_SIZE (GET_MODE (mem
));
4059 return -GET_MODE_SIZE (GET_MODE (mem
));
4062 addr
= XEXP (addr
, 1);
4063 gcc_assert (GET_CODE (addr
) == PLUS
);
4064 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
4065 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
4066 return INTVAL (XEXP (addr
, 1));
4074 fixup_args_size_notes (rtx_insn
*prev
, rtx_insn
*last
, int end_args_size
)
4076 int args_size
= end_args_size
;
4077 bool saw_unknown
= false;
4080 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
4082 HOST_WIDE_INT this_delta
;
4084 if (!NONDEBUG_INSN_P (insn
))
4087 this_delta
= find_args_size_adjust (insn
);
4088 if (this_delta
== 0)
4091 || ACCUMULATE_OUTGOING_ARGS
4092 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
4096 gcc_assert (!saw_unknown
);
4097 if (this_delta
== HOST_WIDE_INT_MIN
)
4100 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (args_size
));
4101 if (STACK_GROWS_DOWNWARD
)
4102 this_delta
= -(unsigned HOST_WIDE_INT
) this_delta
;
4104 args_size
-= this_delta
;
4107 return saw_unknown
? INT_MIN
: args_size
;
4110 #ifdef PUSH_ROUNDING
4111 /* Emit single push insn. */
4114 emit_single_push_insn_1 (machine_mode mode
, rtx x
, tree type
)
4117 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4119 enum insn_code icode
;
4121 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4122 /* If there is push pattern, use it. Otherwise try old way of throwing
4123 MEM representing push operation to move expander. */
4124 icode
= optab_handler (push_optab
, mode
);
4125 if (icode
!= CODE_FOR_nothing
)
4127 struct expand_operand ops
[1];
4129 create_input_operand (&ops
[0], x
, mode
);
4130 if (maybe_expand_insn (icode
, 1, ops
))
4133 if (GET_MODE_SIZE (mode
) == rounded_size
)
4134 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
4135 /* If we are to pad downward, adjust the stack pointer first and
4136 then store X into the stack location using an offset. This is
4137 because emit_move_insn does not know how to pad; it does not have
4139 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
4141 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
4142 HOST_WIDE_INT offset
;
4144 emit_move_insn (stack_pointer_rtx
,
4145 expand_binop (Pmode
,
4146 STACK_GROWS_DOWNWARD
? sub_optab
4149 gen_int_mode (rounded_size
, Pmode
),
4150 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4152 offset
= (HOST_WIDE_INT
) padding_size
;
4153 if (STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_DEC
)
4154 /* We have already decremented the stack pointer, so get the
4156 offset
+= (HOST_WIDE_INT
) rounded_size
;
4158 if (!STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_INC
)
4159 /* We have already incremented the stack pointer, so get the
4161 offset
-= (HOST_WIDE_INT
) rounded_size
;
4163 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4164 gen_int_mode (offset
, Pmode
));
4168 if (STACK_GROWS_DOWNWARD
)
4169 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4170 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4171 gen_int_mode (-(HOST_WIDE_INT
) rounded_size
,
4174 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4175 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4176 gen_int_mode (rounded_size
, Pmode
));
4178 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4181 dest
= gen_rtx_MEM (mode
, dest_addr
);
4185 set_mem_attributes (dest
, type
, 1);
4187 if (cfun
->tail_call_marked
)
4188 /* Function incoming arguments may overlap with sibling call
4189 outgoing arguments and we cannot allow reordering of reads
4190 from function arguments with stores to outgoing arguments
4191 of sibling calls. */
4192 set_mem_alias_set (dest
, 0);
4194 emit_move_insn (dest
, x
);
4197 /* Emit and annotate a single push insn. */
4200 emit_single_push_insn (machine_mode mode
, rtx x
, tree type
)
4202 int delta
, old_delta
= stack_pointer_delta
;
4203 rtx_insn
*prev
= get_last_insn ();
4206 emit_single_push_insn_1 (mode
, x
, type
);
4208 last
= get_last_insn ();
4210 /* Notice the common case where we emitted exactly one insn. */
4211 if (PREV_INSN (last
) == prev
)
4213 add_reg_note (last
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4217 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4218 gcc_assert (delta
== INT_MIN
|| delta
== old_delta
);
4222 /* If reading SIZE bytes from X will end up reading from
4223 Y return the number of bytes that overlap. Return -1
4224 if there is no overlap or -2 if we can't determine
4225 (for example when X and Y have different base registers). */
4228 memory_load_overlap (rtx x
, rtx y
, HOST_WIDE_INT size
)
4230 rtx tmp
= plus_constant (Pmode
, x
, size
);
4231 rtx sub
= simplify_gen_binary (MINUS
, Pmode
, tmp
, y
);
4233 if (!CONST_INT_P (sub
))
4236 HOST_WIDE_INT val
= INTVAL (sub
);
4238 return IN_RANGE (val
, 1, size
) ? val
: -1;
4241 /* Generate code to push X onto the stack, assuming it has mode MODE and
4243 MODE is redundant except when X is a CONST_INT (since they don't
4245 SIZE is an rtx for the size of data to be copied (in bytes),
4246 needed only if X is BLKmode.
4247 Return true if successful. May return false if asked to push a
4248 partial argument during a sibcall optimization (as specified by
4249 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4252 ALIGN (in bits) is maximum alignment we can assume.
4254 If PARTIAL and REG are both nonzero, then copy that many of the first
4255 bytes of X into registers starting with REG, and push the rest of X.
4256 The amount of space pushed is decreased by PARTIAL bytes.
4257 REG must be a hard register in this case.
4258 If REG is zero but PARTIAL is not, take any all others actions for an
4259 argument partially in registers, but do not actually load any
4262 EXTRA is the amount in bytes of extra space to leave next to this arg.
4263 This is ignored if an argument block has already been allocated.
4265 On a machine that lacks real push insns, ARGS_ADDR is the address of
4266 the bottom of the argument block for this call. We use indexing off there
4267 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4268 argument block has not been preallocated.
4270 ARGS_SO_FAR is the size of args previously pushed for this call.
4272 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4273 for arguments passed in registers. If nonzero, it will be the number
4274 of bytes required. */
4277 emit_push_insn (rtx x
, machine_mode mode
, tree type
, rtx size
,
4278 unsigned int align
, int partial
, rtx reg
, int extra
,
4279 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4280 rtx alignment_pad
, bool sibcall_p
)
4283 enum direction stack_direction
= STACK_GROWS_DOWNWARD
? downward
: upward
;
4285 /* Decide where to pad the argument: `downward' for below,
4286 `upward' for above, or `none' for don't pad it.
4287 Default is below for small data on big-endian machines; else above. */
4288 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
4290 /* Invert direction if stack is post-decrement.
4292 if (STACK_PUSH_CODE
== POST_DEC
)
4293 if (where_pad
!= none
)
4294 where_pad
= (where_pad
== downward
? upward
: downward
);
4298 int nregs
= partial
/ UNITS_PER_WORD
;
4299 rtx
*tmp_regs
= NULL
;
4300 int overlapping
= 0;
4303 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4305 /* Copy a block into the stack, entirely or partially. */
4312 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4313 used
= partial
- offset
;
4315 if (mode
!= BLKmode
)
4317 /* A value is to be stored in an insufficiently aligned
4318 stack slot; copy via a suitably aligned slot if
4320 size
= GEN_INT (GET_MODE_SIZE (mode
));
4321 if (!MEM_P (xinner
))
4323 temp
= assign_temp (type
, 1, 1);
4324 emit_move_insn (temp
, xinner
);
4331 /* USED is now the # of bytes we need not copy to the stack
4332 because registers will take care of them. */
4335 xinner
= adjust_address (xinner
, BLKmode
, used
);
4337 /* If the partial register-part of the arg counts in its stack size,
4338 skip the part of stack space corresponding to the registers.
4339 Otherwise, start copying to the beginning of the stack space,
4340 by setting SKIP to 0. */
4341 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4343 #ifdef PUSH_ROUNDING
4344 /* Do it with several push insns if that doesn't take lots of insns
4345 and if there is no difficulty with push insns that skip bytes
4346 on the stack for alignment purposes. */
4349 && CONST_INT_P (size
)
4351 && MEM_ALIGN (xinner
) >= align
4352 && can_move_by_pieces ((unsigned) INTVAL (size
) - used
, align
)
4353 /* Here we avoid the case of a structure whose weak alignment
4354 forces many pushes of a small amount of data,
4355 and such small pushes do rounding that causes trouble. */
4356 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
4357 || align
>= BIGGEST_ALIGNMENT
4358 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4359 == (align
/ BITS_PER_UNIT
)))
4360 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4362 /* Push padding now if padding above and stack grows down,
4363 or if padding below and stack grows up.
4364 But if space already allocated, this has already been done. */
4365 if (extra
&& args_addr
== 0
4366 && where_pad
!= none
&& where_pad
!= stack_direction
)
4367 anti_adjust_stack (GEN_INT (extra
));
4369 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4372 #endif /* PUSH_ROUNDING */
4376 /* Otherwise make space on the stack and copy the data
4377 to the address of that space. */
4379 /* Deduct words put into registers from the size we must copy. */
4382 if (CONST_INT_P (size
))
4383 size
= GEN_INT (INTVAL (size
) - used
);
4385 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4386 gen_int_mode (used
, GET_MODE (size
)),
4387 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4390 /* Get the address of the stack space.
4391 In this case, we do not deal with EXTRA separately.
4392 A single stack adjust will do. */
4395 temp
= push_block (size
, extra
, where_pad
== downward
);
4398 else if (CONST_INT_P (args_so_far
))
4399 temp
= memory_address (BLKmode
,
4400 plus_constant (Pmode
, args_addr
,
4401 skip
+ INTVAL (args_so_far
)));
4403 temp
= memory_address (BLKmode
,
4404 plus_constant (Pmode
,
4405 gen_rtx_PLUS (Pmode
,
4410 if (!ACCUMULATE_OUTGOING_ARGS
)
4412 /* If the source is referenced relative to the stack pointer,
4413 copy it to another register to stabilize it. We do not need
4414 to do this if we know that we won't be changing sp. */
4416 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4417 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4418 temp
= copy_to_reg (temp
);
4421 target
= gen_rtx_MEM (BLKmode
, temp
);
4423 /* We do *not* set_mem_attributes here, because incoming arguments
4424 may overlap with sibling call outgoing arguments and we cannot
4425 allow reordering of reads from function arguments with stores
4426 to outgoing arguments of sibling calls. We do, however, want
4427 to record the alignment of the stack slot. */
4428 /* ALIGN may well be better aligned than TYPE, e.g. due to
4429 PARM_BOUNDARY. Assume the caller isn't lying. */
4430 set_mem_align (target
, align
);
4432 /* If part should go in registers and pushing to that part would
4433 overwrite some of the values that need to go into regs, load the
4434 overlapping values into temporary pseudos to be moved into the hard
4435 regs at the end after the stack pushing has completed.
4436 We cannot load them directly into the hard regs here because
4437 they can be clobbered by the block move expansions.
4440 if (partial
> 0 && reg
!= 0 && mode
== BLKmode
4441 && GET_CODE (reg
) != PARALLEL
)
4443 overlapping
= memory_load_overlap (XEXP (x
, 0), temp
, partial
);
4444 if (overlapping
> 0)
4446 gcc_assert (overlapping
% UNITS_PER_WORD
== 0);
4447 overlapping
/= UNITS_PER_WORD
;
4449 tmp_regs
= XALLOCAVEC (rtx
, overlapping
);
4451 for (int i
= 0; i
< overlapping
; i
++)
4452 tmp_regs
[i
] = gen_reg_rtx (word_mode
);
4454 for (int i
= 0; i
< overlapping
; i
++)
4455 emit_move_insn (tmp_regs
[i
],
4456 operand_subword_force (target
, i
, mode
));
4458 else if (overlapping
== -1)
4460 /* Could not determine whether there is overlap.
4461 Fail the sibcall. */
4469 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4472 else if (partial
> 0)
4474 /* Scalar partly in registers. */
4476 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4479 /* # bytes of start of argument
4480 that we must make space for but need not store. */
4481 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4482 int args_offset
= INTVAL (args_so_far
);
4485 /* Push padding now if padding above and stack grows down,
4486 or if padding below and stack grows up.
4487 But if space already allocated, this has already been done. */
4488 if (extra
&& args_addr
== 0
4489 && where_pad
!= none
&& where_pad
!= stack_direction
)
4490 anti_adjust_stack (GEN_INT (extra
));
4492 /* If we make space by pushing it, we might as well push
4493 the real data. Otherwise, we can leave OFFSET nonzero
4494 and leave the space uninitialized. */
4498 /* Now NOT_STACK gets the number of words that we don't need to
4499 allocate on the stack. Convert OFFSET to words too. */
4500 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4501 offset
/= UNITS_PER_WORD
;
4503 /* If the partial register-part of the arg counts in its stack size,
4504 skip the part of stack space corresponding to the registers.
4505 Otherwise, start copying to the beginning of the stack space,
4506 by setting SKIP to 0. */
4507 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4509 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4510 x
= validize_mem (force_const_mem (mode
, x
));
4512 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4513 SUBREGs of such registers are not allowed. */
4514 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4515 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4516 x
= copy_to_reg (x
);
4518 /* Loop over all the words allocated on the stack for this arg. */
4519 /* We can do it by words, because any scalar bigger than a word
4520 has a size a multiple of a word. */
4521 for (i
= size
- 1; i
>= not_stack
; i
--)
4522 if (i
>= not_stack
+ offset
)
4523 if (!emit_push_insn (operand_subword_force (x
, i
, mode
),
4524 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4526 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4528 reg_parm_stack_space
, alignment_pad
, sibcall_p
))
4536 /* Push padding now if padding above and stack grows down,
4537 or if padding below and stack grows up.
4538 But if space already allocated, this has already been done. */
4539 if (extra
&& args_addr
== 0
4540 && where_pad
!= none
&& where_pad
!= stack_direction
)
4541 anti_adjust_stack (GEN_INT (extra
));
4543 #ifdef PUSH_ROUNDING
4544 if (args_addr
== 0 && PUSH_ARGS
)
4545 emit_single_push_insn (mode
, x
, type
);
4549 if (CONST_INT_P (args_so_far
))
4551 = memory_address (mode
,
4552 plus_constant (Pmode
, args_addr
,
4553 INTVAL (args_so_far
)));
4555 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4557 dest
= gen_rtx_MEM (mode
, addr
);
4559 /* We do *not* set_mem_attributes here, because incoming arguments
4560 may overlap with sibling call outgoing arguments and we cannot
4561 allow reordering of reads from function arguments with stores
4562 to outgoing arguments of sibling calls. We do, however, want
4563 to record the alignment of the stack slot. */
4564 /* ALIGN may well be better aligned than TYPE, e.g. due to
4565 PARM_BOUNDARY. Assume the caller isn't lying. */
4566 set_mem_align (dest
, align
);
4568 emit_move_insn (dest
, x
);
4572 /* Move the partial arguments into the registers and any overlapping
4573 values that we moved into the pseudos in tmp_regs. */
4574 if (partial
> 0 && reg
!= 0)
4576 /* Handle calls that pass values in multiple non-contiguous locations.
4577 The Irix 6 ABI has examples of this. */
4578 if (GET_CODE (reg
) == PARALLEL
)
4579 emit_group_load (reg
, x
, type
, -1);
4582 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4583 move_block_to_reg (REGNO (reg
), x
, nregs
- overlapping
, mode
);
4585 for (int i
= 0; i
< overlapping
; i
++)
4586 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
)
4587 + nregs
- overlapping
+ i
),
4593 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4594 anti_adjust_stack (GEN_INT (extra
));
4596 if (alignment_pad
&& args_addr
== 0)
4597 anti_adjust_stack (alignment_pad
);
4602 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4606 get_subtarget (rtx x
)
4610 /* Only registers can be subtargets. */
4612 /* Don't use hard regs to avoid extending their life. */
4613 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4617 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4618 FIELD is a bitfield. Returns true if the optimization was successful,
4619 and there's nothing else to do. */
4622 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4623 unsigned HOST_WIDE_INT bitpos
,
4624 unsigned HOST_WIDE_INT bitregion_start
,
4625 unsigned HOST_WIDE_INT bitregion_end
,
4626 machine_mode mode1
, rtx str_rtx
,
4627 tree to
, tree src
, bool reverse
)
4629 machine_mode str_mode
= GET_MODE (str_rtx
);
4630 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4635 enum tree_code code
;
4637 if (mode1
!= VOIDmode
4638 || bitsize
>= BITS_PER_WORD
4639 || str_bitsize
> BITS_PER_WORD
4640 || TREE_SIDE_EFFECTS (to
)
4641 || TREE_THIS_VOLATILE (to
))
4645 if (TREE_CODE (src
) != SSA_NAME
)
4647 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4650 srcstmt
= get_gimple_for_ssa_name (src
);
4652 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4655 code
= gimple_assign_rhs_code (srcstmt
);
4657 op0
= gimple_assign_rhs1 (srcstmt
);
4659 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4660 to find its initialization. Hopefully the initialization will
4661 be from a bitfield load. */
4662 if (TREE_CODE (op0
) == SSA_NAME
)
4664 gimple
*op0stmt
= get_gimple_for_ssa_name (op0
);
4666 /* We want to eventually have OP0 be the same as TO, which
4667 should be a bitfield. */
4669 || !is_gimple_assign (op0stmt
)
4670 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4672 op0
= gimple_assign_rhs1 (op0stmt
);
4675 op1
= gimple_assign_rhs2 (srcstmt
);
4677 if (!operand_equal_p (to
, op0
, 0))
4680 if (MEM_P (str_rtx
))
4682 unsigned HOST_WIDE_INT offset1
;
4684 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4685 str_mode
= word_mode
;
4686 str_mode
= get_best_mode (bitsize
, bitpos
,
4687 bitregion_start
, bitregion_end
,
4688 MEM_ALIGN (str_rtx
), str_mode
, 0);
4689 if (str_mode
== VOIDmode
)
4691 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4694 bitpos
%= str_bitsize
;
4695 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4696 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4698 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4701 gcc_assert (!reverse
);
4703 /* If the bit field covers the whole REG/MEM, store_field
4704 will likely generate better code. */
4705 if (bitsize
>= str_bitsize
)
4708 /* We can't handle fields split across multiple entities. */
4709 if (bitpos
+ bitsize
> str_bitsize
)
4712 if (reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4713 bitpos
= str_bitsize
- bitpos
- bitsize
;
4719 /* For now, just optimize the case of the topmost bitfield
4720 where we don't need to do any masking and also
4721 1 bit bitfields where xor can be used.
4722 We might win by one instruction for the other bitfields
4723 too if insv/extv instructions aren't used, so that
4724 can be added later. */
4725 if ((reverse
|| bitpos
+ bitsize
!= str_bitsize
)
4726 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4729 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4730 value
= convert_modes (str_mode
,
4731 TYPE_MODE (TREE_TYPE (op1
)), value
,
4732 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4734 /* We may be accessing data outside the field, which means
4735 we can alias adjacent data. */
4736 if (MEM_P (str_rtx
))
4738 str_rtx
= shallow_copy_rtx (str_rtx
);
4739 set_mem_alias_set (str_rtx
, 0);
4740 set_mem_expr (str_rtx
, 0);
4743 if (bitsize
== 1 && (reverse
|| bitpos
+ bitsize
!= str_bitsize
))
4745 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4749 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4751 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4753 value
= flip_storage_order (str_mode
, value
);
4754 result
= expand_binop (str_mode
, binop
, str_rtx
,
4755 value
, str_rtx
, 1, OPTAB_WIDEN
);
4756 if (result
!= str_rtx
)
4757 emit_move_insn (str_rtx
, result
);
4762 if (TREE_CODE (op1
) != INTEGER_CST
)
4764 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4765 value
= convert_modes (str_mode
,
4766 TYPE_MODE (TREE_TYPE (op1
)), value
,
4767 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4769 /* We may be accessing data outside the field, which means
4770 we can alias adjacent data. */
4771 if (MEM_P (str_rtx
))
4773 str_rtx
= shallow_copy_rtx (str_rtx
);
4774 set_mem_alias_set (str_rtx
, 0);
4775 set_mem_expr (str_rtx
, 0);
4778 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4779 if (bitpos
+ bitsize
!= str_bitsize
)
4781 rtx mask
= gen_int_mode ((HOST_WIDE_INT_1U
<< bitsize
) - 1,
4783 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4785 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4787 value
= flip_storage_order (str_mode
, value
);
4788 result
= expand_binop (str_mode
, binop
, str_rtx
,
4789 value
, str_rtx
, 1, OPTAB_WIDEN
);
4790 if (result
!= str_rtx
)
4791 emit_move_insn (str_rtx
, result
);
4801 /* In the C++ memory model, consecutive bit fields in a structure are
4802 considered one memory location.
4804 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4805 returns the bit range of consecutive bits in which this COMPONENT_REF
4806 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4807 and *OFFSET may be adjusted in the process.
4809 If the access does not need to be restricted, 0 is returned in both
4810 *BITSTART and *BITEND. */
4813 get_bit_range (unsigned HOST_WIDE_INT
*bitstart
,
4814 unsigned HOST_WIDE_INT
*bitend
,
4816 HOST_WIDE_INT
*bitpos
,
4819 HOST_WIDE_INT bitoffset
;
4822 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4824 field
= TREE_OPERAND (exp
, 1);
4825 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4826 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4827 need to limit the range we can access. */
4830 *bitstart
= *bitend
= 0;
4834 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4835 part of a larger bit field, then the representative does not serve any
4836 useful purpose. This can occur in Ada. */
4837 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4840 HOST_WIDE_INT rbitsize
, rbitpos
;
4842 int unsignedp
, reversep
, volatilep
= 0;
4843 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4844 &roffset
, &rmode
, &unsignedp
, &reversep
,
4846 if ((rbitpos
% BITS_PER_UNIT
) != 0)
4848 *bitstart
= *bitend
= 0;
4853 /* Compute the adjustment to bitpos from the offset of the field
4854 relative to the representative. DECL_FIELD_OFFSET of field and
4855 repr are the same by construction if they are not constants,
4856 see finish_bitfield_layout. */
4857 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
))
4858 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr
)))
4859 bitoffset
= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
4860 - tree_to_uhwi (DECL_FIELD_OFFSET (repr
))) * BITS_PER_UNIT
;
4863 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4864 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4866 /* If the adjustment is larger than bitpos, we would have a negative bit
4867 position for the lower bound and this may wreak havoc later. Adjust
4868 offset and bitpos to make the lower bound non-negative in that case. */
4869 if (bitoffset
> *bitpos
)
4871 HOST_WIDE_INT adjust
= bitoffset
- *bitpos
;
4872 gcc_assert ((adjust
% BITS_PER_UNIT
) == 0);
4875 if (*offset
== NULL_TREE
)
4876 *offset
= size_int (-adjust
/ BITS_PER_UNIT
);
4879 = size_binop (MINUS_EXPR
, *offset
, size_int (adjust
/ BITS_PER_UNIT
));
4883 *bitstart
= *bitpos
- bitoffset
;
4885 *bitend
= *bitstart
+ tree_to_uhwi (DECL_SIZE (repr
)) - 1;
4888 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4889 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4890 DECL_RTL was not set yet, return NORTL. */
4893 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4895 if (TREE_CODE (addr
) != ADDR_EXPR
)
4898 tree base
= TREE_OPERAND (addr
, 0);
4901 || TREE_ADDRESSABLE (base
)
4902 || DECL_MODE (base
) == BLKmode
)
4905 if (!DECL_RTL_SET_P (base
))
4908 return (!MEM_P (DECL_RTL (base
)));
4911 /* Returns true if the MEM_REF REF refers to an object that does not
4912 reside in memory and has non-BLKmode. */
4915 mem_ref_refers_to_non_mem_p (tree ref
)
4917 tree base
= TREE_OPERAND (ref
, 0);
4918 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4921 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4922 is true, try generating a nontemporal store. */
4925 expand_assignment (tree to
, tree from
, bool nontemporal
)
4931 enum insn_code icode
;
4933 /* Don't crash if the lhs of the assignment was erroneous. */
4934 if (TREE_CODE (to
) == ERROR_MARK
)
4936 expand_normal (from
);
4940 /* Optimize away no-op moves without side-effects. */
4941 if (operand_equal_p (to
, from
, 0))
4944 /* Handle misaligned stores. */
4945 mode
= TYPE_MODE (TREE_TYPE (to
));
4946 if ((TREE_CODE (to
) == MEM_REF
4947 || TREE_CODE (to
) == TARGET_MEM_REF
)
4949 && !mem_ref_refers_to_non_mem_p (to
)
4950 && ((align
= get_object_alignment (to
))
4951 < GET_MODE_ALIGNMENT (mode
))
4952 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4953 != CODE_FOR_nothing
)
4954 || SLOW_UNALIGNED_ACCESS (mode
, align
)))
4958 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4959 reg
= force_not_mem (reg
);
4960 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4961 if (TREE_CODE (to
) == MEM_REF
&& REF_REVERSE_STORAGE_ORDER (to
))
4962 reg
= flip_storage_order (mode
, reg
);
4964 if (icode
!= CODE_FOR_nothing
)
4966 struct expand_operand ops
[2];
4968 create_fixed_operand (&ops
[0], mem
);
4969 create_input_operand (&ops
[1], reg
, mode
);
4970 /* The movmisalign<mode> pattern cannot fail, else the assignment
4971 would silently be omitted. */
4972 expand_insn (icode
, 2, ops
);
4975 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
,
4980 /* Assignment of a structure component needs special treatment
4981 if the structure component's rtx is not simply a MEM.
4982 Assignment of an array element at a constant index, and assignment of
4983 an array element in an unaligned packed structure field, has the same
4984 problem. Same for (partially) storing into a non-memory object. */
4985 if (handled_component_p (to
)
4986 || (TREE_CODE (to
) == MEM_REF
4987 && (REF_REVERSE_STORAGE_ORDER (to
)
4988 || mem_ref_refers_to_non_mem_p (to
)))
4989 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4992 HOST_WIDE_INT bitsize
, bitpos
;
4993 unsigned HOST_WIDE_INT bitregion_start
= 0;
4994 unsigned HOST_WIDE_INT bitregion_end
= 0;
4996 int unsignedp
, reversep
, volatilep
= 0;
5000 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
5001 &unsignedp
, &reversep
, &volatilep
);
5003 /* Make sure bitpos is not negative, it can wreak havoc later. */
5006 gcc_assert (offset
== NULL_TREE
);
5007 offset
= size_int (bitpos
>> LOG2_BITS_PER_UNIT
);
5008 bitpos
&= BITS_PER_UNIT
- 1;
5011 if (TREE_CODE (to
) == COMPONENT_REF
5012 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
5013 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
5014 /* The C++ memory model naturally applies to byte-aligned fields.
5015 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5016 BITSIZE are not byte-aligned, there is no need to limit the range
5017 we can access. This can occur with packed structures in Ada. */
5018 else if (bitsize
> 0
5019 && bitsize
% BITS_PER_UNIT
== 0
5020 && bitpos
% BITS_PER_UNIT
== 0)
5022 bitregion_start
= bitpos
;
5023 bitregion_end
= bitpos
+ bitsize
- 1;
5026 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5028 /* If the field has a mode, we want to access it in the
5029 field's mode, not the computed mode.
5030 If a MEM has VOIDmode (external with incomplete type),
5031 use BLKmode for it instead. */
5034 if (mode1
!= VOIDmode
)
5035 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
5036 else if (GET_MODE (to_rtx
) == VOIDmode
)
5037 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
5042 machine_mode address_mode
;
5045 if (!MEM_P (to_rtx
))
5047 /* We can get constant negative offsets into arrays with broken
5048 user code. Translate this to a trap instead of ICEing. */
5049 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
5050 expand_builtin_trap ();
5051 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
5054 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5055 address_mode
= get_address_mode (to_rtx
);
5056 if (GET_MODE (offset_rtx
) != address_mode
)
5058 /* We cannot be sure that the RTL in offset_rtx is valid outside
5059 of a memory address context, so force it into a register
5060 before attempting to convert it to the desired mode. */
5061 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
5062 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5065 /* If we have an expression in OFFSET_RTX and a non-zero
5066 byte offset in BITPOS, adding the byte offset before the
5067 OFFSET_RTX results in better intermediate code, which makes
5068 later rtl optimization passes perform better.
5070 We prefer intermediate code like this:
5072 r124:DI=r123:DI+0x18
5077 r124:DI=r123:DI+0x10
5078 [r124:DI+0x8]=r121:DI
5080 This is only done for aligned data values, as these can
5081 be expected to result in single move instructions. */
5082 if (mode1
!= VOIDmode
5085 && (bitpos
% bitsize
) == 0
5086 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
5087 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
5089 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
5090 bitregion_start
= 0;
5091 if (bitregion_end
>= (unsigned HOST_WIDE_INT
) bitpos
)
5092 bitregion_end
-= bitpos
;
5096 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5097 highest_pow2_factor_for_target (to
,
5101 /* No action is needed if the target is not a memory and the field
5102 lies completely outside that target. This can occur if the source
5103 code contains an out-of-bounds access to a small array. */
5105 && GET_MODE (to_rtx
) != BLKmode
5106 && (unsigned HOST_WIDE_INT
) bitpos
5107 >= GET_MODE_PRECISION (GET_MODE (to_rtx
)))
5109 expand_normal (from
);
5112 /* Handle expand_expr of a complex value returning a CONCAT. */
5113 else if (GET_CODE (to_rtx
) == CONCAT
)
5115 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
5116 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
)))
5118 && bitsize
== mode_bitsize
)
5119 result
= store_expr (from
, to_rtx
, false, nontemporal
, reversep
);
5120 else if (bitsize
== mode_bitsize
/ 2
5121 && (bitpos
== 0 || bitpos
== mode_bitsize
/ 2))
5122 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
5123 nontemporal
, reversep
);
5124 else if (bitpos
+ bitsize
<= mode_bitsize
/ 2)
5125 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
5126 bitregion_start
, bitregion_end
,
5127 mode1
, from
, get_alias_set (to
),
5128 nontemporal
, reversep
);
5129 else if (bitpos
>= mode_bitsize
/ 2)
5130 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
5131 bitpos
- mode_bitsize
/ 2,
5132 bitregion_start
, bitregion_end
,
5133 mode1
, from
, get_alias_set (to
),
5134 nontemporal
, reversep
);
5135 else if (bitpos
== 0 && bitsize
== mode_bitsize
)
5138 result
= expand_normal (from
);
5139 from_rtx
= simplify_gen_subreg (GET_MODE (to_rtx
), result
,
5140 TYPE_MODE (TREE_TYPE (from
)), 0);
5141 emit_move_insn (XEXP (to_rtx
, 0),
5142 read_complex_part (from_rtx
, false));
5143 emit_move_insn (XEXP (to_rtx
, 1),
5144 read_complex_part (from_rtx
, true));
5148 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
5149 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5150 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
5151 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
5152 result
= store_field (temp
, bitsize
, bitpos
,
5153 bitregion_start
, bitregion_end
,
5154 mode1
, from
, get_alias_set (to
),
5155 nontemporal
, reversep
);
5156 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
5157 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
5164 /* If the field is at offset zero, we could have been given the
5165 DECL_RTX of the parent struct. Don't munge it. */
5166 to_rtx
= shallow_copy_rtx (to_rtx
);
5167 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
5169 MEM_VOLATILE_P (to_rtx
) = 1;
5172 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
5173 bitregion_start
, bitregion_end
,
5174 mode1
, to_rtx
, to
, from
,
5178 result
= store_field (to_rtx
, bitsize
, bitpos
,
5179 bitregion_start
, bitregion_end
,
5180 mode1
, from
, get_alias_set (to
),
5181 nontemporal
, reversep
);
5185 preserve_temp_slots (result
);
5190 /* If the rhs is a function call and its value is not an aggregate,
5191 call the function before we start to compute the lhs.
5192 This is needed for correct code for cases such as
5193 val = setjmp (buf) on machines where reference to val
5194 requires loading up part of an address in a separate insn.
5196 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5197 since it might be a promoted variable where the zero- or sign- extension
5198 needs to be done. Handling this in the normal way is safe because no
5199 computation is done before the call. The same is true for SSA names. */
5200 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
5201 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5202 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
5204 || TREE_CODE (to
) == PARM_DECL
5205 || TREE_CODE (to
) == RESULT_DECL
)
5206 && REG_P (DECL_RTL (to
)))
5207 || TREE_CODE (to
) == SSA_NAME
))
5213 value
= expand_normal (from
);
5215 /* Split value and bounds to store them separately. */
5216 chkp_split_slot (value
, &value
, &bounds
);
5219 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5221 /* Handle calls that return values in multiple non-contiguous locations.
5222 The Irix 6 ABI has examples of this. */
5223 if (GET_CODE (to_rtx
) == PARALLEL
)
5225 if (GET_CODE (value
) == PARALLEL
)
5226 emit_group_move (to_rtx
, value
);
5228 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5229 int_size_in_bytes (TREE_TYPE (from
)));
5231 else if (GET_CODE (value
) == PARALLEL
)
5232 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5233 int_size_in_bytes (TREE_TYPE (from
)));
5234 else if (GET_MODE (to_rtx
) == BLKmode
)
5236 /* Handle calls that return BLKmode values in registers. */
5238 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5240 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5244 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5245 value
= convert_memory_address_addr_space
5246 (GET_MODE (to_rtx
), value
,
5247 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5249 emit_move_insn (to_rtx
, value
);
5252 /* Store bounds if required. */
5254 && (BOUNDED_P (to
) || chkp_type_has_pointer (TREE_TYPE (to
))))
5256 gcc_assert (MEM_P (to_rtx
));
5257 chkp_emit_bounds_store (bounds
, value
, to_rtx
);
5260 preserve_temp_slots (to_rtx
);
5265 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5266 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5268 /* Don't move directly into a return register. */
5269 if (TREE_CODE (to
) == RESULT_DECL
5270 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5276 /* If the source is itself a return value, it still is in a pseudo at
5277 this point so we can move it back to the return register directly. */
5279 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5280 && TREE_CODE (from
) != CALL_EXPR
)
5281 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5283 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5285 /* Handle calls that return values in multiple non-contiguous locations.
5286 The Irix 6 ABI has examples of this. */
5287 if (GET_CODE (to_rtx
) == PARALLEL
)
5289 if (GET_CODE (temp
) == PARALLEL
)
5290 emit_group_move (to_rtx
, temp
);
5292 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5293 int_size_in_bytes (TREE_TYPE (from
)));
5296 emit_move_insn (to_rtx
, temp
);
5298 preserve_temp_slots (to_rtx
);
5303 /* In case we are returning the contents of an object which overlaps
5304 the place the value is being stored, use a safe function when copying
5305 a value through a pointer into a structure value return block. */
5306 if (TREE_CODE (to
) == RESULT_DECL
5307 && TREE_CODE (from
) == INDIRECT_REF
5308 && ADDR_SPACE_GENERIC_P
5309 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5310 && refs_may_alias_p (to
, from
)
5311 && cfun
->returns_struct
5312 && !cfun
->returns_pcc_struct
)
5317 size
= expr_size (from
);
5318 from_rtx
= expand_normal (from
);
5320 emit_block_move_via_libcall (XEXP (to_rtx
, 0), XEXP (from_rtx
, 0), size
);
5322 preserve_temp_slots (to_rtx
);
5327 /* Compute FROM and store the value in the rtx we got. */
5330 result
= store_expr_with_bounds (from
, to_rtx
, 0, nontemporal
, false, to
);
5331 preserve_temp_slots (result
);
5336 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5337 succeeded, false otherwise. */
5340 emit_storent_insn (rtx to
, rtx from
)
5342 struct expand_operand ops
[2];
5343 machine_mode mode
= GET_MODE (to
);
5344 enum insn_code code
= optab_handler (storent_optab
, mode
);
5346 if (code
== CODE_FOR_nothing
)
5349 create_fixed_operand (&ops
[0], to
);
5350 create_input_operand (&ops
[1], from
, mode
);
5351 return maybe_expand_insn (code
, 2, ops
);
5354 /* Generate code for computing expression EXP,
5355 and storing the value into TARGET.
5357 If the mode is BLKmode then we may return TARGET itself.
5358 It turns out that in BLKmode it doesn't cause a problem.
5359 because C has no operators that could combine two different
5360 assignments into the same BLKmode object with different values
5361 with no sequence point. Will other languages need this to
5364 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5365 stack, and block moves may need to be treated specially.
5367 If NONTEMPORAL is true, try using a nontemporal store instruction.
5369 If REVERSE is true, the store is to be done in reverse order.
5371 If BTARGET is not NULL then computed bounds of EXP are
5372 associated with BTARGET. */
5375 store_expr_with_bounds (tree exp
, rtx target
, int call_param_p
,
5376 bool nontemporal
, bool reverse
, tree btarget
)
5379 rtx alt_rtl
= NULL_RTX
;
5380 location_t loc
= curr_insn_location ();
5382 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5384 /* C++ can generate ?: expressions with a throw expression in one
5385 branch and an rvalue in the other. Here, we resolve attempts to
5386 store the throw expression's nonexistent result. */
5387 gcc_assert (!call_param_p
);
5388 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5391 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5393 /* Perform first part of compound expression, then assign from second
5395 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5396 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5397 return store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
,
5398 call_param_p
, nontemporal
, reverse
,
5401 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5403 /* For conditional expression, get safe form of the target. Then
5404 test the condition, doing the appropriate assignment on either
5405 side. This avoids the creation of unnecessary temporaries.
5406 For non-BLKmode, it is more efficient not to do this. */
5408 rtx_code_label
*lab1
= gen_label_rtx (), *lab2
= gen_label_rtx ();
5410 do_pending_stack_adjust ();
5412 jumpifnot (TREE_OPERAND (exp
, 0), lab1
,
5413 profile_probability::uninitialized ());
5414 store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5415 nontemporal
, reverse
, btarget
);
5416 emit_jump_insn (targetm
.gen_jump (lab2
));
5419 store_expr_with_bounds (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5420 nontemporal
, reverse
, btarget
);
5426 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5427 /* If this is a scalar in a register that is stored in a wider mode
5428 than the declared mode, compute the result into its declared mode
5429 and then convert to the wider mode. Our value is the computed
5432 rtx inner_target
= 0;
5434 /* We can do the conversion inside EXP, which will often result
5435 in some optimizations. Do the conversion in two steps: first
5436 change the signedness, if needed, then the extend. But don't
5437 do this if the type of EXP is a subtype of something else
5438 since then the conversion might involve more than just
5439 converting modes. */
5440 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5441 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5442 && GET_MODE_PRECISION (GET_MODE (target
))
5443 == TYPE_PRECISION (TREE_TYPE (exp
)))
5445 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5446 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5448 /* Some types, e.g. Fortran's logical*4, won't have a signed
5449 version, so use the mode instead. */
5451 = (signed_or_unsigned_type_for
5452 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5454 ntype
= lang_hooks
.types
.type_for_mode
5455 (TYPE_MODE (TREE_TYPE (exp
)),
5456 SUBREG_PROMOTED_SIGN (target
));
5458 exp
= fold_convert_loc (loc
, ntype
, exp
);
5461 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5462 (GET_MODE (SUBREG_REG (target
)),
5463 SUBREG_PROMOTED_SIGN (target
)),
5466 inner_target
= SUBREG_REG (target
);
5469 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5470 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5472 /* Handle bounds returned by call. */
5473 if (TREE_CODE (exp
) == CALL_EXPR
)
5476 chkp_split_slot (temp
, &temp
, &bounds
);
5477 if (bounds
&& btarget
)
5479 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5480 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5481 chkp_set_rtl_bounds (btarget
, tmp
);
5485 /* If TEMP is a VOIDmode constant, use convert_modes to make
5486 sure that we properly convert it. */
5487 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5489 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5490 temp
, SUBREG_PROMOTED_SIGN (target
));
5491 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
5492 GET_MODE (target
), temp
,
5493 SUBREG_PROMOTED_SIGN (target
));
5496 convert_move (SUBREG_REG (target
), temp
,
5497 SUBREG_PROMOTED_SIGN (target
));
5501 else if ((TREE_CODE (exp
) == STRING_CST
5502 || (TREE_CODE (exp
) == MEM_REF
5503 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5504 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5506 && integer_zerop (TREE_OPERAND (exp
, 1))))
5507 && !nontemporal
&& !call_param_p
5510 /* Optimize initialization of an array with a STRING_CST. */
5511 HOST_WIDE_INT exp_len
, str_copy_len
;
5513 tree str
= TREE_CODE (exp
) == STRING_CST
5514 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5516 exp_len
= int_expr_size (exp
);
5520 if (TREE_STRING_LENGTH (str
) <= 0)
5523 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5524 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5527 str_copy_len
= TREE_STRING_LENGTH (str
);
5528 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5529 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5531 str_copy_len
+= STORE_MAX_PIECES
- 1;
5532 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5534 str_copy_len
= MIN (str_copy_len
, exp_len
);
5535 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5536 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5537 MEM_ALIGN (target
), false))
5542 dest_mem
= store_by_pieces (dest_mem
,
5543 str_copy_len
, builtin_strncpy_read_str
,
5545 TREE_STRING_POINTER (str
)),
5546 MEM_ALIGN (target
), false,
5547 exp_len
> str_copy_len
? 1 : 0);
5548 if (exp_len
> str_copy_len
)
5549 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5550 GEN_INT (exp_len
- str_copy_len
),
5559 /* If we want to use a nontemporal or a reverse order store, force the
5560 value into a register first. */
5561 tmp_target
= nontemporal
|| reverse
? NULL_RTX
: target
;
5562 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5564 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5567 /* Handle bounds returned by call. */
5568 if (TREE_CODE (exp
) == CALL_EXPR
)
5571 chkp_split_slot (temp
, &temp
, &bounds
);
5572 if (bounds
&& btarget
)
5574 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5575 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5576 chkp_set_rtl_bounds (btarget
, tmp
);
5581 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5582 the same as that of TARGET, adjust the constant. This is needed, for
5583 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5584 only a word-sized value. */
5585 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5586 && TREE_CODE (exp
) != ERROR_MARK
5587 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5588 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5589 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5591 /* If value was not generated in the target, store it there.
5592 Convert the value to TARGET's type first if necessary and emit the
5593 pending incrementations that have been queued when expanding EXP.
5594 Note that we cannot emit the whole queue blindly because this will
5595 effectively disable the POST_INC optimization later.
5597 If TEMP and TARGET compare equal according to rtx_equal_p, but
5598 one or both of them are volatile memory refs, we have to distinguish
5600 - expand_expr has used TARGET. In this case, we must not generate
5601 another copy. This can be detected by TARGET being equal according
5603 - expand_expr has not used TARGET - that means that the source just
5604 happens to have the same RTX form. Since temp will have been created
5605 by expand_expr, it will compare unequal according to == .
5606 We must generate a copy in this case, to reach the correct number
5607 of volatile memory references. */
5609 if ((! rtx_equal_p (temp
, target
)
5610 || (temp
!= target
&& (side_effects_p (temp
)
5611 || side_effects_p (target
))))
5612 && TREE_CODE (exp
) != ERROR_MARK
5613 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5614 but TARGET is not valid memory reference, TEMP will differ
5615 from TARGET although it is really the same location. */
5617 && rtx_equal_p (alt_rtl
, target
)
5618 && !side_effects_p (alt_rtl
)
5619 && !side_effects_p (target
))
5620 /* If there's nothing to copy, don't bother. Don't call
5621 expr_size unless necessary, because some front-ends (C++)
5622 expr_size-hook must not be given objects that are not
5623 supposed to be bit-copied or bit-initialized. */
5624 && expr_size (exp
) != const0_rtx
)
5626 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5628 if (GET_MODE (target
) == BLKmode
)
5630 /* Handle calls that return BLKmode values in registers. */
5631 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5632 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5634 store_bit_field (target
,
5635 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5636 0, 0, 0, GET_MODE (temp
), temp
, reverse
);
5639 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5642 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5644 /* Handle copying a string constant into an array. The string
5645 constant may be shorter than the array. So copy just the string's
5646 actual length, and clear the rest. First get the size of the data
5647 type of the string, which is actually the size of the target. */
5648 rtx size
= expr_size (exp
);
5650 if (CONST_INT_P (size
)
5651 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5652 emit_block_move (target
, temp
, size
,
5654 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5657 machine_mode pointer_mode
5658 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5659 machine_mode address_mode
= get_address_mode (target
);
5661 /* Compute the size of the data to copy from the string. */
5663 = size_binop_loc (loc
, MIN_EXPR
,
5664 make_tree (sizetype
, size
),
5665 size_int (TREE_STRING_LENGTH (exp
)));
5667 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5669 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5670 rtx_code_label
*label
= 0;
5672 /* Copy that much. */
5673 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5674 TYPE_UNSIGNED (sizetype
));
5675 emit_block_move (target
, temp
, copy_size_rtx
,
5677 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5679 /* Figure out how much is left in TARGET that we have to clear.
5680 Do all calculations in pointer_mode. */
5681 if (CONST_INT_P (copy_size_rtx
))
5683 size
= plus_constant (address_mode
, size
,
5684 -INTVAL (copy_size_rtx
));
5685 target
= adjust_address (target
, BLKmode
,
5686 INTVAL (copy_size_rtx
));
5690 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5691 copy_size_rtx
, NULL_RTX
, 0,
5694 if (GET_MODE (copy_size_rtx
) != address_mode
)
5695 copy_size_rtx
= convert_to_mode (address_mode
,
5697 TYPE_UNSIGNED (sizetype
));
5699 target
= offset_address (target
, copy_size_rtx
,
5700 highest_pow2_factor (copy_size
));
5701 label
= gen_label_rtx ();
5702 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5703 GET_MODE (size
), 0, label
);
5706 if (size
!= const0_rtx
)
5707 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5713 /* Handle calls that return values in multiple non-contiguous locations.
5714 The Irix 6 ABI has examples of this. */
5715 else if (GET_CODE (target
) == PARALLEL
)
5717 if (GET_CODE (temp
) == PARALLEL
)
5718 emit_group_move (target
, temp
);
5720 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5721 int_size_in_bytes (TREE_TYPE (exp
)));
5723 else if (GET_CODE (temp
) == PARALLEL
)
5724 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5725 int_size_in_bytes (TREE_TYPE (exp
)));
5726 else if (GET_MODE (temp
) == BLKmode
)
5727 emit_block_move (target
, temp
, expr_size (exp
),
5729 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5730 /* If we emit a nontemporal store, there is nothing else to do. */
5731 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5736 temp
= flip_storage_order (GET_MODE (target
), temp
);
5737 temp
= force_operand (temp
, target
);
5739 emit_move_insn (target
, temp
);
5746 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5748 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
,
5751 return store_expr_with_bounds (exp
, target
, call_param_p
, nontemporal
,
5755 /* Return true if field F of structure TYPE is a flexible array. */
5758 flexible_array_member_p (const_tree f
, const_tree type
)
5763 return (DECL_CHAIN (f
) == NULL
5764 && TREE_CODE (tf
) == ARRAY_TYPE
5766 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5767 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5768 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5769 && int_size_in_bytes (type
) >= 0);
5772 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5773 must have in order for it to completely initialize a value of type TYPE.
5774 Return -1 if the number isn't known.
5776 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5778 static HOST_WIDE_INT
5779 count_type_elements (const_tree type
, bool for_ctor_p
)
5781 switch (TREE_CODE (type
))
5787 nelts
= array_type_nelts (type
);
5788 if (nelts
&& tree_fits_uhwi_p (nelts
))
5790 unsigned HOST_WIDE_INT n
;
5792 n
= tree_to_uhwi (nelts
) + 1;
5793 if (n
== 0 || for_ctor_p
)
5796 return n
* count_type_elements (TREE_TYPE (type
), false);
5798 return for_ctor_p
? -1 : 1;
5803 unsigned HOST_WIDE_INT n
;
5807 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5808 if (TREE_CODE (f
) == FIELD_DECL
)
5811 n
+= count_type_elements (TREE_TYPE (f
), false);
5812 else if (!flexible_array_member_p (f
, type
))
5813 /* Don't count flexible arrays, which are not supposed
5814 to be initialized. */
5822 case QUAL_UNION_TYPE
:
5827 gcc_assert (!for_ctor_p
);
5828 /* Estimate the number of scalars in each field and pick the
5829 maximum. Other estimates would do instead; the idea is simply
5830 to make sure that the estimate is not sensitive to the ordering
5833 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5834 if (TREE_CODE (f
) == FIELD_DECL
)
5836 m
= count_type_elements (TREE_TYPE (f
), false);
5837 /* If the field doesn't span the whole union, add an extra
5838 scalar for the rest. */
5839 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5840 TYPE_SIZE (type
)) != 1)
5852 return TYPE_VECTOR_SUBPARTS (type
);
5856 case FIXED_POINT_TYPE
:
5861 case REFERENCE_TYPE
:
5877 /* Helper for categorize_ctor_elements. Identical interface. */
5880 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5881 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5883 unsigned HOST_WIDE_INT idx
;
5884 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5885 tree value
, purpose
, elt_type
;
5887 /* Whether CTOR is a valid constant initializer, in accordance with what
5888 initializer_constant_valid_p does. If inferred from the constructor
5889 elements, true until proven otherwise. */
5890 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5891 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5896 elt_type
= NULL_TREE
;
5898 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5900 HOST_WIDE_INT mult
= 1;
5902 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5904 tree lo_index
= TREE_OPERAND (purpose
, 0);
5905 tree hi_index
= TREE_OPERAND (purpose
, 1);
5907 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5908 mult
= (tree_to_uhwi (hi_index
)
5909 - tree_to_uhwi (lo_index
) + 1);
5912 elt_type
= TREE_TYPE (value
);
5914 switch (TREE_CODE (value
))
5918 HOST_WIDE_INT nz
= 0, ic
= 0;
5920 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5923 nz_elts
+= mult
* nz
;
5924 init_elts
+= mult
* ic
;
5926 if (const_from_elts_p
&& const_p
)
5927 const_p
= const_elt_p
;
5934 if (!initializer_zerop (value
))
5940 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
5941 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
5945 if (!initializer_zerop (TREE_REALPART (value
)))
5947 if (!initializer_zerop (TREE_IMAGPART (value
)))
5955 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
5957 tree v
= VECTOR_CST_ELT (value
, i
);
5958 if (!initializer_zerop (v
))
5967 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
5968 nz_elts
+= mult
* tc
;
5969 init_elts
+= mult
* tc
;
5971 if (const_from_elts_p
&& const_p
)
5973 = initializer_constant_valid_p (value
,
5975 TYPE_REVERSE_STORAGE_ORDER
5983 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
5984 num_fields
, elt_type
))
5985 *p_complete
= false;
5987 *p_nz_elts
+= nz_elts
;
5988 *p_init_elts
+= init_elts
;
5993 /* Examine CTOR to discover:
5994 * how many scalar fields are set to nonzero values,
5995 and place it in *P_NZ_ELTS;
5996 * how many scalar fields in total are in CTOR,
5997 and place it in *P_ELT_COUNT.
5998 * whether the constructor is complete -- in the sense that every
5999 meaningful byte is explicitly given a value --
6000 and place it in *P_COMPLETE.
6002 Return whether or not CTOR is a valid static constant initializer, the same
6003 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
6006 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
6007 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
6013 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
6016 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6017 of which had type LAST_TYPE. Each element was itself a complete
6018 initializer, in the sense that every meaningful byte was explicitly
6019 given a value. Return true if the same is true for the constructor
6023 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
6024 const_tree last_type
)
6026 if (TREE_CODE (type
) == UNION_TYPE
6027 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6032 gcc_assert (num_elts
== 1 && last_type
);
6034 /* ??? We could look at each element of the union, and find the
6035 largest element. Which would avoid comparing the size of the
6036 initialized element against any tail padding in the union.
6037 Doesn't seem worth the effort... */
6038 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
6041 return count_type_elements (type
, true) == num_elts
;
6044 /* Return 1 if EXP contains mostly (3/4) zeros. */
6047 mostly_zeros_p (const_tree exp
)
6049 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6051 HOST_WIDE_INT nz_elts
, init_elts
;
6054 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
6055 return !complete_p
|| nz_elts
< init_elts
/ 4;
6058 return initializer_zerop (exp
);
6061 /* Return 1 if EXP contains all zeros. */
6064 all_zeros_p (const_tree exp
)
6066 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6068 HOST_WIDE_INT nz_elts
, init_elts
;
6071 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
6072 return nz_elts
== 0;
6075 return initializer_zerop (exp
);
6078 /* Helper function for store_constructor.
6079 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6080 CLEARED is as for store_constructor.
6081 ALIAS_SET is the alias set to use for any stores.
6082 If REVERSE is true, the store is to be done in reverse order.
6084 This provides a recursive shortcut back to store_constructor when it isn't
6085 necessary to go through store_field. This is so that we can pass through
6086 the cleared field to let store_constructor know that we may not have to
6087 clear a substructure if the outer structure has already been cleared. */
6090 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
6091 HOST_WIDE_INT bitpos
,
6092 unsigned HOST_WIDE_INT bitregion_start
,
6093 unsigned HOST_WIDE_INT bitregion_end
,
6095 tree exp
, int cleared
,
6096 alias_set_type alias_set
, bool reverse
)
6098 if (TREE_CODE (exp
) == CONSTRUCTOR
6099 /* We can only call store_constructor recursively if the size and
6100 bit position are on a byte boundary. */
6101 && bitpos
% BITS_PER_UNIT
== 0
6102 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
6103 /* If we have a nonzero bitpos for a register target, then we just
6104 let store_field do the bitfield handling. This is unlikely to
6105 generate unnecessary clear instructions anyways. */
6106 && (bitpos
== 0 || MEM_P (target
)))
6110 = adjust_address (target
,
6111 GET_MODE (target
) == BLKmode
6113 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
6114 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6117 /* Update the alias set, if required. */
6118 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
6119 && MEM_ALIAS_SET (target
) != 0)
6121 target
= copy_rtx (target
);
6122 set_mem_alias_set (target
, alias_set
);
6125 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
,
6129 store_field (target
, bitsize
, bitpos
, bitregion_start
, bitregion_end
, mode
,
6130 exp
, alias_set
, false, reverse
);
6134 /* Returns the number of FIELD_DECLs in TYPE. */
6137 fields_length (const_tree type
)
6139 tree t
= TYPE_FIELDS (type
);
6142 for (; t
; t
= DECL_CHAIN (t
))
6143 if (TREE_CODE (t
) == FIELD_DECL
)
6150 /* Store the value of constructor EXP into the rtx TARGET.
6151 TARGET is either a REG or a MEM; we know it cannot conflict, since
6152 safe_from_p has been called.
6153 CLEARED is true if TARGET is known to have been zero'd.
6154 SIZE is the number of bytes of TARGET we are allowed to modify: this
6155 may not be the same as the size of EXP if we are assigning to a field
6156 which has been packed to exclude padding bits.
6157 If REVERSE is true, the store is to be done in reverse order. */
6160 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
,
6163 tree type
= TREE_TYPE (exp
);
6164 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
6165 HOST_WIDE_INT bitregion_end
= size
> 0 ? size
* BITS_PER_UNIT
- 1 : 0;
6167 switch (TREE_CODE (type
))
6171 case QUAL_UNION_TYPE
:
6173 unsigned HOST_WIDE_INT idx
;
6176 /* The storage order is specified for every aggregate type. */
6177 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6179 /* If size is zero or the target is already cleared, do nothing. */
6180 if (size
== 0 || cleared
)
6182 /* We either clear the aggregate or indicate the value is dead. */
6183 else if ((TREE_CODE (type
) == UNION_TYPE
6184 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6185 && ! CONSTRUCTOR_ELTS (exp
))
6186 /* If the constructor is empty, clear the union. */
6188 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6192 /* If we are building a static constructor into a register,
6193 set the initial value as zero so we can fold the value into
6194 a constant. But if more than one register is involved,
6195 this probably loses. */
6196 else if (REG_P (target
) && TREE_STATIC (exp
)
6197 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
6199 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6203 /* If the constructor has fewer fields than the structure or
6204 if we are initializing the structure to mostly zeros, clear
6205 the whole structure first. Don't do this if TARGET is a
6206 register whose mode size isn't equal to SIZE since
6207 clear_storage can't handle this case. */
6209 && (((int) CONSTRUCTOR_NELTS (exp
) != fields_length (type
))
6210 || mostly_zeros_p (exp
))
6212 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
6215 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6219 if (REG_P (target
) && !cleared
)
6220 emit_clobber (target
);
6222 /* Store each element of the constructor into the
6223 corresponding field of TARGET. */
6224 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
6227 HOST_WIDE_INT bitsize
;
6228 HOST_WIDE_INT bitpos
= 0;
6230 rtx to_rtx
= target
;
6232 /* Just ignore missing fields. We cleared the whole
6233 structure, above, if any fields are missing. */
6237 if (cleared
&& initializer_zerop (value
))
6240 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
6241 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
6245 mode
= DECL_MODE (field
);
6246 if (DECL_BIT_FIELD (field
))
6249 offset
= DECL_FIELD_OFFSET (field
);
6250 if (tree_fits_shwi_p (offset
)
6251 && tree_fits_shwi_p (bit_position (field
)))
6253 bitpos
= int_bit_position (field
);
6259 /* If this initializes a field that is smaller than a
6260 word, at the start of a word, try to widen it to a full
6261 word. This special case allows us to output C++ member
6262 function initializations in a form that the optimizers
6264 if (WORD_REGISTER_OPERATIONS
6266 && bitsize
< BITS_PER_WORD
6267 && bitpos
% BITS_PER_WORD
== 0
6268 && GET_MODE_CLASS (mode
) == MODE_INT
6269 && TREE_CODE (value
) == INTEGER_CST
6271 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6273 tree type
= TREE_TYPE (value
);
6275 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6277 type
= lang_hooks
.types
.type_for_mode
6278 (word_mode
, TYPE_UNSIGNED (type
));
6279 value
= fold_convert (type
, value
);
6280 /* Make sure the bits beyond the original bitsize are zero
6281 so that we can correctly avoid extra zeroing stores in
6282 later constructor elements. */
6284 = wide_int_to_tree (type
, wi::mask (bitsize
, false,
6286 value
= fold_build2 (BIT_AND_EXPR
, type
, value
, bitsize_mask
);
6289 if (BYTES_BIG_ENDIAN
)
6291 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6292 build_int_cst (type
,
6293 BITS_PER_WORD
- bitsize
));
6294 bitsize
= BITS_PER_WORD
;
6298 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6299 && DECL_NONADDRESSABLE_P (field
))
6301 to_rtx
= copy_rtx (to_rtx
);
6302 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6305 store_constructor_field (to_rtx
, bitsize
, bitpos
,
6306 0, bitregion_end
, mode
,
6308 get_alias_set (TREE_TYPE (field
)),
6316 unsigned HOST_WIDE_INT i
;
6319 tree elttype
= TREE_TYPE (type
);
6321 HOST_WIDE_INT minelt
= 0;
6322 HOST_WIDE_INT maxelt
= 0;
6324 /* The storage order is specified for every aggregate type. */
6325 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6327 domain
= TYPE_DOMAIN (type
);
6328 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6329 && TYPE_MAX_VALUE (domain
)
6330 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6331 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6333 /* If we have constant bounds for the range of the type, get them. */
6336 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6337 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6340 /* If the constructor has fewer elements than the array, clear
6341 the whole array first. Similarly if this is static
6342 constructor of a non-BLKmode object. */
6345 else if (REG_P (target
) && TREE_STATIC (exp
))
6349 unsigned HOST_WIDE_INT idx
;
6351 HOST_WIDE_INT count
= 0, zero_count
= 0;
6352 need_to_clear
= ! const_bounds_p
;
6354 /* This loop is a more accurate version of the loop in
6355 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6356 is also needed to check for missing elements. */
6357 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6359 HOST_WIDE_INT this_node_count
;
6364 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6366 tree lo_index
= TREE_OPERAND (index
, 0);
6367 tree hi_index
= TREE_OPERAND (index
, 1);
6369 if (! tree_fits_uhwi_p (lo_index
)
6370 || ! tree_fits_uhwi_p (hi_index
))
6376 this_node_count
= (tree_to_uhwi (hi_index
)
6377 - tree_to_uhwi (lo_index
) + 1);
6380 this_node_count
= 1;
6382 count
+= this_node_count
;
6383 if (mostly_zeros_p (value
))
6384 zero_count
+= this_node_count
;
6387 /* Clear the entire array first if there are any missing
6388 elements, or if the incidence of zero elements is >=
6391 && (count
< maxelt
- minelt
+ 1
6392 || 4 * zero_count
>= 3 * count
))
6396 if (need_to_clear
&& size
> 0)
6399 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6401 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6405 if (!cleared
&& REG_P (target
))
6406 /* Inform later passes that the old value is dead. */
6407 emit_clobber (target
);
6409 /* Store each element of the constructor into the
6410 corresponding element of TARGET, determined by counting the
6412 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6415 HOST_WIDE_INT bitsize
;
6416 HOST_WIDE_INT bitpos
;
6417 rtx xtarget
= target
;
6419 if (cleared
&& initializer_zerop (value
))
6422 mode
= TYPE_MODE (elttype
);
6423 if (mode
== BLKmode
)
6424 bitsize
= (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6425 ? tree_to_uhwi (TYPE_SIZE (elttype
))
6428 bitsize
= GET_MODE_BITSIZE (mode
);
6430 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6432 tree lo_index
= TREE_OPERAND (index
, 0);
6433 tree hi_index
= TREE_OPERAND (index
, 1);
6434 rtx index_r
, pos_rtx
;
6435 HOST_WIDE_INT lo
, hi
, count
;
6438 /* If the range is constant and "small", unroll the loop. */
6440 && tree_fits_shwi_p (lo_index
)
6441 && tree_fits_shwi_p (hi_index
)
6442 && (lo
= tree_to_shwi (lo_index
),
6443 hi
= tree_to_shwi (hi_index
),
6444 count
= hi
- lo
+ 1,
6447 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6448 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6451 lo
-= minelt
; hi
-= minelt
;
6452 for (; lo
<= hi
; lo
++)
6454 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6457 && !MEM_KEEP_ALIAS_SET_P (target
)
6458 && TREE_CODE (type
) == ARRAY_TYPE
6459 && TYPE_NONALIASED_COMPONENT (type
))
6461 target
= copy_rtx (target
);
6462 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6465 store_constructor_field
6466 (target
, bitsize
, bitpos
, 0, bitregion_end
,
6467 mode
, value
, cleared
,
6468 get_alias_set (elttype
), reverse
);
6473 rtx_code_label
*loop_start
= gen_label_rtx ();
6474 rtx_code_label
*loop_end
= gen_label_rtx ();
6477 expand_normal (hi_index
);
6479 index
= build_decl (EXPR_LOCATION (exp
),
6480 VAR_DECL
, NULL_TREE
, domain
);
6481 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6482 SET_DECL_RTL (index
, index_r
);
6483 store_expr (lo_index
, index_r
, 0, false, reverse
);
6485 /* Build the head of the loop. */
6486 do_pending_stack_adjust ();
6487 emit_label (loop_start
);
6489 /* Assign value to element index. */
6491 fold_convert (ssizetype
,
6492 fold_build2 (MINUS_EXPR
,
6495 TYPE_MIN_VALUE (domain
)));
6498 size_binop (MULT_EXPR
, position
,
6499 fold_convert (ssizetype
,
6500 TYPE_SIZE_UNIT (elttype
)));
6502 pos_rtx
= expand_normal (position
);
6503 xtarget
= offset_address (target
, pos_rtx
,
6504 highest_pow2_factor (position
));
6505 xtarget
= adjust_address (xtarget
, mode
, 0);
6506 if (TREE_CODE (value
) == CONSTRUCTOR
)
6507 store_constructor (value
, xtarget
, cleared
,
6508 bitsize
/ BITS_PER_UNIT
, reverse
);
6510 store_expr (value
, xtarget
, 0, false, reverse
);
6512 /* Generate a conditional jump to exit the loop. */
6513 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6515 jumpif (exit_cond
, loop_end
,
6516 profile_probability::uninitialized ());
6518 /* Update the loop counter, and jump to the head of
6520 expand_assignment (index
,
6521 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6522 index
, integer_one_node
),
6525 emit_jump (loop_start
);
6527 /* Build the end of the loop. */
6528 emit_label (loop_end
);
6531 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6532 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6537 index
= ssize_int (1);
6540 index
= fold_convert (ssizetype
,
6541 fold_build2 (MINUS_EXPR
,
6544 TYPE_MIN_VALUE (domain
)));
6547 size_binop (MULT_EXPR
, index
,
6548 fold_convert (ssizetype
,
6549 TYPE_SIZE_UNIT (elttype
)));
6550 xtarget
= offset_address (target
,
6551 expand_normal (position
),
6552 highest_pow2_factor (position
));
6553 xtarget
= adjust_address (xtarget
, mode
, 0);
6554 store_expr (value
, xtarget
, 0, false, reverse
);
6559 bitpos
= ((tree_to_shwi (index
) - minelt
)
6560 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6562 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6564 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6565 && TREE_CODE (type
) == ARRAY_TYPE
6566 && TYPE_NONALIASED_COMPONENT (type
))
6568 target
= copy_rtx (target
);
6569 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6571 store_constructor_field (target
, bitsize
, bitpos
, 0,
6572 bitregion_end
, mode
, value
,
6573 cleared
, get_alias_set (elttype
),
6582 unsigned HOST_WIDE_INT idx
;
6583 constructor_elt
*ce
;
6586 int icode
= CODE_FOR_nothing
;
6587 tree elttype
= TREE_TYPE (type
);
6588 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6589 machine_mode eltmode
= TYPE_MODE (elttype
);
6590 HOST_WIDE_INT bitsize
;
6591 HOST_WIDE_INT bitpos
;
6592 rtvec vector
= NULL
;
6594 alias_set_type alias
;
6595 bool vec_vec_init_p
= false;
6597 gcc_assert (eltmode
!= BLKmode
);
6599 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6600 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
6602 machine_mode mode
= GET_MODE (target
);
6603 machine_mode emode
= eltmode
;
6605 if (CONSTRUCTOR_NELTS (exp
)
6606 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
))
6609 tree etype
= TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
);
6610 gcc_assert (CONSTRUCTOR_NELTS (exp
) * TYPE_VECTOR_SUBPARTS (etype
)
6612 emode
= TYPE_MODE (etype
);
6614 icode
= (int) convert_optab_handler (vec_init_optab
, mode
, emode
);
6615 if (icode
!= CODE_FOR_nothing
)
6617 unsigned int i
, n
= n_elts
;
6619 if (emode
!= eltmode
)
6621 n
= CONSTRUCTOR_NELTS (exp
);
6622 vec_vec_init_p
= true;
6624 vector
= rtvec_alloc (n
);
6625 for (i
= 0; i
< n
; i
++)
6626 RTVEC_ELT (vector
, i
) = CONST0_RTX (emode
);
6630 /* If the constructor has fewer elements than the vector,
6631 clear the whole array first. Similarly if this is static
6632 constructor of a non-BLKmode object. */
6635 else if (REG_P (target
) && TREE_STATIC (exp
))
6639 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6642 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6644 tree sz
= TYPE_SIZE (TREE_TYPE (value
));
6646 = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR
, sz
,
6647 TYPE_SIZE (elttype
)));
6649 count
+= n_elts_here
;
6650 if (mostly_zeros_p (value
))
6651 zero_count
+= n_elts_here
;
6654 /* Clear the entire vector first if there are any missing elements,
6655 or if the incidence of zero elements is >= 75%. */
6656 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6659 if (need_to_clear
&& size
> 0 && !vector
)
6662 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6664 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6668 /* Inform later passes that the old value is dead. */
6669 if (!cleared
&& !vector
&& REG_P (target
))
6670 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6673 alias
= MEM_ALIAS_SET (target
);
6675 alias
= get_alias_set (elttype
);
6677 /* Store each element of the constructor into the corresponding
6678 element of TARGET, determined by counting the elements. */
6679 for (idx
= 0, i
= 0;
6680 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6681 idx
++, i
+= bitsize
/ elt_size
)
6683 HOST_WIDE_INT eltpos
;
6684 tree value
= ce
->value
;
6686 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6687 if (cleared
&& initializer_zerop (value
))
6691 eltpos
= tree_to_uhwi (ce
->index
);
6699 gcc_assert (ce
->index
== NULL_TREE
);
6700 gcc_assert (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
);
6704 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6705 RTVEC_ELT (vector
, eltpos
) = expand_normal (value
);
6709 machine_mode value_mode
6710 = (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6711 ? TYPE_MODE (TREE_TYPE (value
)) : eltmode
);
6712 bitpos
= eltpos
* elt_size
;
6713 store_constructor_field (target
, bitsize
, bitpos
, 0,
6714 bitregion_end
, value_mode
,
6715 value
, cleared
, alias
, reverse
);
6720 emit_insn (GEN_FCN (icode
) (target
,
6721 gen_rtx_PARALLEL (GET_MODE (target
),
6731 /* Store the value of EXP (an expression tree)
6732 into a subfield of TARGET which has mode MODE and occupies
6733 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6734 If MODE is VOIDmode, it means that we are storing into a bit-field.
6736 BITREGION_START is bitpos of the first bitfield in this region.
6737 BITREGION_END is the bitpos of the ending bitfield in this region.
6738 These two fields are 0, if the C++ memory model does not apply,
6739 or we are not interested in keeping track of bitfield regions.
6741 Always return const0_rtx unless we have something particular to
6744 ALIAS_SET is the alias set for the destination. This value will
6745 (in general) be different from that for TARGET, since TARGET is a
6746 reference to the containing structure.
6748 If NONTEMPORAL is true, try generating a nontemporal store.
6750 If REVERSE is true, the store is to be done in reverse order. */
6753 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
6754 unsigned HOST_WIDE_INT bitregion_start
,
6755 unsigned HOST_WIDE_INT bitregion_end
,
6756 machine_mode mode
, tree exp
,
6757 alias_set_type alias_set
, bool nontemporal
, bool reverse
)
6759 if (TREE_CODE (exp
) == ERROR_MARK
)
6762 /* If we have nothing to store, do nothing unless the expression has
6765 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6767 if (GET_CODE (target
) == CONCAT
)
6769 /* We're storing into a struct containing a single __complex. */
6771 gcc_assert (!bitpos
);
6772 return store_expr (exp
, target
, 0, nontemporal
, reverse
);
6775 /* If the structure is in a register or if the component
6776 is a bit field, we cannot use addressing to access it.
6777 Use bit-field techniques or SUBREG to store in it. */
6779 if (mode
== VOIDmode
6780 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6781 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6782 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6784 || GET_CODE (target
) == SUBREG
6785 /* If the field isn't aligned enough to store as an ordinary memref,
6786 store it as a bit field. */
6788 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6789 || bitpos
% GET_MODE_ALIGNMENT (mode
))
6790 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
6791 || (bitpos
% BITS_PER_UNIT
!= 0)))
6792 || (bitsize
>= 0 && mode
!= BLKmode
6793 && GET_MODE_BITSIZE (mode
) > bitsize
)
6794 /* If the RHS and field are a constant size and the size of the
6795 RHS isn't the same size as the bitfield, we must use bitfield
6798 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6799 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0
6800 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6801 we will handle specially below. */
6802 && !(TREE_CODE (exp
) == CONSTRUCTOR
6803 && bitsize
% BITS_PER_UNIT
== 0)
6804 /* And except for bitwise copying of TREE_ADDRESSABLE types,
6805 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6806 includes some extra padding. store_expr / expand_expr will in
6807 that case call get_inner_reference that will have the bitsize
6808 we check here and thus the block move will not clobber the
6809 padding that shouldn't be clobbered. In the future we could
6810 replace the TREE_ADDRESSABLE check with a check that
6811 get_base_address needs to live in memory. */
6812 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
6813 || TREE_CODE (exp
) != COMPONENT_REF
6814 || TREE_CODE (DECL_SIZE (TREE_OPERAND (exp
, 1))) != INTEGER_CST
6815 || (bitsize
% BITS_PER_UNIT
!= 0)
6816 || (bitpos
% BITS_PER_UNIT
!= 0)
6817 || (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp
, 1)), bitsize
)
6819 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6820 decl we must use bitfield operations. */
6822 && TREE_CODE (exp
) == MEM_REF
6823 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6824 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6825 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6826 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6831 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6832 implies a mask operation. If the precision is the same size as
6833 the field we're storing into, that mask is redundant. This is
6834 particularly common with bit field assignments generated by the
6836 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6839 tree type
= TREE_TYPE (exp
);
6840 if (INTEGRAL_TYPE_P (type
)
6841 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6842 && bitsize
== TYPE_PRECISION (type
))
6844 tree op
= gimple_assign_rhs1 (nop_def
);
6845 type
= TREE_TYPE (op
);
6846 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
6851 temp
= expand_normal (exp
);
6853 /* Handle calls that return values in multiple non-contiguous locations.
6854 The Irix 6 ABI has examples of this. */
6855 if (GET_CODE (temp
) == PARALLEL
)
6857 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6858 scalar_int_mode temp_mode
6859 = smallest_int_mode_for_size (size
* BITS_PER_UNIT
);
6860 rtx temp_target
= gen_reg_rtx (temp_mode
);
6861 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
6865 /* Handle calls that return BLKmode values in registers. */
6866 else if (mode
== BLKmode
&& REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
6868 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
6869 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
6873 /* If the value has aggregate type and an integral mode then, if BITSIZE
6874 is narrower than this mode and this is for big-endian data, we first
6875 need to put the value into the low-order bits for store_bit_field,
6876 except when MODE is BLKmode and BITSIZE larger than the word size
6877 (see the handling of fields larger than a word in store_bit_field).
6878 Moreover, the field may be not aligned on a byte boundary; in this
6879 case, if it has reverse storage order, it needs to be accessed as a
6880 scalar field with reverse storage order and we must first put the
6881 value into target order. */
6882 scalar_int_mode temp_mode
;
6883 if (AGGREGATE_TYPE_P (TREE_TYPE (exp
))
6884 && is_int_mode (GET_MODE (temp
), &temp_mode
))
6886 HOST_WIDE_INT size
= GET_MODE_BITSIZE (temp_mode
);
6888 reverse
= TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp
));
6891 temp
= flip_storage_order (temp_mode
, temp
);
6894 && reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
6895 && !(mode
== BLKmode
&& bitsize
> BITS_PER_WORD
))
6896 temp
= expand_shift (RSHIFT_EXPR
, temp_mode
, temp
,
6897 size
- bitsize
, NULL_RTX
, 1);
6900 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6901 if (mode
!= VOIDmode
&& mode
!= BLKmode
6902 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
6903 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
6905 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
6906 and BITPOS must be aligned on a byte boundary. If so, we simply do
6907 a block copy. Likewise for a BLKmode-like TARGET. */
6908 if (GET_MODE (temp
) == BLKmode
6909 && (GET_MODE (target
) == BLKmode
6911 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
6912 && (bitpos
% BITS_PER_UNIT
) == 0
6913 && (bitsize
% BITS_PER_UNIT
) == 0)))
6915 gcc_assert (MEM_P (target
) && MEM_P (temp
)
6916 && (bitpos
% BITS_PER_UNIT
) == 0);
6918 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6919 emit_block_move (target
, temp
,
6920 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6927 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
6928 word size, we need to load the value (see again store_bit_field). */
6929 if (GET_MODE (temp
) == BLKmode
&& bitsize
<= BITS_PER_WORD
)
6931 scalar_int_mode temp_mode
= smallest_int_mode_for_size (bitsize
);
6932 temp
= extract_bit_field (temp
, bitsize
, 0, 1, NULL_RTX
, temp_mode
,
6933 temp_mode
, false, NULL
);
6936 /* Store the value in the bitfield. */
6937 store_bit_field (target
, bitsize
, bitpos
,
6938 bitregion_start
, bitregion_end
,
6939 mode
, temp
, reverse
);
6945 /* Now build a reference to just the desired component. */
6946 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
6948 if (to_rtx
== target
)
6949 to_rtx
= copy_rtx (to_rtx
);
6951 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
6952 set_mem_alias_set (to_rtx
, alias_set
);
6954 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
6955 into a target smaller than its type; handle that case now. */
6956 if (TREE_CODE (exp
) == CONSTRUCTOR
&& bitsize
>= 0)
6958 gcc_assert (bitsize
% BITS_PER_UNIT
== 0);
6959 store_constructor (exp
, to_rtx
, 0, bitsize
/ BITS_PER_UNIT
, reverse
);
6963 return store_expr (exp
, to_rtx
, 0, nontemporal
, reverse
);
6967 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6968 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6969 codes and find the ultimate containing object, which we return.
6971 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6972 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
6973 storage order of the field.
6974 If the position of the field is variable, we store a tree
6975 giving the variable offset (in units) in *POFFSET.
6976 This offset is in addition to the bit position.
6977 If the position is not variable, we store 0 in *POFFSET.
6979 If any of the extraction expressions is volatile,
6980 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6982 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6983 Otherwise, it is a mode that can be used to access the field.
6985 If the field describes a variable-sized object, *PMODE is set to
6986 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6987 this case, but the address of the object can be found. */
6990 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
6991 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
6992 machine_mode
*pmode
, int *punsignedp
,
6993 int *preversep
, int *pvolatilep
)
6996 machine_mode mode
= VOIDmode
;
6997 bool blkmode_bitfield
= false;
6998 tree offset
= size_zero_node
;
6999 offset_int bit_offset
= 0;
7001 /* First get the mode, signedness, storage order and size. We do this from
7002 just the outermost expression. */
7004 if (TREE_CODE (exp
) == COMPONENT_REF
)
7006 tree field
= TREE_OPERAND (exp
, 1);
7007 size_tree
= DECL_SIZE (field
);
7008 if (flag_strict_volatile_bitfields
> 0
7009 && TREE_THIS_VOLATILE (exp
)
7010 && DECL_BIT_FIELD_TYPE (field
)
7011 && DECL_MODE (field
) != BLKmode
)
7012 /* Volatile bitfields should be accessed in the mode of the
7013 field's type, not the mode computed based on the bit
7015 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
7016 else if (!DECL_BIT_FIELD (field
))
7017 mode
= DECL_MODE (field
);
7018 else if (DECL_MODE (field
) == BLKmode
)
7019 blkmode_bitfield
= true;
7021 *punsignedp
= DECL_UNSIGNED (field
);
7023 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
7025 size_tree
= TREE_OPERAND (exp
, 1);
7026 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
7027 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
7029 /* For vector types, with the correct size of access, use the mode of
7031 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
7032 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
7033 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
7034 mode
= TYPE_MODE (TREE_TYPE (exp
));
7038 mode
= TYPE_MODE (TREE_TYPE (exp
));
7039 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
7041 if (mode
== BLKmode
)
7042 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
7044 *pbitsize
= GET_MODE_BITSIZE (mode
);
7049 if (! tree_fits_uhwi_p (size_tree
))
7050 mode
= BLKmode
, *pbitsize
= -1;
7052 *pbitsize
= tree_to_uhwi (size_tree
);
7055 *preversep
= reverse_storage_order_for_component_p (exp
);
7057 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7058 and find the ultimate containing object. */
7061 switch (TREE_CODE (exp
))
7064 bit_offset
+= wi::to_offset (TREE_OPERAND (exp
, 2));
7069 tree field
= TREE_OPERAND (exp
, 1);
7070 tree this_offset
= component_ref_field_offset (exp
);
7072 /* If this field hasn't been filled in yet, don't go past it.
7073 This should only happen when folding expressions made during
7074 type construction. */
7075 if (this_offset
== 0)
7078 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
7079 bit_offset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
7081 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7086 case ARRAY_RANGE_REF
:
7088 tree index
= TREE_OPERAND (exp
, 1);
7089 tree low_bound
= array_ref_low_bound (exp
);
7090 tree unit_size
= array_ref_element_size (exp
);
7092 /* We assume all arrays have sizes that are a multiple of a byte.
7093 First subtract the lower bound, if any, in the type of the
7094 index, then convert to sizetype and multiply by the size of
7095 the array element. */
7096 if (! integer_zerop (low_bound
))
7097 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
7100 offset
= size_binop (PLUS_EXPR
, offset
,
7101 size_binop (MULT_EXPR
,
7102 fold_convert (sizetype
, index
),
7111 bit_offset
+= *pbitsize
;
7114 case VIEW_CONVERT_EXPR
:
7118 /* Hand back the decl for MEM[&decl, off]. */
7119 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
7121 tree off
= TREE_OPERAND (exp
, 1);
7122 if (!integer_zerop (off
))
7124 offset_int boff
, coff
= mem_ref_offset (exp
);
7125 boff
= coff
<< LOG2_BITS_PER_UNIT
;
7128 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7136 /* If any reference in the chain is volatile, the effect is volatile. */
7137 if (TREE_THIS_VOLATILE (exp
))
7140 exp
= TREE_OPERAND (exp
, 0);
7144 /* If OFFSET is constant, see if we can return the whole thing as a
7145 constant bit position. Make sure to handle overflow during
7147 if (TREE_CODE (offset
) == INTEGER_CST
)
7149 offset_int tem
= wi::sext (wi::to_offset (offset
),
7150 TYPE_PRECISION (sizetype
));
7151 tem
<<= LOG2_BITS_PER_UNIT
;
7153 if (wi::fits_shwi_p (tem
))
7155 *pbitpos
= tem
.to_shwi ();
7156 *poffset
= offset
= NULL_TREE
;
7160 /* Otherwise, split it up. */
7163 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7164 if (wi::neg_p (bit_offset
) || !wi::fits_shwi_p (bit_offset
))
7166 offset_int mask
= wi::mask
<offset_int
> (LOG2_BITS_PER_UNIT
, false);
7167 offset_int tem
= bit_offset
.and_not (mask
);
7168 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
7169 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
7171 tem
>>= LOG2_BITS_PER_UNIT
;
7172 offset
= size_binop (PLUS_EXPR
, offset
,
7173 wide_int_to_tree (sizetype
, tem
));
7176 *pbitpos
= bit_offset
.to_shwi ();
7180 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7181 if (mode
== VOIDmode
7183 && (*pbitpos
% BITS_PER_UNIT
) == 0
7184 && (*pbitsize
% BITS_PER_UNIT
) == 0)
7192 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7194 static unsigned HOST_WIDE_INT
7195 target_align (const_tree target
)
7197 /* We might have a chain of nested references with intermediate misaligning
7198 bitfields components, so need to recurse to find out. */
7200 unsigned HOST_WIDE_INT this_align
, outer_align
;
7202 switch (TREE_CODE (target
))
7208 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7209 outer_align
= target_align (TREE_OPERAND (target
, 0));
7210 return MIN (this_align
, outer_align
);
7213 case ARRAY_RANGE_REF
:
7214 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7215 outer_align
= target_align (TREE_OPERAND (target
, 0));
7216 return MIN (this_align
, outer_align
);
7219 case NON_LVALUE_EXPR
:
7220 case VIEW_CONVERT_EXPR
:
7221 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7222 outer_align
= target_align (TREE_OPERAND (target
, 0));
7223 return MAX (this_align
, outer_align
);
7226 return TYPE_ALIGN (TREE_TYPE (target
));
7231 /* Given an rtx VALUE that may contain additions and multiplications, return
7232 an equivalent value that just refers to a register, memory, or constant.
7233 This is done by generating instructions to perform the arithmetic and
7234 returning a pseudo-register containing the value.
7236 The returned value may be a REG, SUBREG, MEM or constant. */
7239 force_operand (rtx value
, rtx target
)
7242 /* Use subtarget as the target for operand 0 of a binary operation. */
7243 rtx subtarget
= get_subtarget (target
);
7244 enum rtx_code code
= GET_CODE (value
);
7246 /* Check for subreg applied to an expression produced by loop optimizer. */
7248 && !REG_P (SUBREG_REG (value
))
7249 && !MEM_P (SUBREG_REG (value
)))
7252 = simplify_gen_subreg (GET_MODE (value
),
7253 force_reg (GET_MODE (SUBREG_REG (value
)),
7254 force_operand (SUBREG_REG (value
),
7256 GET_MODE (SUBREG_REG (value
)),
7257 SUBREG_BYTE (value
));
7258 code
= GET_CODE (value
);
7261 /* Check for a PIC address load. */
7262 if ((code
== PLUS
|| code
== MINUS
)
7263 && XEXP (value
, 0) == pic_offset_table_rtx
7264 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7265 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7266 || GET_CODE (XEXP (value
, 1)) == CONST
))
7269 subtarget
= gen_reg_rtx (GET_MODE (value
));
7270 emit_move_insn (subtarget
, value
);
7274 if (ARITHMETIC_P (value
))
7276 op2
= XEXP (value
, 1);
7277 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7279 if (code
== MINUS
&& CONST_INT_P (op2
))
7282 op2
= negate_rtx (GET_MODE (value
), op2
);
7285 /* Check for an addition with OP2 a constant integer and our first
7286 operand a PLUS of a virtual register and something else. In that
7287 case, we want to emit the sum of the virtual register and the
7288 constant first and then add the other value. This allows virtual
7289 register instantiation to simply modify the constant rather than
7290 creating another one around this addition. */
7291 if (code
== PLUS
&& CONST_INT_P (op2
)
7292 && GET_CODE (XEXP (value
, 0)) == PLUS
7293 && REG_P (XEXP (XEXP (value
, 0), 0))
7294 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7295 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7297 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7298 XEXP (XEXP (value
, 0), 0), op2
,
7299 subtarget
, 0, OPTAB_LIB_WIDEN
);
7300 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7301 force_operand (XEXP (XEXP (value
,
7303 target
, 0, OPTAB_LIB_WIDEN
);
7306 op1
= force_operand (XEXP (value
, 0), subtarget
);
7307 op2
= force_operand (op2
, NULL_RTX
);
7311 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7313 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7314 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7315 target
, 1, OPTAB_LIB_WIDEN
);
7317 return expand_divmod (0,
7318 FLOAT_MODE_P (GET_MODE (value
))
7319 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7320 GET_MODE (value
), op1
, op2
, target
, 0);
7322 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7325 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7328 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7331 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7332 target
, 0, OPTAB_LIB_WIDEN
);
7334 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7335 target
, 1, OPTAB_LIB_WIDEN
);
7338 if (UNARY_P (value
))
7341 target
= gen_reg_rtx (GET_MODE (value
));
7342 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7349 case FLOAT_TRUNCATE
:
7350 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7355 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7359 case UNSIGNED_FLOAT
:
7360 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7364 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7368 #ifdef INSN_SCHEDULING
7369 /* On machines that have insn scheduling, we want all memory reference to be
7370 explicit, so we need to deal with such paradoxical SUBREGs. */
7371 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7373 = simplify_gen_subreg (GET_MODE (value
),
7374 force_reg (GET_MODE (SUBREG_REG (value
)),
7375 force_operand (SUBREG_REG (value
),
7377 GET_MODE (SUBREG_REG (value
)),
7378 SUBREG_BYTE (value
));
7384 /* Subroutine of expand_expr: return nonzero iff there is no way that
7385 EXP can reference X, which is being modified. TOP_P is nonzero if this
7386 call is going to be used to determine whether we need a temporary
7387 for EXP, as opposed to a recursive call to this function.
7389 It is always safe for this routine to return zero since it merely
7390 searches for optimization opportunities. */
7393 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7399 /* If EXP has varying size, we MUST use a target since we currently
7400 have no way of allocating temporaries of variable size
7401 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7402 So we assume here that something at a higher level has prevented a
7403 clash. This is somewhat bogus, but the best we can do. Only
7404 do this when X is BLKmode and when we are at the top level. */
7405 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7406 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7407 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7408 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7409 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7411 && GET_MODE (x
) == BLKmode
)
7412 /* If X is in the outgoing argument area, it is always safe. */
7414 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7415 || (GET_CODE (XEXP (x
, 0)) == PLUS
7416 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7419 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7420 find the underlying pseudo. */
7421 if (GET_CODE (x
) == SUBREG
)
7424 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7428 /* Now look at our tree code and possibly recurse. */
7429 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7431 case tcc_declaration
:
7432 exp_rtl
= DECL_RTL_IF_SET (exp
);
7438 case tcc_exceptional
:
7439 if (TREE_CODE (exp
) == TREE_LIST
)
7443 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7445 exp
= TREE_CHAIN (exp
);
7448 if (TREE_CODE (exp
) != TREE_LIST
)
7449 return safe_from_p (x
, exp
, 0);
7452 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7454 constructor_elt
*ce
;
7455 unsigned HOST_WIDE_INT idx
;
7457 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7458 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7459 || !safe_from_p (x
, ce
->value
, 0))
7463 else if (TREE_CODE (exp
) == ERROR_MARK
)
7464 return 1; /* An already-visited SAVE_EXPR? */
7469 /* The only case we look at here is the DECL_INITIAL inside a
7471 return (TREE_CODE (exp
) != DECL_EXPR
7472 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7473 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7474 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7477 case tcc_comparison
:
7478 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7483 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7485 case tcc_expression
:
7488 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7489 the expression. If it is set, we conflict iff we are that rtx or
7490 both are in memory. Otherwise, we check all operands of the
7491 expression recursively. */
7493 switch (TREE_CODE (exp
))
7496 /* If the operand is static or we are static, we can't conflict.
7497 Likewise if we don't conflict with the operand at all. */
7498 if (staticp (TREE_OPERAND (exp
, 0))
7499 || TREE_STATIC (exp
)
7500 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7503 /* Otherwise, the only way this can conflict is if we are taking
7504 the address of a DECL a that address if part of X, which is
7506 exp
= TREE_OPERAND (exp
, 0);
7509 if (!DECL_RTL_SET_P (exp
)
7510 || !MEM_P (DECL_RTL (exp
)))
7513 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7519 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7520 get_alias_set (exp
)))
7525 /* Assume that the call will clobber all hard registers and
7527 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7532 case WITH_CLEANUP_EXPR
:
7533 case CLEANUP_POINT_EXPR
:
7534 /* Lowered by gimplify.c. */
7538 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7544 /* If we have an rtx, we do not need to scan our operands. */
7548 nops
= TREE_OPERAND_LENGTH (exp
);
7549 for (i
= 0; i
< nops
; i
++)
7550 if (TREE_OPERAND (exp
, i
) != 0
7551 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7557 /* Should never get a type here. */
7561 /* If we have an rtl, find any enclosed object. Then see if we conflict
7565 if (GET_CODE (exp_rtl
) == SUBREG
)
7567 exp_rtl
= SUBREG_REG (exp_rtl
);
7569 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7573 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7574 are memory and they conflict. */
7575 return ! (rtx_equal_p (x
, exp_rtl
)
7576 || (MEM_P (x
) && MEM_P (exp_rtl
)
7577 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7580 /* If we reach here, it is safe. */
7585 /* Return the highest power of two that EXP is known to be a multiple of.
7586 This is used in updating alignment of MEMs in array references. */
7588 unsigned HOST_WIDE_INT
7589 highest_pow2_factor (const_tree exp
)
7591 unsigned HOST_WIDE_INT ret
;
7592 int trailing_zeros
= tree_ctz (exp
);
7593 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7594 return BIGGEST_ALIGNMENT
;
7595 ret
= HOST_WIDE_INT_1U
<< trailing_zeros
;
7596 if (ret
> BIGGEST_ALIGNMENT
)
7597 return BIGGEST_ALIGNMENT
;
7601 /* Similar, except that the alignment requirements of TARGET are
7602 taken into account. Assume it is at least as aligned as its
7603 type, unless it is a COMPONENT_REF in which case the layout of
7604 the structure gives the alignment. */
7606 static unsigned HOST_WIDE_INT
7607 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7609 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7610 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7612 return MAX (factor
, talign
);
7615 /* Convert the tree comparison code TCODE to the rtl one where the
7616 signedness is UNSIGNEDP. */
7618 static enum rtx_code
7619 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7631 code
= unsignedp
? LTU
: LT
;
7634 code
= unsignedp
? LEU
: LE
;
7637 code
= unsignedp
? GTU
: GT
;
7640 code
= unsignedp
? GEU
: GE
;
7642 case UNORDERED_EXPR
:
7673 /* Subroutine of expand_expr. Expand the two operands of a binary
7674 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7675 The value may be stored in TARGET if TARGET is nonzero. The
7676 MODIFIER argument is as documented by expand_expr. */
7679 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7680 enum expand_modifier modifier
)
7682 if (! safe_from_p (target
, exp1
, 1))
7684 if (operand_equal_p (exp0
, exp1
, 0))
7686 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7687 *op1
= copy_rtx (*op0
);
7691 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7692 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7697 /* Return a MEM that contains constant EXP. DEFER is as for
7698 output_constant_def and MODIFIER is as for expand_expr. */
7701 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7705 mem
= output_constant_def (exp
, defer
);
7706 if (modifier
!= EXPAND_INITIALIZER
)
7707 mem
= use_anchored_address (mem
);
7711 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7712 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7715 expand_expr_addr_expr_1 (tree exp
, rtx target
, machine_mode tmode
,
7716 enum expand_modifier modifier
, addr_space_t as
)
7718 rtx result
, subtarget
;
7720 HOST_WIDE_INT bitsize
, bitpos
;
7721 int unsignedp
, reversep
, volatilep
= 0;
7724 /* If we are taking the address of a constant and are at the top level,
7725 we have to use output_constant_def since we can't call force_const_mem
7727 /* ??? This should be considered a front-end bug. We should not be
7728 generating ADDR_EXPR of something that isn't an LVALUE. The only
7729 exception here is STRING_CST. */
7730 if (CONSTANT_CLASS_P (exp
))
7732 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7733 if (modifier
< EXPAND_SUM
)
7734 result
= force_operand (result
, target
);
7738 /* Everything must be something allowed by is_gimple_addressable. */
7739 switch (TREE_CODE (exp
))
7742 /* This case will happen via recursion for &a->b. */
7743 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7747 tree tem
= TREE_OPERAND (exp
, 0);
7748 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7749 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7750 return expand_expr (tem
, target
, tmode
, modifier
);
7754 /* Expand the initializer like constants above. */
7755 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7757 if (modifier
< EXPAND_SUM
)
7758 result
= force_operand (result
, target
);
7762 /* The real part of the complex number is always first, therefore
7763 the address is the same as the address of the parent object. */
7766 inner
= TREE_OPERAND (exp
, 0);
7770 /* The imaginary part of the complex number is always second.
7771 The expression is therefore always offset by the size of the
7774 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
7775 inner
= TREE_OPERAND (exp
, 0);
7778 case COMPOUND_LITERAL_EXPR
:
7779 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7780 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7781 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7782 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7783 the initializers aren't gimplified. */
7784 if (COMPOUND_LITERAL_EXPR_DECL (exp
)
7785 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp
)))
7786 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7787 target
, tmode
, modifier
, as
);
7790 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7791 expand_expr, as that can have various side effects; LABEL_DECLs for
7792 example, may not have their DECL_RTL set yet. Expand the rtl of
7793 CONSTRUCTORs too, which should yield a memory reference for the
7794 constructor's contents. Assume language specific tree nodes can
7795 be expanded in some interesting way. */
7796 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7798 || TREE_CODE (exp
) == CONSTRUCTOR
7799 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7801 result
= expand_expr (exp
, target
, tmode
,
7802 modifier
== EXPAND_INITIALIZER
7803 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7805 /* If the DECL isn't in memory, then the DECL wasn't properly
7806 marked TREE_ADDRESSABLE, which will be either a front-end
7807 or a tree optimizer bug. */
7809 gcc_assert (MEM_P (result
));
7810 result
= XEXP (result
, 0);
7812 /* ??? Is this needed anymore? */
7814 TREE_USED (exp
) = 1;
7816 if (modifier
!= EXPAND_INITIALIZER
7817 && modifier
!= EXPAND_CONST_ADDRESS
7818 && modifier
!= EXPAND_SUM
)
7819 result
= force_operand (result
, target
);
7823 /* Pass FALSE as the last argument to get_inner_reference although
7824 we are expanding to RTL. The rationale is that we know how to
7825 handle "aligning nodes" here: we can just bypass them because
7826 they won't change the final object whose address will be returned
7827 (they actually exist only for that purpose). */
7828 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
7829 &unsignedp
, &reversep
, &volatilep
);
7833 /* We must have made progress. */
7834 gcc_assert (inner
!= exp
);
7836 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
7837 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7838 inner alignment, force the inner to be sufficiently aligned. */
7839 if (CONSTANT_CLASS_P (inner
)
7840 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7842 inner
= copy_node (inner
);
7843 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7844 SET_TYPE_ALIGN (TREE_TYPE (inner
), TYPE_ALIGN (TREE_TYPE (exp
)));
7845 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7847 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7853 if (modifier
!= EXPAND_NORMAL
)
7854 result
= force_operand (result
, NULL
);
7855 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7856 modifier
== EXPAND_INITIALIZER
7857 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7859 /* expand_expr is allowed to return an object in a mode other
7860 than TMODE. If it did, we need to convert. */
7861 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
7862 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
7863 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
7864 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7865 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7867 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7868 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7871 subtarget
= bitpos
? NULL_RTX
: target
;
7872 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7873 1, OPTAB_LIB_WIDEN
);
7879 /* Someone beforehand should have rejected taking the address
7880 of such an object. */
7881 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7883 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7884 result
= plus_constant (tmode
, result
, bitpos
/ BITS_PER_UNIT
);
7885 if (modifier
< EXPAND_SUM
)
7886 result
= force_operand (result
, target
);
7892 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7893 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7896 expand_expr_addr_expr (tree exp
, rtx target
, machine_mode tmode
,
7897 enum expand_modifier modifier
)
7899 addr_space_t as
= ADDR_SPACE_GENERIC
;
7900 machine_mode address_mode
= Pmode
;
7901 machine_mode pointer_mode
= ptr_mode
;
7905 /* Target mode of VOIDmode says "whatever's natural". */
7906 if (tmode
== VOIDmode
)
7907 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7909 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7911 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7912 address_mode
= targetm
.addr_space
.address_mode (as
);
7913 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7916 /* We can get called with some Weird Things if the user does silliness
7917 like "(short) &a". In that case, convert_memory_address won't do
7918 the right thing, so ignore the given target mode. */
7919 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7920 tmode
= address_mode
;
7922 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7923 tmode
, modifier
, as
);
7925 /* Despite expand_expr claims concerning ignoring TMODE when not
7926 strictly convenient, stuff breaks if we don't honor it. Note
7927 that combined with the above, we only do this for pointer modes. */
7928 rmode
= GET_MODE (result
);
7929 if (rmode
== VOIDmode
)
7932 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7937 /* Generate code for computing CONSTRUCTOR EXP.
7938 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7939 is TRUE, instead of creating a temporary variable in memory
7940 NULL is returned and the caller needs to handle it differently. */
7943 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7944 bool avoid_temp_mem
)
7946 tree type
= TREE_TYPE (exp
);
7947 machine_mode mode
= TYPE_MODE (type
);
7949 /* Try to avoid creating a temporary at all. This is possible
7950 if all of the initializer is zero.
7951 FIXME: try to handle all [0..255] initializers we can handle
7953 if (TREE_STATIC (exp
)
7954 && !TREE_ADDRESSABLE (exp
)
7955 && target
!= 0 && mode
== BLKmode
7956 && all_zeros_p (exp
))
7958 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7962 /* All elts simple constants => refer to a constant in memory. But
7963 if this is a non-BLKmode mode, let it store a field at a time
7964 since that should make a CONST_INT, CONST_WIDE_INT or
7965 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7966 use, it is best to store directly into the target unless the type
7967 is large enough that memcpy will be used. If we are making an
7968 initializer and all operands are constant, put it in memory as
7971 FIXME: Avoid trying to fill vector constructors piece-meal.
7972 Output them with output_constant_def below unless we're sure
7973 they're zeros. This should go away when vector initializers
7974 are treated like VECTOR_CST instead of arrays. */
7975 if ((TREE_STATIC (exp
)
7976 && ((mode
== BLKmode
7977 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7978 || TREE_ADDRESSABLE (exp
)
7979 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
7980 && (! can_move_by_pieces
7981 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
7983 && ! mostly_zeros_p (exp
))))
7984 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7985 && TREE_CONSTANT (exp
)))
7992 constructor
= expand_expr_constant (exp
, 1, modifier
);
7994 if (modifier
!= EXPAND_CONST_ADDRESS
7995 && modifier
!= EXPAND_INITIALIZER
7996 && modifier
!= EXPAND_SUM
)
7997 constructor
= validize_mem (constructor
);
8002 /* Handle calls that pass values in multiple non-contiguous
8003 locations. The Irix 6 ABI has examples of this. */
8004 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
8005 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
8010 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
8013 store_constructor (exp
, target
, 0, int_expr_size (exp
), false);
8018 /* expand_expr: generate code for computing expression EXP.
8019 An rtx for the computed value is returned. The value is never null.
8020 In the case of a void EXP, const0_rtx is returned.
8022 The value may be stored in TARGET if TARGET is nonzero.
8023 TARGET is just a suggestion; callers must assume that
8024 the rtx returned may not be the same as TARGET.
8026 If TARGET is CONST0_RTX, it means that the value will be ignored.
8028 If TMODE is not VOIDmode, it suggests generating the
8029 result in mode TMODE. But this is done only when convenient.
8030 Otherwise, TMODE is ignored and the value generated in its natural mode.
8031 TMODE is just a suggestion; callers must assume that
8032 the rtx returned may not have mode TMODE.
8034 Note that TARGET may have neither TMODE nor MODE. In that case, it
8035 probably will not be used.
8037 If MODIFIER is EXPAND_SUM then when EXP is an addition
8038 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8039 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8040 products as above, or REG or MEM, or constant.
8041 Ordinarily in such cases we would output mul or add instructions
8042 and then return a pseudo reg containing the sum.
8044 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8045 it also marks a label as absolutely required (it can't be dead).
8046 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8047 This is used for outputting expressions used in initializers.
8049 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8050 with a constant address even if that address is not normally legitimate.
8051 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8053 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8054 a call parameter. Such targets require special care as we haven't yet
8055 marked TARGET so that it's safe from being trashed by libcalls. We
8056 don't want to use TARGET for anything but the final result;
8057 Intermediate values must go elsewhere. Additionally, calls to
8058 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8060 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8061 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8062 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8063 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8066 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8067 In this case, we don't adjust a returned MEM rtx that wouldn't be
8068 sufficiently aligned for its mode; instead, it's up to the caller
8069 to deal with it afterwards. This is used to make sure that unaligned
8070 base objects for which out-of-bounds accesses are supported, for
8071 example record types with trailing arrays, aren't realigned behind
8072 the back of the caller.
8073 The normal operating mode is to pass FALSE for this parameter. */
8076 expand_expr_real (tree exp
, rtx target
, machine_mode tmode
,
8077 enum expand_modifier modifier
, rtx
*alt_rtl
,
8078 bool inner_reference_p
)
8082 /* Handle ERROR_MARK before anybody tries to access its type. */
8083 if (TREE_CODE (exp
) == ERROR_MARK
8084 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
8086 ret
= CONST0_RTX (tmode
);
8087 return ret
? ret
: const0_rtx
;
8090 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
8095 /* Try to expand the conditional expression which is represented by
8096 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8097 return the rtl reg which represents the result. Otherwise return
8101 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
8102 tree treeop1 ATTRIBUTE_UNUSED
,
8103 tree treeop2 ATTRIBUTE_UNUSED
)
8106 rtx op00
, op01
, op1
, op2
;
8107 enum rtx_code comparison_code
;
8108 machine_mode comparison_mode
;
8111 tree type
= TREE_TYPE (treeop1
);
8112 int unsignedp
= TYPE_UNSIGNED (type
);
8113 machine_mode mode
= TYPE_MODE (type
);
8114 machine_mode orig_mode
= mode
;
8115 static bool expanding_cond_expr_using_cmove
= false;
8117 /* Conditional move expansion can end up TERing two operands which,
8118 when recursively hitting conditional expressions can result in
8119 exponential behavior if the cmove expansion ultimatively fails.
8120 It's hardly profitable to TER a cmove into a cmove so avoid doing
8121 that by failing early if we end up recursing. */
8122 if (expanding_cond_expr_using_cmove
)
8125 /* If we cannot do a conditional move on the mode, try doing it
8126 with the promoted mode. */
8127 if (!can_conditionally_move_p (mode
))
8129 mode
= promote_mode (type
, mode
, &unsignedp
);
8130 if (!can_conditionally_move_p (mode
))
8132 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
8135 temp
= assign_temp (type
, 0, 1);
8137 expanding_cond_expr_using_cmove
= true;
8139 expand_operands (treeop1
, treeop2
,
8140 temp
, &op1
, &op2
, EXPAND_NORMAL
);
8142 if (TREE_CODE (treeop0
) == SSA_NAME
8143 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
8145 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
8146 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
8147 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
8148 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
8149 comparison_mode
= TYPE_MODE (type
);
8150 unsignedp
= TYPE_UNSIGNED (type
);
8151 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8153 else if (COMPARISON_CLASS_P (treeop0
))
8155 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
8156 enum tree_code cmpcode
= TREE_CODE (treeop0
);
8157 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
8158 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
8159 unsignedp
= TYPE_UNSIGNED (type
);
8160 comparison_mode
= TYPE_MODE (type
);
8161 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8165 op00
= expand_normal (treeop0
);
8167 comparison_code
= NE
;
8168 comparison_mode
= GET_MODE (op00
);
8169 if (comparison_mode
== VOIDmode
)
8170 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8172 expanding_cond_expr_using_cmove
= false;
8174 if (GET_MODE (op1
) != mode
)
8175 op1
= gen_lowpart (mode
, op1
);
8177 if (GET_MODE (op2
) != mode
)
8178 op2
= gen_lowpart (mode
, op2
);
8180 /* Try to emit the conditional move. */
8181 insn
= emit_conditional_move (temp
, comparison_code
,
8182 op00
, op01
, comparison_mode
,
8186 /* If we could do the conditional move, emit the sequence,
8190 rtx_insn
*seq
= get_insns ();
8193 return convert_modes (orig_mode
, mode
, temp
, 0);
8196 /* Otherwise discard the sequence and fall back to code with
8203 expand_expr_real_2 (sepops ops
, rtx target
, machine_mode tmode
,
8204 enum expand_modifier modifier
)
8206 rtx op0
, op1
, op2
, temp
;
8207 rtx_code_label
*lab
;
8211 scalar_int_mode int_mode
;
8212 enum tree_code code
= ops
->code
;
8214 rtx subtarget
, original_target
;
8216 bool reduce_bit_field
;
8217 location_t loc
= ops
->location
;
8218 tree treeop0
, treeop1
, treeop2
;
8219 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8220 ? reduce_to_bit_field_precision ((expr), \
8226 mode
= TYPE_MODE (type
);
8227 unsignedp
= TYPE_UNSIGNED (type
);
8233 /* We should be called only on simple (binary or unary) expressions,
8234 exactly those that are valid in gimple expressions that aren't
8235 GIMPLE_SINGLE_RHS (or invalid). */
8236 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8237 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8238 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8240 ignore
= (target
== const0_rtx
8241 || ((CONVERT_EXPR_CODE_P (code
)
8242 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8243 && TREE_CODE (type
) == VOID_TYPE
));
8245 /* We should be called only if we need the result. */
8246 gcc_assert (!ignore
);
8248 /* An operation in what may be a bit-field type needs the
8249 result to be reduced to the precision of the bit-field type,
8250 which is narrower than that of the type's mode. */
8251 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8252 && !type_has_mode_precision_p (type
));
8254 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8257 /* Use subtarget as the target for operand 0 of a binary operation. */
8258 subtarget
= get_subtarget (target
);
8259 original_target
= target
;
8263 case NON_LVALUE_EXPR
:
8266 if (treeop0
== error_mark_node
)
8269 if (TREE_CODE (type
) == UNION_TYPE
)
8271 tree valtype
= TREE_TYPE (treeop0
);
8273 /* If both input and output are BLKmode, this conversion isn't doing
8274 anything except possibly changing memory attribute. */
8275 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8277 rtx result
= expand_expr (treeop0
, target
, tmode
,
8280 result
= copy_rtx (result
);
8281 set_mem_attributes (result
, type
, 0);
8287 if (TYPE_MODE (type
) != BLKmode
)
8288 target
= gen_reg_rtx (TYPE_MODE (type
));
8290 target
= assign_temp (type
, 1, 1);
8294 /* Store data into beginning of memory target. */
8295 store_expr (treeop0
,
8296 adjust_address (target
, TYPE_MODE (valtype
), 0),
8297 modifier
== EXPAND_STACK_PARM
,
8298 false, TYPE_REVERSE_STORAGE_ORDER (type
));
8302 gcc_assert (REG_P (target
)
8303 && !TYPE_REVERSE_STORAGE_ORDER (type
));
8305 /* Store this field into a union of the proper type. */
8306 store_field (target
,
8307 MIN ((int_size_in_bytes (TREE_TYPE
8310 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8311 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0,
8315 /* Return the entire union. */
8319 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8321 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8324 /* If the signedness of the conversion differs and OP0 is
8325 a promoted SUBREG, clear that indication since we now
8326 have to do the proper extension. */
8327 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8328 && GET_CODE (op0
) == SUBREG
)
8329 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8331 return REDUCE_BIT_FIELD (op0
);
8334 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8335 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8336 if (GET_MODE (op0
) == mode
)
8339 /* If OP0 is a constant, just convert it into the proper mode. */
8340 else if (CONSTANT_P (op0
))
8342 tree inner_type
= TREE_TYPE (treeop0
);
8343 machine_mode inner_mode
= GET_MODE (op0
);
8345 if (inner_mode
== VOIDmode
)
8346 inner_mode
= TYPE_MODE (inner_type
);
8348 if (modifier
== EXPAND_INITIALIZER
)
8349 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
8351 op0
= convert_modes (mode
, inner_mode
, op0
,
8352 TYPE_UNSIGNED (inner_type
));
8355 else if (modifier
== EXPAND_INITIALIZER
)
8356 op0
= gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8357 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8359 else if (target
== 0)
8360 op0
= convert_to_mode (mode
, op0
,
8361 TYPE_UNSIGNED (TREE_TYPE
8365 convert_move (target
, op0
,
8366 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8370 return REDUCE_BIT_FIELD (op0
);
8372 case ADDR_SPACE_CONVERT_EXPR
:
8374 tree treeop0_type
= TREE_TYPE (treeop0
);
8376 gcc_assert (POINTER_TYPE_P (type
));
8377 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8379 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8380 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8382 /* Conversions between pointers to the same address space should
8383 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8384 gcc_assert (as_to
!= as_from
);
8386 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8388 /* Ask target code to handle conversion between pointers
8389 to overlapping address spaces. */
8390 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8391 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8393 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8397 /* For disjoint address spaces, converting anything but a null
8398 pointer invokes undefined behavior. We truncate or extend the
8399 value as if we'd converted via integers, which handles 0 as
8400 required, and all others as the programmer likely expects. */
8401 #ifndef POINTERS_EXTEND_UNSIGNED
8402 const int POINTERS_EXTEND_UNSIGNED
= 1;
8404 op0
= convert_modes (mode
, TYPE_MODE (treeop0_type
),
8405 op0
, POINTERS_EXTEND_UNSIGNED
);
8411 case POINTER_PLUS_EXPR
:
8412 /* Even though the sizetype mode and the pointer's mode can be different
8413 expand is able to handle this correctly and get the correct result out
8414 of the PLUS_EXPR code. */
8415 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8416 if sizetype precision is smaller than pointer precision. */
8417 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8418 treeop1
= fold_convert_loc (loc
, type
,
8419 fold_convert_loc (loc
, ssizetype
,
8421 /* If sizetype precision is larger than pointer precision, truncate the
8422 offset to have matching modes. */
8423 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8424 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8428 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8429 something else, make sure we add the register to the constant and
8430 then to the other thing. This case can occur during strength
8431 reduction and doing it this way will produce better code if the
8432 frame pointer or argument pointer is eliminated.
8434 fold-const.c will ensure that the constant is always in the inner
8435 PLUS_EXPR, so the only case we need to do anything about is if
8436 sp, ap, or fp is our second argument, in which case we must swap
8437 the innermost first argument and our second argument. */
8439 if (TREE_CODE (treeop0
) == PLUS_EXPR
8440 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8442 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8443 || DECL_RTL (treeop1
) == stack_pointer_rtx
8444 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8449 /* If the result is to be ptr_mode and we are adding an integer to
8450 something, we might be forming a constant. So try to use
8451 plus_constant. If it produces a sum and we can't accept it,
8452 use force_operand. This allows P = &ARR[const] to generate
8453 efficient code on machines where a SYMBOL_REF is not a valid
8456 If this is an EXPAND_SUM call, always return the sum. */
8457 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8458 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8460 if (modifier
== EXPAND_STACK_PARM
)
8462 if (TREE_CODE (treeop0
) == INTEGER_CST
8463 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8464 && TREE_CONSTANT (treeop1
))
8468 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8470 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8472 /* Use wi::shwi to ensure that the constant is
8473 truncated according to the mode of OP1, then sign extended
8474 to a HOST_WIDE_INT. Using the constant directly can result
8475 in non-canonical RTL in a 64x32 cross compile. */
8476 wc
= TREE_INT_CST_LOW (treeop0
);
8478 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8479 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8480 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8481 op1
= force_operand (op1
, target
);
8482 return REDUCE_BIT_FIELD (op1
);
8485 else if (TREE_CODE (treeop1
) == INTEGER_CST
8486 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8487 && TREE_CONSTANT (treeop0
))
8491 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8493 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8494 (modifier
== EXPAND_INITIALIZER
8495 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8496 if (! CONSTANT_P (op0
))
8498 op1
= expand_expr (treeop1
, NULL_RTX
,
8499 VOIDmode
, modifier
);
8500 /* Return a PLUS if modifier says it's OK. */
8501 if (modifier
== EXPAND_SUM
8502 || modifier
== EXPAND_INITIALIZER
)
8503 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8506 /* Use wi::shwi to ensure that the constant is
8507 truncated according to the mode of OP1, then sign extended
8508 to a HOST_WIDE_INT. Using the constant directly can result
8509 in non-canonical RTL in a 64x32 cross compile. */
8510 wc
= TREE_INT_CST_LOW (treeop1
);
8512 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8513 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8514 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8515 op0
= force_operand (op0
, target
);
8516 return REDUCE_BIT_FIELD (op0
);
8520 /* Use TER to expand pointer addition of a negated value
8521 as pointer subtraction. */
8522 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8523 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8524 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8525 && TREE_CODE (treeop1
) == SSA_NAME
8526 && TYPE_MODE (TREE_TYPE (treeop0
))
8527 == TYPE_MODE (TREE_TYPE (treeop1
)))
8529 gimple
*def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8532 treeop1
= gimple_assign_rhs1 (def
);
8538 /* No sense saving up arithmetic to be done
8539 if it's all in the wrong mode to form part of an address.
8540 And force_operand won't know whether to sign-extend or
8542 if (modifier
!= EXPAND_INITIALIZER
8543 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8545 expand_operands (treeop0
, treeop1
,
8546 subtarget
, &op0
, &op1
, modifier
);
8547 if (op0
== const0_rtx
)
8549 if (op1
== const0_rtx
)
8554 expand_operands (treeop0
, treeop1
,
8555 subtarget
, &op0
, &op1
, modifier
);
8556 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8560 /* For initializers, we are allowed to return a MINUS of two
8561 symbolic constants. Here we handle all cases when both operands
8563 /* Handle difference of two symbolic constants,
8564 for the sake of an initializer. */
8565 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8566 && really_constant_p (treeop0
)
8567 && really_constant_p (treeop1
))
8569 expand_operands (treeop0
, treeop1
,
8570 NULL_RTX
, &op0
, &op1
, modifier
);
8572 /* If the last operand is a CONST_INT, use plus_constant of
8573 the negated constant. Else make the MINUS. */
8574 if (CONST_INT_P (op1
))
8575 return REDUCE_BIT_FIELD (plus_constant (mode
, op0
,
8578 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8581 /* No sense saving up arithmetic to be done
8582 if it's all in the wrong mode to form part of an address.
8583 And force_operand won't know whether to sign-extend or
8585 if (modifier
!= EXPAND_INITIALIZER
8586 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8589 expand_operands (treeop0
, treeop1
,
8590 subtarget
, &op0
, &op1
, modifier
);
8592 /* Convert A - const to A + (-const). */
8593 if (CONST_INT_P (op1
))
8595 op1
= negate_rtx (mode
, op1
);
8596 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8601 case WIDEN_MULT_PLUS_EXPR
:
8602 case WIDEN_MULT_MINUS_EXPR
:
8603 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8604 op2
= expand_normal (treeop2
);
8605 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8609 case WIDEN_MULT_EXPR
:
8610 /* If first operand is constant, swap them.
8611 Thus the following special case checks need only
8612 check the second operand. */
8613 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8614 std::swap (treeop0
, treeop1
);
8616 /* First, check if we have a multiplication of one signed and one
8617 unsigned operand. */
8618 if (TREE_CODE (treeop1
) != INTEGER_CST
8619 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8620 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8622 machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8623 this_optab
= usmul_widen_optab
;
8624 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8625 != CODE_FOR_nothing
)
8627 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8628 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8631 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8633 /* op0 and op1 might still be constant, despite the above
8634 != INTEGER_CST check. Handle it. */
8635 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8637 op0
= convert_modes (innermode
, mode
, op0
, true);
8638 op1
= convert_modes (innermode
, mode
, op1
, false);
8639 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8640 target
, unsignedp
));
8645 /* Check for a multiplication with matching signedness. */
8646 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8647 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8648 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8649 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8651 tree op0type
= TREE_TYPE (treeop0
);
8652 machine_mode innermode
= TYPE_MODE (op0type
);
8653 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8654 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8655 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8657 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8659 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8660 != CODE_FOR_nothing
)
8662 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8664 /* op0 and op1 might still be constant, despite the above
8665 != INTEGER_CST check. Handle it. */
8666 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8669 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8671 = convert_modes (innermode
, mode
, op1
,
8672 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8673 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8677 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8678 unsignedp
, this_optab
);
8679 return REDUCE_BIT_FIELD (temp
);
8681 if (find_widening_optab_handler (other_optab
, mode
, innermode
, 0)
8683 && innermode
== word_mode
)
8686 op0
= expand_normal (treeop0
);
8687 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8688 op1
= convert_modes (innermode
, mode
,
8689 expand_normal (treeop1
),
8690 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8692 op1
= expand_normal (treeop1
);
8693 /* op0 and op1 might still be constant, despite the above
8694 != INTEGER_CST check. Handle it. */
8695 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8696 goto widen_mult_const
;
8697 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8698 unsignedp
, OPTAB_LIB_WIDEN
);
8699 hipart
= gen_highpart (innermode
, temp
);
8700 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8704 emit_move_insn (hipart
, htem
);
8705 return REDUCE_BIT_FIELD (temp
);
8709 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8710 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8711 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8712 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8716 optab opt
= fma_optab
;
8717 gimple
*def0
, *def2
;
8719 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8721 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8723 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8726 gcc_assert (fn
!= NULL_TREE
);
8727 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8728 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8731 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8732 /* The multiplication is commutative - look at its 2nd operand
8733 if the first isn't fed by a negate. */
8736 def0
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8737 /* Swap operands if the 2nd operand is fed by a negate. */
8739 std::swap (treeop0
, treeop1
);
8741 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8746 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8749 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8750 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8753 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8756 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8759 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8762 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8766 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8768 op2
= expand_normal (treeop2
);
8769 op1
= expand_normal (treeop1
);
8771 return expand_ternary_op (TYPE_MODE (type
), opt
,
8772 op0
, op1
, op2
, target
, 0);
8776 /* If this is a fixed-point operation, then we cannot use the code
8777 below because "expand_mult" doesn't support sat/no-sat fixed-point
8779 if (ALL_FIXED_POINT_MODE_P (mode
))
8782 /* If first operand is constant, swap them.
8783 Thus the following special case checks need only
8784 check the second operand. */
8785 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8786 std::swap (treeop0
, treeop1
);
8788 /* Attempt to return something suitable for generating an
8789 indexed address, for machines that support that. */
8791 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8792 && tree_fits_shwi_p (treeop1
))
8794 tree exp1
= treeop1
;
8796 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8800 op0
= force_operand (op0
, NULL_RTX
);
8802 op0
= copy_to_mode_reg (mode
, op0
);
8804 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8805 gen_int_mode (tree_to_shwi (exp1
),
8806 TYPE_MODE (TREE_TYPE (exp1
)))));
8809 if (modifier
== EXPAND_STACK_PARM
)
8812 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8813 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8815 case TRUNC_MOD_EXPR
:
8816 case FLOOR_MOD_EXPR
:
8818 case ROUND_MOD_EXPR
:
8820 case TRUNC_DIV_EXPR
:
8821 case FLOOR_DIV_EXPR
:
8823 case ROUND_DIV_EXPR
:
8824 case EXACT_DIV_EXPR
:
8826 /* If this is a fixed-point operation, then we cannot use the code
8827 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8829 if (ALL_FIXED_POINT_MODE_P (mode
))
8832 if (modifier
== EXPAND_STACK_PARM
)
8834 /* Possible optimization: compute the dividend with EXPAND_SUM
8835 then if the divisor is constant can optimize the case
8836 where some terms of the dividend have coeffs divisible by it. */
8837 expand_operands (treeop0
, treeop1
,
8838 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8839 bool mod_p
= code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
8840 || code
== CEIL_MOD_EXPR
|| code
== ROUND_MOD_EXPR
;
8841 if (SCALAR_INT_MODE_P (mode
)
8843 && get_range_pos_neg (treeop0
) == 1
8844 && get_range_pos_neg (treeop1
) == 1)
8846 /* If both arguments are known to be positive when interpreted
8847 as signed, we can expand it as both signed and unsigned
8848 division or modulo. Choose the cheaper sequence in that case. */
8849 bool speed_p
= optimize_insn_for_speed_p ();
8850 do_pending_stack_adjust ();
8852 rtx uns_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 1);
8853 rtx_insn
*uns_insns
= get_insns ();
8856 rtx sgn_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 0);
8857 rtx_insn
*sgn_insns
= get_insns ();
8859 unsigned uns_cost
= seq_cost (uns_insns
, speed_p
);
8860 unsigned sgn_cost
= seq_cost (sgn_insns
, speed_p
);
8862 /* If costs are the same then use as tie breaker the other
8864 if (uns_cost
== sgn_cost
)
8866 uns_cost
= seq_cost (uns_insns
, !speed_p
);
8867 sgn_cost
= seq_cost (sgn_insns
, !speed_p
);
8870 if (uns_cost
< sgn_cost
|| (uns_cost
== sgn_cost
&& unsignedp
))
8872 emit_insn (uns_insns
);
8875 emit_insn (sgn_insns
);
8878 return expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, unsignedp
);
8883 case MULT_HIGHPART_EXPR
:
8884 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8885 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8889 case FIXED_CONVERT_EXPR
:
8890 op0
= expand_normal (treeop0
);
8891 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8892 target
= gen_reg_rtx (mode
);
8894 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8895 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8896 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8897 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8899 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8902 case FIX_TRUNC_EXPR
:
8903 op0
= expand_normal (treeop0
);
8904 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8905 target
= gen_reg_rtx (mode
);
8906 expand_fix (target
, op0
, unsignedp
);
8910 op0
= expand_normal (treeop0
);
8911 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8912 target
= gen_reg_rtx (mode
);
8913 /* expand_float can't figure out what to do if FROM has VOIDmode.
8914 So give it the correct mode. With -O, cse will optimize this. */
8915 if (GET_MODE (op0
) == VOIDmode
)
8916 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8918 expand_float (target
, op0
,
8919 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8923 op0
= expand_expr (treeop0
, subtarget
,
8924 VOIDmode
, EXPAND_NORMAL
);
8925 if (modifier
== EXPAND_STACK_PARM
)
8927 temp
= expand_unop (mode
,
8928 optab_for_tree_code (NEGATE_EXPR
, type
,
8932 return REDUCE_BIT_FIELD (temp
);
8935 op0
= expand_expr (treeop0
, subtarget
,
8936 VOIDmode
, EXPAND_NORMAL
);
8937 if (modifier
== EXPAND_STACK_PARM
)
8940 /* ABS_EXPR is not valid for complex arguments. */
8941 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8942 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8944 /* Unsigned abs is simply the operand. Testing here means we don't
8945 risk generating incorrect code below. */
8946 if (TYPE_UNSIGNED (type
))
8949 return expand_abs (mode
, op0
, target
, unsignedp
,
8950 safe_from_p (target
, treeop0
, 1));
8954 target
= original_target
;
8956 || modifier
== EXPAND_STACK_PARM
8957 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8958 || GET_MODE (target
) != mode
8960 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8961 target
= gen_reg_rtx (mode
);
8962 expand_operands (treeop0
, treeop1
,
8963 target
, &op0
, &op1
, EXPAND_NORMAL
);
8965 /* First try to do it with a special MIN or MAX instruction.
8966 If that does not win, use a conditional jump to select the proper
8968 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8969 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8974 /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
8975 and similarly for MAX <x, y>. */
8976 if (VECTOR_TYPE_P (type
))
8978 tree t0
= make_tree (type
, op0
);
8979 tree t1
= make_tree (type
, op1
);
8980 tree comparison
= build2 (code
== MIN_EXPR
? LE_EXPR
: GE_EXPR
,
8982 return expand_vec_cond_expr (type
, comparison
, t0
, t1
,
8986 /* At this point, a MEM target is no longer useful; we will get better
8989 if (! REG_P (target
))
8990 target
= gen_reg_rtx (mode
);
8992 /* If op1 was placed in target, swap op0 and op1. */
8993 if (target
!= op0
&& target
== op1
)
8994 std::swap (op0
, op1
);
8996 /* We generate better code and avoid problems with op1 mentioning
8997 target by forcing op1 into a pseudo if it isn't a constant. */
8998 if (! CONSTANT_P (op1
))
8999 op1
= force_reg (mode
, op1
);
9002 enum rtx_code comparison_code
;
9005 if (code
== MAX_EXPR
)
9006 comparison_code
= unsignedp
? GEU
: GE
;
9008 comparison_code
= unsignedp
? LEU
: LE
;
9010 /* Canonicalize to comparisons against 0. */
9011 if (op1
== const1_rtx
)
9013 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9014 or (a != 0 ? a : 1) for unsigned.
9015 For MIN we are safe converting (a <= 1 ? a : 1)
9016 into (a <= 0 ? a : 1) */
9017 cmpop1
= const0_rtx
;
9018 if (code
== MAX_EXPR
)
9019 comparison_code
= unsignedp
? NE
: GT
;
9021 if (op1
== constm1_rtx
&& !unsignedp
)
9023 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9024 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9025 cmpop1
= const0_rtx
;
9026 if (code
== MIN_EXPR
)
9027 comparison_code
= LT
;
9030 /* Use a conditional move if possible. */
9031 if (can_conditionally_move_p (mode
))
9037 /* Try to emit the conditional move. */
9038 insn
= emit_conditional_move (target
, comparison_code
,
9043 /* If we could do the conditional move, emit the sequence,
9047 rtx_insn
*seq
= get_insns ();
9053 /* Otherwise discard the sequence and fall back to code with
9059 emit_move_insn (target
, op0
);
9061 lab
= gen_label_rtx ();
9062 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
9063 unsignedp
, mode
, NULL_RTX
, NULL
, lab
,
9064 profile_probability::uninitialized ());
9066 emit_move_insn (target
, op1
);
9071 op0
= expand_expr (treeop0
, subtarget
,
9072 VOIDmode
, EXPAND_NORMAL
);
9073 if (modifier
== EXPAND_STACK_PARM
)
9075 /* In case we have to reduce the result to bitfield precision
9076 for unsigned bitfield expand this as XOR with a proper constant
9078 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
9080 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
9081 false, GET_MODE_PRECISION (mode
));
9083 temp
= expand_binop (mode
, xor_optab
, op0
,
9084 immed_wide_int_const (mask
, mode
),
9085 target
, 1, OPTAB_LIB_WIDEN
);
9088 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
9092 /* ??? Can optimize bitwise operations with one arg constant.
9093 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9094 and (a bitwise1 b) bitwise2 b (etc)
9095 but that is probably not worth while. */
9104 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
9105 || type_has_mode_precision_p (type
));
9111 /* If this is a fixed-point operation, then we cannot use the code
9112 below because "expand_shift" doesn't support sat/no-sat fixed-point
9114 if (ALL_FIXED_POINT_MODE_P (mode
))
9117 if (! safe_from_p (subtarget
, treeop1
, 1))
9119 if (modifier
== EXPAND_STACK_PARM
)
9121 op0
= expand_expr (treeop0
, subtarget
,
9122 VOIDmode
, EXPAND_NORMAL
);
9124 /* Left shift optimization when shifting across word_size boundary.
9126 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9127 there isn't native instruction to support this wide mode
9128 left shift. Given below scenario:
9130 Type A = (Type) B << C
9133 | dest_high | dest_low |
9137 If the shift amount C caused we shift B to across the word
9138 size boundary, i.e part of B shifted into high half of
9139 destination register, and part of B remains in the low
9140 half, then GCC will use the following left shift expand
9143 1. Initialize dest_low to B.
9144 2. Initialize every bit of dest_high to the sign bit of B.
9145 3. Logic left shift dest_low by C bit to finalize dest_low.
9146 The value of dest_low before this shift is kept in a temp D.
9147 4. Logic left shift dest_high by C.
9148 5. Logic right shift D by (word_size - C).
9149 6. Or the result of 4 and 5 to finalize dest_high.
9151 While, by checking gimple statements, if operand B is
9152 coming from signed extension, then we can simplify above
9155 1. dest_high = src_low >> (word_size - C).
9156 2. dest_low = src_low << C.
9158 We can use one arithmetic right shift to finish all the
9159 purpose of steps 2, 4, 5, 6, thus we reduce the steps
9160 needed from 6 into 2.
9162 The case is similar for zero extension, except that we
9163 initialize dest_high to zero rather than copies of the sign
9164 bit from B. Furthermore, we need to use a logical right shift
9167 The choice of sign-extension versus zero-extension is
9168 determined entirely by whether or not B is signed and is
9169 independent of the current setting of unsignedp. */
9172 if (code
== LSHIFT_EXPR
9175 && GET_MODE_2XWIDER_MODE (word_mode
).exists (&int_mode
)
9177 && TREE_CONSTANT (treeop1
)
9178 && TREE_CODE (treeop0
) == SSA_NAME
)
9180 gimple
*def
= SSA_NAME_DEF_STMT (treeop0
);
9181 if (is_gimple_assign (def
)
9182 && gimple_assign_rhs_code (def
) == NOP_EXPR
)
9184 machine_mode rmode
= TYPE_MODE
9185 (TREE_TYPE (gimple_assign_rhs1 (def
)));
9187 if (GET_MODE_SIZE (rmode
) < GET_MODE_SIZE (int_mode
)
9188 && TREE_INT_CST_LOW (treeop1
) < GET_MODE_BITSIZE (word_mode
)
9189 && ((TREE_INT_CST_LOW (treeop1
) + GET_MODE_BITSIZE (rmode
))
9190 >= GET_MODE_BITSIZE (word_mode
)))
9192 rtx_insn
*seq
, *seq_old
;
9193 unsigned int high_off
= subreg_highpart_offset (word_mode
,
9195 bool extend_unsigned
9196 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def
)));
9197 rtx low
= lowpart_subreg (word_mode
, op0
, int_mode
);
9198 rtx dest_low
= lowpart_subreg (word_mode
, target
, int_mode
);
9199 rtx dest_high
= simplify_gen_subreg (word_mode
, target
,
9200 int_mode
, high_off
);
9201 HOST_WIDE_INT ramount
= (BITS_PER_WORD
9202 - TREE_INT_CST_LOW (treeop1
));
9203 tree rshift
= build_int_cst (TREE_TYPE (treeop1
), ramount
);
9206 /* dest_high = src_low >> (word_size - C). */
9207 temp
= expand_variable_shift (RSHIFT_EXPR
, word_mode
, low
,
9210 if (temp
!= dest_high
)
9211 emit_move_insn (dest_high
, temp
);
9213 /* dest_low = src_low << C. */
9214 temp
= expand_variable_shift (LSHIFT_EXPR
, word_mode
, low
,
9215 treeop1
, dest_low
, unsignedp
);
9216 if (temp
!= dest_low
)
9217 emit_move_insn (dest_low
, temp
);
9223 if (have_insn_for (ASHIFT
, int_mode
))
9225 bool speed_p
= optimize_insn_for_speed_p ();
9227 rtx ret_old
= expand_variable_shift (code
, int_mode
,
9232 seq_old
= get_insns ();
9234 if (seq_cost (seq
, speed_p
)
9235 >= seq_cost (seq_old
, speed_p
))
9246 if (temp
== NULL_RTX
)
9247 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
9249 if (code
== LSHIFT_EXPR
)
9250 temp
= REDUCE_BIT_FIELD (temp
);
9254 /* Could determine the answer when only additive constants differ. Also,
9255 the addition of one can be handled by changing the condition. */
9262 case UNORDERED_EXPR
:
9271 temp
= do_store_flag (ops
,
9272 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
9273 tmode
!= VOIDmode
? tmode
: mode
);
9277 /* Use a compare and a jump for BLKmode comparisons, or for function
9278 type comparisons is have_canonicalize_funcptr_for_compare. */
9281 || modifier
== EXPAND_STACK_PARM
9282 || ! safe_from_p (target
, treeop0
, 1)
9283 || ! safe_from_p (target
, treeop1
, 1)
9284 /* Make sure we don't have a hard reg (such as function's return
9285 value) live across basic blocks, if not optimizing. */
9286 || (!optimize
&& REG_P (target
)
9287 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
9288 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9290 emit_move_insn (target
, const0_rtx
);
9292 rtx_code_label
*lab1
= gen_label_rtx ();
9293 jumpifnot_1 (code
, treeop0
, treeop1
, lab1
,
9294 profile_probability::uninitialized ());
9296 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
9297 emit_move_insn (target
, constm1_rtx
);
9299 emit_move_insn (target
, const1_rtx
);
9305 /* Get the rtx code of the operands. */
9306 op0
= expand_normal (treeop0
);
9307 op1
= expand_normal (treeop1
);
9310 target
= gen_reg_rtx (TYPE_MODE (type
));
9312 /* If target overlaps with op1, then either we need to force
9313 op1 into a pseudo (if target also overlaps with op0),
9314 or write the complex parts in reverse order. */
9315 switch (GET_CODE (target
))
9318 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
9320 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
9322 complex_expr_force_op1
:
9323 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
9324 emit_move_insn (temp
, op1
);
9328 complex_expr_swap_order
:
9329 /* Move the imaginary (op1) and real (op0) parts to their
9331 write_complex_part (target
, op1
, true);
9332 write_complex_part (target
, op0
, false);
9338 temp
= adjust_address_nv (target
,
9339 GET_MODE_INNER (GET_MODE (target
)), 0);
9340 if (reg_overlap_mentioned_p (temp
, op1
))
9342 machine_mode imode
= GET_MODE_INNER (GET_MODE (target
));
9343 temp
= adjust_address_nv (target
, imode
,
9344 GET_MODE_SIZE (imode
));
9345 if (reg_overlap_mentioned_p (temp
, op0
))
9346 goto complex_expr_force_op1
;
9347 goto complex_expr_swap_order
;
9351 if (reg_overlap_mentioned_p (target
, op1
))
9353 if (reg_overlap_mentioned_p (target
, op0
))
9354 goto complex_expr_force_op1
;
9355 goto complex_expr_swap_order
;
9360 /* Move the real (op0) and imaginary (op1) parts to their location. */
9361 write_complex_part (target
, op0
, false);
9362 write_complex_part (target
, op1
, true);
9366 case WIDEN_SUM_EXPR
:
9368 tree oprnd0
= treeop0
;
9369 tree oprnd1
= treeop1
;
9371 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9372 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9377 case REDUC_MAX_EXPR
:
9378 case REDUC_MIN_EXPR
:
9379 case REDUC_PLUS_EXPR
:
9381 op0
= expand_normal (treeop0
);
9382 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9383 machine_mode vec_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9385 struct expand_operand ops
[2];
9386 enum insn_code icode
= optab_handler (this_optab
, vec_mode
);
9388 create_output_operand (&ops
[0], target
, mode
);
9389 create_input_operand (&ops
[1], op0
, vec_mode
);
9390 expand_insn (icode
, 2, ops
);
9391 target
= ops
[0].value
;
9392 if (GET_MODE (target
) != mode
)
9393 return gen_lowpart (tmode
, target
);
9397 case VEC_UNPACK_HI_EXPR
:
9398 case VEC_UNPACK_LO_EXPR
:
9400 op0
= expand_normal (treeop0
);
9401 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9407 case VEC_UNPACK_FLOAT_HI_EXPR
:
9408 case VEC_UNPACK_FLOAT_LO_EXPR
:
9410 op0
= expand_normal (treeop0
);
9411 /* The signedness is determined from input operand. */
9412 temp
= expand_widen_pattern_expr
9413 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9414 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9420 case VEC_WIDEN_MULT_HI_EXPR
:
9421 case VEC_WIDEN_MULT_LO_EXPR
:
9422 case VEC_WIDEN_MULT_EVEN_EXPR
:
9423 case VEC_WIDEN_MULT_ODD_EXPR
:
9424 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9425 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9426 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9427 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9429 gcc_assert (target
);
9432 case VEC_PACK_TRUNC_EXPR
:
9433 case VEC_PACK_SAT_EXPR
:
9434 case VEC_PACK_FIX_TRUNC_EXPR
:
9435 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9439 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9440 op2
= expand_normal (treeop2
);
9442 /* Careful here: if the target doesn't support integral vector modes,
9443 a constant selection vector could wind up smooshed into a normal
9444 integral constant. */
9445 if (CONSTANT_P (op2
) && GET_CODE (op2
) != CONST_VECTOR
)
9447 tree sel_type
= TREE_TYPE (treeop2
);
9449 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type
)),
9450 TYPE_VECTOR_SUBPARTS (sel_type
));
9451 gcc_assert (GET_MODE_CLASS (vmode
) == MODE_VECTOR_INT
);
9452 op2
= simplify_subreg (vmode
, op2
, TYPE_MODE (sel_type
), 0);
9453 gcc_assert (op2
&& GET_CODE (op2
) == CONST_VECTOR
);
9456 gcc_assert (GET_MODE_CLASS (GET_MODE (op2
)) == MODE_VECTOR_INT
);
9458 temp
= expand_vec_perm (mode
, op0
, op1
, op2
, target
);
9464 tree oprnd0
= treeop0
;
9465 tree oprnd1
= treeop1
;
9466 tree oprnd2
= treeop2
;
9469 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9470 op2
= expand_normal (oprnd2
);
9471 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9478 tree oprnd0
= treeop0
;
9479 tree oprnd1
= treeop1
;
9480 tree oprnd2
= treeop2
;
9483 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9484 op2
= expand_normal (oprnd2
);
9485 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9490 case REALIGN_LOAD_EXPR
:
9492 tree oprnd0
= treeop0
;
9493 tree oprnd1
= treeop1
;
9494 tree oprnd2
= treeop2
;
9497 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9498 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9499 op2
= expand_normal (oprnd2
);
9500 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9508 /* A COND_EXPR with its type being VOID_TYPE represents a
9509 conditional jump and is handled in
9510 expand_gimple_cond_expr. */
9511 gcc_assert (!VOID_TYPE_P (type
));
9513 /* Note that COND_EXPRs whose type is a structure or union
9514 are required to be constructed to contain assignments of
9515 a temporary variable, so that we can evaluate them here
9516 for side effect only. If type is void, we must do likewise. */
9518 gcc_assert (!TREE_ADDRESSABLE (type
)
9520 && TREE_TYPE (treeop1
) != void_type_node
9521 && TREE_TYPE (treeop2
) != void_type_node
);
9523 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9527 /* If we are not to produce a result, we have no target. Otherwise,
9528 if a target was specified use it; it will not be used as an
9529 intermediate target unless it is safe. If no target, use a
9532 if (modifier
!= EXPAND_STACK_PARM
9534 && safe_from_p (original_target
, treeop0
, 1)
9535 && GET_MODE (original_target
) == mode
9536 && !MEM_P (original_target
))
9537 temp
= original_target
;
9539 temp
= assign_temp (type
, 0, 1);
9541 do_pending_stack_adjust ();
9543 rtx_code_label
*lab0
= gen_label_rtx ();
9544 rtx_code_label
*lab1
= gen_label_rtx ();
9545 jumpifnot (treeop0
, lab0
,
9546 profile_probability::uninitialized ());
9547 store_expr (treeop1
, temp
,
9548 modifier
== EXPAND_STACK_PARM
,
9551 emit_jump_insn (targetm
.gen_jump (lab1
));
9554 store_expr (treeop2
, temp
,
9555 modifier
== EXPAND_STACK_PARM
,
9564 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9567 case BIT_INSERT_EXPR
:
9569 unsigned bitpos
= tree_to_uhwi (treeop2
);
9571 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1
)))
9572 bitsize
= TYPE_PRECISION (TREE_TYPE (treeop1
));
9574 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1
)));
9575 rtx op0
= expand_normal (treeop0
);
9576 rtx op1
= expand_normal (treeop1
);
9577 rtx dst
= gen_reg_rtx (mode
);
9578 emit_move_insn (dst
, op0
);
9579 store_bit_field (dst
, bitsize
, bitpos
, 0, 0,
9580 TYPE_MODE (TREE_TYPE (treeop1
)), op1
, false);
9588 /* Here to do an ordinary binary operator. */
9590 expand_operands (treeop0
, treeop1
,
9591 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9593 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9595 if (modifier
== EXPAND_STACK_PARM
)
9597 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9598 unsignedp
, OPTAB_LIB_WIDEN
);
9600 /* Bitwise operations do not need bitfield reduction as we expect their
9601 operands being properly truncated. */
9602 if (code
== BIT_XOR_EXPR
9603 || code
== BIT_AND_EXPR
9604 || code
== BIT_IOR_EXPR
)
9606 return REDUCE_BIT_FIELD (temp
);
9608 #undef REDUCE_BIT_FIELD
9611 /* Return TRUE if expression STMT is suitable for replacement.
9612 Never consider memory loads as replaceable, because those don't ever lead
9613 into constant expressions. */
9616 stmt_is_replaceable_p (gimple
*stmt
)
9618 if (ssa_is_replaceable_p (stmt
))
9620 /* Don't move around loads. */
9621 if (!gimple_assign_single_p (stmt
)
9622 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9629 expand_expr_real_1 (tree exp
, rtx target
, machine_mode tmode
,
9630 enum expand_modifier modifier
, rtx
*alt_rtl
,
9631 bool inner_reference_p
)
9633 rtx op0
, op1
, temp
, decl_rtl
;
9636 machine_mode mode
, dmode
;
9637 enum tree_code code
= TREE_CODE (exp
);
9638 rtx subtarget
, original_target
;
9641 bool reduce_bit_field
;
9642 location_t loc
= EXPR_LOCATION (exp
);
9643 struct separate_ops ops
;
9644 tree treeop0
, treeop1
, treeop2
;
9645 tree ssa_name
= NULL_TREE
;
9648 type
= TREE_TYPE (exp
);
9649 mode
= TYPE_MODE (type
);
9650 unsignedp
= TYPE_UNSIGNED (type
);
9652 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9653 if (!VL_EXP_CLASS_P (exp
))
9654 switch (TREE_CODE_LENGTH (code
))
9657 case 3: treeop2
= TREE_OPERAND (exp
, 2); /* FALLTHRU */
9658 case 2: treeop1
= TREE_OPERAND (exp
, 1); /* FALLTHRU */
9659 case 1: treeop0
= TREE_OPERAND (exp
, 0); /* FALLTHRU */
9669 ignore
= (target
== const0_rtx
9670 || ((CONVERT_EXPR_CODE_P (code
)
9671 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9672 && TREE_CODE (type
) == VOID_TYPE
));
9674 /* An operation in what may be a bit-field type needs the
9675 result to be reduced to the precision of the bit-field type,
9676 which is narrower than that of the type's mode. */
9677 reduce_bit_field
= (!ignore
9678 && INTEGRAL_TYPE_P (type
)
9679 && !type_has_mode_precision_p (type
));
9681 /* If we are going to ignore this result, we need only do something
9682 if there is a side-effect somewhere in the expression. If there
9683 is, short-circuit the most common cases here. Note that we must
9684 not call expand_expr with anything but const0_rtx in case this
9685 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9689 if (! TREE_SIDE_EFFECTS (exp
))
9692 /* Ensure we reference a volatile object even if value is ignored, but
9693 don't do this if all we are doing is taking its address. */
9694 if (TREE_THIS_VOLATILE (exp
)
9695 && TREE_CODE (exp
) != FUNCTION_DECL
9696 && mode
!= VOIDmode
&& mode
!= BLKmode
9697 && modifier
!= EXPAND_CONST_ADDRESS
)
9699 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9705 if (TREE_CODE_CLASS (code
) == tcc_unary
9706 || code
== BIT_FIELD_REF
9707 || code
== COMPONENT_REF
9708 || code
== INDIRECT_REF
)
9709 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9712 else if (TREE_CODE_CLASS (code
) == tcc_binary
9713 || TREE_CODE_CLASS (code
) == tcc_comparison
9714 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9716 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9717 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9724 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9727 /* Use subtarget as the target for operand 0 of a binary operation. */
9728 subtarget
= get_subtarget (target
);
9729 original_target
= target
;
9735 tree function
= decl_function_context (exp
);
9737 temp
= label_rtx (exp
);
9738 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9740 if (function
!= current_function_decl
9742 LABEL_REF_NONLOCAL_P (temp
) = 1;
9744 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9749 /* ??? ivopts calls expander, without any preparation from
9750 out-of-ssa. So fake instructions as if this was an access to the
9751 base variable. This unnecessarily allocates a pseudo, see how we can
9752 reuse it, if partition base vars have it set already. */
9753 if (!currently_expanding_to_rtl
)
9755 tree var
= SSA_NAME_VAR (exp
);
9756 if (var
&& DECL_RTL_SET_P (var
))
9757 return DECL_RTL (var
);
9758 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9759 LAST_VIRTUAL_REGISTER
+ 1);
9762 g
= get_gimple_for_ssa_name (exp
);
9763 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9765 && modifier
== EXPAND_INITIALIZER
9766 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9767 && (optimize
|| !SSA_NAME_VAR (exp
)
9768 || DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9769 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9770 g
= SSA_NAME_DEF_STMT (exp
);
9774 location_t saved_loc
= curr_insn_location ();
9775 location_t loc
= gimple_location (g
);
9776 if (loc
!= UNKNOWN_LOCATION
)
9777 set_curr_insn_location (loc
);
9778 ops
.code
= gimple_assign_rhs_code (g
);
9779 switch (get_gimple_rhs_class (ops
.code
))
9781 case GIMPLE_TERNARY_RHS
:
9782 ops
.op2
= gimple_assign_rhs3 (g
);
9784 case GIMPLE_BINARY_RHS
:
9785 ops
.op1
= gimple_assign_rhs2 (g
);
9787 /* Try to expand conditonal compare. */
9788 if (targetm
.gen_ccmp_first
)
9790 gcc_checking_assert (targetm
.gen_ccmp_next
!= NULL
);
9791 r
= expand_ccmp_expr (g
, mode
);
9796 case GIMPLE_UNARY_RHS
:
9797 ops
.op0
= gimple_assign_rhs1 (g
);
9798 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9800 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9802 case GIMPLE_SINGLE_RHS
:
9804 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9805 tmode
, modifier
, alt_rtl
,
9812 set_curr_insn_location (saved_loc
);
9813 if (REG_P (r
) && !REG_EXPR (r
))
9814 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9819 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9820 exp
= SSA_NAME_VAR (ssa_name
);
9821 goto expand_decl_rtl
;
9825 /* If a static var's type was incomplete when the decl was written,
9826 but the type is complete now, lay out the decl now. */
9827 if (DECL_SIZE (exp
) == 0
9828 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9829 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9830 layout_decl (exp
, 0);
9836 decl_rtl
= DECL_RTL (exp
);
9838 gcc_assert (decl_rtl
);
9840 /* DECL_MODE might change when TYPE_MODE depends on attribute target
9841 settings for VECTOR_TYPE_P that might switch for the function. */
9842 if (currently_expanding_to_rtl
9843 && code
== VAR_DECL
&& MEM_P (decl_rtl
)
9844 && VECTOR_TYPE_P (type
) && exp
&& DECL_MODE (exp
) != mode
)
9845 decl_rtl
= change_address (decl_rtl
, TYPE_MODE (type
), 0);
9847 decl_rtl
= copy_rtx (decl_rtl
);
9849 /* Record writes to register variables. */
9850 if (modifier
== EXPAND_WRITE
9852 && HARD_REGISTER_P (decl_rtl
))
9853 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9854 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9856 /* Ensure variable marked as used even if it doesn't go through
9857 a parser. If it hasn't be used yet, write out an external
9860 TREE_USED (exp
) = 1;
9862 /* Show we haven't gotten RTL for this yet. */
9865 /* Variables inherited from containing functions should have
9866 been lowered by this point. */
9868 context
= decl_function_context (exp
);
9870 || SCOPE_FILE_SCOPE_P (context
)
9871 || context
== current_function_decl
9872 || TREE_STATIC (exp
)
9873 || DECL_EXTERNAL (exp
)
9874 /* ??? C++ creates functions that are not TREE_STATIC. */
9875 || TREE_CODE (exp
) == FUNCTION_DECL
);
9877 /* This is the case of an array whose size is to be determined
9878 from its initializer, while the initializer is still being parsed.
9879 ??? We aren't parsing while expanding anymore. */
9881 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9882 temp
= validize_mem (decl_rtl
);
9884 /* If DECL_RTL is memory, we are in the normal case and the
9885 address is not valid, get the address into a register. */
9887 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9890 *alt_rtl
= decl_rtl
;
9891 decl_rtl
= use_anchored_address (decl_rtl
);
9892 if (modifier
!= EXPAND_CONST_ADDRESS
9893 && modifier
!= EXPAND_SUM
9894 && !memory_address_addr_space_p (exp
? DECL_MODE (exp
)
9895 : GET_MODE (decl_rtl
),
9897 MEM_ADDR_SPACE (decl_rtl
)))
9898 temp
= replace_equiv_address (decl_rtl
,
9899 copy_rtx (XEXP (decl_rtl
, 0)));
9902 /* If we got something, return it. But first, set the alignment
9903 if the address is a register. */
9906 if (exp
&& MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
9907 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
9913 dmode
= DECL_MODE (exp
);
9915 dmode
= TYPE_MODE (TREE_TYPE (ssa_name
));
9917 /* If the mode of DECL_RTL does not match that of the decl,
9918 there are two cases: we are dealing with a BLKmode value
9919 that is returned in a register, or we are dealing with
9920 a promoted value. In the latter case, return a SUBREG
9921 of the wanted mode, but mark it so that we know that it
9922 was already extended. */
9923 if (REG_P (decl_rtl
)
9925 && GET_MODE (decl_rtl
) != dmode
)
9929 /* Get the signedness to be used for this variable. Ensure we get
9930 the same mode we got when the variable was declared. */
9931 if (code
!= SSA_NAME
)
9932 pmode
= promote_decl_mode (exp
, &unsignedp
);
9933 else if ((g
= SSA_NAME_DEF_STMT (ssa_name
))
9934 && gimple_code (g
) == GIMPLE_CALL
9935 && !gimple_call_internal_p (g
))
9936 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
9937 gimple_call_fntype (g
),
9940 pmode
= promote_ssa_mode (ssa_name
, &unsignedp
);
9941 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
9943 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
9944 SUBREG_PROMOTED_VAR_P (temp
) = 1;
9945 SUBREG_PROMOTED_SET (temp
, unsignedp
);
9952 /* Given that TYPE_PRECISION (type) is not always equal to
9953 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9954 the former to the latter according to the signedness of the
9956 temp
= immed_wide_int_const (wi::to_wide
9958 GET_MODE_PRECISION (TYPE_MODE (type
))),
9964 tree tmp
= NULL_TREE
;
9965 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
9966 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
9967 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
9968 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
9969 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
9970 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
9971 return const_vector_from_tree (exp
);
9972 scalar_int_mode int_mode
;
9973 if (is_int_mode (mode
, &int_mode
))
9975 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
9976 return const_scalar_mask_from_tree (exp
);
9980 = lang_hooks
.types
.type_for_mode (int_mode
, 1);
9982 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
,
9983 type_for_mode
, exp
);
9988 vec
<constructor_elt
, va_gc
> *v
;
9990 vec_alloc (v
, VECTOR_CST_NELTS (exp
));
9991 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
9992 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
9993 tmp
= build_constructor (type
, v
);
9995 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
10000 if (modifier
== EXPAND_WRITE
)
10002 /* Writing into CONST_DECL is always invalid, but handle it
10004 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
10005 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
10006 op0
= expand_expr_addr_expr_1 (exp
, NULL_RTX
, address_mode
,
10007 EXPAND_NORMAL
, as
);
10008 op0
= memory_address_addr_space (mode
, op0
, as
);
10009 temp
= gen_rtx_MEM (mode
, op0
);
10010 set_mem_addr_space (temp
, as
);
10013 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
10016 /* If optimized, generate immediate CONST_DOUBLE
10017 which will be turned into memory by reload if necessary.
10019 We used to force a register so that loop.c could see it. But
10020 this does not allow gen_* patterns to perform optimizations with
10021 the constants. It also produces two insns in cases like "x = 1.0;".
10022 On most machines, floating-point constants are not permitted in
10023 many insns, so we'd end up copying it to a register in any case.
10025 Now, we do the copying in expand_binop, if appropriate. */
10026 return const_double_from_real_value (TREE_REAL_CST (exp
),
10027 TYPE_MODE (TREE_TYPE (exp
)));
10030 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
10031 TYPE_MODE (TREE_TYPE (exp
)));
10034 /* Handle evaluating a complex constant in a CONCAT target. */
10035 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
10037 machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
10040 rtarg
= XEXP (original_target
, 0);
10041 itarg
= XEXP (original_target
, 1);
10043 /* Move the real and imaginary parts separately. */
10044 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
10045 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
10048 emit_move_insn (rtarg
, op0
);
10050 emit_move_insn (itarg
, op1
);
10052 return original_target
;
10058 temp
= expand_expr_constant (exp
, 1, modifier
);
10060 /* temp contains a constant address.
10061 On RISC machines where a constant address isn't valid,
10062 make some insns to get that address into a register. */
10063 if (modifier
!= EXPAND_CONST_ADDRESS
10064 && modifier
!= EXPAND_INITIALIZER
10065 && modifier
!= EXPAND_SUM
10066 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
10067 MEM_ADDR_SPACE (temp
)))
10068 return replace_equiv_address (temp
,
10069 copy_rtx (XEXP (temp
, 0)));
10074 tree val
= treeop0
;
10075 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
10076 inner_reference_p
);
10078 if (!SAVE_EXPR_RESOLVED_P (exp
))
10080 /* We can indeed still hit this case, typically via builtin
10081 expanders calling save_expr immediately before expanding
10082 something. Assume this means that we only have to deal
10083 with non-BLKmode values. */
10084 gcc_assert (GET_MODE (ret
) != BLKmode
);
10086 val
= build_decl (curr_insn_location (),
10087 VAR_DECL
, NULL
, TREE_TYPE (exp
));
10088 DECL_ARTIFICIAL (val
) = 1;
10089 DECL_IGNORED_P (val
) = 1;
10091 TREE_OPERAND (exp
, 0) = treeop0
;
10092 SAVE_EXPR_RESOLVED_P (exp
) = 1;
10094 if (!CONSTANT_P (ret
))
10095 ret
= copy_to_reg (ret
);
10096 SET_DECL_RTL (val
, ret
);
10104 /* If we don't need the result, just ensure we evaluate any
10108 unsigned HOST_WIDE_INT idx
;
10111 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
10112 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10117 return expand_constructor (exp
, target
, modifier
, false);
10119 case TARGET_MEM_REF
:
10122 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10123 enum insn_code icode
;
10124 unsigned int align
;
10126 op0
= addr_for_mem_ref (exp
, as
, true);
10127 op0
= memory_address_addr_space (mode
, op0
, as
);
10128 temp
= gen_rtx_MEM (mode
, op0
);
10129 set_mem_attributes (temp
, exp
, 0);
10130 set_mem_addr_space (temp
, as
);
10131 align
= get_object_alignment (exp
);
10132 if (modifier
!= EXPAND_WRITE
10133 && modifier
!= EXPAND_MEMORY
10135 && align
< GET_MODE_ALIGNMENT (mode
)
10136 /* If the target does not have special handling for unaligned
10137 loads of mode then it can use regular moves for them. */
10138 && ((icode
= optab_handler (movmisalign_optab
, mode
))
10139 != CODE_FOR_nothing
))
10141 struct expand_operand ops
[2];
10143 /* We've already validated the memory, and we're creating a
10144 new pseudo destination. The predicates really can't fail,
10145 nor can the generator. */
10146 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10147 create_fixed_operand (&ops
[1], temp
);
10148 expand_insn (icode
, 2, ops
);
10149 temp
= ops
[0].value
;
10156 const bool reverse
= REF_REVERSE_STORAGE_ORDER (exp
);
10158 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10159 machine_mode address_mode
;
10160 tree base
= TREE_OPERAND (exp
, 0);
10162 enum insn_code icode
;
10164 /* Handle expansion of non-aliased memory with non-BLKmode. That
10165 might end up in a register. */
10166 if (mem_ref_refers_to_non_mem_p (exp
))
10168 HOST_WIDE_INT offset
= mem_ref_offset (exp
).to_short_addr ();
10169 base
= TREE_OPERAND (base
, 0);
10172 && tree_fits_uhwi_p (TYPE_SIZE (type
))
10173 && (GET_MODE_BITSIZE (DECL_MODE (base
))
10174 == tree_to_uhwi (TYPE_SIZE (type
))))
10175 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
10176 target
, tmode
, modifier
);
10177 if (TYPE_MODE (type
) == BLKmode
)
10179 temp
= assign_stack_temp (DECL_MODE (base
),
10180 GET_MODE_SIZE (DECL_MODE (base
)));
10181 store_expr (base
, temp
, 0, false, false);
10182 temp
= adjust_address (temp
, BLKmode
, offset
);
10183 set_mem_size (temp
, int_size_in_bytes (type
));
10186 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
10187 bitsize_int (offset
* BITS_PER_UNIT
));
10188 REF_REVERSE_STORAGE_ORDER (exp
) = reverse
;
10189 return expand_expr (exp
, target
, tmode
, modifier
);
10191 address_mode
= targetm
.addr_space
.address_mode (as
);
10192 base
= TREE_OPERAND (exp
, 0);
10193 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
10195 tree mask
= gimple_assign_rhs2 (def_stmt
);
10196 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
10197 gimple_assign_rhs1 (def_stmt
), mask
);
10198 TREE_OPERAND (exp
, 0) = base
;
10200 align
= get_object_alignment (exp
);
10201 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
10202 op0
= memory_address_addr_space (mode
, op0
, as
);
10203 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
10205 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
10206 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
10207 op0
= memory_address_addr_space (mode
, op0
, as
);
10209 temp
= gen_rtx_MEM (mode
, op0
);
10210 set_mem_attributes (temp
, exp
, 0);
10211 set_mem_addr_space (temp
, as
);
10212 if (TREE_THIS_VOLATILE (exp
))
10213 MEM_VOLATILE_P (temp
) = 1;
10214 if (modifier
!= EXPAND_WRITE
10215 && modifier
!= EXPAND_MEMORY
10216 && !inner_reference_p
10218 && align
< GET_MODE_ALIGNMENT (mode
))
10220 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10221 != CODE_FOR_nothing
)
10223 struct expand_operand ops
[2];
10225 /* We've already validated the memory, and we're creating a
10226 new pseudo destination. The predicates really can't fail,
10227 nor can the generator. */
10228 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10229 create_fixed_operand (&ops
[1], temp
);
10230 expand_insn (icode
, 2, ops
);
10231 temp
= ops
[0].value
;
10233 else if (SLOW_UNALIGNED_ACCESS (mode
, align
))
10234 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
10235 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
10236 (modifier
== EXPAND_STACK_PARM
10237 ? NULL_RTX
: target
),
10238 mode
, mode
, false, alt_rtl
);
10241 && modifier
!= EXPAND_MEMORY
10242 && modifier
!= EXPAND_WRITE
)
10243 temp
= flip_storage_order (mode
, temp
);
10250 tree array
= treeop0
;
10251 tree index
= treeop1
;
10254 /* Fold an expression like: "foo"[2].
10255 This is not done in fold so it won't happen inside &.
10256 Don't fold if this is for wide characters since it's too
10257 difficult to do correctly and this is a very rare case. */
10259 if (modifier
!= EXPAND_CONST_ADDRESS
10260 && modifier
!= EXPAND_INITIALIZER
10261 && modifier
!= EXPAND_MEMORY
)
10263 tree t
= fold_read_from_constant_string (exp
);
10266 return expand_expr (t
, target
, tmode
, modifier
);
10269 /* If this is a constant index into a constant array,
10270 just get the value from the array. Handle both the cases when
10271 we have an explicit constructor and when our operand is a variable
10272 that was declared const. */
10274 if (modifier
!= EXPAND_CONST_ADDRESS
10275 && modifier
!= EXPAND_INITIALIZER
10276 && modifier
!= EXPAND_MEMORY
10277 && TREE_CODE (array
) == CONSTRUCTOR
10278 && ! TREE_SIDE_EFFECTS (array
)
10279 && TREE_CODE (index
) == INTEGER_CST
)
10281 unsigned HOST_WIDE_INT ix
;
10284 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
10286 if (tree_int_cst_equal (field
, index
))
10288 if (!TREE_SIDE_EFFECTS (value
))
10289 return expand_expr (fold (value
), target
, tmode
, modifier
);
10294 else if (optimize
>= 1
10295 && modifier
!= EXPAND_CONST_ADDRESS
10296 && modifier
!= EXPAND_INITIALIZER
10297 && modifier
!= EXPAND_MEMORY
10298 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
10299 && TREE_CODE (index
) == INTEGER_CST
10300 && (VAR_P (array
) || TREE_CODE (array
) == CONST_DECL
)
10301 && (init
= ctor_for_folding (array
)) != error_mark_node
)
10303 if (init
== NULL_TREE
)
10305 tree value
= build_zero_cst (type
);
10306 if (TREE_CODE (value
) == CONSTRUCTOR
)
10308 /* If VALUE is a CONSTRUCTOR, this optimization is only
10309 useful if this doesn't store the CONSTRUCTOR into
10310 memory. If it does, it is more efficient to just
10311 load the data from the array directly. */
10312 rtx ret
= expand_constructor (value
, target
,
10314 if (ret
== NULL_RTX
)
10319 return expand_expr (value
, target
, tmode
, modifier
);
10321 else if (TREE_CODE (init
) == CONSTRUCTOR
)
10323 unsigned HOST_WIDE_INT ix
;
10326 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
10328 if (tree_int_cst_equal (field
, index
))
10330 if (TREE_SIDE_EFFECTS (value
))
10333 if (TREE_CODE (value
) == CONSTRUCTOR
)
10335 /* If VALUE is a CONSTRUCTOR, this
10336 optimization is only useful if
10337 this doesn't store the CONSTRUCTOR
10338 into memory. If it does, it is more
10339 efficient to just load the data from
10340 the array directly. */
10341 rtx ret
= expand_constructor (value
, target
,
10343 if (ret
== NULL_RTX
)
10348 expand_expr (fold (value
), target
, tmode
, modifier
);
10351 else if (TREE_CODE (init
) == STRING_CST
)
10353 tree low_bound
= array_ref_low_bound (exp
);
10354 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
10356 /* Optimize the special case of a zero lower bound.
10358 We convert the lower bound to sizetype to avoid problems
10359 with constant folding. E.g. suppose the lower bound is
10360 1 and its mode is QI. Without the conversion
10361 (ARRAY + (INDEX - (unsigned char)1))
10363 (ARRAY + (-(unsigned char)1) + INDEX)
10365 (ARRAY + 255 + INDEX). Oops! */
10366 if (!integer_zerop (low_bound
))
10367 index1
= size_diffop_loc (loc
, index1
,
10368 fold_convert_loc (loc
, sizetype
,
10371 if (tree_fits_uhwi_p (index1
)
10372 && compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
10374 tree type
= TREE_TYPE (TREE_TYPE (init
));
10375 scalar_int_mode mode
;
10377 if (is_int_mode (TYPE_MODE (type
), &mode
)
10378 && GET_MODE_SIZE (mode
) == 1)
10379 return gen_int_mode (TREE_STRING_POINTER (init
)
10380 [TREE_INT_CST_LOW (index1
)],
10386 goto normal_inner_ref
;
10388 case COMPONENT_REF
:
10389 /* If the operand is a CONSTRUCTOR, we can just extract the
10390 appropriate field if it is present. */
10391 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
10393 unsigned HOST_WIDE_INT idx
;
10395 scalar_int_mode field_mode
;
10397 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
10399 if (field
== treeop1
10400 /* We can normally use the value of the field in the
10401 CONSTRUCTOR. However, if this is a bitfield in
10402 an integral mode that we can fit in a HOST_WIDE_INT,
10403 we must mask only the number of bits in the bitfield,
10404 since this is done implicitly by the constructor. If
10405 the bitfield does not meet either of those conditions,
10406 we can't do this optimization. */
10407 && (! DECL_BIT_FIELD (field
)
10408 || (is_int_mode (DECL_MODE (field
), &field_mode
)
10409 && (GET_MODE_PRECISION (field_mode
)
10410 <= HOST_BITS_PER_WIDE_INT
))))
10412 if (DECL_BIT_FIELD (field
)
10413 && modifier
== EXPAND_STACK_PARM
)
10415 op0
= expand_expr (value
, target
, tmode
, modifier
);
10416 if (DECL_BIT_FIELD (field
))
10418 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10419 machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
10421 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10423 op1
= gen_int_mode ((HOST_WIDE_INT_1
<< bitsize
) - 1,
10425 op0
= expand_and (imode
, op0
, op1
, target
);
10429 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10431 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10433 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10441 goto normal_inner_ref
;
10443 case BIT_FIELD_REF
:
10444 case ARRAY_RANGE_REF
:
10447 machine_mode mode1
, mode2
;
10448 HOST_WIDE_INT bitsize
, bitpos
;
10450 int reversep
, volatilep
= 0, must_force_mem
;
10452 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
10453 &unsignedp
, &reversep
, &volatilep
);
10454 rtx orig_op0
, memloc
;
10455 bool clear_mem_expr
= false;
10457 /* If we got back the original object, something is wrong. Perhaps
10458 we are evaluating an expression too early. In any event, don't
10459 infinitely recurse. */
10460 gcc_assert (tem
!= exp
);
10462 /* If TEM's type is a union of variable size, pass TARGET to the inner
10463 computation, since it will need a temporary and TARGET is known
10464 to have to do. This occurs in unchecked conversion in Ada. */
10466 = expand_expr_real (tem
,
10467 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10468 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10469 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10471 && modifier
!= EXPAND_STACK_PARM
10472 ? target
: NULL_RTX
),
10474 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10477 /* If the field has a mode, we want to access it in the
10478 field's mode, not the computed mode.
10479 If a MEM has VOIDmode (external with incomplete type),
10480 use BLKmode for it instead. */
10483 if (mode1
!= VOIDmode
)
10484 op0
= adjust_address (op0
, mode1
, 0);
10485 else if (GET_MODE (op0
) == VOIDmode
)
10486 op0
= adjust_address (op0
, BLKmode
, 0);
10490 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10492 /* If we have either an offset, a BLKmode result, or a reference
10493 outside the underlying object, we must force it to memory.
10494 Such a case can occur in Ada if we have unchecked conversion
10495 of an expression from a scalar type to an aggregate type or
10496 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10497 passed a partially uninitialized object or a view-conversion
10498 to a larger size. */
10499 must_force_mem
= (offset
10500 || mode1
== BLKmode
10501 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
10503 /* Handle CONCAT first. */
10504 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10507 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
))
10508 && COMPLEX_MODE_P (mode1
)
10509 && COMPLEX_MODE_P (GET_MODE (op0
))
10510 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1
))
10511 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0
)))))
10514 op0
= flip_storage_order (GET_MODE (op0
), op0
);
10515 if (mode1
!= GET_MODE (op0
))
10518 for (int i
= 0; i
< 2; i
++)
10520 rtx op
= read_complex_part (op0
, i
!= 0);
10521 if (GET_CODE (op
) == SUBREG
)
10522 op
= force_reg (GET_MODE (op
), op
);
10523 rtx temp
= gen_lowpart_common (GET_MODE_INNER (mode1
),
10529 if (!REG_P (op
) && !MEM_P (op
))
10530 op
= force_reg (GET_MODE (op
), op
);
10531 op
= gen_lowpart (GET_MODE_INNER (mode1
), op
);
10535 op0
= gen_rtx_CONCAT (mode1
, parts
[0], parts
[1]);
10540 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10543 op0
= XEXP (op0
, 0);
10544 mode2
= GET_MODE (op0
);
10546 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10547 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
10551 op0
= XEXP (op0
, 1);
10553 mode2
= GET_MODE (op0
);
10556 /* Otherwise force into memory. */
10557 must_force_mem
= 1;
10560 /* If this is a constant, put it in a register if it is a legitimate
10561 constant and we don't need a memory reference. */
10562 if (CONSTANT_P (op0
)
10563 && mode2
!= BLKmode
10564 && targetm
.legitimate_constant_p (mode2
, op0
)
10565 && !must_force_mem
)
10566 op0
= force_reg (mode2
, op0
);
10568 /* Otherwise, if this is a constant, try to force it to the constant
10569 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10570 is a legitimate constant. */
10571 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10572 op0
= validize_mem (memloc
);
10574 /* Otherwise, if this is a constant or the object is not in memory
10575 and need be, put it there. */
10576 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10578 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10579 emit_move_insn (memloc
, op0
);
10581 clear_mem_expr
= true;
10586 machine_mode address_mode
;
10587 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10590 gcc_assert (MEM_P (op0
));
10592 address_mode
= get_address_mode (op0
);
10593 if (GET_MODE (offset_rtx
) != address_mode
)
10595 /* We cannot be sure that the RTL in offset_rtx is valid outside
10596 of a memory address context, so force it into a register
10597 before attempting to convert it to the desired mode. */
10598 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
10599 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10602 /* See the comment in expand_assignment for the rationale. */
10603 if (mode1
!= VOIDmode
10606 && (bitpos
% bitsize
) == 0
10607 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
10608 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10610 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10614 op0
= offset_address (op0
, offset_rtx
,
10615 highest_pow2_factor (offset
));
10618 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10619 record its alignment as BIGGEST_ALIGNMENT. */
10620 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
10621 && is_aligning_offset (offset
, tem
))
10622 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10624 /* Don't forget about volatility even if this is a bitfield. */
10625 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10627 if (op0
== orig_op0
)
10628 op0
= copy_rtx (op0
);
10630 MEM_VOLATILE_P (op0
) = 1;
10633 /* In cases where an aligned union has an unaligned object
10634 as a field, we might be extracting a BLKmode value from
10635 an integer-mode (e.g., SImode) object. Handle this case
10636 by doing the extract into an object as wide as the field
10637 (which we know to be the width of a basic mode), then
10638 storing into memory, and changing the mode to BLKmode. */
10639 if (mode1
== VOIDmode
10640 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10641 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10642 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10643 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10644 && modifier
!= EXPAND_CONST_ADDRESS
10645 && modifier
!= EXPAND_INITIALIZER
10646 && modifier
!= EXPAND_MEMORY
)
10647 /* If the bitfield is volatile and the bitsize
10648 is narrower than the access size of the bitfield,
10649 we need to extract bitfields from the access. */
10650 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10651 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10652 && mode1
!= BLKmode
10653 && bitsize
< GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
)
10654 /* If the field isn't aligned enough to fetch as a memref,
10655 fetch it as a bit field. */
10656 || (mode1
!= BLKmode
10658 ? MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10659 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0)
10660 : TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10661 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
10662 && modifier
!= EXPAND_MEMORY
10663 && ((modifier
== EXPAND_CONST_ADDRESS
10664 || modifier
== EXPAND_INITIALIZER
)
10666 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
10667 || (bitpos
% BITS_PER_UNIT
!= 0)))
10668 /* If the type and the field are a constant size and the
10669 size of the type isn't the same size as the bitfield,
10670 we must use bitfield operations. */
10672 && TYPE_SIZE (TREE_TYPE (exp
))
10673 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10674 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
10677 machine_mode ext_mode
= mode
;
10679 if (ext_mode
== BLKmode
10680 && ! (target
!= 0 && MEM_P (op0
)
10682 && bitpos
% BITS_PER_UNIT
== 0))
10683 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
10685 if (ext_mode
== BLKmode
)
10688 target
= assign_temp (type
, 1, 1);
10690 /* ??? Unlike the similar test a few lines below, this one is
10691 very likely obsolete. */
10695 /* In this case, BITPOS must start at a byte boundary and
10696 TARGET, if specified, must be a MEM. */
10697 gcc_assert (MEM_P (op0
)
10698 && (!target
|| MEM_P (target
))
10699 && !(bitpos
% BITS_PER_UNIT
));
10701 emit_block_move (target
,
10702 adjust_address (op0
, VOIDmode
,
10703 bitpos
/ BITS_PER_UNIT
),
10704 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
10706 (modifier
== EXPAND_STACK_PARM
10707 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10712 /* If we have nothing to extract, the result will be 0 for targets
10713 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10714 return 0 for the sake of consistency, as reading a zero-sized
10715 bitfield is valid in Ada and the value is fully specified. */
10719 op0
= validize_mem (op0
);
10721 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10722 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10724 /* If the result has a record type and the extraction is done in
10725 an integral mode, then the field may be not aligned on a byte
10726 boundary; in this case, if it has reverse storage order, it
10727 needs to be extracted as a scalar field with reverse storage
10728 order and put back into memory order afterwards. */
10729 if (TREE_CODE (type
) == RECORD_TYPE
10730 && GET_MODE_CLASS (ext_mode
) == MODE_INT
)
10731 reversep
= TYPE_REVERSE_STORAGE_ORDER (type
);
10733 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10734 (modifier
== EXPAND_STACK_PARM
10735 ? NULL_RTX
: target
),
10736 ext_mode
, ext_mode
, reversep
, alt_rtl
);
10738 /* If the result has a record type and the mode of OP0 is an
10739 integral mode then, if BITSIZE is narrower than this mode
10740 and this is for big-endian data, we must put the field
10741 into the high-order bits. And we must also put it back
10742 into memory order if it has been previously reversed. */
10743 scalar_int_mode op0_mode
;
10744 if (TREE_CODE (type
) == RECORD_TYPE
10745 && is_int_mode (GET_MODE (op0
), &op0_mode
))
10747 HOST_WIDE_INT size
= GET_MODE_BITSIZE (op0_mode
);
10750 && reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
10751 op0
= expand_shift (LSHIFT_EXPR
, op0_mode
, op0
,
10752 size
- bitsize
, op0
, 1);
10755 op0
= flip_storage_order (op0_mode
, op0
);
10758 /* If the result type is BLKmode, store the data into a temporary
10759 of the appropriate type, but with the mode corresponding to the
10760 mode for the data we have (op0's mode). */
10761 if (mode
== BLKmode
)
10764 = assign_stack_temp_for_type (ext_mode
,
10765 GET_MODE_BITSIZE (ext_mode
),
10767 emit_move_insn (new_rtx
, op0
);
10768 op0
= copy_rtx (new_rtx
);
10769 PUT_MODE (op0
, BLKmode
);
10775 /* If the result is BLKmode, use that to access the object
10777 if (mode
== BLKmode
)
10780 /* Get a reference to just this component. */
10781 if (modifier
== EXPAND_CONST_ADDRESS
10782 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10783 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10785 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10787 if (op0
== orig_op0
)
10788 op0
= copy_rtx (op0
);
10790 /* Don't set memory attributes if the base expression is
10791 SSA_NAME that got expanded as a MEM. In that case, we should
10792 just honor its original memory attributes. */
10793 if (TREE_CODE (tem
) != SSA_NAME
|| !MEM_P (orig_op0
))
10794 set_mem_attributes (op0
, exp
, 0);
10796 if (REG_P (XEXP (op0
, 0)))
10797 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10799 /* If op0 is a temporary because the original expressions was forced
10800 to memory, clear MEM_EXPR so that the original expression cannot
10801 be marked as addressable through MEM_EXPR of the temporary. */
10802 if (clear_mem_expr
)
10803 set_mem_expr (op0
, NULL_TREE
);
10805 MEM_VOLATILE_P (op0
) |= volatilep
;
10808 && modifier
!= EXPAND_MEMORY
10809 && modifier
!= EXPAND_WRITE
)
10810 op0
= flip_storage_order (mode1
, op0
);
10812 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10813 || modifier
== EXPAND_CONST_ADDRESS
10814 || modifier
== EXPAND_INITIALIZER
)
10818 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10820 convert_move (target
, op0
, unsignedp
);
10825 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10828 /* All valid uses of __builtin_va_arg_pack () are removed during
10830 if (CALL_EXPR_VA_ARG_PACK (exp
))
10831 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10833 tree fndecl
= get_callee_fndecl (exp
), attr
;
10836 && (attr
= lookup_attribute ("error",
10837 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10838 error ("%Kcall to %qs declared with attribute error: %s",
10839 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10840 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10842 && (attr
= lookup_attribute ("warning",
10843 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10844 warning_at (tree_nonartificial_location (exp
),
10845 0, "%Kcall to %qs declared with attribute warning: %s",
10846 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10847 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10849 /* Check for a built-in function. */
10850 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10852 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10853 if (CALL_WITH_BOUNDS_P (exp
))
10854 return expand_builtin_with_bounds (exp
, target
, subtarget
,
10857 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10860 return expand_call (exp
, target
, ignore
);
10862 case VIEW_CONVERT_EXPR
:
10865 /* If we are converting to BLKmode, try to avoid an intermediate
10866 temporary by fetching an inner memory reference. */
10867 if (mode
== BLKmode
10868 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
10869 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10870 && handled_component_p (treeop0
))
10872 machine_mode mode1
;
10873 HOST_WIDE_INT bitsize
, bitpos
;
10875 int unsignedp
, reversep
, volatilep
= 0;
10877 = get_inner_reference (treeop0
, &bitsize
, &bitpos
, &offset
, &mode1
,
10878 &unsignedp
, &reversep
, &volatilep
);
10881 /* ??? We should work harder and deal with non-zero offsets. */
10883 && (bitpos
% BITS_PER_UNIT
) == 0
10886 && compare_tree_int (TYPE_SIZE (type
), bitsize
) == 0)
10888 /* See the normal_inner_ref case for the rationale. */
10890 = expand_expr_real (tem
,
10891 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10892 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10894 && modifier
!= EXPAND_STACK_PARM
10895 ? target
: NULL_RTX
),
10897 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10900 if (MEM_P (orig_op0
))
10904 /* Get a reference to just this component. */
10905 if (modifier
== EXPAND_CONST_ADDRESS
10906 || modifier
== EXPAND_SUM
10907 || modifier
== EXPAND_INITIALIZER
)
10908 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10910 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10912 if (op0
== orig_op0
)
10913 op0
= copy_rtx (op0
);
10915 set_mem_attributes (op0
, treeop0
, 0);
10916 if (REG_P (XEXP (op0
, 0)))
10917 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10919 MEM_VOLATILE_P (op0
) |= volatilep
;
10925 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
10926 NULL
, inner_reference_p
);
10928 /* If the input and output modes are both the same, we are done. */
10929 if (mode
== GET_MODE (op0
))
10931 /* If neither mode is BLKmode, and both modes are the same size
10932 then we can use gen_lowpart. */
10933 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
10934 && (GET_MODE_PRECISION (mode
)
10935 == GET_MODE_PRECISION (GET_MODE (op0
)))
10936 && !COMPLEX_MODE_P (GET_MODE (op0
)))
10938 if (GET_CODE (op0
) == SUBREG
)
10939 op0
= force_reg (GET_MODE (op0
), op0
);
10940 temp
= gen_lowpart_common (mode
, op0
);
10945 if (!REG_P (op0
) && !MEM_P (op0
))
10946 op0
= force_reg (GET_MODE (op0
), op0
);
10947 op0
= gen_lowpart (mode
, op0
);
10950 /* If both types are integral, convert from one mode to the other. */
10951 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
10952 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
10953 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
10954 /* If the output type is a bit-field type, do an extraction. */
10955 else if (reduce_bit_field
)
10956 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
10957 TYPE_UNSIGNED (type
), NULL_RTX
,
10958 mode
, mode
, false, NULL
);
10959 /* As a last resort, spill op0 to memory, and reload it in a
10961 else if (!MEM_P (op0
))
10963 /* If the operand is not a MEM, force it into memory. Since we
10964 are going to be changing the mode of the MEM, don't call
10965 force_const_mem for constants because we don't allow pool
10966 constants to change mode. */
10967 tree inner_type
= TREE_TYPE (treeop0
);
10969 gcc_assert (!TREE_ADDRESSABLE (exp
));
10971 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
10973 = assign_stack_temp_for_type
10974 (TYPE_MODE (inner_type
),
10975 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
10977 emit_move_insn (target
, op0
);
10981 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10982 output type is such that the operand is known to be aligned, indicate
10983 that it is. Otherwise, we need only be concerned about alignment for
10984 non-BLKmode results. */
10987 enum insn_code icode
;
10989 if (modifier
!= EXPAND_WRITE
10990 && modifier
!= EXPAND_MEMORY
10991 && !inner_reference_p
10993 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
10995 /* If the target does have special handling for unaligned
10996 loads of mode then use them. */
10997 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10998 != CODE_FOR_nothing
)
11002 op0
= adjust_address (op0
, mode
, 0);
11003 /* We've already validated the memory, and we're creating a
11004 new pseudo destination. The predicates really can't
11006 reg
= gen_reg_rtx (mode
);
11008 /* Nor can the insn generator. */
11009 rtx_insn
*insn
= GEN_FCN (icode
) (reg
, op0
);
11013 else if (STRICT_ALIGNMENT
)
11015 tree inner_type
= TREE_TYPE (treeop0
);
11016 HOST_WIDE_INT temp_size
11017 = MAX (int_size_in_bytes (inner_type
),
11018 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
11020 = assign_stack_temp_for_type (mode
, temp_size
, type
);
11021 rtx new_with_op0_mode
11022 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
11024 gcc_assert (!TREE_ADDRESSABLE (exp
));
11026 if (GET_MODE (op0
) == BLKmode
)
11027 emit_block_move (new_with_op0_mode
, op0
,
11028 GEN_INT (GET_MODE_SIZE (mode
)),
11029 (modifier
== EXPAND_STACK_PARM
11030 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
11032 emit_move_insn (new_with_op0_mode
, op0
);
11038 op0
= adjust_address (op0
, mode
, 0);
11045 tree lhs
= treeop0
;
11046 tree rhs
= treeop1
;
11047 gcc_assert (ignore
);
11049 /* Check for |= or &= of a bitfield of size one into another bitfield
11050 of size 1. In this case, (unless we need the result of the
11051 assignment) we can do this more efficiently with a
11052 test followed by an assignment, if necessary.
11054 ??? At this point, we can't get a BIT_FIELD_REF here. But if
11055 things change so we do, this code should be enhanced to
11057 if (TREE_CODE (lhs
) == COMPONENT_REF
11058 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
11059 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
11060 && TREE_OPERAND (rhs
, 0) == lhs
11061 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
11062 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
11063 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
11065 rtx_code_label
*label
= gen_label_rtx ();
11066 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
11067 do_jump (TREE_OPERAND (rhs
, 1),
11070 profile_probability::uninitialized ());
11071 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
11073 do_pending_stack_adjust ();
11074 emit_label (label
);
11078 expand_assignment (lhs
, rhs
, false);
11083 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
11085 case REALPART_EXPR
:
11086 op0
= expand_normal (treeop0
);
11087 return read_complex_part (op0
, false);
11089 case IMAGPART_EXPR
:
11090 op0
= expand_normal (treeop0
);
11091 return read_complex_part (op0
, true);
11098 /* Expanded in cfgexpand.c. */
11099 gcc_unreachable ();
11101 case TRY_CATCH_EXPR
:
11103 case EH_FILTER_EXPR
:
11104 case TRY_FINALLY_EXPR
:
11105 /* Lowered by tree-eh.c. */
11106 gcc_unreachable ();
11108 case WITH_CLEANUP_EXPR
:
11109 case CLEANUP_POINT_EXPR
:
11111 case CASE_LABEL_EXPR
:
11116 case COMPOUND_EXPR
:
11117 case PREINCREMENT_EXPR
:
11118 case PREDECREMENT_EXPR
:
11119 case POSTINCREMENT_EXPR
:
11120 case POSTDECREMENT_EXPR
:
11123 case COMPOUND_LITERAL_EXPR
:
11124 /* Lowered by gimplify.c. */
11125 gcc_unreachable ();
11128 /* Function descriptors are not valid except for as
11129 initialization constants, and should not be expanded. */
11130 gcc_unreachable ();
11132 case WITH_SIZE_EXPR
:
11133 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11134 have pulled out the size to use in whatever context it needed. */
11135 return expand_expr_real (treeop0
, original_target
, tmode
,
11136 modifier
, alt_rtl
, inner_reference_p
);
11139 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
11143 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11144 signedness of TYPE), possibly returning the result in TARGET. */
11146 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
11148 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
11149 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
11151 /* For constant values, reduce using build_int_cst_type. */
11152 if (CONST_INT_P (exp
))
11154 HOST_WIDE_INT value
= INTVAL (exp
);
11155 tree t
= build_int_cst_type (type
, value
);
11156 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
11158 else if (TYPE_UNSIGNED (type
))
11160 machine_mode mode
= GET_MODE (exp
);
11161 rtx mask
= immed_wide_int_const
11162 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
11163 return expand_and (mode
, exp
, mask
, target
);
11167 int count
= GET_MODE_PRECISION (GET_MODE (exp
)) - prec
;
11168 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
),
11169 exp
, count
, target
, 0);
11170 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
),
11171 exp
, count
, target
, 0);
11175 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11176 when applied to the address of EXP produces an address known to be
11177 aligned more than BIGGEST_ALIGNMENT. */
11180 is_aligning_offset (const_tree offset
, const_tree exp
)
11182 /* Strip off any conversions. */
11183 while (CONVERT_EXPR_P (offset
))
11184 offset
= TREE_OPERAND (offset
, 0);
11186 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11187 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11188 if (TREE_CODE (offset
) != BIT_AND_EXPR
11189 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
11190 || compare_tree_int (TREE_OPERAND (offset
, 1),
11191 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
11192 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1))
11195 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11196 It must be NEGATE_EXPR. Then strip any more conversions. */
11197 offset
= TREE_OPERAND (offset
, 0);
11198 while (CONVERT_EXPR_P (offset
))
11199 offset
= TREE_OPERAND (offset
, 0);
11201 if (TREE_CODE (offset
) != NEGATE_EXPR
)
11204 offset
= TREE_OPERAND (offset
, 0);
11205 while (CONVERT_EXPR_P (offset
))
11206 offset
= TREE_OPERAND (offset
, 0);
11208 /* This must now be the address of EXP. */
11209 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
11212 /* Return the tree node if an ARG corresponds to a string constant or zero
11213 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
11214 in bytes within the string that ARG is accessing. The type of the
11215 offset will be `sizetype'. */
11218 string_constant (tree arg
, tree
*ptr_offset
)
11220 tree array
, offset
, lower_bound
;
11223 if (TREE_CODE (arg
) == ADDR_EXPR
)
11225 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
11227 *ptr_offset
= size_zero_node
;
11228 return TREE_OPERAND (arg
, 0);
11230 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
11232 array
= TREE_OPERAND (arg
, 0);
11233 offset
= size_zero_node
;
11235 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
11237 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
11238 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
11239 if (TREE_CODE (array
) != STRING_CST
&& !VAR_P (array
))
11242 /* Check if the array has a nonzero lower bound. */
11243 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
11244 if (!integer_zerop (lower_bound
))
11246 /* If the offset and base aren't both constants, return 0. */
11247 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
11249 if (TREE_CODE (offset
) != INTEGER_CST
)
11251 /* Adjust offset by the lower bound. */
11252 offset
= size_diffop (fold_convert (sizetype
, offset
),
11253 fold_convert (sizetype
, lower_bound
));
11256 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
11258 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
11259 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
11260 if (TREE_CODE (array
) != ADDR_EXPR
)
11262 array
= TREE_OPERAND (array
, 0);
11263 if (TREE_CODE (array
) != STRING_CST
&& !VAR_P (array
))
11269 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
11271 tree arg0
= TREE_OPERAND (arg
, 0);
11272 tree arg1
= TREE_OPERAND (arg
, 1);
11277 if (TREE_CODE (arg0
) == ADDR_EXPR
11278 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
11279 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
11281 array
= TREE_OPERAND (arg0
, 0);
11284 else if (TREE_CODE (arg1
) == ADDR_EXPR
11285 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
11286 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
11288 array
= TREE_OPERAND (arg1
, 0);
11297 if (TREE_CODE (array
) == STRING_CST
)
11299 *ptr_offset
= fold_convert (sizetype
, offset
);
11302 else if (VAR_P (array
) || TREE_CODE (array
) == CONST_DECL
)
11305 tree init
= ctor_for_folding (array
);
11307 /* Variables initialized to string literals can be handled too. */
11308 if (init
== error_mark_node
11310 || TREE_CODE (init
) != STRING_CST
)
11313 /* Avoid const char foo[4] = "abcde"; */
11314 if (DECL_SIZE_UNIT (array
) == NULL_TREE
11315 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
11316 || (length
= TREE_STRING_LENGTH (init
)) <= 0
11317 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
11320 /* If variable is bigger than the string literal, OFFSET must be constant
11321 and inside of the bounds of the string literal. */
11322 offset
= fold_convert (sizetype
, offset
);
11323 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
11324 && (! tree_fits_uhwi_p (offset
)
11325 || compare_tree_int (offset
, length
) >= 0))
11328 *ptr_offset
= offset
;
11335 /* Generate code to calculate OPS, and exploded expression
11336 using a store-flag instruction and return an rtx for the result.
11337 OPS reflects a comparison.
11339 If TARGET is nonzero, store the result there if convenient.
11341 Return zero if there is no suitable set-flag instruction
11342 available on this machine.
11344 Once expand_expr has been called on the arguments of the comparison,
11345 we are committed to doing the store flag, since it is not safe to
11346 re-evaluate the expression. We emit the store-flag insn by calling
11347 emit_store_flag, but only expand the arguments if we have a reason
11348 to believe that emit_store_flag will be successful. If we think that
11349 it will, but it isn't, we have to simulate the store-flag with a
11350 set/jump/set sequence. */
11353 do_store_flag (sepops ops
, rtx target
, machine_mode mode
)
11355 enum rtx_code code
;
11356 tree arg0
, arg1
, type
;
11357 machine_mode operand_mode
;
11360 rtx subtarget
= target
;
11361 location_t loc
= ops
->location
;
11366 /* Don't crash if the comparison was erroneous. */
11367 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
11370 type
= TREE_TYPE (arg0
);
11371 operand_mode
= TYPE_MODE (type
);
11372 unsignedp
= TYPE_UNSIGNED (type
);
11374 /* We won't bother with BLKmode store-flag operations because it would mean
11375 passing a lot of information to emit_store_flag. */
11376 if (operand_mode
== BLKmode
)
11379 /* We won't bother with store-flag operations involving function pointers
11380 when function pointers must be canonicalized before comparisons. */
11381 if (targetm
.have_canonicalize_funcptr_for_compare ()
11382 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
11383 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
11385 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
11386 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
11387 == FUNCTION_TYPE
))))
11393 /* For vector typed comparisons emit code to generate the desired
11394 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11395 expander for this. */
11396 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
11398 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
11399 if (VECTOR_BOOLEAN_TYPE_P (ops
->type
)
11400 && expand_vec_cmp_expr_p (TREE_TYPE (arg0
), ops
->type
, ops
->code
))
11401 return expand_vec_cmp_expr (ops
->type
, ifexp
, target
);
11404 tree if_true
= constant_boolean_node (true, ops
->type
);
11405 tree if_false
= constant_boolean_node (false, ops
->type
);
11406 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
,
11411 /* Get the rtx comparison code to use. We know that EXP is a comparison
11412 operation of some type. Some comparisons against 1 and -1 can be
11413 converted to comparisons with zero. Do so here so that the tests
11414 below will be aware that we have a comparison with zero. These
11415 tests will not catch constants in the first operand, but constants
11416 are rarely passed as the first operand. */
11427 if (integer_onep (arg1
))
11428 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
11430 code
= unsignedp
? LTU
: LT
;
11433 if (! unsignedp
&& integer_all_onesp (arg1
))
11434 arg1
= integer_zero_node
, code
= LT
;
11436 code
= unsignedp
? LEU
: LE
;
11439 if (! unsignedp
&& integer_all_onesp (arg1
))
11440 arg1
= integer_zero_node
, code
= GE
;
11442 code
= unsignedp
? GTU
: GT
;
11445 if (integer_onep (arg1
))
11446 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
11448 code
= unsignedp
? GEU
: GE
;
11451 case UNORDERED_EXPR
:
11477 gcc_unreachable ();
11480 /* Put a constant second. */
11481 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
11482 || TREE_CODE (arg0
) == FIXED_CST
)
11484 std::swap (arg0
, arg1
);
11485 code
= swap_condition (code
);
11488 /* If this is an equality or inequality test of a single bit, we can
11489 do this by shifting the bit being tested to the low-order bit and
11490 masking the result with the constant 1. If the condition was EQ,
11491 we xor it with 1. This does not require an scc insn and is faster
11492 than an scc insn even if we have it.
11494 The code to make this transformation was moved into fold_single_bit_test,
11495 so we just call into the folder and expand its result. */
11497 if ((code
== NE
|| code
== EQ
)
11498 && integer_zerop (arg1
)
11499 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
11501 gimple
*srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
11503 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
11505 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
11506 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
11507 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
11508 gimple_assign_rhs1 (srcstmt
),
11509 gimple_assign_rhs2 (srcstmt
));
11510 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
11512 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
11516 if (! get_subtarget (target
)
11517 || GET_MODE (subtarget
) != operand_mode
)
11520 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
11523 target
= gen_reg_rtx (mode
);
11525 /* Try a cstore if possible. */
11526 return emit_store_flag_force (target
, code
, op0
, op1
,
11527 operand_mode
, unsignedp
,
11528 (TYPE_PRECISION (ops
->type
) == 1
11529 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
11532 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11533 0 otherwise (i.e. if there is no casesi instruction).
11535 DEFAULT_PROBABILITY is the probability of jumping to the default
11538 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
11539 rtx table_label
, rtx default_label
, rtx fallback_label
,
11540 profile_probability default_probability
)
11542 struct expand_operand ops
[5];
11543 machine_mode index_mode
= SImode
;
11544 rtx op1
, op2
, index
;
11546 if (! targetm
.have_casesi ())
11549 /* Convert the index to SImode. */
11550 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
11552 machine_mode omode
= TYPE_MODE (index_type
);
11553 rtx rangertx
= expand_normal (range
);
11555 /* We must handle the endpoints in the original mode. */
11556 index_expr
= build2 (MINUS_EXPR
, index_type
,
11557 index_expr
, minval
);
11558 minval
= integer_zero_node
;
11559 index
= expand_normal (index_expr
);
11561 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
11562 omode
, 1, default_label
,
11563 default_probability
);
11564 /* Now we can safely truncate. */
11565 index
= convert_to_mode (index_mode
, index
, 0);
11569 if (TYPE_MODE (index_type
) != index_mode
)
11571 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
11572 index_expr
= fold_convert (index_type
, index_expr
);
11575 index
= expand_normal (index_expr
);
11578 do_pending_stack_adjust ();
11580 op1
= expand_normal (minval
);
11581 op2
= expand_normal (range
);
11583 create_input_operand (&ops
[0], index
, index_mode
);
11584 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
11585 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
11586 create_fixed_operand (&ops
[3], table_label
);
11587 create_fixed_operand (&ops
[4], (default_label
11589 : fallback_label
));
11590 expand_jump_insn (targetm
.code_for_casesi
, 5, ops
);
11594 /* Attempt to generate a tablejump instruction; same concept. */
11595 /* Subroutine of the next function.
11597 INDEX is the value being switched on, with the lowest value
11598 in the table already subtracted.
11599 MODE is its expected mode (needed if INDEX is constant).
11600 RANGE is the length of the jump table.
11601 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11603 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11604 index value is out of range.
11605 DEFAULT_PROBABILITY is the probability of jumping to
11606 the default label. */
11609 do_tablejump (rtx index
, machine_mode mode
, rtx range
, rtx table_label
,
11610 rtx default_label
, profile_probability default_probability
)
11614 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
11615 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
11617 /* Do an unsigned comparison (in the proper mode) between the index
11618 expression and the value which represents the length of the range.
11619 Since we just finished subtracting the lower bound of the range
11620 from the index expression, this comparison allows us to simultaneously
11621 check that the original index expression value is both greater than
11622 or equal to the minimum value of the range and less than or equal to
11623 the maximum value of the range. */
11626 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11627 default_label
, default_probability
);
11630 /* If index is in range, it must fit in Pmode.
11631 Convert to Pmode so we can index with it. */
11633 index
= convert_to_mode (Pmode
, index
, 1);
11635 /* Don't let a MEM slip through, because then INDEX that comes
11636 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11637 and break_out_memory_refs will go to work on it and mess it up. */
11638 #ifdef PIC_CASE_VECTOR_ADDRESS
11639 if (flag_pic
&& !REG_P (index
))
11640 index
= copy_to_mode_reg (Pmode
, index
);
11643 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11644 GET_MODE_SIZE, because this indicates how large insns are. The other
11645 uses should all be Pmode, because they are addresses. This code
11646 could fail if addresses and insns are not the same size. */
11647 index
= simplify_gen_binary (MULT
, Pmode
, index
,
11648 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
11650 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
11651 gen_rtx_LABEL_REF (Pmode
, table_label
));
11653 #ifdef PIC_CASE_VECTOR_ADDRESS
11655 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11658 index
= memory_address (CASE_VECTOR_MODE
, index
);
11659 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11660 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
11661 convert_move (temp
, vector
, 0);
11663 emit_jump_insn (targetm
.gen_tablejump (temp
, table_label
));
11665 /* If we are generating PIC code or if the table is PC-relative, the
11666 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11667 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11672 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
11673 rtx table_label
, rtx default_label
,
11674 profile_probability default_probability
)
11678 if (! targetm
.have_tablejump ())
11681 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
11682 fold_convert (index_type
, index_expr
),
11683 fold_convert (index_type
, minval
));
11684 index
= expand_normal (index_expr
);
11685 do_pending_stack_adjust ();
11687 do_tablejump (index
, TYPE_MODE (index_type
),
11688 convert_modes (TYPE_MODE (index_type
),
11689 TYPE_MODE (TREE_TYPE (range
)),
11690 expand_normal (range
),
11691 TYPE_UNSIGNED (TREE_TYPE (range
))),
11692 table_label
, default_label
, default_probability
);
11696 /* Return a CONST_VECTOR rtx representing vector mask for
11697 a VECTOR_CST of booleans. */
11699 const_vector_mask_from_tree (tree exp
)
11705 machine_mode inner
, mode
;
11707 mode
= TYPE_MODE (TREE_TYPE (exp
));
11708 units
= GET_MODE_NUNITS (mode
);
11709 inner
= GET_MODE_INNER (mode
);
11711 v
= rtvec_alloc (units
);
11713 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11715 elt
= VECTOR_CST_ELT (exp
, i
);
11717 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
11718 if (integer_zerop (elt
))
11719 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
11720 else if (integer_onep (elt
)
11721 || integer_minus_onep (elt
))
11722 RTVEC_ELT (v
, i
) = CONSTM1_RTX (inner
);
11724 gcc_unreachable ();
11727 return gen_rtx_CONST_VECTOR (mode
, v
);
11730 /* Return a CONST_INT rtx representing vector mask for
11731 a VECTOR_CST of booleans. */
11733 const_scalar_mask_from_tree (tree exp
)
11735 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
11736 wide_int res
= wi::zero (GET_MODE_PRECISION (mode
));
11740 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11742 elt
= VECTOR_CST_ELT (exp
, i
);
11743 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
11744 if (integer_all_onesp (elt
))
11745 res
= wi::set_bit (res
, i
);
11747 gcc_assert (integer_zerop (elt
));
11750 return immed_wide_int_const (res
, mode
);
11753 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11755 const_vector_from_tree (tree exp
)
11761 machine_mode inner
, mode
;
11763 mode
= TYPE_MODE (TREE_TYPE (exp
));
11765 if (initializer_zerop (exp
))
11766 return CONST0_RTX (mode
);
11768 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
11769 return const_vector_mask_from_tree (exp
);
11771 units
= GET_MODE_NUNITS (mode
);
11772 inner
= GET_MODE_INNER (mode
);
11774 v
= rtvec_alloc (units
);
11776 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11778 elt
= VECTOR_CST_ELT (exp
, i
);
11780 if (TREE_CODE (elt
) == REAL_CST
)
11781 RTVEC_ELT (v
, i
) = const_double_from_real_value (TREE_REAL_CST (elt
),
11783 else if (TREE_CODE (elt
) == FIXED_CST
)
11784 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11787 RTVEC_ELT (v
, i
) = immed_wide_int_const (elt
, inner
);
11790 return gen_rtx_CONST_VECTOR (mode
, v
);
11793 /* Build a decl for a personality function given a language prefix. */
11796 build_personality_function (const char *lang
)
11798 const char *unwind_and_version
;
11802 switch (targetm_common
.except_unwind_info (&global_options
))
11807 unwind_and_version
= "_sj0";
11811 unwind_and_version
= "_v0";
11814 unwind_and_version
= "_seh0";
11817 gcc_unreachable ();
11820 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11822 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11823 long_long_unsigned_type_node
,
11824 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11825 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11826 get_identifier (name
), type
);
11827 DECL_ARTIFICIAL (decl
) = 1;
11828 DECL_EXTERNAL (decl
) = 1;
11829 TREE_PUBLIC (decl
) = 1;
11831 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11832 are the flags assigned by targetm.encode_section_info. */
11833 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11838 /* Extracts the personality function of DECL and returns the corresponding
11842 get_personality_function (tree decl
)
11844 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11845 enum eh_personality_kind pk
;
11847 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11848 if (pk
== eh_personality_none
)
11852 && pk
== eh_personality_any
)
11853 personality
= lang_hooks
.eh_personality ();
11855 if (pk
== eh_personality_lang
)
11856 gcc_assert (personality
!= NULL_TREE
);
11858 return XEXP (DECL_RTL (personality
), 0);
11861 /* Returns a tree for the size of EXP in bytes. */
11864 tree_expr_size (const_tree exp
)
11867 && DECL_SIZE_UNIT (exp
) != 0)
11868 return DECL_SIZE_UNIT (exp
);
11870 return size_in_bytes (TREE_TYPE (exp
));
11873 /* Return an rtx for the size in bytes of the value of EXP. */
11876 expr_size (tree exp
)
11880 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11881 size
= TREE_OPERAND (exp
, 1);
11884 size
= tree_expr_size (exp
);
11886 gcc_assert (size
== SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, exp
));
11889 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
), EXPAND_NORMAL
);
11892 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11893 if the size can vary or is larger than an integer. */
11895 static HOST_WIDE_INT
11896 int_expr_size (tree exp
)
11900 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11901 size
= TREE_OPERAND (exp
, 1);
11904 size
= tree_expr_size (exp
);
11908 if (size
== 0 || !tree_fits_shwi_p (size
))
11911 return tree_to_shwi (size
);