1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
37 #include "diagnostic.h"
39 #include "fold-const.h"
40 #include "stor-layout.h"
44 #include "insn-attr.h"
49 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
51 #include "optabs-tree.h"
54 #include "langhooks.h"
55 #include "common/common-target.h"
56 #include "tree-ssa-live.h"
57 #include "tree-outof-ssa.h"
58 #include "tree-ssa-address.h"
60 #include "tree-chkp.h"
65 /* If this is nonzero, we do not bother generating VOLATILE
66 around volatile memory references, and we are willing to
67 output indirect addresses. If cse is to follow, we reject
68 indirect addresses so a useful potential cse is generated;
69 if it is used only once, instruction combination will produce
70 the same indirect address eventually. */
73 static bool block_move_libcall_safe_for_call_parm (void);
74 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
75 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
76 unsigned HOST_WIDE_INT
);
77 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
78 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
79 static rtx_insn
*compress_float_constant (rtx
, rtx
);
80 static rtx
get_subtarget (rtx
);
81 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
82 HOST_WIDE_INT
, machine_mode
,
83 tree
, int, alias_set_type
, bool);
84 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
, bool);
85 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
,
86 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
87 machine_mode
, tree
, alias_set_type
, bool, bool);
89 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
91 static int is_aligning_offset (const_tree
, const_tree
);
92 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
93 static rtx
do_store_flag (sepops
, rtx
, machine_mode
);
95 static void emit_single_push_insn (machine_mode
, rtx
, tree
);
97 static void do_tablejump (rtx
, machine_mode
, rtx
, rtx
, rtx
, int);
98 static rtx
const_vector_from_tree (tree
);
99 static rtx
const_scalar_mask_from_tree (tree
);
100 static tree
tree_expr_size (const_tree
);
101 static HOST_WIDE_INT
int_expr_size (tree
);
104 /* This is run to set up which modes can be used
105 directly in memory and to initialize the block move optab. It is run
106 at the beginning of compilation and when the target is reinitialized. */
109 init_expr_target (void)
117 /* Try indexing by frame ptr and try by stack ptr.
118 It is known that on the Convex the stack ptr isn't a valid index.
119 With luck, one or the other is valid on any machine. */
120 mem
= gen_rtx_MEM (word_mode
, stack_pointer_rtx
);
121 mem1
= gen_rtx_MEM (word_mode
, frame_pointer_rtx
);
123 /* A scratch register we can modify in-place below to avoid
124 useless RTL allocations. */
125 reg
= gen_rtx_REG (word_mode
, LAST_VIRTUAL_REGISTER
+ 1);
127 insn
= rtx_alloc (INSN
);
128 pat
= gen_rtx_SET (NULL_RTX
, NULL_RTX
);
129 PATTERN (insn
) = pat
;
131 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
132 mode
= (machine_mode
) ((int) mode
+ 1))
136 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
137 PUT_MODE (mem
, mode
);
138 PUT_MODE (mem1
, mode
);
140 /* See if there is some register that can be used in this mode and
141 directly loaded or stored from memory. */
143 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
144 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
145 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
148 if (! HARD_REGNO_MODE_OK (regno
, mode
))
151 set_mode_and_regno (reg
, mode
, regno
);
154 SET_DEST (pat
) = reg
;
155 if (recog (pat
, insn
, &num_clobbers
) >= 0)
156 direct_load
[(int) mode
] = 1;
158 SET_SRC (pat
) = mem1
;
159 SET_DEST (pat
) = reg
;
160 if (recog (pat
, insn
, &num_clobbers
) >= 0)
161 direct_load
[(int) mode
] = 1;
164 SET_DEST (pat
) = mem
;
165 if (recog (pat
, insn
, &num_clobbers
) >= 0)
166 direct_store
[(int) mode
] = 1;
169 SET_DEST (pat
) = mem1
;
170 if (recog (pat
, insn
, &num_clobbers
) >= 0)
171 direct_store
[(int) mode
] = 1;
175 mem
= gen_rtx_MEM (VOIDmode
, gen_raw_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1));
177 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
178 mode
= GET_MODE_WIDER_MODE (mode
))
180 machine_mode srcmode
;
181 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
182 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
186 ic
= can_extend_p (mode
, srcmode
, 0);
187 if (ic
== CODE_FOR_nothing
)
190 PUT_MODE (mem
, srcmode
);
192 if (insn_operand_matches (ic
, 1, mem
))
193 float_extend_from_mem
[mode
][srcmode
] = true;
198 /* This is run at the start of compiling a function. */
203 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
206 /* Copy data from FROM to TO, where the machine modes are not the same.
207 Both modes may be integer, or both may be floating, or both may be
209 UNSIGNEDP should be nonzero if FROM is an unsigned type.
210 This causes zero-extension instead of sign-extension. */
213 convert_move (rtx to
, rtx from
, int unsignedp
)
215 machine_mode to_mode
= GET_MODE (to
);
216 machine_mode from_mode
= GET_MODE (from
);
217 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
218 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
222 /* rtx code for making an equivalent value. */
223 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
224 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
227 gcc_assert (to_real
== from_real
);
228 gcc_assert (to_mode
!= BLKmode
);
229 gcc_assert (from_mode
!= BLKmode
);
231 /* If the source and destination are already the same, then there's
236 /* If FROM is a SUBREG that indicates that we have already done at least
237 the required extension, strip it. We don't handle such SUBREGs as
240 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
241 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from
)))
242 >= GET_MODE_PRECISION (to_mode
))
243 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
244 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
246 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
248 if (to_mode
== from_mode
249 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
251 emit_move_insn (to
, from
);
255 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
257 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
259 if (VECTOR_MODE_P (to_mode
))
260 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
262 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
264 emit_move_insn (to
, from
);
268 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
270 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
271 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
281 gcc_assert ((GET_MODE_PRECISION (from_mode
)
282 != GET_MODE_PRECISION (to_mode
))
283 || (DECIMAL_FLOAT_MODE_P (from_mode
)
284 != DECIMAL_FLOAT_MODE_P (to_mode
)));
286 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
287 /* Conversion between decimal float and binary float, same size. */
288 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
289 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
294 /* Try converting directly if the insn is supported. */
296 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
297 if (code
!= CODE_FOR_nothing
)
299 emit_unop_insn (code
, to
, from
,
300 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
304 /* Otherwise use a libcall. */
305 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
307 /* Is this conversion implemented yet? */
308 gcc_assert (libcall
);
311 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
313 insns
= get_insns ();
315 emit_libcall_block (insns
, to
, value
,
316 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
318 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
322 /* Handle pointer conversion. */ /* SPEE 900220. */
323 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
327 if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
334 if (convert_optab_handler (ctab
, to_mode
, from_mode
)
337 emit_unop_insn (convert_optab_handler (ctab
, to_mode
, from_mode
),
343 /* Targets are expected to provide conversion insns between PxImode and
344 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
345 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
347 machine_mode full_mode
348 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
350 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
351 != CODE_FOR_nothing
);
353 if (full_mode
!= from_mode
)
354 from
= convert_to_mode (full_mode
, from
, unsignedp
);
355 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
359 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
362 machine_mode full_mode
363 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
364 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
365 enum insn_code icode
;
367 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
368 gcc_assert (icode
!= CODE_FOR_nothing
);
370 if (to_mode
== full_mode
)
372 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
376 new_from
= gen_reg_rtx (full_mode
);
377 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
379 /* else proceed to integer conversions below. */
380 from_mode
= full_mode
;
384 /* Make sure both are fixed-point modes or both are not. */
385 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
386 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
387 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
389 /* If we widen from_mode to to_mode and they are in the same class,
390 we won't saturate the result.
391 Otherwise, always saturate the result to play safe. */
392 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
393 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
394 expand_fixed_convert (to
, from
, 0, 0);
396 expand_fixed_convert (to
, from
, 0, 1);
400 /* Now both modes are integers. */
402 /* Handle expanding beyond a word. */
403 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
404 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
411 machine_mode lowpart_mode
;
412 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
414 /* Try converting directly if the insn is supported. */
415 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
418 /* If FROM is a SUBREG, put it into a register. Do this
419 so that we always generate the same set of insns for
420 better cse'ing; if an intermediate assignment occurred,
421 we won't be doing the operation directly on the SUBREG. */
422 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
423 from
= force_reg (from_mode
, from
);
424 emit_unop_insn (code
, to
, from
, equiv_code
);
427 /* Next, try converting via full word. */
428 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
429 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
430 != CODE_FOR_nothing
))
432 rtx word_to
= gen_reg_rtx (word_mode
);
435 if (reg_overlap_mentioned_p (to
, from
))
436 from
= force_reg (from_mode
, from
);
439 convert_move (word_to
, from
, unsignedp
);
440 emit_unop_insn (code
, to
, word_to
, equiv_code
);
444 /* No special multiword conversion insn; do it by hand. */
447 /* Since we will turn this into a no conflict block, we must ensure
448 the source does not overlap the target so force it into an isolated
449 register when maybe so. Likewise for any MEM input, since the
450 conversion sequence might require several references to it and we
451 must ensure we're getting the same value every time. */
453 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
454 from
= force_reg (from_mode
, from
);
456 /* Get a copy of FROM widened to a word, if necessary. */
457 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
458 lowpart_mode
= word_mode
;
460 lowpart_mode
= from_mode
;
462 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
464 lowpart
= gen_lowpart (lowpart_mode
, to
);
465 emit_move_insn (lowpart
, lowfrom
);
467 /* Compute the value to put in each remaining word. */
469 fill_value
= const0_rtx
;
471 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
472 LT
, lowfrom
, const0_rtx
,
473 lowpart_mode
, 0, -1);
475 /* Fill the remaining words. */
476 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
478 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
479 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
481 gcc_assert (subword
);
483 if (fill_value
!= subword
)
484 emit_move_insn (subword
, fill_value
);
487 insns
= get_insns ();
494 /* Truncating multi-word to a word or less. */
495 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
496 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
499 && ! MEM_VOLATILE_P (from
)
500 && direct_load
[(int) to_mode
]
501 && ! mode_dependent_address_p (XEXP (from
, 0),
502 MEM_ADDR_SPACE (from
)))
504 || GET_CODE (from
) == SUBREG
))
505 from
= force_reg (from_mode
, from
);
506 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
510 /* Now follow all the conversions between integers
511 no more than a word long. */
513 /* For truncation, usually we can just refer to FROM in a narrower mode. */
514 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
515 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
518 && ! MEM_VOLATILE_P (from
)
519 && direct_load
[(int) to_mode
]
520 && ! mode_dependent_address_p (XEXP (from
, 0),
521 MEM_ADDR_SPACE (from
)))
523 || GET_CODE (from
) == SUBREG
))
524 from
= force_reg (from_mode
, from
);
525 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
526 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
527 from
= copy_to_reg (from
);
528 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
532 /* Handle extension. */
533 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
535 /* Convert directly if that works. */
536 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
539 emit_unop_insn (code
, to
, from
, equiv_code
);
544 machine_mode intermediate
;
548 /* Search for a mode to convert via. */
549 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
550 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
551 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
553 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
554 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, intermediate
)))
555 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
556 != CODE_FOR_nothing
))
558 convert_move (to
, convert_to_mode (intermediate
, from
,
559 unsignedp
), unsignedp
);
563 /* No suitable intermediate mode.
564 Generate what we need with shifts. */
565 shift_amount
= (GET_MODE_PRECISION (to_mode
)
566 - GET_MODE_PRECISION (from_mode
));
567 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
568 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
570 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
573 emit_move_insn (to
, tmp
);
578 /* Support special truncate insns for certain modes. */
579 if (convert_optab_handler (trunc_optab
, to_mode
,
580 from_mode
) != CODE_FOR_nothing
)
582 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
587 /* Handle truncation of volatile memrefs, and so on;
588 the things that couldn't be truncated directly,
589 and for which there was no special instruction.
591 ??? Code above formerly short-circuited this, for most integer
592 mode pairs, with a force_reg in from_mode followed by a recursive
593 call to this routine. Appears always to have been wrong. */
594 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
596 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
597 emit_move_insn (to
, temp
);
601 /* Mode combination is not recognized. */
605 /* Return an rtx for a value that would result
606 from converting X to mode MODE.
607 Both X and MODE may be floating, or both integer.
608 UNSIGNEDP is nonzero if X is an unsigned value.
609 This can be done by referring to a part of X in place
610 or by copying to a new temporary with conversion. */
613 convert_to_mode (machine_mode mode
, rtx x
, int unsignedp
)
615 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
618 /* Return an rtx for a value that would result
619 from converting X from mode OLDMODE to mode MODE.
620 Both modes may be floating, or both integer.
621 UNSIGNEDP is nonzero if X is an unsigned value.
623 This can be done by referring to a part of X in place
624 or by copying to a new temporary with conversion.
626 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
629 convert_modes (machine_mode mode
, machine_mode oldmode
, rtx x
, int unsignedp
)
633 /* If FROM is a SUBREG that indicates that we have already done at least
634 the required extension, strip it. */
636 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
637 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
638 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
639 x
= gen_lowpart (mode
, SUBREG_REG (x
));
641 if (GET_MODE (x
) != VOIDmode
)
642 oldmode
= GET_MODE (x
);
647 if (CONST_SCALAR_INT_P (x
) && GET_MODE_CLASS (mode
) == MODE_INT
)
649 /* If the caller did not tell us the old mode, then there is not
650 much to do with respect to canonicalization. We have to
651 assume that all the bits are significant. */
652 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
653 oldmode
= MAX_MODE_INT
;
654 wide_int w
= wide_int::from (std::make_pair (x
, oldmode
),
655 GET_MODE_PRECISION (mode
),
656 unsignedp
? UNSIGNED
: SIGNED
);
657 return immed_wide_int_const (w
, mode
);
660 /* We can do this with a gen_lowpart if both desired and current modes
661 are integer, and this is either a constant integer, a register, or a
663 if (GET_MODE_CLASS (mode
) == MODE_INT
664 && GET_MODE_CLASS (oldmode
) == MODE_INT
665 && GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (oldmode
)
666 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) mode
])
668 && (!HARD_REGISTER_P (x
)
669 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
670 && TRULY_NOOP_TRUNCATION_MODES_P (mode
, GET_MODE (x
)))))
672 return gen_lowpart (mode
, x
);
674 /* Converting from integer constant into mode is always equivalent to an
676 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
678 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
679 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
682 temp
= gen_reg_rtx (mode
);
683 convert_move (temp
, x
, unsignedp
);
687 /* Return the largest alignment we can use for doing a move (or store)
688 of MAX_PIECES. ALIGN is the largest alignment we could use. */
691 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
695 tmode
= mode_for_size (max_pieces
* BITS_PER_UNIT
, MODE_INT
, 1);
696 if (align
>= GET_MODE_ALIGNMENT (tmode
))
697 align
= GET_MODE_ALIGNMENT (tmode
);
700 machine_mode tmode
, xmode
;
702 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
704 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
705 if (GET_MODE_SIZE (tmode
) > max_pieces
706 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
709 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
715 /* Return the widest integer mode no wider than SIZE. If no such mode
716 can be found, return VOIDmode. */
719 widest_int_mode_for_size (unsigned int size
)
721 machine_mode tmode
, mode
= VOIDmode
;
723 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
724 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
725 if (GET_MODE_SIZE (tmode
) < size
)
731 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
732 and should be performed piecewise. */
735 can_do_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
,
736 enum by_pieces_operation op
)
738 return targetm
.use_by_pieces_infrastructure_p (len
, align
, op
,
739 optimize_insn_for_speed_p ());
742 /* Determine whether the LEN bytes can be moved by using several move
743 instructions. Return nonzero if a call to move_by_pieces should
747 can_move_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
)
749 return can_do_by_pieces (len
, align
, MOVE_BY_PIECES
);
752 /* Return number of insns required to perform operation OP by pieces
753 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
755 unsigned HOST_WIDE_INT
756 by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
757 unsigned int max_size
, by_pieces_operation op
)
759 unsigned HOST_WIDE_INT n_insns
= 0;
761 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
763 while (max_size
> 1 && l
> 0)
766 enum insn_code icode
;
768 mode
= widest_int_mode_for_size (max_size
);
770 if (mode
== VOIDmode
)
772 unsigned int modesize
= GET_MODE_SIZE (mode
);
774 icode
= optab_handler (mov_optab
, mode
);
775 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
777 unsigned HOST_WIDE_INT n_pieces
= l
/ modesize
;
785 case COMPARE_BY_PIECES
:
786 int batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
787 int batch_ops
= 4 * batch
- 1;
788 int full
= n_pieces
/ batch
;
789 n_insns
+= full
* batch_ops
;
790 if (n_pieces
% batch
!= 0)
803 /* Used when performing piecewise block operations, holds information
804 about one of the memory objects involved. The member functions
805 can be used to generate code for loading from the object and
806 updating the address when iterating. */
810 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
813 /* The address of the object. Can differ from that seen in the
814 MEM rtx if we copied the address to a register. */
816 /* Nonzero if the address on the object has an autoincrement already,
817 signifies whether that was an increment or decrement. */
818 signed char m_addr_inc
;
819 /* Nonzero if we intend to use autoinc without the address already
820 having autoinc form. We will insert add insns around each memory
821 reference, expecting later passes to form autoinc addressing modes.
822 The only supported options are predecrement and postincrement. */
823 signed char m_explicit_inc
;
824 /* True if we have either of the two possible cases of using
827 /* True if this is an address to be used for load operations rather
831 /* Optionally, a function to obtain constants for any given offset into
832 the objects, and data associated with it. */
833 by_pieces_constfn m_constfn
;
836 pieces_addr (rtx
, bool, by_pieces_constfn
, void *);
837 rtx
adjust (machine_mode
, HOST_WIDE_INT
);
838 void increment_address (HOST_WIDE_INT
);
839 void maybe_predec (HOST_WIDE_INT
);
840 void maybe_postinc (HOST_WIDE_INT
);
841 void decide_autoinc (machine_mode
, bool, HOST_WIDE_INT
);
848 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
849 true if the operation to be performed on this object is a load
850 rather than a store. For stores, OBJ can be NULL, in which case we
851 assume the operation is a stack push. For loads, the optional
852 CONSTFN and its associated CFNDATA can be used in place of the
855 pieces_addr::pieces_addr (rtx obj
, bool is_load
, by_pieces_constfn constfn
,
857 : m_obj (obj
), m_is_load (is_load
), m_constfn (constfn
), m_cfndata (cfndata
)
863 rtx addr
= XEXP (obj
, 0);
864 rtx_code code
= GET_CODE (addr
);
866 bool dec
= code
== PRE_DEC
|| code
== POST_DEC
;
867 bool inc
= code
== PRE_INC
|| code
== POST_INC
;
870 m_addr_inc
= dec
? -1 : 1;
872 /* While we have always looked for these codes here, the code
873 implementing the memory operation has never handled them.
874 Support could be added later if necessary or beneficial. */
875 gcc_assert (code
!= PRE_INC
&& code
!= POST_DEC
);
883 if (STACK_GROWS_DOWNWARD
)
889 gcc_assert (constfn
!= NULL
);
893 gcc_assert (is_load
);
896 /* Decide whether to use autoinc for an address involved in a memory op.
897 MODE is the mode of the accesses, REVERSE is true if we've decided to
898 perform the operation starting from the end, and LEN is the length of
899 the operation. Don't override an earlier decision to set m_auto. */
902 pieces_addr::decide_autoinc (machine_mode
ARG_UNUSED (mode
), bool reverse
,
905 if (m_auto
|| m_obj
== NULL_RTX
)
908 bool use_predec
= (m_is_load
909 ? USE_LOAD_PRE_DECREMENT (mode
)
910 : USE_STORE_PRE_DECREMENT (mode
));
911 bool use_postinc
= (m_is_load
912 ? USE_LOAD_POST_INCREMENT (mode
)
913 : USE_STORE_POST_INCREMENT (mode
));
914 machine_mode addr_mode
= get_address_mode (m_obj
);
916 if (use_predec
&& reverse
)
918 m_addr
= copy_to_mode_reg (addr_mode
,
919 plus_constant (addr_mode
,
924 else if (use_postinc
&& !reverse
)
926 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
930 else if (CONSTANT_P (m_addr
))
931 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
934 /* Adjust the address to refer to the data at OFFSET in MODE. If we
935 are using autoincrement for this address, we don't add the offset,
936 but we still modify the MEM's properties. */
939 pieces_addr::adjust (machine_mode mode
, HOST_WIDE_INT offset
)
942 return m_constfn (m_cfndata
, offset
, mode
);
943 if (m_obj
== NULL_RTX
)
946 return adjust_automodify_address (m_obj
, mode
, m_addr
, offset
);
948 return adjust_address (m_obj
, mode
, offset
);
951 /* Emit an add instruction to increment the address by SIZE. */
954 pieces_addr::increment_address (HOST_WIDE_INT size
)
956 rtx amount
= gen_int_mode (size
, GET_MODE (m_addr
));
957 emit_insn (gen_add2_insn (m_addr
, amount
));
960 /* If we are supposed to decrement the address after each access, emit code
961 to do so now. Increment by SIZE (which has should have the correct sign
965 pieces_addr::maybe_predec (HOST_WIDE_INT size
)
967 if (m_explicit_inc
>= 0)
969 gcc_assert (HAVE_PRE_DECREMENT
);
970 increment_address (size
);
973 /* If we are supposed to decrement the address after each access, emit code
974 to do so now. Increment by SIZE. */
977 pieces_addr::maybe_postinc (HOST_WIDE_INT size
)
979 if (m_explicit_inc
<= 0)
981 gcc_assert (HAVE_POST_INCREMENT
);
982 increment_address (size
);
985 /* This structure is used by do_op_by_pieces to describe the operation
991 pieces_addr m_to
, m_from
;
992 unsigned HOST_WIDE_INT m_len
;
993 HOST_WIDE_INT m_offset
;
994 unsigned int m_align
;
995 unsigned int m_max_size
;
998 /* Virtual functions, overriden by derived classes for the specific
1000 virtual void generate (rtx
, rtx
, machine_mode
) = 0;
1001 virtual bool prepare_mode (machine_mode
, unsigned int) = 0;
1002 virtual void finish_mode (machine_mode
)
1007 op_by_pieces_d (rtx
, bool, rtx
, bool, by_pieces_constfn
, void *,
1008 unsigned HOST_WIDE_INT
, unsigned int);
1012 /* The constructor for an op_by_pieces_d structure. We require two
1013 objects named TO and FROM, which are identified as loads or stores
1014 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1015 and its associated FROM_CFN_DATA can be used to replace loads with
1016 constant values. LEN describes the length of the operation. */
1018 op_by_pieces_d::op_by_pieces_d (rtx to
, bool to_load
,
1019 rtx from
, bool from_load
,
1020 by_pieces_constfn from_cfn
,
1021 void *from_cfn_data
,
1022 unsigned HOST_WIDE_INT len
,
1024 : m_to (to
, to_load
, NULL
, NULL
),
1025 m_from (from
, from_load
, from_cfn
, from_cfn_data
),
1026 m_len (len
), m_max_size (MOVE_MAX_PIECES
+ 1)
1028 int toi
= m_to
.get_addr_inc ();
1029 int fromi
= m_from
.get_addr_inc ();
1030 if (toi
>= 0 && fromi
>= 0)
1032 else if (toi
<= 0 && fromi
<= 0)
1037 m_offset
= m_reverse
? len
: 0;
1038 align
= MIN (to
? MEM_ALIGN (to
) : align
,
1039 from
? MEM_ALIGN (from
) : align
);
1041 /* If copying requires more than two move insns,
1042 copy addresses to registers (to make displacements shorter)
1043 and use post-increment if available. */
1044 if (by_pieces_ninsns (len
, align
, m_max_size
, MOVE_BY_PIECES
) > 2)
1046 /* Find the mode of the largest comparison. */
1047 machine_mode mode
= widest_int_mode_for_size (m_max_size
);
1049 m_from
.decide_autoinc (mode
, m_reverse
, len
);
1050 m_to
.decide_autoinc (mode
, m_reverse
, len
);
1053 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1057 /* This function contains the main loop used for expanding a block
1058 operation. First move what we can in the largest integer mode,
1059 then go to successively smaller modes. For every access, call
1060 GENFUN with the two operands and the EXTRA_DATA. */
1063 op_by_pieces_d::run ()
1065 while (m_max_size
> 1 && m_len
> 0)
1067 machine_mode mode
= widest_int_mode_for_size (m_max_size
);
1069 if (mode
== VOIDmode
)
1072 if (prepare_mode (mode
, m_align
))
1074 unsigned int size
= GET_MODE_SIZE (mode
);
1075 rtx to1
= NULL_RTX
, from1
;
1077 while (m_len
>= size
)
1082 to1
= m_to
.adjust (mode
, m_offset
);
1083 from1
= m_from
.adjust (mode
, m_offset
);
1085 m_to
.maybe_predec (-(HOST_WIDE_INT
)size
);
1086 m_from
.maybe_predec (-(HOST_WIDE_INT
)size
);
1088 generate (to1
, from1
, mode
);
1090 m_to
.maybe_postinc (size
);
1091 m_from
.maybe_postinc (size
);
1102 m_max_size
= GET_MODE_SIZE (mode
);
1105 /* The code above should have handled everything. */
1106 gcc_assert (!m_len
);
1109 /* Derived class from op_by_pieces_d, providing support for block move
1112 class move_by_pieces_d
: public op_by_pieces_d
1114 insn_gen_fn m_gen_fun
;
1115 void generate (rtx
, rtx
, machine_mode
);
1116 bool prepare_mode (machine_mode
, unsigned int);
1119 move_by_pieces_d (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1121 : op_by_pieces_d (to
, false, from
, true, NULL
, NULL
, len
, align
)
1124 rtx
finish_endp (int);
1127 /* Return true if MODE can be used for a set of copies, given an
1128 alignment ALIGN. Prepare whatever data is necessary for later
1129 calls to generate. */
1132 move_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1134 insn_code icode
= optab_handler (mov_optab
, mode
);
1135 m_gen_fun
= GEN_FCN (icode
);
1136 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1139 /* A callback used when iterating for a compare_by_pieces_operation.
1140 OP0 and OP1 are the values that have been loaded and should be
1141 compared in MODE. If OP0 is NULL, this means we should generate a
1142 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1143 gen function that should be used to generate the mode. */
1146 move_by_pieces_d::generate (rtx op0
, rtx op1
,
1147 machine_mode mode ATTRIBUTE_UNUSED
)
1149 #ifdef PUSH_ROUNDING
1150 if (op0
== NULL_RTX
)
1152 emit_single_push_insn (mode
, op1
, NULL
);
1156 emit_insn (m_gen_fun (op0
, op1
));
1159 /* Perform the final adjustment at the end of a string to obtain the
1160 correct return value for the block operation. If ENDP is 1 return
1161 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1162 end minus one byte ala stpcpy. */
1165 move_by_pieces_d::finish_endp (int endp
)
1167 gcc_assert (!m_reverse
);
1170 m_to
.maybe_postinc (-1);
1173 return m_to
.adjust (QImode
, m_offset
);
1176 /* Generate several move instructions to copy LEN bytes from block FROM to
1177 block TO. (These are MEM rtx's with BLKmode).
1179 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1180 used to push FROM to the stack.
1182 ALIGN is maximum stack alignment we can assume.
1184 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1185 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1189 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1190 unsigned int align
, int endp
)
1192 #ifndef PUSH_ROUNDING
1197 move_by_pieces_d
data (to
, from
, len
, align
);
1202 return data
.finish_endp (endp
);
1207 /* Derived class from op_by_pieces_d, providing support for block move
1210 class store_by_pieces_d
: public op_by_pieces_d
1212 insn_gen_fn m_gen_fun
;
1213 void generate (rtx
, rtx
, machine_mode
);
1214 bool prepare_mode (machine_mode
, unsigned int);
1217 store_by_pieces_d (rtx to
, by_pieces_constfn cfn
, void *cfn_data
,
1218 unsigned HOST_WIDE_INT len
, unsigned int align
)
1219 : op_by_pieces_d (to
, false, NULL_RTX
, true, cfn
, cfn_data
, len
, align
)
1222 rtx
finish_endp (int);
1225 /* Return true if MODE can be used for a set of stores, given an
1226 alignment ALIGN. Prepare whatever data is necessary for later
1227 calls to generate. */
1230 store_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1232 insn_code icode
= optab_handler (mov_optab
, mode
);
1233 m_gen_fun
= GEN_FCN (icode
);
1234 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1237 /* A callback used when iterating for a store_by_pieces_operation.
1238 OP0 and OP1 are the values that have been loaded and should be
1239 compared in MODE. If OP0 is NULL, this means we should generate a
1240 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1241 gen function that should be used to generate the mode. */
1244 store_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode
)
1246 emit_insn (m_gen_fun (op0
, op1
));
1249 /* Perform the final adjustment at the end of a string to obtain the
1250 correct return value for the block operation. If ENDP is 1 return
1251 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1252 end minus one byte ala stpcpy. */
1255 store_by_pieces_d::finish_endp (int endp
)
1257 gcc_assert (!m_reverse
);
1260 m_to
.maybe_postinc (-1);
1263 return m_to
.adjust (QImode
, m_offset
);
1266 /* Determine whether the LEN bytes generated by CONSTFUN can be
1267 stored to memory using several move instructions. CONSTFUNDATA is
1268 a pointer which will be passed as argument in every CONSTFUN call.
1269 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1270 a memset operation and false if it's a copy of a constant string.
1271 Return nonzero if a call to store_by_pieces should succeed. */
1274 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
1275 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
),
1276 void *constfundata
, unsigned int align
, bool memsetp
)
1278 unsigned HOST_WIDE_INT l
;
1279 unsigned int max_size
;
1280 HOST_WIDE_INT offset
= 0;
1282 enum insn_code icode
;
1284 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1285 rtx cst ATTRIBUTE_UNUSED
;
1290 if (!targetm
.use_by_pieces_infrastructure_p (len
, align
,
1294 optimize_insn_for_speed_p ()))
1297 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
1299 /* We would first store what we can in the largest integer mode, then go to
1300 successively smaller modes. */
1303 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
1307 max_size
= STORE_MAX_PIECES
+ 1;
1308 while (max_size
> 1 && l
> 0)
1310 mode
= widest_int_mode_for_size (max_size
);
1312 if (mode
== VOIDmode
)
1315 icode
= optab_handler (mov_optab
, mode
);
1316 if (icode
!= CODE_FOR_nothing
1317 && align
>= GET_MODE_ALIGNMENT (mode
))
1319 unsigned int size
= GET_MODE_SIZE (mode
);
1326 cst
= (*constfun
) (constfundata
, offset
, mode
);
1327 if (!targetm
.legitimate_constant_p (mode
, cst
))
1337 max_size
= GET_MODE_SIZE (mode
);
1340 /* The code above should have handled everything. */
1347 /* Generate several move instructions to store LEN bytes generated by
1348 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1349 pointer which will be passed as argument in every CONSTFUN call.
1350 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1351 a memset operation and false if it's a copy of a constant string.
1352 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1353 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1357 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
1358 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
),
1359 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
1363 gcc_assert (endp
!= 2);
1367 gcc_assert (targetm
.use_by_pieces_infrastructure_p
1369 memsetp
? SET_BY_PIECES
: STORE_BY_PIECES
,
1370 optimize_insn_for_speed_p ()));
1372 store_by_pieces_d
data (to
, constfun
, constfundata
, len
, align
);
1376 return data
.finish_endp (endp
);
1381 /* Callback routine for clear_by_pieces.
1382 Return const0_rtx unconditionally. */
1385 clear_by_pieces_1 (void *, HOST_WIDE_INT
, machine_mode
)
1390 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1391 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1394 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
1399 store_by_pieces_d
data (to
, clear_by_pieces_1
, NULL
, len
, align
);
1403 /* Context used by compare_by_pieces_genfn. It stores the fail label
1404 to jump to in case of miscomparison, and for branch ratios greater than 1,
1405 it stores an accumulator and the current and maximum counts before
1406 emitting another branch. */
1408 class compare_by_pieces_d
: public op_by_pieces_d
1410 rtx_code_label
*m_fail_label
;
1412 int m_count
, m_batch
;
1414 void generate (rtx
, rtx
, machine_mode
);
1415 bool prepare_mode (machine_mode
, unsigned int);
1416 void finish_mode (machine_mode
);
1418 compare_by_pieces_d (rtx op0
, rtx op1
, by_pieces_constfn op1_cfn
,
1419 void *op1_cfn_data
, HOST_WIDE_INT len
, int align
,
1420 rtx_code_label
*fail_label
)
1421 : op_by_pieces_d (op0
, true, op1
, true, op1_cfn
, op1_cfn_data
, len
, align
)
1423 m_fail_label
= fail_label
;
1427 /* A callback used when iterating for a compare_by_pieces_operation.
1428 OP0 and OP1 are the values that have been loaded and should be
1429 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1430 context structure. */
1433 compare_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode mode
)
1437 rtx temp
= expand_binop (mode
, sub_optab
, op0
, op1
, NULL_RTX
,
1438 true, OPTAB_LIB_WIDEN
);
1440 temp
= expand_binop (mode
, ior_optab
, m_accumulator
, temp
, temp
,
1441 true, OPTAB_LIB_WIDEN
);
1442 m_accumulator
= temp
;
1444 if (++m_count
< m_batch
)
1448 op0
= m_accumulator
;
1450 m_accumulator
= NULL_RTX
;
1452 do_compare_rtx_and_jump (op0
, op1
, NE
, true, mode
, NULL_RTX
, NULL
,
1456 /* Return true if MODE can be used for a set of moves and comparisons,
1457 given an alignment ALIGN. Prepare whatever data is necessary for
1458 later calls to generate. */
1461 compare_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1463 insn_code icode
= optab_handler (mov_optab
, mode
);
1464 if (icode
== CODE_FOR_nothing
1465 || align
< GET_MODE_ALIGNMENT (mode
)
1466 || !can_compare_p (EQ
, mode
, ccp_jump
))
1468 m_batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
1471 m_accumulator
= NULL_RTX
;
1476 /* Called after expanding a series of comparisons in MODE. If we have
1477 accumulated results for which we haven't emitted a branch yet, do
1481 compare_by_pieces_d::finish_mode (machine_mode mode
)
1483 if (m_accumulator
!= NULL_RTX
)
1484 do_compare_rtx_and_jump (m_accumulator
, const0_rtx
, NE
, true, mode
,
1485 NULL_RTX
, NULL
, m_fail_label
, -1);
1488 /* Generate several move instructions to compare LEN bytes from blocks
1489 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1491 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1492 used to push FROM to the stack.
1494 ALIGN is maximum stack alignment we can assume.
1496 Optionally, the caller can pass a constfn and associated data in A1_CFN
1497 and A1_CFN_DATA. describing that the second operand being compared is a
1498 known constant and how to obtain its data. */
1501 compare_by_pieces (rtx arg0
, rtx arg1
, unsigned HOST_WIDE_INT len
,
1502 rtx target
, unsigned int align
,
1503 by_pieces_constfn a1_cfn
, void *a1_cfn_data
)
1505 rtx_code_label
*fail_label
= gen_label_rtx ();
1506 rtx_code_label
*end_label
= gen_label_rtx ();
1508 if (target
== NULL_RTX
1509 || !REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
1510 target
= gen_reg_rtx (TYPE_MODE (integer_type_node
));
1512 compare_by_pieces_d
data (arg0
, arg1
, a1_cfn
, a1_cfn_data
, len
, align
,
1517 emit_move_insn (target
, const0_rtx
);
1518 emit_jump (end_label
);
1520 emit_label (fail_label
);
1521 emit_move_insn (target
, const1_rtx
);
1522 emit_label (end_label
);
1527 /* Emit code to move a block Y to a block X. This may be done with
1528 string-move instructions, with multiple scalar move instructions,
1529 or with a library call.
1531 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1532 SIZE is an rtx that says how long they are.
1533 ALIGN is the maximum alignment we can assume they have.
1534 METHOD describes what kind of copy this is, and what mechanisms may be used.
1535 MIN_SIZE is the minimal size of block to move
1536 MAX_SIZE is the maximal size of block to move, if it can not be represented
1537 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1539 Return the address of the new block, if memcpy is called and returns it,
1543 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1544 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1545 unsigned HOST_WIDE_INT min_size
,
1546 unsigned HOST_WIDE_INT max_size
,
1547 unsigned HOST_WIDE_INT probable_max_size
)
1554 if (CONST_INT_P (size
) && INTVAL (size
) == 0)
1559 case BLOCK_OP_NORMAL
:
1560 case BLOCK_OP_TAILCALL
:
1561 may_use_call
= true;
1564 case BLOCK_OP_CALL_PARM
:
1565 may_use_call
= block_move_libcall_safe_for_call_parm ();
1567 /* Make inhibit_defer_pop nonzero around the library call
1568 to force it to pop the arguments right away. */
1572 case BLOCK_OP_NO_LIBCALL
:
1573 may_use_call
= false;
1580 gcc_assert (MEM_P (x
) && MEM_P (y
));
1581 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1582 gcc_assert (align
>= BITS_PER_UNIT
);
1584 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1585 block copy is more efficient for other large modes, e.g. DCmode. */
1586 x
= adjust_address (x
, BLKmode
, 0);
1587 y
= adjust_address (y
, BLKmode
, 0);
1589 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1590 can be incorrect is coming from __builtin_memcpy. */
1591 if (CONST_INT_P (size
))
1593 x
= shallow_copy_rtx (x
);
1594 y
= shallow_copy_rtx (y
);
1595 set_mem_size (x
, INTVAL (size
));
1596 set_mem_size (y
, INTVAL (size
));
1599 if (CONST_INT_P (size
) && can_move_by_pieces (INTVAL (size
), align
))
1600 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1601 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1602 expected_align
, expected_size
,
1603 min_size
, max_size
, probable_max_size
))
1605 else if (may_use_call
1606 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1607 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1609 /* Since x and y are passed to a libcall, mark the corresponding
1610 tree EXPR as addressable. */
1611 tree y_expr
= MEM_EXPR (y
);
1612 tree x_expr
= MEM_EXPR (x
);
1614 mark_addressable (y_expr
);
1616 mark_addressable (x_expr
);
1617 retval
= emit_block_copy_via_libcall (x
, y
, size
,
1618 method
== BLOCK_OP_TAILCALL
);
1622 emit_block_move_via_loop (x
, y
, size
, align
);
1624 if (method
== BLOCK_OP_CALL_PARM
)
1631 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1633 unsigned HOST_WIDE_INT max
, min
= 0;
1634 if (GET_CODE (size
) == CONST_INT
)
1635 min
= max
= UINTVAL (size
);
1637 max
= GET_MODE_MASK (GET_MODE (size
));
1638 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1642 /* A subroutine of emit_block_move. Returns true if calling the
1643 block move libcall will not clobber any parameters which may have
1644 already been placed on the stack. */
1647 block_move_libcall_safe_for_call_parm (void)
1649 #if defined (REG_PARM_STACK_SPACE)
1653 /* If arguments are pushed on the stack, then they're safe. */
1657 /* If registers go on the stack anyway, any argument is sure to clobber
1658 an outgoing argument. */
1659 #if defined (REG_PARM_STACK_SPACE)
1660 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1661 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1662 depend on its argument. */
1664 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1665 && REG_PARM_STACK_SPACE (fn
) != 0)
1669 /* If any argument goes in memory, then it might clobber an outgoing
1672 CUMULATIVE_ARGS args_so_far_v
;
1673 cumulative_args_t args_so_far
;
1676 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1677 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1678 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1680 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1681 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1683 machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1684 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1686 if (!tmp
|| !REG_P (tmp
))
1688 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1690 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1697 /* A subroutine of emit_block_move. Expand a movmem pattern;
1698 return true if successful. */
1701 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1702 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1703 unsigned HOST_WIDE_INT min_size
,
1704 unsigned HOST_WIDE_INT max_size
,
1705 unsigned HOST_WIDE_INT probable_max_size
)
1707 int save_volatile_ok
= volatile_ok
;
1710 if (expected_align
< align
)
1711 expected_align
= align
;
1712 if (expected_size
!= -1)
1714 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1715 expected_size
= probable_max_size
;
1716 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1717 expected_size
= min_size
;
1720 /* Since this is a move insn, we don't care about volatility. */
1723 /* Try the most limited insn first, because there's no point
1724 including more than one in the machine description unless
1725 the more limited one has some advantage. */
1727 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1728 mode
= GET_MODE_WIDER_MODE (mode
))
1730 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1732 if (code
!= CODE_FOR_nothing
1733 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1734 here because if SIZE is less than the mode mask, as it is
1735 returned by the macro, it will definitely be less than the
1736 actual mode mask. Since SIZE is within the Pmode address
1737 space, we limit MODE to Pmode. */
1738 && ((CONST_INT_P (size
)
1739 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1740 <= (GET_MODE_MASK (mode
) >> 1)))
1741 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1742 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1744 struct expand_operand ops
[9];
1747 /* ??? When called via emit_block_move_for_call, it'd be
1748 nice if there were some way to inform the backend, so
1749 that it doesn't fail the expansion because it thinks
1750 emitting the libcall would be more efficient. */
1751 nops
= insn_data
[(int) code
].n_generator_args
;
1752 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1754 create_fixed_operand (&ops
[0], x
);
1755 create_fixed_operand (&ops
[1], y
);
1756 /* The check above guarantees that this size conversion is valid. */
1757 create_convert_operand_to (&ops
[2], size
, mode
, true);
1758 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1761 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1762 create_integer_operand (&ops
[5], expected_size
);
1766 create_integer_operand (&ops
[6], min_size
);
1767 /* If we can not represent the maximal size,
1768 make parameter NULL. */
1769 if ((HOST_WIDE_INT
) max_size
!= -1)
1770 create_integer_operand (&ops
[7], max_size
);
1772 create_fixed_operand (&ops
[7], NULL
);
1776 /* If we can not represent the maximal size,
1777 make parameter NULL. */
1778 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1779 create_integer_operand (&ops
[8], probable_max_size
);
1781 create_fixed_operand (&ops
[8], NULL
);
1783 if (maybe_expand_insn (code
, nops
, ops
))
1785 volatile_ok
= save_volatile_ok
;
1791 volatile_ok
= save_volatile_ok
;
1795 /* A subroutine of emit_block_move. Copy the data via an explicit
1796 loop. This is used only when libcalls are forbidden. */
1797 /* ??? It'd be nice to copy in hunks larger than QImode. */
1800 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1801 unsigned int align ATTRIBUTE_UNUSED
)
1803 rtx_code_label
*cmp_label
, *top_label
;
1804 rtx iter
, x_addr
, y_addr
, tmp
;
1805 machine_mode x_addr_mode
= get_address_mode (x
);
1806 machine_mode y_addr_mode
= get_address_mode (y
);
1807 machine_mode iter_mode
;
1809 iter_mode
= GET_MODE (size
);
1810 if (iter_mode
== VOIDmode
)
1811 iter_mode
= word_mode
;
1813 top_label
= gen_label_rtx ();
1814 cmp_label
= gen_label_rtx ();
1815 iter
= gen_reg_rtx (iter_mode
);
1817 emit_move_insn (iter
, const0_rtx
);
1819 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1820 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1821 do_pending_stack_adjust ();
1823 emit_jump (cmp_label
);
1824 emit_label (top_label
);
1826 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1827 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1829 if (x_addr_mode
!= y_addr_mode
)
1830 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1831 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1833 x
= change_address (x
, QImode
, x_addr
);
1834 y
= change_address (y
, QImode
, y_addr
);
1836 emit_move_insn (x
, y
);
1838 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1839 true, OPTAB_LIB_WIDEN
);
1841 emit_move_insn (iter
, tmp
);
1843 emit_label (cmp_label
);
1845 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1846 true, top_label
, REG_BR_PROB_BASE
* 90 / 100);
1849 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1850 TAILCALL is true if this is a tail call. */
1853 emit_block_op_via_libcall (enum built_in_function fncode
, rtx dst
, rtx src
,
1854 rtx size
, bool tailcall
)
1856 rtx dst_addr
, src_addr
;
1857 tree call_expr
, dst_tree
, src_tree
, size_tree
;
1858 machine_mode size_mode
;
1860 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1861 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1862 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1864 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1865 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1866 src_tree
= make_tree (ptr_type_node
, src_addr
);
1868 size_mode
= TYPE_MODE (sizetype
);
1869 size
= convert_to_mode (size_mode
, size
, 1);
1870 size
= copy_to_mode_reg (size_mode
, size
);
1871 size_tree
= make_tree (sizetype
, size
);
1873 /* It is incorrect to use the libcall calling conventions for calls to
1874 memcpy/memmove/memcmp because they can be provided by the user. */
1875 tree fn
= builtin_decl_implicit (fncode
);
1876 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1877 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1879 return expand_call (call_expr
, NULL_RTX
, false);
1882 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1883 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1884 otherwise return null. */
1887 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
1888 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
1889 HOST_WIDE_INT align
)
1891 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
1893 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
1896 struct expand_operand ops
[5];
1897 create_output_operand (&ops
[0], target
, insn_mode
);
1898 create_fixed_operand (&ops
[1], arg1_rtx
);
1899 create_fixed_operand (&ops
[2], arg2_rtx
);
1900 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
1901 TYPE_UNSIGNED (arg3_type
));
1902 create_integer_operand (&ops
[4], align
);
1903 if (maybe_expand_insn (icode
, 5, ops
))
1904 return ops
[0].value
;
1908 /* Expand a block compare between X and Y with length LEN using the
1909 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
1910 of the expression that was used to calculate the length. ALIGN
1911 gives the known minimum common alignment. */
1914 emit_block_cmp_via_cmpmem (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1917 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1918 implementing memcmp because it will stop if it encounters two
1920 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
1922 if (icode
== CODE_FOR_nothing
)
1925 return expand_cmpstrn_or_cmpmem (icode
, target
, x
, y
, len_type
, len
, align
);
1928 /* Emit code to compare a block Y to a block X. This may be done with
1929 string-compare instructions, with multiple scalar instructions,
1930 or with a library call.
1932 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
1933 they are. LEN_TYPE is the type of the expression that was used to
1936 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1937 value of a normal memcmp call, instead we can just compare for equality.
1938 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1941 Optionally, the caller can pass a constfn and associated data in Y_CFN
1942 and Y_CFN_DATA. describing that the second operand being compared is a
1943 known constant and how to obtain its data.
1944 Return the result of the comparison, or NULL_RTX if we failed to
1945 perform the operation. */
1948 emit_block_cmp_hints (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1949 bool equality_only
, by_pieces_constfn y_cfn
,
1954 if (CONST_INT_P (len
) && INTVAL (len
) == 0)
1957 gcc_assert (MEM_P (x
) && MEM_P (y
));
1958 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1959 gcc_assert (align
>= BITS_PER_UNIT
);
1961 x
= adjust_address (x
, BLKmode
, 0);
1962 y
= adjust_address (y
, BLKmode
, 0);
1965 && CONST_INT_P (len
)
1966 && can_do_by_pieces (INTVAL (len
), align
, COMPARE_BY_PIECES
))
1967 result
= compare_by_pieces (x
, y
, INTVAL (len
), target
, align
,
1970 result
= emit_block_cmp_via_cmpmem (x
, y
, len
, len_type
, target
, align
);
1975 /* Copy all or part of a value X into registers starting at REGNO.
1976 The number of registers to be filled is NREGS. */
1979 move_block_to_reg (int regno
, rtx x
, int nregs
, machine_mode mode
)
1984 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
1985 x
= validize_mem (force_const_mem (mode
, x
));
1987 /* See if the machine can do this with a load multiple insn. */
1988 if (targetm
.have_load_multiple ())
1990 rtx_insn
*last
= get_last_insn ();
1991 rtx first
= gen_rtx_REG (word_mode
, regno
);
1992 if (rtx_insn
*pat
= targetm
.gen_load_multiple (first
, x
,
1999 delete_insns_since (last
);
2002 for (int i
= 0; i
< nregs
; i
++)
2003 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2004 operand_subword_force (x
, i
, mode
));
2007 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2008 The number of registers to be filled is NREGS. */
2011 move_block_from_reg (int regno
, rtx x
, int nregs
)
2016 /* See if the machine can do this with a store multiple insn. */
2017 if (targetm
.have_store_multiple ())
2019 rtx_insn
*last
= get_last_insn ();
2020 rtx first
= gen_rtx_REG (word_mode
, regno
);
2021 if (rtx_insn
*pat
= targetm
.gen_store_multiple (x
, first
,
2028 delete_insns_since (last
);
2031 for (int i
= 0; i
< nregs
; i
++)
2033 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2037 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2041 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2042 ORIG, where ORIG is a non-consecutive group of registers represented by
2043 a PARALLEL. The clone is identical to the original except in that the
2044 original set of registers is replaced by a new set of pseudo registers.
2045 The new set has the same modes as the original set. */
2048 gen_group_rtx (rtx orig
)
2053 gcc_assert (GET_CODE (orig
) == PARALLEL
);
2055 length
= XVECLEN (orig
, 0);
2056 tmps
= XALLOCAVEC (rtx
, length
);
2058 /* Skip a NULL entry in first slot. */
2059 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2064 for (; i
< length
; i
++)
2066 machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2067 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2069 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2072 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2075 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2076 except that values are placed in TMPS[i], and must later be moved
2077 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2080 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
2084 machine_mode m
= GET_MODE (orig_src
);
2086 gcc_assert (GET_CODE (dst
) == PARALLEL
);
2089 && !SCALAR_INT_MODE_P (m
)
2090 && !MEM_P (orig_src
)
2091 && GET_CODE (orig_src
) != CONCAT
)
2093 machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
2094 if (imode
== BLKmode
)
2095 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
2097 src
= gen_reg_rtx (imode
);
2098 if (imode
!= BLKmode
)
2099 src
= gen_lowpart (GET_MODE (orig_src
), src
);
2100 emit_move_insn (src
, orig_src
);
2101 /* ...and back again. */
2102 if (imode
!= BLKmode
)
2103 src
= gen_lowpart (imode
, src
);
2104 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2108 /* Check for a NULL entry, used to indicate that the parameter goes
2109 both on the stack and in registers. */
2110 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2115 /* Process the pieces. */
2116 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2118 machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2119 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2120 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2123 /* Handle trailing fragments that run over the size of the struct. */
2124 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2126 /* Arrange to shift the fragment to where it belongs.
2127 extract_bit_field loads to the lsb of the reg. */
2129 #ifdef BLOCK_REG_PADDING
2130 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
2131 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2136 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2137 bytelen
= ssize
- bytepos
;
2138 gcc_assert (bytelen
> 0);
2141 /* If we won't be loading directly from memory, protect the real source
2142 from strange tricks we might play; but make sure that the source can
2143 be loaded directly into the destination. */
2145 if (!MEM_P (orig_src
)
2146 && (!CONSTANT_P (orig_src
)
2147 || (GET_MODE (orig_src
) != mode
2148 && GET_MODE (orig_src
) != VOIDmode
)))
2150 if (GET_MODE (orig_src
) == VOIDmode
)
2151 src
= gen_reg_rtx (mode
);
2153 src
= gen_reg_rtx (GET_MODE (orig_src
));
2155 emit_move_insn (src
, orig_src
);
2158 /* Optimize the access just a bit. */
2160 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
2161 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
2162 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2163 && bytelen
== GET_MODE_SIZE (mode
))
2165 tmps
[i
] = gen_reg_rtx (mode
);
2166 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2168 else if (COMPLEX_MODE_P (mode
)
2169 && GET_MODE (src
) == mode
2170 && bytelen
== GET_MODE_SIZE (mode
))
2171 /* Let emit_move_complex do the bulk of the work. */
2173 else if (GET_CODE (src
) == CONCAT
)
2175 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
2176 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2178 if ((bytepos
== 0 && bytelen
== slen0
)
2179 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
2181 /* The following assumes that the concatenated objects all
2182 have the same size. In this case, a simple calculation
2183 can be used to determine the object and the bit field
2185 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
2186 if (! CONSTANT_P (tmps
[i
])
2187 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
2188 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2189 (bytepos
% slen0
) * BITS_PER_UNIT
,
2190 1, NULL_RTX
, mode
, mode
, false);
2196 gcc_assert (!bytepos
);
2197 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2198 emit_move_insn (mem
, src
);
2199 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
2200 0, 1, NULL_RTX
, mode
, mode
, false);
2203 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2204 SIMD register, which is currently broken. While we get GCC
2205 to emit proper RTL for these cases, let's dump to memory. */
2206 else if (VECTOR_MODE_P (GET_MODE (dst
))
2209 int slen
= GET_MODE_SIZE (GET_MODE (src
));
2212 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2213 emit_move_insn (mem
, src
);
2214 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
2216 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
2217 && XVECLEN (dst
, 0) > 1)
2218 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
2219 else if (CONSTANT_P (src
))
2221 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
2229 /* TODO: const_wide_int can have sizes other than this... */
2230 gcc_assert (2 * len
== ssize
);
2231 split_double (src
, &first
, &second
);
2238 else if (REG_P (src
) && GET_MODE (src
) == mode
)
2241 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2242 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2246 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
2251 /* Emit code to move a block SRC of type TYPE to a block DST,
2252 where DST is non-consecutive registers represented by a PARALLEL.
2253 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2257 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
2262 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
2263 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2265 /* Copy the extracted pieces into the proper (probable) hard regs. */
2266 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
2268 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
2271 emit_move_insn (d
, tmps
[i
]);
2275 /* Similar, but load SRC into new pseudos in a format that looks like
2276 PARALLEL. This can later be fed to emit_group_move to get things
2277 in the right place. */
2280 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
2285 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
2286 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
2288 /* Convert the vector to look just like the original PARALLEL, except
2289 with the computed values. */
2290 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
2292 rtx e
= XVECEXP (parallel
, 0, i
);
2293 rtx d
= XEXP (e
, 0);
2297 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
2298 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
2300 RTVEC_ELT (vec
, i
) = e
;
2303 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
2306 /* Emit code to move a block SRC to block DST, where SRC and DST are
2307 non-consecutive groups of registers, each represented by a PARALLEL. */
2310 emit_group_move (rtx dst
, rtx src
)
2314 gcc_assert (GET_CODE (src
) == PARALLEL
2315 && GET_CODE (dst
) == PARALLEL
2316 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
2318 /* Skip first entry if NULL. */
2319 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2320 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2321 XEXP (XVECEXP (src
, 0, i
), 0));
2324 /* Move a group of registers represented by a PARALLEL into pseudos. */
2327 emit_group_move_into_temps (rtx src
)
2329 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
2332 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
2334 rtx e
= XVECEXP (src
, 0, i
);
2335 rtx d
= XEXP (e
, 0);
2338 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
2339 RTVEC_ELT (vec
, i
) = e
;
2342 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
2345 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2346 where SRC is non-consecutive registers represented by a PARALLEL.
2347 SSIZE represents the total size of block ORIG_DST, or -1 if not
2351 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
2354 int start
, finish
, i
;
2355 machine_mode m
= GET_MODE (orig_dst
);
2357 gcc_assert (GET_CODE (src
) == PARALLEL
);
2359 if (!SCALAR_INT_MODE_P (m
)
2360 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
2362 machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
2363 if (imode
== BLKmode
)
2364 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
2366 dst
= gen_reg_rtx (imode
);
2367 emit_group_store (dst
, src
, type
, ssize
);
2368 if (imode
!= BLKmode
)
2369 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
2370 emit_move_insn (orig_dst
, dst
);
2374 /* Check for a NULL entry, used to indicate that the parameter goes
2375 both on the stack and in registers. */
2376 if (XEXP (XVECEXP (src
, 0, 0), 0))
2380 finish
= XVECLEN (src
, 0);
2382 tmps
= XALLOCAVEC (rtx
, finish
);
2384 /* Copy the (probable) hard regs into pseudos. */
2385 for (i
= start
; i
< finish
; i
++)
2387 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2388 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
2390 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2391 emit_move_insn (tmps
[i
], reg
);
2397 /* If we won't be storing directly into memory, protect the real destination
2398 from strange tricks we might play. */
2400 if (GET_CODE (dst
) == PARALLEL
)
2404 /* We can get a PARALLEL dst if there is a conditional expression in
2405 a return statement. In that case, the dst and src are the same,
2406 so no action is necessary. */
2407 if (rtx_equal_p (dst
, src
))
2410 /* It is unclear if we can ever reach here, but we may as well handle
2411 it. Allocate a temporary, and split this into a store/load to/from
2413 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
2414 emit_group_store (temp
, src
, type
, ssize
);
2415 emit_group_load (dst
, temp
, type
, ssize
);
2418 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
2420 machine_mode outer
= GET_MODE (dst
);
2422 HOST_WIDE_INT bytepos
;
2426 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
2427 dst
= gen_reg_rtx (outer
);
2429 /* Make life a bit easier for combine. */
2430 /* If the first element of the vector is the low part
2431 of the destination mode, use a paradoxical subreg to
2432 initialize the destination. */
2435 inner
= GET_MODE (tmps
[start
]);
2436 bytepos
= subreg_lowpart_offset (inner
, outer
);
2437 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
2439 temp
= simplify_gen_subreg (outer
, tmps
[start
],
2443 emit_move_insn (dst
, temp
);
2450 /* If the first element wasn't the low part, try the last. */
2452 && start
< finish
- 1)
2454 inner
= GET_MODE (tmps
[finish
- 1]);
2455 bytepos
= subreg_lowpart_offset (inner
, outer
);
2456 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
2458 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2462 emit_move_insn (dst
, temp
);
2469 /* Otherwise, simply initialize the result to zero. */
2471 emit_move_insn (dst
, CONST0_RTX (outer
));
2474 /* Process the pieces. */
2475 for (i
= start
; i
< finish
; i
++)
2477 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2478 machine_mode mode
= GET_MODE (tmps
[i
]);
2479 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2480 unsigned int adj_bytelen
;
2483 /* Handle trailing fragments that run over the size of the struct. */
2484 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2485 adj_bytelen
= ssize
- bytepos
;
2487 adj_bytelen
= bytelen
;
2489 if (GET_CODE (dst
) == CONCAT
)
2491 if (bytepos
+ adj_bytelen
2492 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2493 dest
= XEXP (dst
, 0);
2494 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2496 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2497 dest
= XEXP (dst
, 1);
2501 machine_mode dest_mode
= GET_MODE (dest
);
2502 machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2504 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2506 if (GET_MODE_ALIGNMENT (dest_mode
)
2507 >= GET_MODE_ALIGNMENT (tmp_mode
))
2509 dest
= assign_stack_temp (dest_mode
,
2510 GET_MODE_SIZE (dest_mode
));
2511 emit_move_insn (adjust_address (dest
,
2519 dest
= assign_stack_temp (tmp_mode
,
2520 GET_MODE_SIZE (tmp_mode
));
2521 emit_move_insn (dest
, tmps
[i
]);
2522 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2528 /* Handle trailing fragments that run over the size of the struct. */
2529 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2531 /* store_bit_field always takes its value from the lsb.
2532 Move the fragment to the lsb if it's not already there. */
2534 #ifdef BLOCK_REG_PADDING
2535 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2536 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2542 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2543 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2547 /* Make sure not to write past the end of the struct. */
2548 store_bit_field (dest
,
2549 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2550 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2551 VOIDmode
, tmps
[i
], false);
2554 /* Optimize the access just a bit. */
2555 else if (MEM_P (dest
)
2556 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2557 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2558 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2559 && bytelen
== GET_MODE_SIZE (mode
))
2560 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2563 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2564 0, 0, mode
, tmps
[i
], false);
2567 /* Copy from the pseudo into the (probable) hard reg. */
2568 if (orig_dst
!= dst
)
2569 emit_move_insn (orig_dst
, dst
);
2572 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2573 of the value stored in X. */
2576 maybe_emit_group_store (rtx x
, tree type
)
2578 machine_mode mode
= TYPE_MODE (type
);
2579 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2580 if (GET_CODE (x
) == PARALLEL
)
2582 rtx result
= gen_reg_rtx (mode
);
2583 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2589 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2591 This is used on targets that return BLKmode values in registers. */
2594 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2596 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2597 rtx src
= NULL
, dst
= NULL
;
2598 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2599 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2600 machine_mode mode
= GET_MODE (srcreg
);
2601 machine_mode tmode
= GET_MODE (target
);
2602 machine_mode copy_mode
;
2604 /* BLKmode registers created in the back-end shouldn't have survived. */
2605 gcc_assert (mode
!= BLKmode
);
2607 /* If the structure doesn't take up a whole number of words, see whether
2608 SRCREG is padded on the left or on the right. If it's on the left,
2609 set PADDING_CORRECTION to the number of bits to skip.
2611 In most ABIs, the structure will be returned at the least end of
2612 the register, which translates to right padding on little-endian
2613 targets and left padding on big-endian targets. The opposite
2614 holds if the structure is returned at the most significant
2615 end of the register. */
2616 if (bytes
% UNITS_PER_WORD
!= 0
2617 && (targetm
.calls
.return_in_msb (type
)
2619 : BYTES_BIG_ENDIAN
))
2621 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2623 /* We can use a single move if we have an exact mode for the size. */
2624 else if (MEM_P (target
)
2625 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
2626 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2627 && bytes
== GET_MODE_SIZE (mode
))
2629 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2633 /* And if we additionally have the same mode for a register. */
2634 else if (REG_P (target
)
2635 && GET_MODE (target
) == mode
2636 && bytes
== GET_MODE_SIZE (mode
))
2638 emit_move_insn (target
, srcreg
);
2642 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2643 into a new pseudo which is a full word. */
2644 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2646 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2650 /* Copy the structure BITSIZE bits at a time. If the target lives in
2651 memory, take care of not reading/writing past its end by selecting
2652 a copy mode suited to BITSIZE. This should always be possible given
2655 If the target lives in register, make sure not to select a copy mode
2656 larger than the mode of the register.
2658 We could probably emit more efficient code for machines which do not use
2659 strict alignment, but it doesn't seem worth the effort at the current
2662 copy_mode
= word_mode
;
2665 machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2666 if (mem_mode
!= BLKmode
)
2667 copy_mode
= mem_mode
;
2669 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2672 for (bitpos
= 0, xbitpos
= padding_correction
;
2673 bitpos
< bytes
* BITS_PER_UNIT
;
2674 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2676 /* We need a new source operand each time xbitpos is on a
2677 word boundary and when xbitpos == padding_correction
2678 (the first time through). */
2679 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2680 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2682 /* We need a new destination operand each time bitpos is on
2684 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2686 else if (bitpos
% BITS_PER_WORD
== 0)
2687 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2689 /* Use xbitpos for the source extraction (right justified) and
2690 bitpos for the destination store (left justified). */
2691 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2692 extract_bit_field (src
, bitsize
,
2693 xbitpos
% BITS_PER_WORD
, 1,
2694 NULL_RTX
, copy_mode
, copy_mode
,
2700 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2701 register if it contains any data, otherwise return null.
2703 This is used on targets that return BLKmode values in registers. */
2706 copy_blkmode_to_reg (machine_mode mode
, tree src
)
2709 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2710 unsigned int bitsize
;
2711 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2712 machine_mode dst_mode
;
2714 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2716 x
= expand_normal (src
);
2718 bytes
= int_size_in_bytes (TREE_TYPE (src
));
2722 /* If the structure doesn't take up a whole number of words, see
2723 whether the register value should be padded on the left or on
2724 the right. Set PADDING_CORRECTION to the number of padding
2725 bits needed on the left side.
2727 In most ABIs, the structure will be returned at the least end of
2728 the register, which translates to right padding on little-endian
2729 targets and left padding on big-endian targets. The opposite
2730 holds if the structure is returned at the most significant
2731 end of the register. */
2732 if (bytes
% UNITS_PER_WORD
!= 0
2733 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2735 : BYTES_BIG_ENDIAN
))
2736 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2739 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2740 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2741 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2743 /* Copy the structure BITSIZE bits at a time. */
2744 for (bitpos
= 0, xbitpos
= padding_correction
;
2745 bitpos
< bytes
* BITS_PER_UNIT
;
2746 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2748 /* We need a new destination pseudo each time xbitpos is
2749 on a word boundary and when xbitpos == padding_correction
2750 (the first time through). */
2751 if (xbitpos
% BITS_PER_WORD
== 0
2752 || xbitpos
== padding_correction
)
2754 /* Generate an appropriate register. */
2755 dst_word
= gen_reg_rtx (word_mode
);
2756 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2758 /* Clear the destination before we move anything into it. */
2759 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2762 /* We need a new source operand each time bitpos is on a word
2764 if (bitpos
% BITS_PER_WORD
== 0)
2765 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2767 /* Use bitpos for the source extraction (left justified) and
2768 xbitpos for the destination store (right justified). */
2769 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2771 extract_bit_field (src_word
, bitsize
,
2772 bitpos
% BITS_PER_WORD
, 1,
2773 NULL_RTX
, word_mode
, word_mode
,
2778 if (mode
== BLKmode
)
2780 /* Find the smallest integer mode large enough to hold the
2781 entire structure. */
2782 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2784 mode
= GET_MODE_WIDER_MODE (mode
))
2785 /* Have we found a large enough mode? */
2786 if (GET_MODE_SIZE (mode
) >= bytes
)
2789 /* A suitable mode should have been found. */
2790 gcc_assert (mode
!= VOIDmode
);
2793 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2794 dst_mode
= word_mode
;
2797 dst
= gen_reg_rtx (dst_mode
);
2799 for (i
= 0; i
< n_regs
; i
++)
2800 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2802 if (mode
!= dst_mode
)
2803 dst
= gen_lowpart (mode
, dst
);
2808 /* Add a USE expression for REG to the (possibly empty) list pointed
2809 to by CALL_FUSAGE. REG must denote a hard register. */
2812 use_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2814 gcc_assert (REG_P (reg
));
2816 if (!HARD_REGISTER_P (reg
))
2820 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2823 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2824 to by CALL_FUSAGE. REG must denote a hard register. */
2827 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2829 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2832 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2835 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2836 starting at REGNO. All of these registers must be hard registers. */
2839 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2843 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2845 for (i
= 0; i
< nregs
; i
++)
2846 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2849 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2850 PARALLEL REGS. This is for calls that pass values in multiple
2851 non-contiguous locations. The Irix 6 ABI has examples of this. */
2854 use_group_regs (rtx
*call_fusage
, rtx regs
)
2858 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2860 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2862 /* A NULL entry means the parameter goes both on the stack and in
2863 registers. This can also be a MEM for targets that pass values
2864 partially on the stack and partially in registers. */
2865 if (reg
!= 0 && REG_P (reg
))
2866 use_reg (call_fusage
, reg
);
2870 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2871 assigment and the code of the expresion on the RHS is CODE. Return
2875 get_def_for_expr (tree name
, enum tree_code code
)
2879 if (TREE_CODE (name
) != SSA_NAME
)
2882 def_stmt
= get_gimple_for_ssa_name (name
);
2884 || gimple_assign_rhs_code (def_stmt
) != code
)
2890 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2891 assigment and the class of the expresion on the RHS is CLASS. Return
2895 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2899 if (TREE_CODE (name
) != SSA_NAME
)
2902 def_stmt
= get_gimple_for_ssa_name (name
);
2904 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2910 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2911 its length in bytes. */
2914 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2915 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2916 unsigned HOST_WIDE_INT min_size
,
2917 unsigned HOST_WIDE_INT max_size
,
2918 unsigned HOST_WIDE_INT probable_max_size
)
2920 machine_mode mode
= GET_MODE (object
);
2923 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2925 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2926 just move a zero. Otherwise, do this a piece at a time. */
2928 && CONST_INT_P (size
)
2929 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2931 rtx zero
= CONST0_RTX (mode
);
2934 emit_move_insn (object
, zero
);
2938 if (COMPLEX_MODE_P (mode
))
2940 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2943 write_complex_part (object
, zero
, 0);
2944 write_complex_part (object
, zero
, 1);
2950 if (size
== const0_rtx
)
2953 align
= MEM_ALIGN (object
);
2955 if (CONST_INT_P (size
)
2956 && targetm
.use_by_pieces_infrastructure_p (INTVAL (size
), align
,
2958 optimize_insn_for_speed_p ()))
2959 clear_by_pieces (object
, INTVAL (size
), align
);
2960 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2961 expected_align
, expected_size
,
2962 min_size
, max_size
, probable_max_size
))
2964 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2965 return set_storage_via_libcall (object
, size
, const0_rtx
,
2966 method
== BLOCK_OP_TAILCALL
);
2974 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2976 unsigned HOST_WIDE_INT max
, min
= 0;
2977 if (GET_CODE (size
) == CONST_INT
)
2978 min
= max
= UINTVAL (size
);
2980 max
= GET_MODE_MASK (GET_MODE (size
));
2981 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
2985 /* A subroutine of clear_storage. Expand a call to memset.
2986 Return the return value of memset, 0 otherwise. */
2989 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2991 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2992 machine_mode size_mode
;
2994 object
= copy_addr_to_reg (XEXP (object
, 0));
2995 object_tree
= make_tree (ptr_type_node
, object
);
2997 if (!CONST_INT_P (val
))
2998 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2999 val_tree
= make_tree (integer_type_node
, val
);
3001 size_mode
= TYPE_MODE (sizetype
);
3002 size
= convert_to_mode (size_mode
, size
, 1);
3003 size
= copy_to_mode_reg (size_mode
, size
);
3004 size_tree
= make_tree (sizetype
, size
);
3006 /* It is incorrect to use the libcall calling conventions for calls to
3007 memset because it can be provided by the user. */
3008 fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
3009 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
3010 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
3012 return expand_call (call_expr
, NULL_RTX
, false);
3015 /* Expand a setmem pattern; return true if successful. */
3018 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
3019 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
3020 unsigned HOST_WIDE_INT min_size
,
3021 unsigned HOST_WIDE_INT max_size
,
3022 unsigned HOST_WIDE_INT probable_max_size
)
3024 /* Try the most limited insn first, because there's no point
3025 including more than one in the machine description unless
3026 the more limited one has some advantage. */
3030 if (expected_align
< align
)
3031 expected_align
= align
;
3032 if (expected_size
!= -1)
3034 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
3035 expected_size
= max_size
;
3036 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
3037 expected_size
= min_size
;
3040 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
3041 mode
= GET_MODE_WIDER_MODE (mode
))
3043 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
3045 if (code
!= CODE_FOR_nothing
3046 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3047 here because if SIZE is less than the mode mask, as it is
3048 returned by the macro, it will definitely be less than the
3049 actual mode mask. Since SIZE is within the Pmode address
3050 space, we limit MODE to Pmode. */
3051 && ((CONST_INT_P (size
)
3052 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3053 <= (GET_MODE_MASK (mode
) >> 1)))
3054 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
3055 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
3057 struct expand_operand ops
[9];
3060 nops
= insn_data
[(int) code
].n_generator_args
;
3061 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
3063 create_fixed_operand (&ops
[0], object
);
3064 /* The check above guarantees that this size conversion is valid. */
3065 create_convert_operand_to (&ops
[1], size
, mode
, true);
3066 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
3067 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
3070 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
3071 create_integer_operand (&ops
[5], expected_size
);
3075 create_integer_operand (&ops
[6], min_size
);
3076 /* If we can not represent the maximal size,
3077 make parameter NULL. */
3078 if ((HOST_WIDE_INT
) max_size
!= -1)
3079 create_integer_operand (&ops
[7], max_size
);
3081 create_fixed_operand (&ops
[7], NULL
);
3085 /* If we can not represent the maximal size,
3086 make parameter NULL. */
3087 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
3088 create_integer_operand (&ops
[8], probable_max_size
);
3090 create_fixed_operand (&ops
[8], NULL
);
3092 if (maybe_expand_insn (code
, nops
, ops
))
3101 /* Write to one of the components of the complex value CPLX. Write VAL to
3102 the real part if IMAG_P is false, and the imaginary part if its true. */
3105 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
3111 if (GET_CODE (cplx
) == CONCAT
)
3113 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3117 cmode
= GET_MODE (cplx
);
3118 imode
= GET_MODE_INNER (cmode
);
3119 ibitsize
= GET_MODE_BITSIZE (imode
);
3121 /* For MEMs simplify_gen_subreg may generate an invalid new address
3122 because, e.g., the original address is considered mode-dependent
3123 by the target, which restricts simplify_subreg from invoking
3124 adjust_address_nv. Instead of preparing fallback support for an
3125 invalid address, we call adjust_address_nv directly. */
3128 emit_move_insn (adjust_address_nv (cplx
, imode
,
3129 imag_p
? GET_MODE_SIZE (imode
) : 0),
3134 /* If the sub-object is at least word sized, then we know that subregging
3135 will work. This special case is important, since store_bit_field
3136 wants to operate on integer modes, and there's rarely an OImode to
3137 correspond to TCmode. */
3138 if (ibitsize
>= BITS_PER_WORD
3139 /* For hard regs we have exact predicates. Assume we can split
3140 the original object if it spans an even number of hard regs.
3141 This special case is important for SCmode on 64-bit platforms
3142 where the natural size of floating-point regs is 32-bit. */
3144 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3145 && REG_NREGS (cplx
) % 2 == 0))
3147 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3148 imag_p
? GET_MODE_SIZE (imode
) : 0);
3151 emit_move_insn (part
, val
);
3155 /* simplify_gen_subreg may fail for sub-word MEMs. */
3156 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3159 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
,
3163 /* Extract one of the components of the complex value CPLX. Extract the
3164 real part if IMAG_P is false, and the imaginary part if it's true. */
3167 read_complex_part (rtx cplx
, bool imag_p
)
3169 machine_mode cmode
, imode
;
3172 if (GET_CODE (cplx
) == CONCAT
)
3173 return XEXP (cplx
, imag_p
);
3175 cmode
= GET_MODE (cplx
);
3176 imode
= GET_MODE_INNER (cmode
);
3177 ibitsize
= GET_MODE_BITSIZE (imode
);
3179 /* Special case reads from complex constants that got spilled to memory. */
3180 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3182 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3183 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3185 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3186 if (CONSTANT_CLASS_P (part
))
3187 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3191 /* For MEMs simplify_gen_subreg may generate an invalid new address
3192 because, e.g., the original address is considered mode-dependent
3193 by the target, which restricts simplify_subreg from invoking
3194 adjust_address_nv. Instead of preparing fallback support for an
3195 invalid address, we call adjust_address_nv directly. */
3197 return adjust_address_nv (cplx
, imode
,
3198 imag_p
? GET_MODE_SIZE (imode
) : 0);
3200 /* If the sub-object is at least word sized, then we know that subregging
3201 will work. This special case is important, since extract_bit_field
3202 wants to operate on integer modes, and there's rarely an OImode to
3203 correspond to TCmode. */
3204 if (ibitsize
>= BITS_PER_WORD
3205 /* For hard regs we have exact predicates. Assume we can split
3206 the original object if it spans an even number of hard regs.
3207 This special case is important for SCmode on 64-bit platforms
3208 where the natural size of floating-point regs is 32-bit. */
3210 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3211 && REG_NREGS (cplx
) % 2 == 0))
3213 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3214 imag_p
? GET_MODE_SIZE (imode
) : 0);
3218 /* simplify_gen_subreg may fail for sub-word MEMs. */
3219 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3222 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3223 true, NULL_RTX
, imode
, imode
, false);
3226 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3227 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3228 represented in NEW_MODE. If FORCE is true, this will never happen, as
3229 we'll force-create a SUBREG if needed. */
3232 emit_move_change_mode (machine_mode new_mode
,
3233 machine_mode old_mode
, rtx x
, bool force
)
3237 if (push_operand (x
, GET_MODE (x
)))
3239 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3240 MEM_COPY_ATTRIBUTES (ret
, x
);
3244 /* We don't have to worry about changing the address since the
3245 size in bytes is supposed to be the same. */
3246 if (reload_in_progress
)
3248 /* Copy the MEM to change the mode and move any
3249 substitutions from the old MEM to the new one. */
3250 ret
= adjust_address_nv (x
, new_mode
, 0);
3251 copy_replacements (x
, ret
);
3254 ret
= adjust_address (x
, new_mode
, 0);
3258 /* Note that we do want simplify_subreg's behavior of validating
3259 that the new mode is ok for a hard register. If we were to use
3260 simplify_gen_subreg, we would create the subreg, but would
3261 probably run into the target not being able to implement it. */
3262 /* Except, of course, when FORCE is true, when this is exactly what
3263 we want. Which is needed for CCmodes on some targets. */
3265 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3267 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3273 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3274 an integer mode of the same size as MODE. Returns the instruction
3275 emitted, or NULL if such a move could not be generated. */
3278 emit_move_via_integer (machine_mode mode
, rtx x
, rtx y
, bool force
)
3281 enum insn_code code
;
3283 /* There must exist a mode of the exact size we require. */
3284 imode
= int_mode_for_mode (mode
);
3285 if (imode
== BLKmode
)
3288 /* The target must support moves in this mode. */
3289 code
= optab_handler (mov_optab
, imode
);
3290 if (code
== CODE_FOR_nothing
)
3293 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3296 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3299 return emit_insn (GEN_FCN (code
) (x
, y
));
3302 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3303 Return an equivalent MEM that does not use an auto-increment. */
3306 emit_move_resolve_push (machine_mode mode
, rtx x
)
3308 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3309 HOST_WIDE_INT adjust
;
3312 adjust
= GET_MODE_SIZE (mode
);
3313 #ifdef PUSH_ROUNDING
3314 adjust
= PUSH_ROUNDING (adjust
);
3316 if (code
== PRE_DEC
|| code
== POST_DEC
)
3318 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3320 rtx expr
= XEXP (XEXP (x
, 0), 1);
3323 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3324 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3325 val
= INTVAL (XEXP (expr
, 1));
3326 if (GET_CODE (expr
) == MINUS
)
3328 gcc_assert (adjust
== val
|| adjust
== -val
);
3332 /* Do not use anti_adjust_stack, since we don't want to update
3333 stack_pointer_delta. */
3334 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3335 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3336 0, OPTAB_LIB_WIDEN
);
3337 if (temp
!= stack_pointer_rtx
)
3338 emit_move_insn (stack_pointer_rtx
, temp
);
3345 temp
= stack_pointer_rtx
;
3350 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3356 return replace_equiv_address (x
, temp
);
3359 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3360 X is known to satisfy push_operand, and MODE is known to be complex.
3361 Returns the last instruction emitted. */
3364 emit_move_complex_push (machine_mode mode
, rtx x
, rtx y
)
3366 machine_mode submode
= GET_MODE_INNER (mode
);
3369 #ifdef PUSH_ROUNDING
3370 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3372 /* In case we output to the stack, but the size is smaller than the
3373 machine can push exactly, we need to use move instructions. */
3374 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3376 x
= emit_move_resolve_push (mode
, x
);
3377 return emit_move_insn (x
, y
);
3381 /* Note that the real part always precedes the imag part in memory
3382 regardless of machine's endianness. */
3383 switch (GET_CODE (XEXP (x
, 0)))
3397 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3398 read_complex_part (y
, imag_first
));
3399 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3400 read_complex_part (y
, !imag_first
));
3403 /* A subroutine of emit_move_complex. Perform the move from Y to X
3404 via two moves of the parts. Returns the last instruction emitted. */
3407 emit_move_complex_parts (rtx x
, rtx y
)
3409 /* Show the output dies here. This is necessary for SUBREGs
3410 of pseudos since we cannot track their lifetimes correctly;
3411 hard regs shouldn't appear here except as return values. */
3412 if (!reload_completed
&& !reload_in_progress
3413 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3416 write_complex_part (x
, read_complex_part (y
, false), false);
3417 write_complex_part (x
, read_complex_part (y
, true), true);
3419 return get_last_insn ();
3422 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3423 MODE is known to be complex. Returns the last instruction emitted. */
3426 emit_move_complex (machine_mode mode
, rtx x
, rtx y
)
3430 /* Need to take special care for pushes, to maintain proper ordering
3431 of the data, and possibly extra padding. */
3432 if (push_operand (x
, mode
))
3433 return emit_move_complex_push (mode
, x
, y
);
3435 /* See if we can coerce the target into moving both values at once, except
3436 for floating point where we favor moving as parts if this is easy. */
3437 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3438 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3440 && HARD_REGISTER_P (x
)
3441 && REG_NREGS (x
) == 1)
3443 && HARD_REGISTER_P (y
)
3444 && REG_NREGS (y
) == 1))
3446 /* Not possible if the values are inherently not adjacent. */
3447 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3449 /* Is possible if both are registers (or subregs of registers). */
3450 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3452 /* If one of the operands is a memory, and alignment constraints
3453 are friendly enough, we may be able to do combined memory operations.
3454 We do not attempt this if Y is a constant because that combination is
3455 usually better with the by-parts thing below. */
3456 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3457 && (!STRICT_ALIGNMENT
3458 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3467 /* For memory to memory moves, optimal behavior can be had with the
3468 existing block move logic. */
3469 if (MEM_P (x
) && MEM_P (y
))
3471 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3472 BLOCK_OP_NO_LIBCALL
);
3473 return get_last_insn ();
3476 ret
= emit_move_via_integer (mode
, x
, y
, true);
3481 return emit_move_complex_parts (x
, y
);
3484 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3485 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3488 emit_move_ccmode (machine_mode mode
, rtx x
, rtx y
)
3492 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3495 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3496 if (code
!= CODE_FOR_nothing
)
3498 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3499 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3500 return emit_insn (GEN_FCN (code
) (x
, y
));
3504 /* Otherwise, find the MODE_INT mode of the same width. */
3505 ret
= emit_move_via_integer (mode
, x
, y
, false);
3506 gcc_assert (ret
!= NULL
);
3510 /* Return true if word I of OP lies entirely in the
3511 undefined bits of a paradoxical subreg. */
3514 undefined_operand_subword_p (const_rtx op
, int i
)
3516 machine_mode innermode
, innermostmode
;
3518 if (GET_CODE (op
) != SUBREG
)
3520 innermode
= GET_MODE (op
);
3521 innermostmode
= GET_MODE (SUBREG_REG (op
));
3522 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3523 /* The SUBREG_BYTE represents offset, as if the value were stored in
3524 memory, except for a paradoxical subreg where we define
3525 SUBREG_BYTE to be 0; undo this exception as in
3527 if (SUBREG_BYTE (op
) == 0
3528 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3530 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3531 if (WORDS_BIG_ENDIAN
)
3532 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3533 if (BYTES_BIG_ENDIAN
)
3534 offset
+= difference
% UNITS_PER_WORD
;
3536 if (offset
>= GET_MODE_SIZE (innermostmode
)
3537 || offset
<= -GET_MODE_SIZE (word_mode
))
3542 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3543 MODE is any multi-word or full-word mode that lacks a move_insn
3544 pattern. Note that you will get better code if you define such
3545 patterns, even if they must turn into multiple assembler instructions. */
3548 emit_move_multi_word (machine_mode mode
, rtx x
, rtx y
)
3550 rtx_insn
*last_insn
= 0;
3556 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3558 /* If X is a push on the stack, do the push now and replace
3559 X with a reference to the stack pointer. */
3560 if (push_operand (x
, mode
))
3561 x
= emit_move_resolve_push (mode
, x
);
3563 /* If we are in reload, see if either operand is a MEM whose address
3564 is scheduled for replacement. */
3565 if (reload_in_progress
&& MEM_P (x
)
3566 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3567 x
= replace_equiv_address_nv (x
, inner
);
3568 if (reload_in_progress
&& MEM_P (y
)
3569 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3570 y
= replace_equiv_address_nv (y
, inner
);
3574 need_clobber
= false;
3576 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3579 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3582 /* Do not generate code for a move if it would come entirely
3583 from the undefined bits of a paradoxical subreg. */
3584 if (undefined_operand_subword_p (y
, i
))
3587 ypart
= operand_subword (y
, i
, 1, mode
);
3589 /* If we can't get a part of Y, put Y into memory if it is a
3590 constant. Otherwise, force it into a register. Then we must
3591 be able to get a part of Y. */
3592 if (ypart
== 0 && CONSTANT_P (y
))
3594 y
= use_anchored_address (force_const_mem (mode
, y
));
3595 ypart
= operand_subword (y
, i
, 1, mode
);
3597 else if (ypart
== 0)
3598 ypart
= operand_subword_force (y
, i
, mode
);
3600 gcc_assert (xpart
&& ypart
);
3602 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3604 last_insn
= emit_move_insn (xpart
, ypart
);
3610 /* Show the output dies here. This is necessary for SUBREGs
3611 of pseudos since we cannot track their lifetimes correctly;
3612 hard regs shouldn't appear here except as return values.
3613 We never want to emit such a clobber after reload. */
3615 && ! (reload_in_progress
|| reload_completed
)
3616 && need_clobber
!= 0)
3624 /* Low level part of emit_move_insn.
3625 Called just like emit_move_insn, but assumes X and Y
3626 are basically valid. */
3629 emit_move_insn_1 (rtx x
, rtx y
)
3631 machine_mode mode
= GET_MODE (x
);
3632 enum insn_code code
;
3634 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3636 code
= optab_handler (mov_optab
, mode
);
3637 if (code
!= CODE_FOR_nothing
)
3638 return emit_insn (GEN_FCN (code
) (x
, y
));
3640 /* Expand complex moves by moving real part and imag part. */
3641 if (COMPLEX_MODE_P (mode
))
3642 return emit_move_complex (mode
, x
, y
);
3644 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3645 || ALL_FIXED_POINT_MODE_P (mode
))
3647 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3649 /* If we can't find an integer mode, use multi words. */
3653 return emit_move_multi_word (mode
, x
, y
);
3656 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3657 return emit_move_ccmode (mode
, x
, y
);
3659 /* Try using a move pattern for the corresponding integer mode. This is
3660 only safe when simplify_subreg can convert MODE constants into integer
3661 constants. At present, it can only do this reliably if the value
3662 fits within a HOST_WIDE_INT. */
3663 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3665 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3669 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3674 return emit_move_multi_word (mode
, x
, y
);
3677 /* Generate code to copy Y into X.
3678 Both Y and X must have the same mode, except that
3679 Y can be a constant with VOIDmode.
3680 This mode cannot be BLKmode; use emit_block_move for that.
3682 Return the last instruction emitted. */
3685 emit_move_insn (rtx x
, rtx y
)
3687 machine_mode mode
= GET_MODE (x
);
3688 rtx y_cst
= NULL_RTX
;
3689 rtx_insn
*last_insn
;
3692 gcc_assert (mode
!= BLKmode
3693 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3698 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3699 && (last_insn
= compress_float_constant (x
, y
)))
3704 if (!targetm
.legitimate_constant_p (mode
, y
))
3706 y
= force_const_mem (mode
, y
);
3708 /* If the target's cannot_force_const_mem prevented the spill,
3709 assume that the target's move expanders will also take care
3710 of the non-legitimate constant. */
3714 y
= use_anchored_address (y
);
3718 /* If X or Y are memory references, verify that their addresses are valid
3721 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3723 && ! push_operand (x
, GET_MODE (x
))))
3724 x
= validize_mem (x
);
3727 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3728 MEM_ADDR_SPACE (y
)))
3729 y
= validize_mem (y
);
3731 gcc_assert (mode
!= BLKmode
);
3733 last_insn
= emit_move_insn_1 (x
, y
);
3735 if (y_cst
&& REG_P (x
)
3736 && (set
= single_set (last_insn
)) != NULL_RTX
3737 && SET_DEST (set
) == x
3738 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3739 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3744 /* Generate the body of an instruction to copy Y into X.
3745 It may be a list of insns, if one insn isn't enough. */
3748 gen_move_insn (rtx x
, rtx y
)
3753 emit_move_insn_1 (x
, y
);
3759 /* If Y is representable exactly in a narrower mode, and the target can
3760 perform the extension directly from constant or memory, then emit the
3761 move as an extension. */
3764 compress_float_constant (rtx x
, rtx y
)
3766 machine_mode dstmode
= GET_MODE (x
);
3767 machine_mode orig_srcmode
= GET_MODE (y
);
3768 machine_mode srcmode
;
3769 const REAL_VALUE_TYPE
*r
;
3770 int oldcost
, newcost
;
3771 bool speed
= optimize_insn_for_speed_p ();
3773 r
= CONST_DOUBLE_REAL_VALUE (y
);
3775 if (targetm
.legitimate_constant_p (dstmode
, y
))
3776 oldcost
= set_src_cost (y
, orig_srcmode
, speed
);
3778 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), dstmode
, speed
);
3780 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3781 srcmode
!= orig_srcmode
;
3782 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3786 rtx_insn
*last_insn
;
3788 /* Skip if the target can't extend this way. */
3789 ic
= can_extend_p (dstmode
, srcmode
, 0);
3790 if (ic
== CODE_FOR_nothing
)
3793 /* Skip if the narrowed value isn't exact. */
3794 if (! exact_real_truncate (srcmode
, r
))
3797 trunc_y
= const_double_from_real_value (*r
, srcmode
);
3799 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3801 /* Skip if the target needs extra instructions to perform
3803 if (!insn_operand_matches (ic
, 1, trunc_y
))
3805 /* This is valid, but may not be cheaper than the original. */
3806 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3808 if (oldcost
< newcost
)
3811 else if (float_extend_from_mem
[dstmode
][srcmode
])
3813 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3814 /* This is valid, but may not be cheaper than the original. */
3815 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3817 if (oldcost
< newcost
)
3819 trunc_y
= validize_mem (trunc_y
);
3824 /* For CSE's benefit, force the compressed constant pool entry
3825 into a new pseudo. This constant may be used in different modes,
3826 and if not, combine will put things back together for us. */
3827 trunc_y
= force_reg (srcmode
, trunc_y
);
3829 /* If x is a hard register, perform the extension into a pseudo,
3830 so that e.g. stack realignment code is aware of it. */
3832 if (REG_P (x
) && HARD_REGISTER_P (x
))
3833 target
= gen_reg_rtx (dstmode
);
3835 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3836 last_insn
= get_last_insn ();
3839 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3842 return emit_move_insn (x
, target
);
3849 /* Pushing data onto the stack. */
3851 /* Push a block of length SIZE (perhaps variable)
3852 and return an rtx to address the beginning of the block.
3853 The value may be virtual_outgoing_args_rtx.
3855 EXTRA is the number of bytes of padding to push in addition to SIZE.
3856 BELOW nonzero means this padding comes at low addresses;
3857 otherwise, the padding comes at high addresses. */
3860 push_block (rtx size
, int extra
, int below
)
3864 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3865 if (CONSTANT_P (size
))
3866 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3867 else if (REG_P (size
) && extra
== 0)
3868 anti_adjust_stack (size
);
3871 temp
= copy_to_mode_reg (Pmode
, size
);
3873 temp
= expand_binop (Pmode
, add_optab
, temp
,
3874 gen_int_mode (extra
, Pmode
),
3875 temp
, 0, OPTAB_LIB_WIDEN
);
3876 anti_adjust_stack (temp
);
3879 if (STACK_GROWS_DOWNWARD
)
3881 temp
= virtual_outgoing_args_rtx
;
3882 if (extra
!= 0 && below
)
3883 temp
= plus_constant (Pmode
, temp
, extra
);
3887 if (CONST_INT_P (size
))
3888 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3889 -INTVAL (size
) - (below
? 0 : extra
));
3890 else if (extra
!= 0 && !below
)
3891 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3892 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3895 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3896 negate_rtx (Pmode
, size
));
3899 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3902 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3905 mem_autoinc_base (rtx mem
)
3909 rtx addr
= XEXP (mem
, 0);
3910 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3911 return XEXP (addr
, 0);
3916 /* A utility routine used here, in reload, and in try_split. The insns
3917 after PREV up to and including LAST are known to adjust the stack,
3918 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3919 placing notes as appropriate. PREV may be NULL, indicating the
3920 entire insn sequence prior to LAST should be scanned.
3922 The set of allowed stack pointer modifications is small:
3923 (1) One or more auto-inc style memory references (aka pushes),
3924 (2) One or more addition/subtraction with the SP as destination,
3925 (3) A single move insn with the SP as destination,
3926 (4) A call_pop insn,
3927 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3929 Insns in the sequence that do not modify the SP are ignored,
3930 except for noreturn calls.
3932 The return value is the amount of adjustment that can be trivially
3933 verified, via immediate operand or auto-inc. If the adjustment
3934 cannot be trivially extracted, the return value is INT_MIN. */
3937 find_args_size_adjust (rtx_insn
*insn
)
3942 pat
= PATTERN (insn
);
3945 /* Look for a call_pop pattern. */
3948 /* We have to allow non-call_pop patterns for the case
3949 of emit_single_push_insn of a TLS address. */
3950 if (GET_CODE (pat
) != PARALLEL
)
3953 /* All call_pop have a stack pointer adjust in the parallel.
3954 The call itself is always first, and the stack adjust is
3955 usually last, so search from the end. */
3956 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3958 set
= XVECEXP (pat
, 0, i
);
3959 if (GET_CODE (set
) != SET
)
3961 dest
= SET_DEST (set
);
3962 if (dest
== stack_pointer_rtx
)
3965 /* We'd better have found the stack pointer adjust. */
3968 /* Fall through to process the extracted SET and DEST
3969 as if it was a standalone insn. */
3971 else if (GET_CODE (pat
) == SET
)
3973 else if ((set
= single_set (insn
)) != NULL
)
3975 else if (GET_CODE (pat
) == PARALLEL
)
3977 /* ??? Some older ports use a parallel with a stack adjust
3978 and a store for a PUSH_ROUNDING pattern, rather than a
3979 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3980 /* ??? See h8300 and m68k, pushqi1. */
3981 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
3983 set
= XVECEXP (pat
, 0, i
);
3984 if (GET_CODE (set
) != SET
)
3986 dest
= SET_DEST (set
);
3987 if (dest
== stack_pointer_rtx
)
3990 /* We do not expect an auto-inc of the sp in the parallel. */
3991 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
3992 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3993 != stack_pointer_rtx
);
4001 dest
= SET_DEST (set
);
4003 /* Look for direct modifications of the stack pointer. */
4004 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
4006 /* Look for a trivial adjustment, otherwise assume nothing. */
4007 /* Note that the SPU restore_stack_block pattern refers to
4008 the stack pointer in V4SImode. Consider that non-trivial. */
4009 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
4010 && GET_CODE (SET_SRC (set
)) == PLUS
4011 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
4012 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
4013 return INTVAL (XEXP (SET_SRC (set
), 1));
4014 /* ??? Reload can generate no-op moves, which will be cleaned
4015 up later. Recognize it and continue searching. */
4016 else if (rtx_equal_p (dest
, SET_SRC (set
)))
4019 return HOST_WIDE_INT_MIN
;
4025 /* Otherwise only think about autoinc patterns. */
4026 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
4029 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4030 != stack_pointer_rtx
);
4032 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
4033 mem
= SET_SRC (set
);
4037 addr
= XEXP (mem
, 0);
4038 switch (GET_CODE (addr
))
4042 return GET_MODE_SIZE (GET_MODE (mem
));
4045 return -GET_MODE_SIZE (GET_MODE (mem
));
4048 addr
= XEXP (addr
, 1);
4049 gcc_assert (GET_CODE (addr
) == PLUS
);
4050 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
4051 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
4052 return INTVAL (XEXP (addr
, 1));
4060 fixup_args_size_notes (rtx_insn
*prev
, rtx_insn
*last
, int end_args_size
)
4062 int args_size
= end_args_size
;
4063 bool saw_unknown
= false;
4066 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
4068 HOST_WIDE_INT this_delta
;
4070 if (!NONDEBUG_INSN_P (insn
))
4073 this_delta
= find_args_size_adjust (insn
);
4074 if (this_delta
== 0)
4077 || ACCUMULATE_OUTGOING_ARGS
4078 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
4082 gcc_assert (!saw_unknown
);
4083 if (this_delta
== HOST_WIDE_INT_MIN
)
4086 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (args_size
));
4087 if (STACK_GROWS_DOWNWARD
)
4088 this_delta
= -(unsigned HOST_WIDE_INT
) this_delta
;
4090 args_size
-= this_delta
;
4093 return saw_unknown
? INT_MIN
: args_size
;
4096 #ifdef PUSH_ROUNDING
4097 /* Emit single push insn. */
4100 emit_single_push_insn_1 (machine_mode mode
, rtx x
, tree type
)
4103 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4105 enum insn_code icode
;
4107 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4108 /* If there is push pattern, use it. Otherwise try old way of throwing
4109 MEM representing push operation to move expander. */
4110 icode
= optab_handler (push_optab
, mode
);
4111 if (icode
!= CODE_FOR_nothing
)
4113 struct expand_operand ops
[1];
4115 create_input_operand (&ops
[0], x
, mode
);
4116 if (maybe_expand_insn (icode
, 1, ops
))
4119 if (GET_MODE_SIZE (mode
) == rounded_size
)
4120 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
4121 /* If we are to pad downward, adjust the stack pointer first and
4122 then store X into the stack location using an offset. This is
4123 because emit_move_insn does not know how to pad; it does not have
4125 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
4127 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
4128 HOST_WIDE_INT offset
;
4130 emit_move_insn (stack_pointer_rtx
,
4131 expand_binop (Pmode
,
4132 STACK_GROWS_DOWNWARD
? sub_optab
4135 gen_int_mode (rounded_size
, Pmode
),
4136 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4138 offset
= (HOST_WIDE_INT
) padding_size
;
4139 if (STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_DEC
)
4140 /* We have already decremented the stack pointer, so get the
4142 offset
+= (HOST_WIDE_INT
) rounded_size
;
4144 if (!STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_INC
)
4145 /* We have already incremented the stack pointer, so get the
4147 offset
-= (HOST_WIDE_INT
) rounded_size
;
4149 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4150 gen_int_mode (offset
, Pmode
));
4154 if (STACK_GROWS_DOWNWARD
)
4155 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4156 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4157 gen_int_mode (-(HOST_WIDE_INT
) rounded_size
,
4160 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4161 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4162 gen_int_mode (rounded_size
, Pmode
));
4164 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4167 dest
= gen_rtx_MEM (mode
, dest_addr
);
4171 set_mem_attributes (dest
, type
, 1);
4173 if (cfun
->tail_call_marked
)
4174 /* Function incoming arguments may overlap with sibling call
4175 outgoing arguments and we cannot allow reordering of reads
4176 from function arguments with stores to outgoing arguments
4177 of sibling calls. */
4178 set_mem_alias_set (dest
, 0);
4180 emit_move_insn (dest
, x
);
4183 /* Emit and annotate a single push insn. */
4186 emit_single_push_insn (machine_mode mode
, rtx x
, tree type
)
4188 int delta
, old_delta
= stack_pointer_delta
;
4189 rtx_insn
*prev
= get_last_insn ();
4192 emit_single_push_insn_1 (mode
, x
, type
);
4194 last
= get_last_insn ();
4196 /* Notice the common case where we emitted exactly one insn. */
4197 if (PREV_INSN (last
) == prev
)
4199 add_reg_note (last
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4203 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4204 gcc_assert (delta
== INT_MIN
|| delta
== old_delta
);
4208 /* If reading SIZE bytes from X will end up reading from
4209 Y return the number of bytes that overlap. Return -1
4210 if there is no overlap or -2 if we can't determine
4211 (for example when X and Y have different base registers). */
4214 memory_load_overlap (rtx x
, rtx y
, HOST_WIDE_INT size
)
4216 rtx tmp
= plus_constant (Pmode
, x
, size
);
4217 rtx sub
= simplify_gen_binary (MINUS
, Pmode
, tmp
, y
);
4219 if (!CONST_INT_P (sub
))
4222 HOST_WIDE_INT val
= INTVAL (sub
);
4224 return IN_RANGE (val
, 1, size
) ? val
: -1;
4227 /* Generate code to push X onto the stack, assuming it has mode MODE and
4229 MODE is redundant except when X is a CONST_INT (since they don't
4231 SIZE is an rtx for the size of data to be copied (in bytes),
4232 needed only if X is BLKmode.
4233 Return true if successful. May return false if asked to push a
4234 partial argument during a sibcall optimization (as specified by
4235 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4238 ALIGN (in bits) is maximum alignment we can assume.
4240 If PARTIAL and REG are both nonzero, then copy that many of the first
4241 bytes of X into registers starting with REG, and push the rest of X.
4242 The amount of space pushed is decreased by PARTIAL bytes.
4243 REG must be a hard register in this case.
4244 If REG is zero but PARTIAL is not, take any all others actions for an
4245 argument partially in registers, but do not actually load any
4248 EXTRA is the amount in bytes of extra space to leave next to this arg.
4249 This is ignored if an argument block has already been allocated.
4251 On a machine that lacks real push insns, ARGS_ADDR is the address of
4252 the bottom of the argument block for this call. We use indexing off there
4253 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4254 argument block has not been preallocated.
4256 ARGS_SO_FAR is the size of args previously pushed for this call.
4258 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4259 for arguments passed in registers. If nonzero, it will be the number
4260 of bytes required. */
4263 emit_push_insn (rtx x
, machine_mode mode
, tree type
, rtx size
,
4264 unsigned int align
, int partial
, rtx reg
, int extra
,
4265 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4266 rtx alignment_pad
, bool sibcall_p
)
4269 enum direction stack_direction
= STACK_GROWS_DOWNWARD
? downward
: upward
;
4271 /* Decide where to pad the argument: `downward' for below,
4272 `upward' for above, or `none' for don't pad it.
4273 Default is below for small data on big-endian machines; else above. */
4274 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
4276 /* Invert direction if stack is post-decrement.
4278 if (STACK_PUSH_CODE
== POST_DEC
)
4279 if (where_pad
!= none
)
4280 where_pad
= (where_pad
== downward
? upward
: downward
);
4284 int nregs
= partial
/ UNITS_PER_WORD
;
4285 rtx
*tmp_regs
= NULL
;
4286 int overlapping
= 0;
4289 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4291 /* Copy a block into the stack, entirely or partially. */
4298 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4299 used
= partial
- offset
;
4301 if (mode
!= BLKmode
)
4303 /* A value is to be stored in an insufficiently aligned
4304 stack slot; copy via a suitably aligned slot if
4306 size
= GEN_INT (GET_MODE_SIZE (mode
));
4307 if (!MEM_P (xinner
))
4309 temp
= assign_temp (type
, 1, 1);
4310 emit_move_insn (temp
, xinner
);
4317 /* USED is now the # of bytes we need not copy to the stack
4318 because registers will take care of them. */
4321 xinner
= adjust_address (xinner
, BLKmode
, used
);
4323 /* If the partial register-part of the arg counts in its stack size,
4324 skip the part of stack space corresponding to the registers.
4325 Otherwise, start copying to the beginning of the stack space,
4326 by setting SKIP to 0. */
4327 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4329 #ifdef PUSH_ROUNDING
4330 /* Do it with several push insns if that doesn't take lots of insns
4331 and if there is no difficulty with push insns that skip bytes
4332 on the stack for alignment purposes. */
4335 && CONST_INT_P (size
)
4337 && MEM_ALIGN (xinner
) >= align
4338 && can_move_by_pieces ((unsigned) INTVAL (size
) - used
, align
)
4339 /* Here we avoid the case of a structure whose weak alignment
4340 forces many pushes of a small amount of data,
4341 and such small pushes do rounding that causes trouble. */
4342 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
4343 || align
>= BIGGEST_ALIGNMENT
4344 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4345 == (align
/ BITS_PER_UNIT
)))
4346 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4348 /* Push padding now if padding above and stack grows down,
4349 or if padding below and stack grows up.
4350 But if space already allocated, this has already been done. */
4351 if (extra
&& args_addr
== 0
4352 && where_pad
!= none
&& where_pad
!= stack_direction
)
4353 anti_adjust_stack (GEN_INT (extra
));
4355 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4358 #endif /* PUSH_ROUNDING */
4362 /* Otherwise make space on the stack and copy the data
4363 to the address of that space. */
4365 /* Deduct words put into registers from the size we must copy. */
4368 if (CONST_INT_P (size
))
4369 size
= GEN_INT (INTVAL (size
) - used
);
4371 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4372 gen_int_mode (used
, GET_MODE (size
)),
4373 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4376 /* Get the address of the stack space.
4377 In this case, we do not deal with EXTRA separately.
4378 A single stack adjust will do. */
4381 temp
= push_block (size
, extra
, where_pad
== downward
);
4384 else if (CONST_INT_P (args_so_far
))
4385 temp
= memory_address (BLKmode
,
4386 plus_constant (Pmode
, args_addr
,
4387 skip
+ INTVAL (args_so_far
)));
4389 temp
= memory_address (BLKmode
,
4390 plus_constant (Pmode
,
4391 gen_rtx_PLUS (Pmode
,
4396 if (!ACCUMULATE_OUTGOING_ARGS
)
4398 /* If the source is referenced relative to the stack pointer,
4399 copy it to another register to stabilize it. We do not need
4400 to do this if we know that we won't be changing sp. */
4402 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4403 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4404 temp
= copy_to_reg (temp
);
4407 target
= gen_rtx_MEM (BLKmode
, temp
);
4409 /* We do *not* set_mem_attributes here, because incoming arguments
4410 may overlap with sibling call outgoing arguments and we cannot
4411 allow reordering of reads from function arguments with stores
4412 to outgoing arguments of sibling calls. We do, however, want
4413 to record the alignment of the stack slot. */
4414 /* ALIGN may well be better aligned than TYPE, e.g. due to
4415 PARM_BOUNDARY. Assume the caller isn't lying. */
4416 set_mem_align (target
, align
);
4418 /* If part should go in registers and pushing to that part would
4419 overwrite some of the values that need to go into regs, load the
4420 overlapping values into temporary pseudos to be moved into the hard
4421 regs at the end after the stack pushing has completed.
4422 We cannot load them directly into the hard regs here because
4423 they can be clobbered by the block move expansions.
4426 if (partial
> 0 && reg
!= 0 && mode
== BLKmode
4427 && GET_CODE (reg
) != PARALLEL
)
4429 overlapping
= memory_load_overlap (XEXP (x
, 0), temp
, partial
);
4430 if (overlapping
> 0)
4432 gcc_assert (overlapping
% UNITS_PER_WORD
== 0);
4433 overlapping
/= UNITS_PER_WORD
;
4435 tmp_regs
= XALLOCAVEC (rtx
, overlapping
);
4437 for (int i
= 0; i
< overlapping
; i
++)
4438 tmp_regs
[i
] = gen_reg_rtx (word_mode
);
4440 for (int i
= 0; i
< overlapping
; i
++)
4441 emit_move_insn (tmp_regs
[i
],
4442 operand_subword_force (target
, i
, mode
));
4444 else if (overlapping
== -1)
4446 /* Could not determine whether there is overlap.
4447 Fail the sibcall. */
4455 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4458 else if (partial
> 0)
4460 /* Scalar partly in registers. */
4462 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4465 /* # bytes of start of argument
4466 that we must make space for but need not store. */
4467 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4468 int args_offset
= INTVAL (args_so_far
);
4471 /* Push padding now if padding above and stack grows down,
4472 or if padding below and stack grows up.
4473 But if space already allocated, this has already been done. */
4474 if (extra
&& args_addr
== 0
4475 && where_pad
!= none
&& where_pad
!= stack_direction
)
4476 anti_adjust_stack (GEN_INT (extra
));
4478 /* If we make space by pushing it, we might as well push
4479 the real data. Otherwise, we can leave OFFSET nonzero
4480 and leave the space uninitialized. */
4484 /* Now NOT_STACK gets the number of words that we don't need to
4485 allocate on the stack. Convert OFFSET to words too. */
4486 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4487 offset
/= UNITS_PER_WORD
;
4489 /* If the partial register-part of the arg counts in its stack size,
4490 skip the part of stack space corresponding to the registers.
4491 Otherwise, start copying to the beginning of the stack space,
4492 by setting SKIP to 0. */
4493 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4495 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4496 x
= validize_mem (force_const_mem (mode
, x
));
4498 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4499 SUBREGs of such registers are not allowed. */
4500 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4501 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4502 x
= copy_to_reg (x
);
4504 /* Loop over all the words allocated on the stack for this arg. */
4505 /* We can do it by words, because any scalar bigger than a word
4506 has a size a multiple of a word. */
4507 for (i
= size
- 1; i
>= not_stack
; i
--)
4508 if (i
>= not_stack
+ offset
)
4509 if (!emit_push_insn (operand_subword_force (x
, i
, mode
),
4510 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4512 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4514 reg_parm_stack_space
, alignment_pad
, sibcall_p
))
4522 /* Push padding now if padding above and stack grows down,
4523 or if padding below and stack grows up.
4524 But if space already allocated, this has already been done. */
4525 if (extra
&& args_addr
== 0
4526 && where_pad
!= none
&& where_pad
!= stack_direction
)
4527 anti_adjust_stack (GEN_INT (extra
));
4529 #ifdef PUSH_ROUNDING
4530 if (args_addr
== 0 && PUSH_ARGS
)
4531 emit_single_push_insn (mode
, x
, type
);
4535 if (CONST_INT_P (args_so_far
))
4537 = memory_address (mode
,
4538 plus_constant (Pmode
, args_addr
,
4539 INTVAL (args_so_far
)));
4541 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4543 dest
= gen_rtx_MEM (mode
, addr
);
4545 /* We do *not* set_mem_attributes here, because incoming arguments
4546 may overlap with sibling call outgoing arguments and we cannot
4547 allow reordering of reads from function arguments with stores
4548 to outgoing arguments of sibling calls. We do, however, want
4549 to record the alignment of the stack slot. */
4550 /* ALIGN may well be better aligned than TYPE, e.g. due to
4551 PARM_BOUNDARY. Assume the caller isn't lying. */
4552 set_mem_align (dest
, align
);
4554 emit_move_insn (dest
, x
);
4558 /* Move the partial arguments into the registers and any overlapping
4559 values that we moved into the pseudos in tmp_regs. */
4560 if (partial
> 0 && reg
!= 0)
4562 /* Handle calls that pass values in multiple non-contiguous locations.
4563 The Irix 6 ABI has examples of this. */
4564 if (GET_CODE (reg
) == PARALLEL
)
4565 emit_group_load (reg
, x
, type
, -1);
4568 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4569 move_block_to_reg (REGNO (reg
), x
, nregs
- overlapping
, mode
);
4571 for (int i
= 0; i
< overlapping
; i
++)
4572 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
)
4573 + nregs
- overlapping
+ i
),
4579 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4580 anti_adjust_stack (GEN_INT (extra
));
4582 if (alignment_pad
&& args_addr
== 0)
4583 anti_adjust_stack (alignment_pad
);
4588 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4592 get_subtarget (rtx x
)
4596 /* Only registers can be subtargets. */
4598 /* Don't use hard regs to avoid extending their life. */
4599 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4603 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4604 FIELD is a bitfield. Returns true if the optimization was successful,
4605 and there's nothing else to do. */
4608 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4609 unsigned HOST_WIDE_INT bitpos
,
4610 unsigned HOST_WIDE_INT bitregion_start
,
4611 unsigned HOST_WIDE_INT bitregion_end
,
4612 machine_mode mode1
, rtx str_rtx
,
4613 tree to
, tree src
, bool reverse
)
4615 machine_mode str_mode
= GET_MODE (str_rtx
);
4616 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4621 enum tree_code code
;
4623 if (mode1
!= VOIDmode
4624 || bitsize
>= BITS_PER_WORD
4625 || str_bitsize
> BITS_PER_WORD
4626 || TREE_SIDE_EFFECTS (to
)
4627 || TREE_THIS_VOLATILE (to
))
4631 if (TREE_CODE (src
) != SSA_NAME
)
4633 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4636 srcstmt
= get_gimple_for_ssa_name (src
);
4638 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4641 code
= gimple_assign_rhs_code (srcstmt
);
4643 op0
= gimple_assign_rhs1 (srcstmt
);
4645 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4646 to find its initialization. Hopefully the initialization will
4647 be from a bitfield load. */
4648 if (TREE_CODE (op0
) == SSA_NAME
)
4650 gimple
*op0stmt
= get_gimple_for_ssa_name (op0
);
4652 /* We want to eventually have OP0 be the same as TO, which
4653 should be a bitfield. */
4655 || !is_gimple_assign (op0stmt
)
4656 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4658 op0
= gimple_assign_rhs1 (op0stmt
);
4661 op1
= gimple_assign_rhs2 (srcstmt
);
4663 if (!operand_equal_p (to
, op0
, 0))
4666 if (MEM_P (str_rtx
))
4668 unsigned HOST_WIDE_INT offset1
;
4670 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4671 str_mode
= word_mode
;
4672 str_mode
= get_best_mode (bitsize
, bitpos
,
4673 bitregion_start
, bitregion_end
,
4674 MEM_ALIGN (str_rtx
), str_mode
, 0);
4675 if (str_mode
== VOIDmode
)
4677 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4680 bitpos
%= str_bitsize
;
4681 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4682 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4684 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4687 gcc_assert (!reverse
);
4689 /* If the bit field covers the whole REG/MEM, store_field
4690 will likely generate better code. */
4691 if (bitsize
>= str_bitsize
)
4694 /* We can't handle fields split across multiple entities. */
4695 if (bitpos
+ bitsize
> str_bitsize
)
4698 if (reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4699 bitpos
= str_bitsize
- bitpos
- bitsize
;
4705 /* For now, just optimize the case of the topmost bitfield
4706 where we don't need to do any masking and also
4707 1 bit bitfields where xor can be used.
4708 We might win by one instruction for the other bitfields
4709 too if insv/extv instructions aren't used, so that
4710 can be added later. */
4711 if ((reverse
|| bitpos
+ bitsize
!= str_bitsize
)
4712 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4715 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4716 value
= convert_modes (str_mode
,
4717 TYPE_MODE (TREE_TYPE (op1
)), value
,
4718 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4720 /* We may be accessing data outside the field, which means
4721 we can alias adjacent data. */
4722 if (MEM_P (str_rtx
))
4724 str_rtx
= shallow_copy_rtx (str_rtx
);
4725 set_mem_alias_set (str_rtx
, 0);
4726 set_mem_expr (str_rtx
, 0);
4729 if (bitsize
== 1 && (reverse
|| bitpos
+ bitsize
!= str_bitsize
))
4731 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4735 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4737 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4739 value
= flip_storage_order (str_mode
, value
);
4740 result
= expand_binop (str_mode
, binop
, str_rtx
,
4741 value
, str_rtx
, 1, OPTAB_WIDEN
);
4742 if (result
!= str_rtx
)
4743 emit_move_insn (str_rtx
, result
);
4748 if (TREE_CODE (op1
) != INTEGER_CST
)
4750 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4751 value
= convert_modes (str_mode
,
4752 TYPE_MODE (TREE_TYPE (op1
)), value
,
4753 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4755 /* We may be accessing data outside the field, which means
4756 we can alias adjacent data. */
4757 if (MEM_P (str_rtx
))
4759 str_rtx
= shallow_copy_rtx (str_rtx
);
4760 set_mem_alias_set (str_rtx
, 0);
4761 set_mem_expr (str_rtx
, 0);
4764 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4765 if (bitpos
+ bitsize
!= str_bitsize
)
4767 rtx mask
= gen_int_mode (((unsigned HOST_WIDE_INT
) 1 << bitsize
) - 1,
4769 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4771 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4773 value
= flip_storage_order (str_mode
, value
);
4774 result
= expand_binop (str_mode
, binop
, str_rtx
,
4775 value
, str_rtx
, 1, OPTAB_WIDEN
);
4776 if (result
!= str_rtx
)
4777 emit_move_insn (str_rtx
, result
);
4787 /* In the C++ memory model, consecutive bit fields in a structure are
4788 considered one memory location.
4790 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4791 returns the bit range of consecutive bits in which this COMPONENT_REF
4792 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4793 and *OFFSET may be adjusted in the process.
4795 If the access does not need to be restricted, 0 is returned in both
4796 *BITSTART and *BITEND. */
4799 get_bit_range (unsigned HOST_WIDE_INT
*bitstart
,
4800 unsigned HOST_WIDE_INT
*bitend
,
4802 HOST_WIDE_INT
*bitpos
,
4805 HOST_WIDE_INT bitoffset
;
4808 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4810 field
= TREE_OPERAND (exp
, 1);
4811 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4812 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4813 need to limit the range we can access. */
4816 *bitstart
= *bitend
= 0;
4820 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4821 part of a larger bit field, then the representative does not serve any
4822 useful purpose. This can occur in Ada. */
4823 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4826 HOST_WIDE_INT rbitsize
, rbitpos
;
4828 int unsignedp
, reversep
, volatilep
= 0;
4829 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4830 &roffset
, &rmode
, &unsignedp
, &reversep
,
4832 if ((rbitpos
% BITS_PER_UNIT
) != 0)
4834 *bitstart
= *bitend
= 0;
4839 /* Compute the adjustment to bitpos from the offset of the field
4840 relative to the representative. DECL_FIELD_OFFSET of field and
4841 repr are the same by construction if they are not constants,
4842 see finish_bitfield_layout. */
4843 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
))
4844 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr
)))
4845 bitoffset
= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
4846 - tree_to_uhwi (DECL_FIELD_OFFSET (repr
))) * BITS_PER_UNIT
;
4849 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4850 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4852 /* If the adjustment is larger than bitpos, we would have a negative bit
4853 position for the lower bound and this may wreak havoc later. Adjust
4854 offset and bitpos to make the lower bound non-negative in that case. */
4855 if (bitoffset
> *bitpos
)
4857 HOST_WIDE_INT adjust
= bitoffset
- *bitpos
;
4858 gcc_assert ((adjust
% BITS_PER_UNIT
) == 0);
4861 if (*offset
== NULL_TREE
)
4862 *offset
= size_int (-adjust
/ BITS_PER_UNIT
);
4865 = size_binop (MINUS_EXPR
, *offset
, size_int (adjust
/ BITS_PER_UNIT
));
4869 *bitstart
= *bitpos
- bitoffset
;
4871 *bitend
= *bitstart
+ tree_to_uhwi (DECL_SIZE (repr
)) - 1;
4874 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4875 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4876 DECL_RTL was not set yet, return NORTL. */
4879 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4881 if (TREE_CODE (addr
) != ADDR_EXPR
)
4884 tree base
= TREE_OPERAND (addr
, 0);
4887 || TREE_ADDRESSABLE (base
)
4888 || DECL_MODE (base
) == BLKmode
)
4891 if (!DECL_RTL_SET_P (base
))
4894 return (!MEM_P (DECL_RTL (base
)));
4897 /* Returns true if the MEM_REF REF refers to an object that does not
4898 reside in memory and has non-BLKmode. */
4901 mem_ref_refers_to_non_mem_p (tree ref
)
4903 tree base
= TREE_OPERAND (ref
, 0);
4904 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4907 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4908 is true, try generating a nontemporal store. */
4911 expand_assignment (tree to
, tree from
, bool nontemporal
)
4917 enum insn_code icode
;
4919 /* Don't crash if the lhs of the assignment was erroneous. */
4920 if (TREE_CODE (to
) == ERROR_MARK
)
4922 expand_normal (from
);
4926 /* Optimize away no-op moves without side-effects. */
4927 if (operand_equal_p (to
, from
, 0))
4930 /* Handle misaligned stores. */
4931 mode
= TYPE_MODE (TREE_TYPE (to
));
4932 if ((TREE_CODE (to
) == MEM_REF
4933 || TREE_CODE (to
) == TARGET_MEM_REF
)
4935 && !mem_ref_refers_to_non_mem_p (to
)
4936 && ((align
= get_object_alignment (to
))
4937 < GET_MODE_ALIGNMENT (mode
))
4938 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4939 != CODE_FOR_nothing
)
4940 || SLOW_UNALIGNED_ACCESS (mode
, align
)))
4944 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4945 reg
= force_not_mem (reg
);
4946 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4947 if (TREE_CODE (to
) == MEM_REF
&& REF_REVERSE_STORAGE_ORDER (to
))
4948 reg
= flip_storage_order (mode
, reg
);
4950 if (icode
!= CODE_FOR_nothing
)
4952 struct expand_operand ops
[2];
4954 create_fixed_operand (&ops
[0], mem
);
4955 create_input_operand (&ops
[1], reg
, mode
);
4956 /* The movmisalign<mode> pattern cannot fail, else the assignment
4957 would silently be omitted. */
4958 expand_insn (icode
, 2, ops
);
4961 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
,
4966 /* Assignment of a structure component needs special treatment
4967 if the structure component's rtx is not simply a MEM.
4968 Assignment of an array element at a constant index, and assignment of
4969 an array element in an unaligned packed structure field, has the same
4970 problem. Same for (partially) storing into a non-memory object. */
4971 if (handled_component_p (to
)
4972 || (TREE_CODE (to
) == MEM_REF
4973 && (REF_REVERSE_STORAGE_ORDER (to
)
4974 || mem_ref_refers_to_non_mem_p (to
)))
4975 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4978 HOST_WIDE_INT bitsize
, bitpos
;
4979 unsigned HOST_WIDE_INT bitregion_start
= 0;
4980 unsigned HOST_WIDE_INT bitregion_end
= 0;
4982 int unsignedp
, reversep
, volatilep
= 0;
4986 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4987 &unsignedp
, &reversep
, &volatilep
, true);
4989 /* Make sure bitpos is not negative, it can wreak havoc later. */
4992 gcc_assert (offset
== NULL_TREE
);
4993 offset
= size_int (bitpos
>> (BITS_PER_UNIT
== 8
4994 ? 3 : exact_log2 (BITS_PER_UNIT
)));
4995 bitpos
&= BITS_PER_UNIT
- 1;
4998 if (TREE_CODE (to
) == COMPONENT_REF
4999 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
5000 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
5001 /* The C++ memory model naturally applies to byte-aligned fields.
5002 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5003 BITSIZE are not byte-aligned, there is no need to limit the range
5004 we can access. This can occur with packed structures in Ada. */
5005 else if (bitsize
> 0
5006 && bitsize
% BITS_PER_UNIT
== 0
5007 && bitpos
% BITS_PER_UNIT
== 0)
5009 bitregion_start
= bitpos
;
5010 bitregion_end
= bitpos
+ bitsize
- 1;
5013 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5015 /* If the field has a mode, we want to access it in the
5016 field's mode, not the computed mode.
5017 If a MEM has VOIDmode (external with incomplete type),
5018 use BLKmode for it instead. */
5021 if (mode1
!= VOIDmode
)
5022 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
5023 else if (GET_MODE (to_rtx
) == VOIDmode
)
5024 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
5029 machine_mode address_mode
;
5032 if (!MEM_P (to_rtx
))
5034 /* We can get constant negative offsets into arrays with broken
5035 user code. Translate this to a trap instead of ICEing. */
5036 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
5037 expand_builtin_trap ();
5038 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
5041 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5042 address_mode
= get_address_mode (to_rtx
);
5043 if (GET_MODE (offset_rtx
) != address_mode
)
5045 /* We cannot be sure that the RTL in offset_rtx is valid outside
5046 of a memory address context, so force it into a register
5047 before attempting to convert it to the desired mode. */
5048 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
5049 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5052 /* If we have an expression in OFFSET_RTX and a non-zero
5053 byte offset in BITPOS, adding the byte offset before the
5054 OFFSET_RTX results in better intermediate code, which makes
5055 later rtl optimization passes perform better.
5057 We prefer intermediate code like this:
5059 r124:DI=r123:DI+0x18
5064 r124:DI=r123:DI+0x10
5065 [r124:DI+0x8]=r121:DI
5067 This is only done for aligned data values, as these can
5068 be expected to result in single move instructions. */
5069 if (mode1
!= VOIDmode
5072 && (bitpos
% bitsize
) == 0
5073 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
5074 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
5076 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
5077 bitregion_start
= 0;
5078 if (bitregion_end
>= (unsigned HOST_WIDE_INT
) bitpos
)
5079 bitregion_end
-= bitpos
;
5083 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5084 highest_pow2_factor_for_target (to
,
5088 /* No action is needed if the target is not a memory and the field
5089 lies completely outside that target. This can occur if the source
5090 code contains an out-of-bounds access to a small array. */
5092 && GET_MODE (to_rtx
) != BLKmode
5093 && (unsigned HOST_WIDE_INT
) bitpos
5094 >= GET_MODE_PRECISION (GET_MODE (to_rtx
)))
5096 expand_normal (from
);
5099 /* Handle expand_expr of a complex value returning a CONCAT. */
5100 else if (GET_CODE (to_rtx
) == CONCAT
)
5102 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
5103 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
)))
5105 && bitsize
== mode_bitsize
)
5106 result
= store_expr (from
, to_rtx
, false, nontemporal
, reversep
);
5107 else if (bitsize
== mode_bitsize
/ 2
5108 && (bitpos
== 0 || bitpos
== mode_bitsize
/ 2))
5109 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
5110 nontemporal
, reversep
);
5111 else if (bitpos
+ bitsize
<= mode_bitsize
/ 2)
5112 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
5113 bitregion_start
, bitregion_end
,
5114 mode1
, from
, get_alias_set (to
),
5115 nontemporal
, reversep
);
5116 else if (bitpos
>= mode_bitsize
/ 2)
5117 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
5118 bitpos
- mode_bitsize
/ 2,
5119 bitregion_start
, bitregion_end
,
5120 mode1
, from
, get_alias_set (to
),
5121 nontemporal
, reversep
);
5122 else if (bitpos
== 0 && bitsize
== mode_bitsize
)
5125 result
= expand_normal (from
);
5126 from_rtx
= simplify_gen_subreg (GET_MODE (to_rtx
), result
,
5127 TYPE_MODE (TREE_TYPE (from
)), 0);
5128 emit_move_insn (XEXP (to_rtx
, 0),
5129 read_complex_part (from_rtx
, false));
5130 emit_move_insn (XEXP (to_rtx
, 1),
5131 read_complex_part (from_rtx
, true));
5135 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
5136 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5137 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
5138 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
5139 result
= store_field (temp
, bitsize
, bitpos
,
5140 bitregion_start
, bitregion_end
,
5141 mode1
, from
, get_alias_set (to
),
5142 nontemporal
, reversep
);
5143 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
5144 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
5151 /* If the field is at offset zero, we could have been given the
5152 DECL_RTX of the parent struct. Don't munge it. */
5153 to_rtx
= shallow_copy_rtx (to_rtx
);
5154 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
5156 MEM_VOLATILE_P (to_rtx
) = 1;
5159 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
5160 bitregion_start
, bitregion_end
,
5161 mode1
, to_rtx
, to
, from
,
5165 result
= store_field (to_rtx
, bitsize
, bitpos
,
5166 bitregion_start
, bitregion_end
,
5167 mode1
, from
, get_alias_set (to
),
5168 nontemporal
, reversep
);
5172 preserve_temp_slots (result
);
5177 /* If the rhs is a function call and its value is not an aggregate,
5178 call the function before we start to compute the lhs.
5179 This is needed for correct code for cases such as
5180 val = setjmp (buf) on machines where reference to val
5181 requires loading up part of an address in a separate insn.
5183 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5184 since it might be a promoted variable where the zero- or sign- extension
5185 needs to be done. Handling this in the normal way is safe because no
5186 computation is done before the call. The same is true for SSA names. */
5187 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
5188 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5189 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
5190 && ! (((TREE_CODE (to
) == VAR_DECL
5191 || TREE_CODE (to
) == PARM_DECL
5192 || TREE_CODE (to
) == RESULT_DECL
)
5193 && REG_P (DECL_RTL (to
)))
5194 || TREE_CODE (to
) == SSA_NAME
))
5200 value
= expand_normal (from
);
5202 /* Split value and bounds to store them separately. */
5203 chkp_split_slot (value
, &value
, &bounds
);
5206 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5208 /* Handle calls that return values in multiple non-contiguous locations.
5209 The Irix 6 ABI has examples of this. */
5210 if (GET_CODE (to_rtx
) == PARALLEL
)
5212 if (GET_CODE (value
) == PARALLEL
)
5213 emit_group_move (to_rtx
, value
);
5215 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5216 int_size_in_bytes (TREE_TYPE (from
)));
5218 else if (GET_CODE (value
) == PARALLEL
)
5219 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5220 int_size_in_bytes (TREE_TYPE (from
)));
5221 else if (GET_MODE (to_rtx
) == BLKmode
)
5223 /* Handle calls that return BLKmode values in registers. */
5225 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5227 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5231 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5232 value
= convert_memory_address_addr_space
5233 (GET_MODE (to_rtx
), value
,
5234 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5236 emit_move_insn (to_rtx
, value
);
5239 /* Store bounds if required. */
5241 && (BOUNDED_P (to
) || chkp_type_has_pointer (TREE_TYPE (to
))))
5243 gcc_assert (MEM_P (to_rtx
));
5244 chkp_emit_bounds_store (bounds
, value
, to_rtx
);
5247 preserve_temp_slots (to_rtx
);
5252 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5253 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5255 /* Don't move directly into a return register. */
5256 if (TREE_CODE (to
) == RESULT_DECL
5257 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5263 /* If the source is itself a return value, it still is in a pseudo at
5264 this point so we can move it back to the return register directly. */
5266 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5267 && TREE_CODE (from
) != CALL_EXPR
)
5268 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5270 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5272 /* Handle calls that return values in multiple non-contiguous locations.
5273 The Irix 6 ABI has examples of this. */
5274 if (GET_CODE (to_rtx
) == PARALLEL
)
5276 if (GET_CODE (temp
) == PARALLEL
)
5277 emit_group_move (to_rtx
, temp
);
5279 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5280 int_size_in_bytes (TREE_TYPE (from
)));
5283 emit_move_insn (to_rtx
, temp
);
5285 preserve_temp_slots (to_rtx
);
5290 /* In case we are returning the contents of an object which overlaps
5291 the place the value is being stored, use a safe function when copying
5292 a value through a pointer into a structure value return block. */
5293 if (TREE_CODE (to
) == RESULT_DECL
5294 && TREE_CODE (from
) == INDIRECT_REF
5295 && ADDR_SPACE_GENERIC_P
5296 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5297 && refs_may_alias_p (to
, from
)
5298 && cfun
->returns_struct
5299 && !cfun
->returns_pcc_struct
)
5304 size
= expr_size (from
);
5305 from_rtx
= expand_normal (from
);
5307 emit_block_move_via_libcall (XEXP (to_rtx
, 0), XEXP (from_rtx
, 0), size
);
5309 preserve_temp_slots (to_rtx
);
5314 /* Compute FROM and store the value in the rtx we got. */
5317 result
= store_expr_with_bounds (from
, to_rtx
, 0, nontemporal
, false, to
);
5318 preserve_temp_slots (result
);
5323 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5324 succeeded, false otherwise. */
5327 emit_storent_insn (rtx to
, rtx from
)
5329 struct expand_operand ops
[2];
5330 machine_mode mode
= GET_MODE (to
);
5331 enum insn_code code
= optab_handler (storent_optab
, mode
);
5333 if (code
== CODE_FOR_nothing
)
5336 create_fixed_operand (&ops
[0], to
);
5337 create_input_operand (&ops
[1], from
, mode
);
5338 return maybe_expand_insn (code
, 2, ops
);
5341 /* Generate code for computing expression EXP,
5342 and storing the value into TARGET.
5344 If the mode is BLKmode then we may return TARGET itself.
5345 It turns out that in BLKmode it doesn't cause a problem.
5346 because C has no operators that could combine two different
5347 assignments into the same BLKmode object with different values
5348 with no sequence point. Will other languages need this to
5351 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5352 stack, and block moves may need to be treated specially.
5354 If NONTEMPORAL is true, try using a nontemporal store instruction.
5356 If REVERSE is true, the store is to be done in reverse order.
5358 If BTARGET is not NULL then computed bounds of EXP are
5359 associated with BTARGET. */
5362 store_expr_with_bounds (tree exp
, rtx target
, int call_param_p
,
5363 bool nontemporal
, bool reverse
, tree btarget
)
5366 rtx alt_rtl
= NULL_RTX
;
5367 location_t loc
= curr_insn_location ();
5369 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5371 /* C++ can generate ?: expressions with a throw expression in one
5372 branch and an rvalue in the other. Here, we resolve attempts to
5373 store the throw expression's nonexistent result. */
5374 gcc_assert (!call_param_p
);
5375 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5378 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5380 /* Perform first part of compound expression, then assign from second
5382 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5383 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5384 return store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
,
5385 call_param_p
, nontemporal
, reverse
,
5388 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5390 /* For conditional expression, get safe form of the target. Then
5391 test the condition, doing the appropriate assignment on either
5392 side. This avoids the creation of unnecessary temporaries.
5393 For non-BLKmode, it is more efficient not to do this. */
5395 rtx_code_label
*lab1
= gen_label_rtx (), *lab2
= gen_label_rtx ();
5397 do_pending_stack_adjust ();
5399 jumpifnot (TREE_OPERAND (exp
, 0), lab1
, -1);
5400 store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5401 nontemporal
, reverse
, btarget
);
5402 emit_jump_insn (targetm
.gen_jump (lab2
));
5405 store_expr_with_bounds (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5406 nontemporal
, reverse
, btarget
);
5412 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5413 /* If this is a scalar in a register that is stored in a wider mode
5414 than the declared mode, compute the result into its declared mode
5415 and then convert to the wider mode. Our value is the computed
5418 rtx inner_target
= 0;
5420 /* We can do the conversion inside EXP, which will often result
5421 in some optimizations. Do the conversion in two steps: first
5422 change the signedness, if needed, then the extend. But don't
5423 do this if the type of EXP is a subtype of something else
5424 since then the conversion might involve more than just
5425 converting modes. */
5426 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5427 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5428 && GET_MODE_PRECISION (GET_MODE (target
))
5429 == TYPE_PRECISION (TREE_TYPE (exp
)))
5431 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5432 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5434 /* Some types, e.g. Fortran's logical*4, won't have a signed
5435 version, so use the mode instead. */
5437 = (signed_or_unsigned_type_for
5438 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5440 ntype
= lang_hooks
.types
.type_for_mode
5441 (TYPE_MODE (TREE_TYPE (exp
)),
5442 SUBREG_PROMOTED_SIGN (target
));
5444 exp
= fold_convert_loc (loc
, ntype
, exp
);
5447 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5448 (GET_MODE (SUBREG_REG (target
)),
5449 SUBREG_PROMOTED_SIGN (target
)),
5452 inner_target
= SUBREG_REG (target
);
5455 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5456 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5458 /* Handle bounds returned by call. */
5459 if (TREE_CODE (exp
) == CALL_EXPR
)
5462 chkp_split_slot (temp
, &temp
, &bounds
);
5463 if (bounds
&& btarget
)
5465 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5466 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5467 chkp_set_rtl_bounds (btarget
, tmp
);
5471 /* If TEMP is a VOIDmode constant, use convert_modes to make
5472 sure that we properly convert it. */
5473 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5475 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5476 temp
, SUBREG_PROMOTED_SIGN (target
));
5477 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
5478 GET_MODE (target
), temp
,
5479 SUBREG_PROMOTED_SIGN (target
));
5482 convert_move (SUBREG_REG (target
), temp
,
5483 SUBREG_PROMOTED_SIGN (target
));
5487 else if ((TREE_CODE (exp
) == STRING_CST
5488 || (TREE_CODE (exp
) == MEM_REF
5489 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5490 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5492 && integer_zerop (TREE_OPERAND (exp
, 1))))
5493 && !nontemporal
&& !call_param_p
5496 /* Optimize initialization of an array with a STRING_CST. */
5497 HOST_WIDE_INT exp_len
, str_copy_len
;
5499 tree str
= TREE_CODE (exp
) == STRING_CST
5500 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5502 exp_len
= int_expr_size (exp
);
5506 if (TREE_STRING_LENGTH (str
) <= 0)
5509 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5510 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5513 str_copy_len
= TREE_STRING_LENGTH (str
);
5514 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5515 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5517 str_copy_len
+= STORE_MAX_PIECES
- 1;
5518 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5520 str_copy_len
= MIN (str_copy_len
, exp_len
);
5521 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5522 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5523 MEM_ALIGN (target
), false))
5528 dest_mem
= store_by_pieces (dest_mem
,
5529 str_copy_len
, builtin_strncpy_read_str
,
5531 TREE_STRING_POINTER (str
)),
5532 MEM_ALIGN (target
), false,
5533 exp_len
> str_copy_len
? 1 : 0);
5534 if (exp_len
> str_copy_len
)
5535 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5536 GEN_INT (exp_len
- str_copy_len
),
5545 /* If we want to use a nontemporal or a reverse order store, force the
5546 value into a register first. */
5547 tmp_target
= nontemporal
|| reverse
? NULL_RTX
: target
;
5548 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5550 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5553 /* Handle bounds returned by call. */
5554 if (TREE_CODE (exp
) == CALL_EXPR
)
5557 chkp_split_slot (temp
, &temp
, &bounds
);
5558 if (bounds
&& btarget
)
5560 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5561 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5562 chkp_set_rtl_bounds (btarget
, tmp
);
5567 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5568 the same as that of TARGET, adjust the constant. This is needed, for
5569 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5570 only a word-sized value. */
5571 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5572 && TREE_CODE (exp
) != ERROR_MARK
5573 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5574 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5575 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5577 /* If value was not generated in the target, store it there.
5578 Convert the value to TARGET's type first if necessary and emit the
5579 pending incrementations that have been queued when expanding EXP.
5580 Note that we cannot emit the whole queue blindly because this will
5581 effectively disable the POST_INC optimization later.
5583 If TEMP and TARGET compare equal according to rtx_equal_p, but
5584 one or both of them are volatile memory refs, we have to distinguish
5586 - expand_expr has used TARGET. In this case, we must not generate
5587 another copy. This can be detected by TARGET being equal according
5589 - expand_expr has not used TARGET - that means that the source just
5590 happens to have the same RTX form. Since temp will have been created
5591 by expand_expr, it will compare unequal according to == .
5592 We must generate a copy in this case, to reach the correct number
5593 of volatile memory references. */
5595 if ((! rtx_equal_p (temp
, target
)
5596 || (temp
!= target
&& (side_effects_p (temp
)
5597 || side_effects_p (target
))))
5598 && TREE_CODE (exp
) != ERROR_MARK
5599 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5600 but TARGET is not valid memory reference, TEMP will differ
5601 from TARGET although it is really the same location. */
5603 && rtx_equal_p (alt_rtl
, target
)
5604 && !side_effects_p (alt_rtl
)
5605 && !side_effects_p (target
))
5606 /* If there's nothing to copy, don't bother. Don't call
5607 expr_size unless necessary, because some front-ends (C++)
5608 expr_size-hook must not be given objects that are not
5609 supposed to be bit-copied or bit-initialized. */
5610 && expr_size (exp
) != const0_rtx
)
5612 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5614 if (GET_MODE (target
) == BLKmode
)
5616 /* Handle calls that return BLKmode values in registers. */
5617 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5618 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5620 store_bit_field (target
,
5621 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5622 0, 0, 0, GET_MODE (temp
), temp
, reverse
);
5625 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5628 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5630 /* Handle copying a string constant into an array. The string
5631 constant may be shorter than the array. So copy just the string's
5632 actual length, and clear the rest. First get the size of the data
5633 type of the string, which is actually the size of the target. */
5634 rtx size
= expr_size (exp
);
5636 if (CONST_INT_P (size
)
5637 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5638 emit_block_move (target
, temp
, size
,
5640 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5643 machine_mode pointer_mode
5644 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5645 machine_mode address_mode
= get_address_mode (target
);
5647 /* Compute the size of the data to copy from the string. */
5649 = size_binop_loc (loc
, MIN_EXPR
,
5650 make_tree (sizetype
, size
),
5651 size_int (TREE_STRING_LENGTH (exp
)));
5653 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5655 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5656 rtx_code_label
*label
= 0;
5658 /* Copy that much. */
5659 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5660 TYPE_UNSIGNED (sizetype
));
5661 emit_block_move (target
, temp
, copy_size_rtx
,
5663 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5665 /* Figure out how much is left in TARGET that we have to clear.
5666 Do all calculations in pointer_mode. */
5667 if (CONST_INT_P (copy_size_rtx
))
5669 size
= plus_constant (address_mode
, size
,
5670 -INTVAL (copy_size_rtx
));
5671 target
= adjust_address (target
, BLKmode
,
5672 INTVAL (copy_size_rtx
));
5676 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5677 copy_size_rtx
, NULL_RTX
, 0,
5680 if (GET_MODE (copy_size_rtx
) != address_mode
)
5681 copy_size_rtx
= convert_to_mode (address_mode
,
5683 TYPE_UNSIGNED (sizetype
));
5685 target
= offset_address (target
, copy_size_rtx
,
5686 highest_pow2_factor (copy_size
));
5687 label
= gen_label_rtx ();
5688 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5689 GET_MODE (size
), 0, label
);
5692 if (size
!= const0_rtx
)
5693 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5699 /* Handle calls that return values in multiple non-contiguous locations.
5700 The Irix 6 ABI has examples of this. */
5701 else if (GET_CODE (target
) == PARALLEL
)
5703 if (GET_CODE (temp
) == PARALLEL
)
5704 emit_group_move (target
, temp
);
5706 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5707 int_size_in_bytes (TREE_TYPE (exp
)));
5709 else if (GET_CODE (temp
) == PARALLEL
)
5710 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5711 int_size_in_bytes (TREE_TYPE (exp
)));
5712 else if (GET_MODE (temp
) == BLKmode
)
5713 emit_block_move (target
, temp
, expr_size (exp
),
5715 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5716 /* If we emit a nontemporal store, there is nothing else to do. */
5717 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5722 temp
= flip_storage_order (GET_MODE (target
), temp
);
5723 temp
= force_operand (temp
, target
);
5725 emit_move_insn (target
, temp
);
5732 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5734 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
,
5737 return store_expr_with_bounds (exp
, target
, call_param_p
, nontemporal
,
5741 /* Return true if field F of structure TYPE is a flexible array. */
5744 flexible_array_member_p (const_tree f
, const_tree type
)
5749 return (DECL_CHAIN (f
) == NULL
5750 && TREE_CODE (tf
) == ARRAY_TYPE
5752 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5753 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5754 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5755 && int_size_in_bytes (type
) >= 0);
5758 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5759 must have in order for it to completely initialize a value of type TYPE.
5760 Return -1 if the number isn't known.
5762 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5764 static HOST_WIDE_INT
5765 count_type_elements (const_tree type
, bool for_ctor_p
)
5767 switch (TREE_CODE (type
))
5773 nelts
= array_type_nelts (type
);
5774 if (nelts
&& tree_fits_uhwi_p (nelts
))
5776 unsigned HOST_WIDE_INT n
;
5778 n
= tree_to_uhwi (nelts
) + 1;
5779 if (n
== 0 || for_ctor_p
)
5782 return n
* count_type_elements (TREE_TYPE (type
), false);
5784 return for_ctor_p
? -1 : 1;
5789 unsigned HOST_WIDE_INT n
;
5793 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5794 if (TREE_CODE (f
) == FIELD_DECL
)
5797 n
+= count_type_elements (TREE_TYPE (f
), false);
5798 else if (!flexible_array_member_p (f
, type
))
5799 /* Don't count flexible arrays, which are not supposed
5800 to be initialized. */
5808 case QUAL_UNION_TYPE
:
5813 gcc_assert (!for_ctor_p
);
5814 /* Estimate the number of scalars in each field and pick the
5815 maximum. Other estimates would do instead; the idea is simply
5816 to make sure that the estimate is not sensitive to the ordering
5819 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5820 if (TREE_CODE (f
) == FIELD_DECL
)
5822 m
= count_type_elements (TREE_TYPE (f
), false);
5823 /* If the field doesn't span the whole union, add an extra
5824 scalar for the rest. */
5825 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5826 TYPE_SIZE (type
)) != 1)
5838 return TYPE_VECTOR_SUBPARTS (type
);
5842 case FIXED_POINT_TYPE
:
5847 case REFERENCE_TYPE
:
5863 /* Helper for categorize_ctor_elements. Identical interface. */
5866 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5867 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5869 unsigned HOST_WIDE_INT idx
;
5870 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5871 tree value
, purpose
, elt_type
;
5873 /* Whether CTOR is a valid constant initializer, in accordance with what
5874 initializer_constant_valid_p does. If inferred from the constructor
5875 elements, true until proven otherwise. */
5876 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5877 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5882 elt_type
= NULL_TREE
;
5884 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5886 HOST_WIDE_INT mult
= 1;
5888 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5890 tree lo_index
= TREE_OPERAND (purpose
, 0);
5891 tree hi_index
= TREE_OPERAND (purpose
, 1);
5893 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5894 mult
= (tree_to_uhwi (hi_index
)
5895 - tree_to_uhwi (lo_index
) + 1);
5898 elt_type
= TREE_TYPE (value
);
5900 switch (TREE_CODE (value
))
5904 HOST_WIDE_INT nz
= 0, ic
= 0;
5906 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5909 nz_elts
+= mult
* nz
;
5910 init_elts
+= mult
* ic
;
5912 if (const_from_elts_p
&& const_p
)
5913 const_p
= const_elt_p
;
5920 if (!initializer_zerop (value
))
5926 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
5927 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
5931 if (!initializer_zerop (TREE_REALPART (value
)))
5933 if (!initializer_zerop (TREE_IMAGPART (value
)))
5941 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
5943 tree v
= VECTOR_CST_ELT (value
, i
);
5944 if (!initializer_zerop (v
))
5953 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
5954 nz_elts
+= mult
* tc
;
5955 init_elts
+= mult
* tc
;
5957 if (const_from_elts_p
&& const_p
)
5959 = initializer_constant_valid_p (value
,
5961 TYPE_REVERSE_STORAGE_ORDER
5969 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
5970 num_fields
, elt_type
))
5971 *p_complete
= false;
5973 *p_nz_elts
+= nz_elts
;
5974 *p_init_elts
+= init_elts
;
5979 /* Examine CTOR to discover:
5980 * how many scalar fields are set to nonzero values,
5981 and place it in *P_NZ_ELTS;
5982 * how many scalar fields in total are in CTOR,
5983 and place it in *P_ELT_COUNT.
5984 * whether the constructor is complete -- in the sense that every
5985 meaningful byte is explicitly given a value --
5986 and place it in *P_COMPLETE.
5988 Return whether or not CTOR is a valid static constant initializer, the same
5989 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5992 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5993 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5999 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
6002 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6003 of which had type LAST_TYPE. Each element was itself a complete
6004 initializer, in the sense that every meaningful byte was explicitly
6005 given a value. Return true if the same is true for the constructor
6009 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
6010 const_tree last_type
)
6012 if (TREE_CODE (type
) == UNION_TYPE
6013 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6018 gcc_assert (num_elts
== 1 && last_type
);
6020 /* ??? We could look at each element of the union, and find the
6021 largest element. Which would avoid comparing the size of the
6022 initialized element against any tail padding in the union.
6023 Doesn't seem worth the effort... */
6024 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
6027 return count_type_elements (type
, true) == num_elts
;
6030 /* Return 1 if EXP contains mostly (3/4) zeros. */
6033 mostly_zeros_p (const_tree exp
)
6035 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6037 HOST_WIDE_INT nz_elts
, init_elts
;
6040 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
6041 return !complete_p
|| nz_elts
< init_elts
/ 4;
6044 return initializer_zerop (exp
);
6047 /* Return 1 if EXP contains all zeros. */
6050 all_zeros_p (const_tree exp
)
6052 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6054 HOST_WIDE_INT nz_elts
, init_elts
;
6057 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
6058 return nz_elts
== 0;
6061 return initializer_zerop (exp
);
6064 /* Helper function for store_constructor.
6065 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6066 CLEARED is as for store_constructor.
6067 ALIAS_SET is the alias set to use for any stores.
6068 If REVERSE is true, the store is to be done in reverse order.
6070 This provides a recursive shortcut back to store_constructor when it isn't
6071 necessary to go through store_field. This is so that we can pass through
6072 the cleared field to let store_constructor know that we may not have to
6073 clear a substructure if the outer structure has already been cleared. */
6076 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
6077 HOST_WIDE_INT bitpos
, machine_mode mode
,
6078 tree exp
, int cleared
,
6079 alias_set_type alias_set
, bool reverse
)
6081 if (TREE_CODE (exp
) == CONSTRUCTOR
6082 /* We can only call store_constructor recursively if the size and
6083 bit position are on a byte boundary. */
6084 && bitpos
% BITS_PER_UNIT
== 0
6085 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
6086 /* If we have a nonzero bitpos for a register target, then we just
6087 let store_field do the bitfield handling. This is unlikely to
6088 generate unnecessary clear instructions anyways. */
6089 && (bitpos
== 0 || MEM_P (target
)))
6093 = adjust_address (target
,
6094 GET_MODE (target
) == BLKmode
6096 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
6097 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6100 /* Update the alias set, if required. */
6101 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
6102 && MEM_ALIAS_SET (target
) != 0)
6104 target
= copy_rtx (target
);
6105 set_mem_alias_set (target
, alias_set
);
6108 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
,
6112 store_field (target
, bitsize
, bitpos
, 0, 0, mode
, exp
, alias_set
, false,
6117 /* Returns the number of FIELD_DECLs in TYPE. */
6120 fields_length (const_tree type
)
6122 tree t
= TYPE_FIELDS (type
);
6125 for (; t
; t
= DECL_CHAIN (t
))
6126 if (TREE_CODE (t
) == FIELD_DECL
)
6133 /* Store the value of constructor EXP into the rtx TARGET.
6134 TARGET is either a REG or a MEM; we know it cannot conflict, since
6135 safe_from_p has been called.
6136 CLEARED is true if TARGET is known to have been zero'd.
6137 SIZE is the number of bytes of TARGET we are allowed to modify: this
6138 may not be the same as the size of EXP if we are assigning to a field
6139 which has been packed to exclude padding bits.
6140 If REVERSE is true, the store is to be done in reverse order. */
6143 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
,
6146 tree type
= TREE_TYPE (exp
);
6147 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
6149 switch (TREE_CODE (type
))
6153 case QUAL_UNION_TYPE
:
6155 unsigned HOST_WIDE_INT idx
;
6158 /* The storage order is specified for every aggregate type. */
6159 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6161 /* If size is zero or the target is already cleared, do nothing. */
6162 if (size
== 0 || cleared
)
6164 /* We either clear the aggregate or indicate the value is dead. */
6165 else if ((TREE_CODE (type
) == UNION_TYPE
6166 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6167 && ! CONSTRUCTOR_ELTS (exp
))
6168 /* If the constructor is empty, clear the union. */
6170 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6174 /* If we are building a static constructor into a register,
6175 set the initial value as zero so we can fold the value into
6176 a constant. But if more than one register is involved,
6177 this probably loses. */
6178 else if (REG_P (target
) && TREE_STATIC (exp
)
6179 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
6181 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6185 /* If the constructor has fewer fields than the structure or
6186 if we are initializing the structure to mostly zeros, clear
6187 the whole structure first. Don't do this if TARGET is a
6188 register whose mode size isn't equal to SIZE since
6189 clear_storage can't handle this case. */
6191 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp
))
6192 != fields_length (type
))
6193 || mostly_zeros_p (exp
))
6195 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
6198 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6202 if (REG_P (target
) && !cleared
)
6203 emit_clobber (target
);
6205 /* Store each element of the constructor into the
6206 corresponding field of TARGET. */
6207 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
6210 HOST_WIDE_INT bitsize
;
6211 HOST_WIDE_INT bitpos
= 0;
6213 rtx to_rtx
= target
;
6215 /* Just ignore missing fields. We cleared the whole
6216 structure, above, if any fields are missing. */
6220 if (cleared
&& initializer_zerop (value
))
6223 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
6224 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
6228 mode
= DECL_MODE (field
);
6229 if (DECL_BIT_FIELD (field
))
6232 offset
= DECL_FIELD_OFFSET (field
);
6233 if (tree_fits_shwi_p (offset
)
6234 && tree_fits_shwi_p (bit_position (field
)))
6236 bitpos
= int_bit_position (field
);
6240 bitpos
= tree_to_shwi (DECL_FIELD_BIT_OFFSET (field
));
6244 machine_mode address_mode
;
6248 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
6249 make_tree (TREE_TYPE (exp
),
6252 offset_rtx
= expand_normal (offset
);
6253 gcc_assert (MEM_P (to_rtx
));
6255 address_mode
= get_address_mode (to_rtx
);
6256 if (GET_MODE (offset_rtx
) != address_mode
)
6257 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
6259 to_rtx
= offset_address (to_rtx
, offset_rtx
,
6260 highest_pow2_factor (offset
));
6263 /* If this initializes a field that is smaller than a
6264 word, at the start of a word, try to widen it to a full
6265 word. This special case allows us to output C++ member
6266 function initializations in a form that the optimizers
6268 if (WORD_REGISTER_OPERATIONS
6270 && bitsize
< BITS_PER_WORD
6271 && bitpos
% BITS_PER_WORD
== 0
6272 && GET_MODE_CLASS (mode
) == MODE_INT
6273 && TREE_CODE (value
) == INTEGER_CST
6275 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6277 tree type
= TREE_TYPE (value
);
6279 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6281 type
= lang_hooks
.types
.type_for_mode
6282 (word_mode
, TYPE_UNSIGNED (type
));
6283 value
= fold_convert (type
, value
);
6286 if (BYTES_BIG_ENDIAN
)
6288 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6289 build_int_cst (type
,
6290 BITS_PER_WORD
- bitsize
));
6291 bitsize
= BITS_PER_WORD
;
6295 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6296 && DECL_NONADDRESSABLE_P (field
))
6298 to_rtx
= copy_rtx (to_rtx
);
6299 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6302 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
6304 get_alias_set (TREE_TYPE (field
)),
6312 unsigned HOST_WIDE_INT i
;
6315 tree elttype
= TREE_TYPE (type
);
6317 HOST_WIDE_INT minelt
= 0;
6318 HOST_WIDE_INT maxelt
= 0;
6320 /* The storage order is specified for every aggregate type. */
6321 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6323 domain
= TYPE_DOMAIN (type
);
6324 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6325 && TYPE_MAX_VALUE (domain
)
6326 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6327 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6329 /* If we have constant bounds for the range of the type, get them. */
6332 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6333 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6336 /* If the constructor has fewer elements than the array, clear
6337 the whole array first. Similarly if this is static
6338 constructor of a non-BLKmode object. */
6341 else if (REG_P (target
) && TREE_STATIC (exp
))
6345 unsigned HOST_WIDE_INT idx
;
6347 HOST_WIDE_INT count
= 0, zero_count
= 0;
6348 need_to_clear
= ! const_bounds_p
;
6350 /* This loop is a more accurate version of the loop in
6351 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6352 is also needed to check for missing elements. */
6353 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6355 HOST_WIDE_INT this_node_count
;
6360 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6362 tree lo_index
= TREE_OPERAND (index
, 0);
6363 tree hi_index
= TREE_OPERAND (index
, 1);
6365 if (! tree_fits_uhwi_p (lo_index
)
6366 || ! tree_fits_uhwi_p (hi_index
))
6372 this_node_count
= (tree_to_uhwi (hi_index
)
6373 - tree_to_uhwi (lo_index
) + 1);
6376 this_node_count
= 1;
6378 count
+= this_node_count
;
6379 if (mostly_zeros_p (value
))
6380 zero_count
+= this_node_count
;
6383 /* Clear the entire array first if there are any missing
6384 elements, or if the incidence of zero elements is >=
6387 && (count
< maxelt
- minelt
+ 1
6388 || 4 * zero_count
>= 3 * count
))
6392 if (need_to_clear
&& size
> 0)
6395 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6397 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6401 if (!cleared
&& REG_P (target
))
6402 /* Inform later passes that the old value is dead. */
6403 emit_clobber (target
);
6405 /* Store each element of the constructor into the
6406 corresponding element of TARGET, determined by counting the
6408 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6411 HOST_WIDE_INT bitsize
;
6412 HOST_WIDE_INT bitpos
;
6413 rtx xtarget
= target
;
6415 if (cleared
&& initializer_zerop (value
))
6418 mode
= TYPE_MODE (elttype
);
6419 if (mode
== BLKmode
)
6420 bitsize
= (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6421 ? tree_to_uhwi (TYPE_SIZE (elttype
))
6424 bitsize
= GET_MODE_BITSIZE (mode
);
6426 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6428 tree lo_index
= TREE_OPERAND (index
, 0);
6429 tree hi_index
= TREE_OPERAND (index
, 1);
6430 rtx index_r
, pos_rtx
;
6431 HOST_WIDE_INT lo
, hi
, count
;
6434 /* If the range is constant and "small", unroll the loop. */
6436 && tree_fits_shwi_p (lo_index
)
6437 && tree_fits_shwi_p (hi_index
)
6438 && (lo
= tree_to_shwi (lo_index
),
6439 hi
= tree_to_shwi (hi_index
),
6440 count
= hi
- lo
+ 1,
6443 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6444 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6447 lo
-= minelt
; hi
-= minelt
;
6448 for (; lo
<= hi
; lo
++)
6450 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6453 && !MEM_KEEP_ALIAS_SET_P (target
)
6454 && TREE_CODE (type
) == ARRAY_TYPE
6455 && TYPE_NONALIASED_COMPONENT (type
))
6457 target
= copy_rtx (target
);
6458 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6461 store_constructor_field
6462 (target
, bitsize
, bitpos
, mode
, value
, cleared
,
6463 get_alias_set (elttype
), reverse
);
6468 rtx_code_label
*loop_start
= gen_label_rtx ();
6469 rtx_code_label
*loop_end
= gen_label_rtx ();
6472 expand_normal (hi_index
);
6474 index
= build_decl (EXPR_LOCATION (exp
),
6475 VAR_DECL
, NULL_TREE
, domain
);
6476 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6477 SET_DECL_RTL (index
, index_r
);
6478 store_expr (lo_index
, index_r
, 0, false, reverse
);
6480 /* Build the head of the loop. */
6481 do_pending_stack_adjust ();
6482 emit_label (loop_start
);
6484 /* Assign value to element index. */
6486 fold_convert (ssizetype
,
6487 fold_build2 (MINUS_EXPR
,
6490 TYPE_MIN_VALUE (domain
)));
6493 size_binop (MULT_EXPR
, position
,
6494 fold_convert (ssizetype
,
6495 TYPE_SIZE_UNIT (elttype
)));
6497 pos_rtx
= expand_normal (position
);
6498 xtarget
= offset_address (target
, pos_rtx
,
6499 highest_pow2_factor (position
));
6500 xtarget
= adjust_address (xtarget
, mode
, 0);
6501 if (TREE_CODE (value
) == CONSTRUCTOR
)
6502 store_constructor (value
, xtarget
, cleared
,
6503 bitsize
/ BITS_PER_UNIT
, reverse
);
6505 store_expr (value
, xtarget
, 0, false, reverse
);
6507 /* Generate a conditional jump to exit the loop. */
6508 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6510 jumpif (exit_cond
, loop_end
, -1);
6512 /* Update the loop counter, and jump to the head of
6514 expand_assignment (index
,
6515 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6516 index
, integer_one_node
),
6519 emit_jump (loop_start
);
6521 /* Build the end of the loop. */
6522 emit_label (loop_end
);
6525 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6526 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6531 index
= ssize_int (1);
6534 index
= fold_convert (ssizetype
,
6535 fold_build2 (MINUS_EXPR
,
6538 TYPE_MIN_VALUE (domain
)));
6541 size_binop (MULT_EXPR
, index
,
6542 fold_convert (ssizetype
,
6543 TYPE_SIZE_UNIT (elttype
)));
6544 xtarget
= offset_address (target
,
6545 expand_normal (position
),
6546 highest_pow2_factor (position
));
6547 xtarget
= adjust_address (xtarget
, mode
, 0);
6548 store_expr (value
, xtarget
, 0, false, reverse
);
6553 bitpos
= ((tree_to_shwi (index
) - minelt
)
6554 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6556 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6558 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6559 && TREE_CODE (type
) == ARRAY_TYPE
6560 && TYPE_NONALIASED_COMPONENT (type
))
6562 target
= copy_rtx (target
);
6563 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6565 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
6566 cleared
, get_alias_set (elttype
),
6575 unsigned HOST_WIDE_INT idx
;
6576 constructor_elt
*ce
;
6579 int icode
= CODE_FOR_nothing
;
6580 tree elttype
= TREE_TYPE (type
);
6581 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6582 machine_mode eltmode
= TYPE_MODE (elttype
);
6583 HOST_WIDE_INT bitsize
;
6584 HOST_WIDE_INT bitpos
;
6585 rtvec vector
= NULL
;
6587 alias_set_type alias
;
6589 gcc_assert (eltmode
!= BLKmode
);
6591 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6592 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
6594 machine_mode mode
= GET_MODE (target
);
6596 icode
= (int) optab_handler (vec_init_optab
, mode
);
6597 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6598 if (icode
!= CODE_FOR_nothing
)
6602 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6603 if (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
)
6605 icode
= CODE_FOR_nothing
;
6609 if (icode
!= CODE_FOR_nothing
)
6613 vector
= rtvec_alloc (n_elts
);
6614 for (i
= 0; i
< n_elts
; i
++)
6615 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
6619 /* If the constructor has fewer elements than the vector,
6620 clear the whole array first. Similarly if this is static
6621 constructor of a non-BLKmode object. */
6624 else if (REG_P (target
) && TREE_STATIC (exp
))
6628 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6631 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6633 int n_elts_here
= tree_to_uhwi
6634 (int_const_binop (TRUNC_DIV_EXPR
,
6635 TYPE_SIZE (TREE_TYPE (value
)),
6636 TYPE_SIZE (elttype
)));
6638 count
+= n_elts_here
;
6639 if (mostly_zeros_p (value
))
6640 zero_count
+= n_elts_here
;
6643 /* Clear the entire vector first if there are any missing elements,
6644 or if the incidence of zero elements is >= 75%. */
6645 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6648 if (need_to_clear
&& size
> 0 && !vector
)
6651 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6653 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6657 /* Inform later passes that the old value is dead. */
6658 if (!cleared
&& !vector
&& REG_P (target
))
6659 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6662 alias
= MEM_ALIAS_SET (target
);
6664 alias
= get_alias_set (elttype
);
6666 /* Store each element of the constructor into the corresponding
6667 element of TARGET, determined by counting the elements. */
6668 for (idx
= 0, i
= 0;
6669 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6670 idx
++, i
+= bitsize
/ elt_size
)
6672 HOST_WIDE_INT eltpos
;
6673 tree value
= ce
->value
;
6675 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6676 if (cleared
&& initializer_zerop (value
))
6680 eltpos
= tree_to_uhwi (ce
->index
);
6686 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6688 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6689 RTVEC_ELT (vector
, eltpos
)
6690 = expand_normal (value
);
6694 machine_mode value_mode
=
6695 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6696 ? TYPE_MODE (TREE_TYPE (value
))
6698 bitpos
= eltpos
* elt_size
;
6699 store_constructor_field (target
, bitsize
, bitpos
, value_mode
,
6700 value
, cleared
, alias
, reverse
);
6705 emit_insn (GEN_FCN (icode
)
6707 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
6716 /* Store the value of EXP (an expression tree)
6717 into a subfield of TARGET which has mode MODE and occupies
6718 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6719 If MODE is VOIDmode, it means that we are storing into a bit-field.
6721 BITREGION_START is bitpos of the first bitfield in this region.
6722 BITREGION_END is the bitpos of the ending bitfield in this region.
6723 These two fields are 0, if the C++ memory model does not apply,
6724 or we are not interested in keeping track of bitfield regions.
6726 Always return const0_rtx unless we have something particular to
6729 ALIAS_SET is the alias set for the destination. This value will
6730 (in general) be different from that for TARGET, since TARGET is a
6731 reference to the containing structure.
6733 If NONTEMPORAL is true, try generating a nontemporal store.
6735 If REVERSE is true, the store is to be done in reverse order. */
6738 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
6739 unsigned HOST_WIDE_INT bitregion_start
,
6740 unsigned HOST_WIDE_INT bitregion_end
,
6741 machine_mode mode
, tree exp
,
6742 alias_set_type alias_set
, bool nontemporal
, bool reverse
)
6744 if (TREE_CODE (exp
) == ERROR_MARK
)
6747 /* If we have nothing to store, do nothing unless the expression has
6750 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6752 if (GET_CODE (target
) == CONCAT
)
6754 /* We're storing into a struct containing a single __complex. */
6756 gcc_assert (!bitpos
);
6757 return store_expr (exp
, target
, 0, nontemporal
, reverse
);
6760 /* If the structure is in a register or if the component
6761 is a bit field, we cannot use addressing to access it.
6762 Use bit-field techniques or SUBREG to store in it. */
6764 if (mode
== VOIDmode
6765 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6766 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6767 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6769 || GET_CODE (target
) == SUBREG
6770 /* If the field isn't aligned enough to store as an ordinary memref,
6771 store it as a bit field. */
6773 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6774 || bitpos
% GET_MODE_ALIGNMENT (mode
))
6775 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
6776 || (bitpos
% BITS_PER_UNIT
!= 0)))
6777 || (bitsize
>= 0 && mode
!= BLKmode
6778 && GET_MODE_BITSIZE (mode
) > bitsize
)
6779 /* If the RHS and field are a constant size and the size of the
6780 RHS isn't the same size as the bitfield, we must use bitfield
6783 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6784 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0
6785 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6786 we will handle specially below. */
6787 && !(TREE_CODE (exp
) == CONSTRUCTOR
6788 && bitsize
% BITS_PER_UNIT
== 0)
6789 /* And except for bitwise copying of TREE_ADDRESSABLE types,
6790 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6791 includes some extra padding. store_expr / expand_expr will in
6792 that case call get_inner_reference that will have the bitsize
6793 we check here and thus the block move will not clobber the
6794 padding that shouldn't be clobbered. In the future we could
6795 replace the TREE_ADDRESSABLE check with a check that
6796 get_base_address needs to live in memory. */
6797 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
6798 || TREE_CODE (exp
) != COMPONENT_REF
6799 || TREE_CODE (DECL_SIZE (TREE_OPERAND (exp
, 1))) != INTEGER_CST
6800 || (bitsize
% BITS_PER_UNIT
!= 0)
6801 || (bitpos
% BITS_PER_UNIT
!= 0)
6802 || (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp
, 1)), bitsize
)
6804 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6805 decl we must use bitfield operations. */
6807 && TREE_CODE (exp
) == MEM_REF
6808 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6809 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6810 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6811 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6816 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6817 implies a mask operation. If the precision is the same size as
6818 the field we're storing into, that mask is redundant. This is
6819 particularly common with bit field assignments generated by the
6821 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6824 tree type
= TREE_TYPE (exp
);
6825 if (INTEGRAL_TYPE_P (type
)
6826 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6827 && bitsize
== TYPE_PRECISION (type
))
6829 tree op
= gimple_assign_rhs1 (nop_def
);
6830 type
= TREE_TYPE (op
);
6831 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
6836 temp
= expand_normal (exp
);
6838 /* If the value has a record type and an integral mode then, if BITSIZE
6839 is narrower than this mode and this is for big-endian data, we must
6840 first put the value into the low-order bits. Moreover, the field may
6841 be not aligned on a byte boundary; in this case, if it has reverse
6842 storage order, it needs to be accessed as a scalar field with reverse
6843 storage order and we must first put the value into target order. */
6844 if (TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
6845 && GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
)
6847 HOST_WIDE_INT size
= GET_MODE_BITSIZE (GET_MODE (temp
));
6849 reverse
= TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp
));
6852 temp
= flip_storage_order (GET_MODE (temp
), temp
);
6855 && reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
6856 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
6857 size
- bitsize
, NULL_RTX
, 1);
6860 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6861 if (mode
!= VOIDmode
&& mode
!= BLKmode
6862 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
6863 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
6865 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6866 are both BLKmode, both must be in memory and BITPOS must be aligned
6867 on a byte boundary. If so, we simply do a block copy. Likewise for
6868 a BLKmode-like TARGET. */
6869 if (GET_CODE (temp
) != PARALLEL
6870 && GET_MODE (temp
) == BLKmode
6871 && (GET_MODE (target
) == BLKmode
6873 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
6874 && (bitpos
% BITS_PER_UNIT
) == 0
6875 && (bitsize
% BITS_PER_UNIT
) == 0)))
6877 gcc_assert (MEM_P (target
) && MEM_P (temp
)
6878 && (bitpos
% BITS_PER_UNIT
) == 0);
6880 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6881 emit_block_move (target
, temp
,
6882 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6889 /* Handle calls that return values in multiple non-contiguous locations.
6890 The Irix 6 ABI has examples of this. */
6891 if (GET_CODE (temp
) == PARALLEL
)
6893 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6895 if (mode
== BLKmode
|| mode
== VOIDmode
)
6896 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6897 temp_target
= gen_reg_rtx (mode
);
6898 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
6901 else if (mode
== BLKmode
)
6903 /* Handle calls that return BLKmode values in registers. */
6904 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
6906 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
6907 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
6912 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6914 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6915 temp_target
= gen_reg_rtx (mode
);
6917 = extract_bit_field (temp
, size
* BITS_PER_UNIT
, 0, 1,
6918 temp_target
, mode
, mode
, false);
6923 /* Store the value in the bitfield. */
6924 store_bit_field (target
, bitsize
, bitpos
,
6925 bitregion_start
, bitregion_end
,
6926 mode
, temp
, reverse
);
6932 /* Now build a reference to just the desired component. */
6933 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
6935 if (to_rtx
== target
)
6936 to_rtx
= copy_rtx (to_rtx
);
6938 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
6939 set_mem_alias_set (to_rtx
, alias_set
);
6941 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
6942 into a target smaller than its type; handle that case now. */
6943 if (TREE_CODE (exp
) == CONSTRUCTOR
&& bitsize
>= 0)
6945 gcc_assert (bitsize
% BITS_PER_UNIT
== 0);
6946 store_constructor (exp
, to_rtx
, 0, bitsize
/ BITS_PER_UNIT
, reverse
);
6950 return store_expr (exp
, to_rtx
, 0, nontemporal
, reverse
);
6954 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6955 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6956 codes and find the ultimate containing object, which we return.
6958 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6959 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
6960 storage order of the field.
6961 If the position of the field is variable, we store a tree
6962 giving the variable offset (in units) in *POFFSET.
6963 This offset is in addition to the bit position.
6964 If the position is not variable, we store 0 in *POFFSET.
6966 If any of the extraction expressions is volatile,
6967 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6969 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6970 Otherwise, it is a mode that can be used to access the field.
6972 If the field describes a variable-sized object, *PMODE is set to
6973 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6974 this case, but the address of the object can be found.
6976 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6977 look through nodes that serve as markers of a greater alignment than
6978 the one that can be deduced from the expression. These nodes make it
6979 possible for front-ends to prevent temporaries from being created by
6980 the middle-end on alignment considerations. For that purpose, the
6981 normal operating mode at high-level is to always pass FALSE so that
6982 the ultimate containing object is really returned; moreover, the
6983 associated predicate handled_component_p will always return TRUE
6984 on these nodes, thus indicating that they are essentially handled
6985 by get_inner_reference. TRUE should only be passed when the caller
6986 is scanning the expression in order to build another representation
6987 and specifically knows how to handle these nodes; as such, this is
6988 the normal operating mode in the RTL expanders. */
6991 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
6992 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
6993 machine_mode
*pmode
, int *punsignedp
,
6994 int *preversep
, int *pvolatilep
, bool keep_aligning
)
6997 machine_mode mode
= VOIDmode
;
6998 bool blkmode_bitfield
= false;
6999 tree offset
= size_zero_node
;
7000 offset_int bit_offset
= 0;
7002 /* First get the mode, signedness, storage order and size. We do this from
7003 just the outermost expression. */
7005 if (TREE_CODE (exp
) == COMPONENT_REF
)
7007 tree field
= TREE_OPERAND (exp
, 1);
7008 size_tree
= DECL_SIZE (field
);
7009 if (flag_strict_volatile_bitfields
> 0
7010 && TREE_THIS_VOLATILE (exp
)
7011 && DECL_BIT_FIELD_TYPE (field
)
7012 && DECL_MODE (field
) != BLKmode
)
7013 /* Volatile bitfields should be accessed in the mode of the
7014 field's type, not the mode computed based on the bit
7016 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
7017 else if (!DECL_BIT_FIELD (field
))
7018 mode
= DECL_MODE (field
);
7019 else if (DECL_MODE (field
) == BLKmode
)
7020 blkmode_bitfield
= true;
7022 *punsignedp
= DECL_UNSIGNED (field
);
7024 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
7026 size_tree
= TREE_OPERAND (exp
, 1);
7027 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
7028 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
7030 /* For vector types, with the correct size of access, use the mode of
7032 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
7033 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
7034 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
7035 mode
= TYPE_MODE (TREE_TYPE (exp
));
7039 mode
= TYPE_MODE (TREE_TYPE (exp
));
7040 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
7042 if (mode
== BLKmode
)
7043 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
7045 *pbitsize
= GET_MODE_BITSIZE (mode
);
7050 if (! tree_fits_uhwi_p (size_tree
))
7051 mode
= BLKmode
, *pbitsize
= -1;
7053 *pbitsize
= tree_to_uhwi (size_tree
);
7056 *preversep
= reverse_storage_order_for_component_p (exp
);
7058 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7059 and find the ultimate containing object. */
7062 switch (TREE_CODE (exp
))
7065 bit_offset
+= wi::to_offset (TREE_OPERAND (exp
, 2));
7070 tree field
= TREE_OPERAND (exp
, 1);
7071 tree this_offset
= component_ref_field_offset (exp
);
7073 /* If this field hasn't been filled in yet, don't go past it.
7074 This should only happen when folding expressions made during
7075 type construction. */
7076 if (this_offset
== 0)
7079 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
7080 bit_offset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
7082 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7087 case ARRAY_RANGE_REF
:
7089 tree index
= TREE_OPERAND (exp
, 1);
7090 tree low_bound
= array_ref_low_bound (exp
);
7091 tree unit_size
= array_ref_element_size (exp
);
7093 /* We assume all arrays have sizes that are a multiple of a byte.
7094 First subtract the lower bound, if any, in the type of the
7095 index, then convert to sizetype and multiply by the size of
7096 the array element. */
7097 if (! integer_zerop (low_bound
))
7098 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
7101 offset
= size_binop (PLUS_EXPR
, offset
,
7102 size_binop (MULT_EXPR
,
7103 fold_convert (sizetype
, index
),
7112 bit_offset
+= *pbitsize
;
7115 case VIEW_CONVERT_EXPR
:
7116 if (keep_aligning
&& STRICT_ALIGNMENT
7117 && (TYPE_ALIGN (TREE_TYPE (exp
))
7118 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
7119 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
7120 < BIGGEST_ALIGNMENT
)
7121 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
7122 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
7127 /* Hand back the decl for MEM[&decl, off]. */
7128 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
7130 tree off
= TREE_OPERAND (exp
, 1);
7131 if (!integer_zerop (off
))
7133 offset_int boff
, coff
= mem_ref_offset (exp
);
7134 boff
= coff
<< LOG2_BITS_PER_UNIT
;
7137 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7145 /* If any reference in the chain is volatile, the effect is volatile. */
7146 if (TREE_THIS_VOLATILE (exp
))
7149 exp
= TREE_OPERAND (exp
, 0);
7153 /* If OFFSET is constant, see if we can return the whole thing as a
7154 constant bit position. Make sure to handle overflow during
7156 if (TREE_CODE (offset
) == INTEGER_CST
)
7158 offset_int tem
= wi::sext (wi::to_offset (offset
),
7159 TYPE_PRECISION (sizetype
));
7160 tem
<<= LOG2_BITS_PER_UNIT
;
7162 if (wi::fits_shwi_p (tem
))
7164 *pbitpos
= tem
.to_shwi ();
7165 *poffset
= offset
= NULL_TREE
;
7169 /* Otherwise, split it up. */
7172 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7173 if (wi::neg_p (bit_offset
) || !wi::fits_shwi_p (bit_offset
))
7175 offset_int mask
= wi::mask
<offset_int
> (LOG2_BITS_PER_UNIT
, false);
7176 offset_int tem
= bit_offset
.and_not (mask
);
7177 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
7178 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
7180 tem
>>= LOG2_BITS_PER_UNIT
;
7181 offset
= size_binop (PLUS_EXPR
, offset
,
7182 wide_int_to_tree (sizetype
, tem
));
7185 *pbitpos
= bit_offset
.to_shwi ();
7189 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7190 if (mode
== VOIDmode
7192 && (*pbitpos
% BITS_PER_UNIT
) == 0
7193 && (*pbitsize
% BITS_PER_UNIT
) == 0)
7201 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7203 static unsigned HOST_WIDE_INT
7204 target_align (const_tree target
)
7206 /* We might have a chain of nested references with intermediate misaligning
7207 bitfields components, so need to recurse to find out. */
7209 unsigned HOST_WIDE_INT this_align
, outer_align
;
7211 switch (TREE_CODE (target
))
7217 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7218 outer_align
= target_align (TREE_OPERAND (target
, 0));
7219 return MIN (this_align
, outer_align
);
7222 case ARRAY_RANGE_REF
:
7223 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7224 outer_align
= target_align (TREE_OPERAND (target
, 0));
7225 return MIN (this_align
, outer_align
);
7228 case NON_LVALUE_EXPR
:
7229 case VIEW_CONVERT_EXPR
:
7230 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7231 outer_align
= target_align (TREE_OPERAND (target
, 0));
7232 return MAX (this_align
, outer_align
);
7235 return TYPE_ALIGN (TREE_TYPE (target
));
7240 /* Given an rtx VALUE that may contain additions and multiplications, return
7241 an equivalent value that just refers to a register, memory, or constant.
7242 This is done by generating instructions to perform the arithmetic and
7243 returning a pseudo-register containing the value.
7245 The returned value may be a REG, SUBREG, MEM or constant. */
7248 force_operand (rtx value
, rtx target
)
7251 /* Use subtarget as the target for operand 0 of a binary operation. */
7252 rtx subtarget
= get_subtarget (target
);
7253 enum rtx_code code
= GET_CODE (value
);
7255 /* Check for subreg applied to an expression produced by loop optimizer. */
7257 && !REG_P (SUBREG_REG (value
))
7258 && !MEM_P (SUBREG_REG (value
)))
7261 = simplify_gen_subreg (GET_MODE (value
),
7262 force_reg (GET_MODE (SUBREG_REG (value
)),
7263 force_operand (SUBREG_REG (value
),
7265 GET_MODE (SUBREG_REG (value
)),
7266 SUBREG_BYTE (value
));
7267 code
= GET_CODE (value
);
7270 /* Check for a PIC address load. */
7271 if ((code
== PLUS
|| code
== MINUS
)
7272 && XEXP (value
, 0) == pic_offset_table_rtx
7273 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7274 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7275 || GET_CODE (XEXP (value
, 1)) == CONST
))
7278 subtarget
= gen_reg_rtx (GET_MODE (value
));
7279 emit_move_insn (subtarget
, value
);
7283 if (ARITHMETIC_P (value
))
7285 op2
= XEXP (value
, 1);
7286 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7288 if (code
== MINUS
&& CONST_INT_P (op2
))
7291 op2
= negate_rtx (GET_MODE (value
), op2
);
7294 /* Check for an addition with OP2 a constant integer and our first
7295 operand a PLUS of a virtual register and something else. In that
7296 case, we want to emit the sum of the virtual register and the
7297 constant first and then add the other value. This allows virtual
7298 register instantiation to simply modify the constant rather than
7299 creating another one around this addition. */
7300 if (code
== PLUS
&& CONST_INT_P (op2
)
7301 && GET_CODE (XEXP (value
, 0)) == PLUS
7302 && REG_P (XEXP (XEXP (value
, 0), 0))
7303 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7304 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7306 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7307 XEXP (XEXP (value
, 0), 0), op2
,
7308 subtarget
, 0, OPTAB_LIB_WIDEN
);
7309 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7310 force_operand (XEXP (XEXP (value
,
7312 target
, 0, OPTAB_LIB_WIDEN
);
7315 op1
= force_operand (XEXP (value
, 0), subtarget
);
7316 op2
= force_operand (op2
, NULL_RTX
);
7320 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7322 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7323 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7324 target
, 1, OPTAB_LIB_WIDEN
);
7326 return expand_divmod (0,
7327 FLOAT_MODE_P (GET_MODE (value
))
7328 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7329 GET_MODE (value
), op1
, op2
, target
, 0);
7331 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7334 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7337 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7340 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7341 target
, 0, OPTAB_LIB_WIDEN
);
7343 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7344 target
, 1, OPTAB_LIB_WIDEN
);
7347 if (UNARY_P (value
))
7350 target
= gen_reg_rtx (GET_MODE (value
));
7351 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7358 case FLOAT_TRUNCATE
:
7359 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7364 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7368 case UNSIGNED_FLOAT
:
7369 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7373 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7377 #ifdef INSN_SCHEDULING
7378 /* On machines that have insn scheduling, we want all memory reference to be
7379 explicit, so we need to deal with such paradoxical SUBREGs. */
7380 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7382 = simplify_gen_subreg (GET_MODE (value
),
7383 force_reg (GET_MODE (SUBREG_REG (value
)),
7384 force_operand (SUBREG_REG (value
),
7386 GET_MODE (SUBREG_REG (value
)),
7387 SUBREG_BYTE (value
));
7393 /* Subroutine of expand_expr: return nonzero iff there is no way that
7394 EXP can reference X, which is being modified. TOP_P is nonzero if this
7395 call is going to be used to determine whether we need a temporary
7396 for EXP, as opposed to a recursive call to this function.
7398 It is always safe for this routine to return zero since it merely
7399 searches for optimization opportunities. */
7402 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7408 /* If EXP has varying size, we MUST use a target since we currently
7409 have no way of allocating temporaries of variable size
7410 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7411 So we assume here that something at a higher level has prevented a
7412 clash. This is somewhat bogus, but the best we can do. Only
7413 do this when X is BLKmode and when we are at the top level. */
7414 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7415 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7416 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7417 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7418 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7420 && GET_MODE (x
) == BLKmode
)
7421 /* If X is in the outgoing argument area, it is always safe. */
7423 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7424 || (GET_CODE (XEXP (x
, 0)) == PLUS
7425 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7428 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7429 find the underlying pseudo. */
7430 if (GET_CODE (x
) == SUBREG
)
7433 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7437 /* Now look at our tree code and possibly recurse. */
7438 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7440 case tcc_declaration
:
7441 exp_rtl
= DECL_RTL_IF_SET (exp
);
7447 case tcc_exceptional
:
7448 if (TREE_CODE (exp
) == TREE_LIST
)
7452 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7454 exp
= TREE_CHAIN (exp
);
7457 if (TREE_CODE (exp
) != TREE_LIST
)
7458 return safe_from_p (x
, exp
, 0);
7461 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7463 constructor_elt
*ce
;
7464 unsigned HOST_WIDE_INT idx
;
7466 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7467 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7468 || !safe_from_p (x
, ce
->value
, 0))
7472 else if (TREE_CODE (exp
) == ERROR_MARK
)
7473 return 1; /* An already-visited SAVE_EXPR? */
7478 /* The only case we look at here is the DECL_INITIAL inside a
7480 return (TREE_CODE (exp
) != DECL_EXPR
7481 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7482 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7483 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7486 case tcc_comparison
:
7487 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7492 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7494 case tcc_expression
:
7497 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7498 the expression. If it is set, we conflict iff we are that rtx or
7499 both are in memory. Otherwise, we check all operands of the
7500 expression recursively. */
7502 switch (TREE_CODE (exp
))
7505 /* If the operand is static or we are static, we can't conflict.
7506 Likewise if we don't conflict with the operand at all. */
7507 if (staticp (TREE_OPERAND (exp
, 0))
7508 || TREE_STATIC (exp
)
7509 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7512 /* Otherwise, the only way this can conflict is if we are taking
7513 the address of a DECL a that address if part of X, which is
7515 exp
= TREE_OPERAND (exp
, 0);
7518 if (!DECL_RTL_SET_P (exp
)
7519 || !MEM_P (DECL_RTL (exp
)))
7522 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7528 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7529 get_alias_set (exp
)))
7534 /* Assume that the call will clobber all hard registers and
7536 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7541 case WITH_CLEANUP_EXPR
:
7542 case CLEANUP_POINT_EXPR
:
7543 /* Lowered by gimplify.c. */
7547 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7553 /* If we have an rtx, we do not need to scan our operands. */
7557 nops
= TREE_OPERAND_LENGTH (exp
);
7558 for (i
= 0; i
< nops
; i
++)
7559 if (TREE_OPERAND (exp
, i
) != 0
7560 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7566 /* Should never get a type here. */
7570 /* If we have an rtl, find any enclosed object. Then see if we conflict
7574 if (GET_CODE (exp_rtl
) == SUBREG
)
7576 exp_rtl
= SUBREG_REG (exp_rtl
);
7578 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7582 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7583 are memory and they conflict. */
7584 return ! (rtx_equal_p (x
, exp_rtl
)
7585 || (MEM_P (x
) && MEM_P (exp_rtl
)
7586 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7589 /* If we reach here, it is safe. */
7594 /* Return the highest power of two that EXP is known to be a multiple of.
7595 This is used in updating alignment of MEMs in array references. */
7597 unsigned HOST_WIDE_INT
7598 highest_pow2_factor (const_tree exp
)
7600 unsigned HOST_WIDE_INT ret
;
7601 int trailing_zeros
= tree_ctz (exp
);
7602 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7603 return BIGGEST_ALIGNMENT
;
7604 ret
= (unsigned HOST_WIDE_INT
) 1 << trailing_zeros
;
7605 if (ret
> BIGGEST_ALIGNMENT
)
7606 return BIGGEST_ALIGNMENT
;
7610 /* Similar, except that the alignment requirements of TARGET are
7611 taken into account. Assume it is at least as aligned as its
7612 type, unless it is a COMPONENT_REF in which case the layout of
7613 the structure gives the alignment. */
7615 static unsigned HOST_WIDE_INT
7616 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7618 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7619 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7621 return MAX (factor
, talign
);
7624 /* Convert the tree comparison code TCODE to the rtl one where the
7625 signedness is UNSIGNEDP. */
7627 static enum rtx_code
7628 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7640 code
= unsignedp
? LTU
: LT
;
7643 code
= unsignedp
? LEU
: LE
;
7646 code
= unsignedp
? GTU
: GT
;
7649 code
= unsignedp
? GEU
: GE
;
7651 case UNORDERED_EXPR
:
7682 /* Subroutine of expand_expr. Expand the two operands of a binary
7683 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7684 The value may be stored in TARGET if TARGET is nonzero. The
7685 MODIFIER argument is as documented by expand_expr. */
7688 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7689 enum expand_modifier modifier
)
7691 if (! safe_from_p (target
, exp1
, 1))
7693 if (operand_equal_p (exp0
, exp1
, 0))
7695 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7696 *op1
= copy_rtx (*op0
);
7700 /* If we need to preserve evaluation order, copy exp0 into its own
7701 temporary variable so that it can't be clobbered by exp1. */
7702 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
7703 exp0
= save_expr (exp0
);
7704 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7705 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7710 /* Return a MEM that contains constant EXP. DEFER is as for
7711 output_constant_def and MODIFIER is as for expand_expr. */
7714 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7718 mem
= output_constant_def (exp
, defer
);
7719 if (modifier
!= EXPAND_INITIALIZER
)
7720 mem
= use_anchored_address (mem
);
7724 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7725 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7728 expand_expr_addr_expr_1 (tree exp
, rtx target
, machine_mode tmode
,
7729 enum expand_modifier modifier
, addr_space_t as
)
7731 rtx result
, subtarget
;
7733 HOST_WIDE_INT bitsize
, bitpos
;
7734 int unsignedp
, reversep
, volatilep
= 0;
7737 /* If we are taking the address of a constant and are at the top level,
7738 we have to use output_constant_def since we can't call force_const_mem
7740 /* ??? This should be considered a front-end bug. We should not be
7741 generating ADDR_EXPR of something that isn't an LVALUE. The only
7742 exception here is STRING_CST. */
7743 if (CONSTANT_CLASS_P (exp
))
7745 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7746 if (modifier
< EXPAND_SUM
)
7747 result
= force_operand (result
, target
);
7751 /* Everything must be something allowed by is_gimple_addressable. */
7752 switch (TREE_CODE (exp
))
7755 /* This case will happen via recursion for &a->b. */
7756 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7760 tree tem
= TREE_OPERAND (exp
, 0);
7761 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7762 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7763 return expand_expr (tem
, target
, tmode
, modifier
);
7767 /* Expand the initializer like constants above. */
7768 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7770 if (modifier
< EXPAND_SUM
)
7771 result
= force_operand (result
, target
);
7775 /* The real part of the complex number is always first, therefore
7776 the address is the same as the address of the parent object. */
7779 inner
= TREE_OPERAND (exp
, 0);
7783 /* The imaginary part of the complex number is always second.
7784 The expression is therefore always offset by the size of the
7787 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
7788 inner
= TREE_OPERAND (exp
, 0);
7791 case COMPOUND_LITERAL_EXPR
:
7792 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7793 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7794 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7795 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7796 the initializers aren't gimplified. */
7797 if (COMPOUND_LITERAL_EXPR_DECL (exp
)
7798 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp
)))
7799 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7800 target
, tmode
, modifier
, as
);
7803 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7804 expand_expr, as that can have various side effects; LABEL_DECLs for
7805 example, may not have their DECL_RTL set yet. Expand the rtl of
7806 CONSTRUCTORs too, which should yield a memory reference for the
7807 constructor's contents. Assume language specific tree nodes can
7808 be expanded in some interesting way. */
7809 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7811 || TREE_CODE (exp
) == CONSTRUCTOR
7812 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7814 result
= expand_expr (exp
, target
, tmode
,
7815 modifier
== EXPAND_INITIALIZER
7816 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7818 /* If the DECL isn't in memory, then the DECL wasn't properly
7819 marked TREE_ADDRESSABLE, which will be either a front-end
7820 or a tree optimizer bug. */
7822 gcc_assert (MEM_P (result
));
7823 result
= XEXP (result
, 0);
7825 /* ??? Is this needed anymore? */
7827 TREE_USED (exp
) = 1;
7829 if (modifier
!= EXPAND_INITIALIZER
7830 && modifier
!= EXPAND_CONST_ADDRESS
7831 && modifier
!= EXPAND_SUM
)
7832 result
= force_operand (result
, target
);
7836 /* Pass FALSE as the last argument to get_inner_reference although
7837 we are expanding to RTL. The rationale is that we know how to
7838 handle "aligning nodes" here: we can just bypass them because
7839 they won't change the final object whose address will be returned
7840 (they actually exist only for that purpose). */
7841 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
7842 &unsignedp
, &reversep
, &volatilep
, false);
7846 /* We must have made progress. */
7847 gcc_assert (inner
!= exp
);
7849 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
7850 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7851 inner alignment, force the inner to be sufficiently aligned. */
7852 if (CONSTANT_CLASS_P (inner
)
7853 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7855 inner
= copy_node (inner
);
7856 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7857 SET_TYPE_ALIGN (TREE_TYPE (inner
), TYPE_ALIGN (TREE_TYPE (exp
)));
7858 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7860 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7866 if (modifier
!= EXPAND_NORMAL
)
7867 result
= force_operand (result
, NULL
);
7868 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7869 modifier
== EXPAND_INITIALIZER
7870 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7872 /* expand_expr is allowed to return an object in a mode other
7873 than TMODE. If it did, we need to convert. */
7874 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
7875 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
7876 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
7877 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7878 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7880 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7881 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7884 subtarget
= bitpos
? NULL_RTX
: target
;
7885 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7886 1, OPTAB_LIB_WIDEN
);
7892 /* Someone beforehand should have rejected taking the address
7893 of such an object. */
7894 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7896 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7897 result
= plus_constant (tmode
, result
, bitpos
/ BITS_PER_UNIT
);
7898 if (modifier
< EXPAND_SUM
)
7899 result
= force_operand (result
, target
);
7905 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7906 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7909 expand_expr_addr_expr (tree exp
, rtx target
, machine_mode tmode
,
7910 enum expand_modifier modifier
)
7912 addr_space_t as
= ADDR_SPACE_GENERIC
;
7913 machine_mode address_mode
= Pmode
;
7914 machine_mode pointer_mode
= ptr_mode
;
7918 /* Target mode of VOIDmode says "whatever's natural". */
7919 if (tmode
== VOIDmode
)
7920 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7922 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7924 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7925 address_mode
= targetm
.addr_space
.address_mode (as
);
7926 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7929 /* We can get called with some Weird Things if the user does silliness
7930 like "(short) &a". In that case, convert_memory_address won't do
7931 the right thing, so ignore the given target mode. */
7932 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7933 tmode
= address_mode
;
7935 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7936 tmode
, modifier
, as
);
7938 /* Despite expand_expr claims concerning ignoring TMODE when not
7939 strictly convenient, stuff breaks if we don't honor it. Note
7940 that combined with the above, we only do this for pointer modes. */
7941 rmode
= GET_MODE (result
);
7942 if (rmode
== VOIDmode
)
7945 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7950 /* Generate code for computing CONSTRUCTOR EXP.
7951 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7952 is TRUE, instead of creating a temporary variable in memory
7953 NULL is returned and the caller needs to handle it differently. */
7956 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7957 bool avoid_temp_mem
)
7959 tree type
= TREE_TYPE (exp
);
7960 machine_mode mode
= TYPE_MODE (type
);
7962 /* Try to avoid creating a temporary at all. This is possible
7963 if all of the initializer is zero.
7964 FIXME: try to handle all [0..255] initializers we can handle
7966 if (TREE_STATIC (exp
)
7967 && !TREE_ADDRESSABLE (exp
)
7968 && target
!= 0 && mode
== BLKmode
7969 && all_zeros_p (exp
))
7971 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7975 /* All elts simple constants => refer to a constant in memory. But
7976 if this is a non-BLKmode mode, let it store a field at a time
7977 since that should make a CONST_INT, CONST_WIDE_INT or
7978 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7979 use, it is best to store directly into the target unless the type
7980 is large enough that memcpy will be used. If we are making an
7981 initializer and all operands are constant, put it in memory as
7984 FIXME: Avoid trying to fill vector constructors piece-meal.
7985 Output them with output_constant_def below unless we're sure
7986 they're zeros. This should go away when vector initializers
7987 are treated like VECTOR_CST instead of arrays. */
7988 if ((TREE_STATIC (exp
)
7989 && ((mode
== BLKmode
7990 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7991 || TREE_ADDRESSABLE (exp
)
7992 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
7993 && (! can_move_by_pieces
7994 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
7996 && ! mostly_zeros_p (exp
))))
7997 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7998 && TREE_CONSTANT (exp
)))
8005 constructor
= expand_expr_constant (exp
, 1, modifier
);
8007 if (modifier
!= EXPAND_CONST_ADDRESS
8008 && modifier
!= EXPAND_INITIALIZER
8009 && modifier
!= EXPAND_SUM
)
8010 constructor
= validize_mem (constructor
);
8015 /* Handle calls that pass values in multiple non-contiguous
8016 locations. The Irix 6 ABI has examples of this. */
8017 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
8018 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
8023 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
8026 store_constructor (exp
, target
, 0, int_expr_size (exp
), false);
8031 /* expand_expr: generate code for computing expression EXP.
8032 An rtx for the computed value is returned. The value is never null.
8033 In the case of a void EXP, const0_rtx is returned.
8035 The value may be stored in TARGET if TARGET is nonzero.
8036 TARGET is just a suggestion; callers must assume that
8037 the rtx returned may not be the same as TARGET.
8039 If TARGET is CONST0_RTX, it means that the value will be ignored.
8041 If TMODE is not VOIDmode, it suggests generating the
8042 result in mode TMODE. But this is done only when convenient.
8043 Otherwise, TMODE is ignored and the value generated in its natural mode.
8044 TMODE is just a suggestion; callers must assume that
8045 the rtx returned may not have mode TMODE.
8047 Note that TARGET may have neither TMODE nor MODE. In that case, it
8048 probably will not be used.
8050 If MODIFIER is EXPAND_SUM then when EXP is an addition
8051 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8052 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8053 products as above, or REG or MEM, or constant.
8054 Ordinarily in such cases we would output mul or add instructions
8055 and then return a pseudo reg containing the sum.
8057 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8058 it also marks a label as absolutely required (it can't be dead).
8059 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8060 This is used for outputting expressions used in initializers.
8062 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8063 with a constant address even if that address is not normally legitimate.
8064 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8066 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8067 a call parameter. Such targets require special care as we haven't yet
8068 marked TARGET so that it's safe from being trashed by libcalls. We
8069 don't want to use TARGET for anything but the final result;
8070 Intermediate values must go elsewhere. Additionally, calls to
8071 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8073 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8074 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8075 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8076 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8079 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8080 In this case, we don't adjust a returned MEM rtx that wouldn't be
8081 sufficiently aligned for its mode; instead, it's up to the caller
8082 to deal with it afterwards. This is used to make sure that unaligned
8083 base objects for which out-of-bounds accesses are supported, for
8084 example record types with trailing arrays, aren't realigned behind
8085 the back of the caller.
8086 The normal operating mode is to pass FALSE for this parameter. */
8089 expand_expr_real (tree exp
, rtx target
, machine_mode tmode
,
8090 enum expand_modifier modifier
, rtx
*alt_rtl
,
8091 bool inner_reference_p
)
8095 /* Handle ERROR_MARK before anybody tries to access its type. */
8096 if (TREE_CODE (exp
) == ERROR_MARK
8097 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
8099 ret
= CONST0_RTX (tmode
);
8100 return ret
? ret
: const0_rtx
;
8103 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
8108 /* Try to expand the conditional expression which is represented by
8109 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8110 return the rtl reg which represents the result. Otherwise return
8114 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
8115 tree treeop1 ATTRIBUTE_UNUSED
,
8116 tree treeop2 ATTRIBUTE_UNUSED
)
8119 rtx op00
, op01
, op1
, op2
;
8120 enum rtx_code comparison_code
;
8121 machine_mode comparison_mode
;
8124 tree type
= TREE_TYPE (treeop1
);
8125 int unsignedp
= TYPE_UNSIGNED (type
);
8126 machine_mode mode
= TYPE_MODE (type
);
8127 machine_mode orig_mode
= mode
;
8129 /* If we cannot do a conditional move on the mode, try doing it
8130 with the promoted mode. */
8131 if (!can_conditionally_move_p (mode
))
8133 mode
= promote_mode (type
, mode
, &unsignedp
);
8134 if (!can_conditionally_move_p (mode
))
8136 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
8139 temp
= assign_temp (type
, 0, 1);
8142 expand_operands (treeop1
, treeop2
,
8143 temp
, &op1
, &op2
, EXPAND_NORMAL
);
8145 if (TREE_CODE (treeop0
) == SSA_NAME
8146 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
8148 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
8149 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
8150 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
8151 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
8152 comparison_mode
= TYPE_MODE (type
);
8153 unsignedp
= TYPE_UNSIGNED (type
);
8154 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8156 else if (COMPARISON_CLASS_P (treeop0
))
8158 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
8159 enum tree_code cmpcode
= TREE_CODE (treeop0
);
8160 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
8161 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
8162 unsignedp
= TYPE_UNSIGNED (type
);
8163 comparison_mode
= TYPE_MODE (type
);
8164 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8168 op00
= expand_normal (treeop0
);
8170 comparison_code
= NE
;
8171 comparison_mode
= GET_MODE (op00
);
8172 if (comparison_mode
== VOIDmode
)
8173 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8176 if (GET_MODE (op1
) != mode
)
8177 op1
= gen_lowpart (mode
, op1
);
8179 if (GET_MODE (op2
) != mode
)
8180 op2
= gen_lowpart (mode
, op2
);
8182 /* Try to emit the conditional move. */
8183 insn
= emit_conditional_move (temp
, comparison_code
,
8184 op00
, op01
, comparison_mode
,
8188 /* If we could do the conditional move, emit the sequence,
8192 rtx_insn
*seq
= get_insns ();
8195 return convert_modes (orig_mode
, mode
, temp
, 0);
8198 /* Otherwise discard the sequence and fall back to code with
8205 expand_expr_real_2 (sepops ops
, rtx target
, machine_mode tmode
,
8206 enum expand_modifier modifier
)
8208 rtx op0
, op1
, op2
, temp
;
8209 rtx_code_label
*lab
;
8213 enum tree_code code
= ops
->code
;
8215 rtx subtarget
, original_target
;
8217 bool reduce_bit_field
;
8218 location_t loc
= ops
->location
;
8219 tree treeop0
, treeop1
, treeop2
;
8220 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8221 ? reduce_to_bit_field_precision ((expr), \
8227 mode
= TYPE_MODE (type
);
8228 unsignedp
= TYPE_UNSIGNED (type
);
8234 /* We should be called only on simple (binary or unary) expressions,
8235 exactly those that are valid in gimple expressions that aren't
8236 GIMPLE_SINGLE_RHS (or invalid). */
8237 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8238 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8239 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8241 ignore
= (target
== const0_rtx
8242 || ((CONVERT_EXPR_CODE_P (code
)
8243 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8244 && TREE_CODE (type
) == VOID_TYPE
));
8246 /* We should be called only if we need the result. */
8247 gcc_assert (!ignore
);
8249 /* An operation in what may be a bit-field type needs the
8250 result to be reduced to the precision of the bit-field type,
8251 which is narrower than that of the type's mode. */
8252 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8253 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
8255 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8258 /* Use subtarget as the target for operand 0 of a binary operation. */
8259 subtarget
= get_subtarget (target
);
8260 original_target
= target
;
8264 case NON_LVALUE_EXPR
:
8267 if (treeop0
== error_mark_node
)
8270 if (TREE_CODE (type
) == UNION_TYPE
)
8272 tree valtype
= TREE_TYPE (treeop0
);
8274 /* If both input and output are BLKmode, this conversion isn't doing
8275 anything except possibly changing memory attribute. */
8276 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8278 rtx result
= expand_expr (treeop0
, target
, tmode
,
8281 result
= copy_rtx (result
);
8282 set_mem_attributes (result
, type
, 0);
8288 if (TYPE_MODE (type
) != BLKmode
)
8289 target
= gen_reg_rtx (TYPE_MODE (type
));
8291 target
= assign_temp (type
, 1, 1);
8295 /* Store data into beginning of memory target. */
8296 store_expr (treeop0
,
8297 adjust_address (target
, TYPE_MODE (valtype
), 0),
8298 modifier
== EXPAND_STACK_PARM
,
8299 false, TYPE_REVERSE_STORAGE_ORDER (type
));
8303 gcc_assert (REG_P (target
)
8304 && !TYPE_REVERSE_STORAGE_ORDER (type
));
8306 /* Store this field into a union of the proper type. */
8307 store_field (target
,
8308 MIN ((int_size_in_bytes (TREE_TYPE
8311 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8312 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0,
8316 /* Return the entire union. */
8320 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8322 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8325 /* If the signedness of the conversion differs and OP0 is
8326 a promoted SUBREG, clear that indication since we now
8327 have to do the proper extension. */
8328 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8329 && GET_CODE (op0
) == SUBREG
)
8330 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8332 return REDUCE_BIT_FIELD (op0
);
8335 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8336 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8337 if (GET_MODE (op0
) == mode
)
8340 /* If OP0 is a constant, just convert it into the proper mode. */
8341 else if (CONSTANT_P (op0
))
8343 tree inner_type
= TREE_TYPE (treeop0
);
8344 machine_mode inner_mode
= GET_MODE (op0
);
8346 if (inner_mode
== VOIDmode
)
8347 inner_mode
= TYPE_MODE (inner_type
);
8349 if (modifier
== EXPAND_INITIALIZER
)
8350 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
8352 op0
= convert_modes (mode
, inner_mode
, op0
,
8353 TYPE_UNSIGNED (inner_type
));
8356 else if (modifier
== EXPAND_INITIALIZER
)
8357 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8359 else if (target
== 0)
8360 op0
= convert_to_mode (mode
, op0
,
8361 TYPE_UNSIGNED (TREE_TYPE
8365 convert_move (target
, op0
,
8366 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8370 return REDUCE_BIT_FIELD (op0
);
8372 case ADDR_SPACE_CONVERT_EXPR
:
8374 tree treeop0_type
= TREE_TYPE (treeop0
);
8376 gcc_assert (POINTER_TYPE_P (type
));
8377 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8379 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8380 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8382 /* Conversions between pointers to the same address space should
8383 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8384 gcc_assert (as_to
!= as_from
);
8386 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8388 /* Ask target code to handle conversion between pointers
8389 to overlapping address spaces. */
8390 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8391 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8393 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8397 /* For disjoint address spaces, converting anything but a null
8398 pointer invokes undefined behavior. We truncate or extend the
8399 value as if we'd converted via integers, which handles 0 as
8400 required, and all others as the programmer likely expects. */
8401 #ifndef POINTERS_EXTEND_UNSIGNED
8402 const int POINTERS_EXTEND_UNSIGNED
= 1;
8404 op0
= convert_modes (mode
, TYPE_MODE (treeop0_type
),
8405 op0
, POINTERS_EXTEND_UNSIGNED
);
8411 case POINTER_PLUS_EXPR
:
8412 /* Even though the sizetype mode and the pointer's mode can be different
8413 expand is able to handle this correctly and get the correct result out
8414 of the PLUS_EXPR code. */
8415 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8416 if sizetype precision is smaller than pointer precision. */
8417 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8418 treeop1
= fold_convert_loc (loc
, type
,
8419 fold_convert_loc (loc
, ssizetype
,
8421 /* If sizetype precision is larger than pointer precision, truncate the
8422 offset to have matching modes. */
8423 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8424 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8427 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8428 something else, make sure we add the register to the constant and
8429 then to the other thing. This case can occur during strength
8430 reduction and doing it this way will produce better code if the
8431 frame pointer or argument pointer is eliminated.
8433 fold-const.c will ensure that the constant is always in the inner
8434 PLUS_EXPR, so the only case we need to do anything about is if
8435 sp, ap, or fp is our second argument, in which case we must swap
8436 the innermost first argument and our second argument. */
8438 if (TREE_CODE (treeop0
) == PLUS_EXPR
8439 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8440 && TREE_CODE (treeop1
) == VAR_DECL
8441 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8442 || DECL_RTL (treeop1
) == stack_pointer_rtx
8443 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8448 /* If the result is to be ptr_mode and we are adding an integer to
8449 something, we might be forming a constant. So try to use
8450 plus_constant. If it produces a sum and we can't accept it,
8451 use force_operand. This allows P = &ARR[const] to generate
8452 efficient code on machines where a SYMBOL_REF is not a valid
8455 If this is an EXPAND_SUM call, always return the sum. */
8456 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8457 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8459 if (modifier
== EXPAND_STACK_PARM
)
8461 if (TREE_CODE (treeop0
) == INTEGER_CST
8462 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8463 && TREE_CONSTANT (treeop1
))
8467 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8469 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8471 /* Use wi::shwi to ensure that the constant is
8472 truncated according to the mode of OP1, then sign extended
8473 to a HOST_WIDE_INT. Using the constant directly can result
8474 in non-canonical RTL in a 64x32 cross compile. */
8475 wc
= TREE_INT_CST_LOW (treeop0
);
8477 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8478 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8479 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8480 op1
= force_operand (op1
, target
);
8481 return REDUCE_BIT_FIELD (op1
);
8484 else if (TREE_CODE (treeop1
) == INTEGER_CST
8485 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8486 && TREE_CONSTANT (treeop0
))
8490 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8492 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8493 (modifier
== EXPAND_INITIALIZER
8494 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8495 if (! CONSTANT_P (op0
))
8497 op1
= expand_expr (treeop1
, NULL_RTX
,
8498 VOIDmode
, modifier
);
8499 /* Return a PLUS if modifier says it's OK. */
8500 if (modifier
== EXPAND_SUM
8501 || modifier
== EXPAND_INITIALIZER
)
8502 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8505 /* Use wi::shwi to ensure that the constant is
8506 truncated according to the mode of OP1, then sign extended
8507 to a HOST_WIDE_INT. Using the constant directly can result
8508 in non-canonical RTL in a 64x32 cross compile. */
8509 wc
= TREE_INT_CST_LOW (treeop1
);
8511 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8512 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8513 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8514 op0
= force_operand (op0
, target
);
8515 return REDUCE_BIT_FIELD (op0
);
8519 /* Use TER to expand pointer addition of a negated value
8520 as pointer subtraction. */
8521 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8522 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8523 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8524 && TREE_CODE (treeop1
) == SSA_NAME
8525 && TYPE_MODE (TREE_TYPE (treeop0
))
8526 == TYPE_MODE (TREE_TYPE (treeop1
)))
8528 gimple
*def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8531 treeop1
= gimple_assign_rhs1 (def
);
8537 /* No sense saving up arithmetic to be done
8538 if it's all in the wrong mode to form part of an address.
8539 And force_operand won't know whether to sign-extend or
8541 if (modifier
!= EXPAND_INITIALIZER
8542 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8544 expand_operands (treeop0
, treeop1
,
8545 subtarget
, &op0
, &op1
, modifier
);
8546 if (op0
== const0_rtx
)
8548 if (op1
== const0_rtx
)
8553 expand_operands (treeop0
, treeop1
,
8554 subtarget
, &op0
, &op1
, modifier
);
8555 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8559 /* For initializers, we are allowed to return a MINUS of two
8560 symbolic constants. Here we handle all cases when both operands
8562 /* Handle difference of two symbolic constants,
8563 for the sake of an initializer. */
8564 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8565 && really_constant_p (treeop0
)
8566 && really_constant_p (treeop1
))
8568 expand_operands (treeop0
, treeop1
,
8569 NULL_RTX
, &op0
, &op1
, modifier
);
8571 /* If the last operand is a CONST_INT, use plus_constant of
8572 the negated constant. Else make the MINUS. */
8573 if (CONST_INT_P (op1
))
8574 return REDUCE_BIT_FIELD (plus_constant (mode
, op0
,
8577 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8580 /* No sense saving up arithmetic to be done
8581 if it's all in the wrong mode to form part of an address.
8582 And force_operand won't know whether to sign-extend or
8584 if (modifier
!= EXPAND_INITIALIZER
8585 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8588 expand_operands (treeop0
, treeop1
,
8589 subtarget
, &op0
, &op1
, modifier
);
8591 /* Convert A - const to A + (-const). */
8592 if (CONST_INT_P (op1
))
8594 op1
= negate_rtx (mode
, op1
);
8595 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8600 case WIDEN_MULT_PLUS_EXPR
:
8601 case WIDEN_MULT_MINUS_EXPR
:
8602 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8603 op2
= expand_normal (treeop2
);
8604 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8608 case WIDEN_MULT_EXPR
:
8609 /* If first operand is constant, swap them.
8610 Thus the following special case checks need only
8611 check the second operand. */
8612 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8613 std::swap (treeop0
, treeop1
);
8615 /* First, check if we have a multiplication of one signed and one
8616 unsigned operand. */
8617 if (TREE_CODE (treeop1
) != INTEGER_CST
8618 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8619 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8621 machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8622 this_optab
= usmul_widen_optab
;
8623 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8624 != CODE_FOR_nothing
)
8626 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8627 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8630 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8632 /* op0 and op1 might still be constant, despite the above
8633 != INTEGER_CST check. Handle it. */
8634 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8636 op0
= convert_modes (innermode
, mode
, op0
, true);
8637 op1
= convert_modes (innermode
, mode
, op1
, false);
8638 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8639 target
, unsignedp
));
8644 /* Check for a multiplication with matching signedness. */
8645 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8646 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8647 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8648 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8650 tree op0type
= TREE_TYPE (treeop0
);
8651 machine_mode innermode
= TYPE_MODE (op0type
);
8652 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8653 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8654 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8656 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8658 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8659 != CODE_FOR_nothing
)
8661 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8663 /* op0 and op1 might still be constant, despite the above
8664 != INTEGER_CST check. Handle it. */
8665 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8668 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8670 = convert_modes (innermode
, mode
, op1
,
8671 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8672 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8676 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8677 unsignedp
, this_optab
);
8678 return REDUCE_BIT_FIELD (temp
);
8680 if (find_widening_optab_handler (other_optab
, mode
, innermode
, 0)
8682 && innermode
== word_mode
)
8685 op0
= expand_normal (treeop0
);
8686 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8687 op1
= convert_modes (innermode
, mode
,
8688 expand_normal (treeop1
),
8689 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8691 op1
= expand_normal (treeop1
);
8692 /* op0 and op1 might still be constant, despite the above
8693 != INTEGER_CST check. Handle it. */
8694 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8695 goto widen_mult_const
;
8696 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8697 unsignedp
, OPTAB_LIB_WIDEN
);
8698 hipart
= gen_highpart (innermode
, temp
);
8699 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8703 emit_move_insn (hipart
, htem
);
8704 return REDUCE_BIT_FIELD (temp
);
8708 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8709 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8710 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8711 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8715 optab opt
= fma_optab
;
8716 gimple
*def0
, *def2
;
8718 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8720 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8722 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8725 gcc_assert (fn
!= NULL_TREE
);
8726 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8727 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8730 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8731 /* The multiplication is commutative - look at its 2nd operand
8732 if the first isn't fed by a negate. */
8735 def0
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8736 /* Swap operands if the 2nd operand is fed by a negate. */
8738 std::swap (treeop0
, treeop1
);
8740 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8745 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8748 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8749 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8752 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8755 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8758 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8761 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8765 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8767 op2
= expand_normal (treeop2
);
8768 op1
= expand_normal (treeop1
);
8770 return expand_ternary_op (TYPE_MODE (type
), opt
,
8771 op0
, op1
, op2
, target
, 0);
8775 /* If this is a fixed-point operation, then we cannot use the code
8776 below because "expand_mult" doesn't support sat/no-sat fixed-point
8778 if (ALL_FIXED_POINT_MODE_P (mode
))
8781 /* If first operand is constant, swap them.
8782 Thus the following special case checks need only
8783 check the second operand. */
8784 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8785 std::swap (treeop0
, treeop1
);
8787 /* Attempt to return something suitable for generating an
8788 indexed address, for machines that support that. */
8790 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8791 && tree_fits_shwi_p (treeop1
))
8793 tree exp1
= treeop1
;
8795 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8799 op0
= force_operand (op0
, NULL_RTX
);
8801 op0
= copy_to_mode_reg (mode
, op0
);
8803 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8804 gen_int_mode (tree_to_shwi (exp1
),
8805 TYPE_MODE (TREE_TYPE (exp1
)))));
8808 if (modifier
== EXPAND_STACK_PARM
)
8811 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8812 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8814 case TRUNC_DIV_EXPR
:
8815 case FLOOR_DIV_EXPR
:
8817 case ROUND_DIV_EXPR
:
8818 case EXACT_DIV_EXPR
:
8819 /* If this is a fixed-point operation, then we cannot use the code
8820 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8822 if (ALL_FIXED_POINT_MODE_P (mode
))
8825 if (modifier
== EXPAND_STACK_PARM
)
8827 /* Possible optimization: compute the dividend with EXPAND_SUM
8828 then if the divisor is constant can optimize the case
8829 where some terms of the dividend have coeffs divisible by it. */
8830 expand_operands (treeop0
, treeop1
,
8831 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8832 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8837 case MULT_HIGHPART_EXPR
:
8838 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8839 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8843 case TRUNC_MOD_EXPR
:
8844 case FLOOR_MOD_EXPR
:
8846 case ROUND_MOD_EXPR
:
8847 if (modifier
== EXPAND_STACK_PARM
)
8849 expand_operands (treeop0
, treeop1
,
8850 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8851 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8853 case FIXED_CONVERT_EXPR
:
8854 op0
= expand_normal (treeop0
);
8855 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8856 target
= gen_reg_rtx (mode
);
8858 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8859 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8860 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8861 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8863 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8866 case FIX_TRUNC_EXPR
:
8867 op0
= expand_normal (treeop0
);
8868 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8869 target
= gen_reg_rtx (mode
);
8870 expand_fix (target
, op0
, unsignedp
);
8874 op0
= expand_normal (treeop0
);
8875 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8876 target
= gen_reg_rtx (mode
);
8877 /* expand_float can't figure out what to do if FROM has VOIDmode.
8878 So give it the correct mode. With -O, cse will optimize this. */
8879 if (GET_MODE (op0
) == VOIDmode
)
8880 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8882 expand_float (target
, op0
,
8883 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8887 op0
= expand_expr (treeop0
, subtarget
,
8888 VOIDmode
, EXPAND_NORMAL
);
8889 if (modifier
== EXPAND_STACK_PARM
)
8891 temp
= expand_unop (mode
,
8892 optab_for_tree_code (NEGATE_EXPR
, type
,
8896 return REDUCE_BIT_FIELD (temp
);
8899 op0
= expand_expr (treeop0
, subtarget
,
8900 VOIDmode
, EXPAND_NORMAL
);
8901 if (modifier
== EXPAND_STACK_PARM
)
8904 /* ABS_EXPR is not valid for complex arguments. */
8905 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8906 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8908 /* Unsigned abs is simply the operand. Testing here means we don't
8909 risk generating incorrect code below. */
8910 if (TYPE_UNSIGNED (type
))
8913 return expand_abs (mode
, op0
, target
, unsignedp
,
8914 safe_from_p (target
, treeop0
, 1));
8918 target
= original_target
;
8920 || modifier
== EXPAND_STACK_PARM
8921 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8922 || GET_MODE (target
) != mode
8924 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8925 target
= gen_reg_rtx (mode
);
8926 expand_operands (treeop0
, treeop1
,
8927 target
, &op0
, &op1
, EXPAND_NORMAL
);
8929 /* First try to do it with a special MIN or MAX instruction.
8930 If that does not win, use a conditional jump to select the proper
8932 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8933 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8938 /* At this point, a MEM target is no longer useful; we will get better
8941 if (! REG_P (target
))
8942 target
= gen_reg_rtx (mode
);
8944 /* If op1 was placed in target, swap op0 and op1. */
8945 if (target
!= op0
&& target
== op1
)
8946 std::swap (op0
, op1
);
8948 /* We generate better code and avoid problems with op1 mentioning
8949 target by forcing op1 into a pseudo if it isn't a constant. */
8950 if (! CONSTANT_P (op1
))
8951 op1
= force_reg (mode
, op1
);
8954 enum rtx_code comparison_code
;
8957 if (code
== MAX_EXPR
)
8958 comparison_code
= unsignedp
? GEU
: GE
;
8960 comparison_code
= unsignedp
? LEU
: LE
;
8962 /* Canonicalize to comparisons against 0. */
8963 if (op1
== const1_rtx
)
8965 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8966 or (a != 0 ? a : 1) for unsigned.
8967 For MIN we are safe converting (a <= 1 ? a : 1)
8968 into (a <= 0 ? a : 1) */
8969 cmpop1
= const0_rtx
;
8970 if (code
== MAX_EXPR
)
8971 comparison_code
= unsignedp
? NE
: GT
;
8973 if (op1
== constm1_rtx
&& !unsignedp
)
8975 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8976 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8977 cmpop1
= const0_rtx
;
8978 if (code
== MIN_EXPR
)
8979 comparison_code
= LT
;
8982 /* Use a conditional move if possible. */
8983 if (can_conditionally_move_p (mode
))
8989 /* Try to emit the conditional move. */
8990 insn
= emit_conditional_move (target
, comparison_code
,
8995 /* If we could do the conditional move, emit the sequence,
8999 rtx_insn
*seq
= get_insns ();
9005 /* Otherwise discard the sequence and fall back to code with
9011 emit_move_insn (target
, op0
);
9013 lab
= gen_label_rtx ();
9014 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
9015 unsignedp
, mode
, NULL_RTX
, NULL
, lab
,
9018 emit_move_insn (target
, op1
);
9023 op0
= expand_expr (treeop0
, subtarget
,
9024 VOIDmode
, EXPAND_NORMAL
);
9025 if (modifier
== EXPAND_STACK_PARM
)
9027 /* In case we have to reduce the result to bitfield precision
9028 for unsigned bitfield expand this as XOR with a proper constant
9030 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
9032 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
9033 false, GET_MODE_PRECISION (mode
));
9035 temp
= expand_binop (mode
, xor_optab
, op0
,
9036 immed_wide_int_const (mask
, mode
),
9037 target
, 1, OPTAB_LIB_WIDEN
);
9040 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
9044 /* ??? Can optimize bitwise operations with one arg constant.
9045 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9046 and (a bitwise1 b) bitwise2 b (etc)
9047 but that is probably not worth while. */
9056 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
9057 || (GET_MODE_PRECISION (TYPE_MODE (type
))
9058 == TYPE_PRECISION (type
)));
9064 /* If this is a fixed-point operation, then we cannot use the code
9065 below because "expand_shift" doesn't support sat/no-sat fixed-point
9067 if (ALL_FIXED_POINT_MODE_P (mode
))
9070 if (! safe_from_p (subtarget
, treeop1
, 1))
9072 if (modifier
== EXPAND_STACK_PARM
)
9074 op0
= expand_expr (treeop0
, subtarget
,
9075 VOIDmode
, EXPAND_NORMAL
);
9077 /* Left shift optimization when shifting across word_size boundary.
9079 If mode == GET_MODE_WIDER_MODE (word_mode), then normally there isn't
9080 native instruction to support this wide mode left shift. Given below
9083 Type A = (Type) B << C
9086 | dest_high | dest_low |
9090 If the shift amount C caused we shift B to across the word size
9091 boundary, i.e part of B shifted into high half of destination
9092 register, and part of B remains in the low half, then GCC will use
9093 the following left shift expand logic:
9095 1. Initialize dest_low to B.
9096 2. Initialize every bit of dest_high to the sign bit of B.
9097 3. Logic left shift dest_low by C bit to finalize dest_low.
9098 The value of dest_low before this shift is kept in a temp D.
9099 4. Logic left shift dest_high by C.
9100 5. Logic right shift D by (word_size - C).
9101 6. Or the result of 4 and 5 to finalize dest_high.
9103 While, by checking gimple statements, if operand B is coming from
9104 signed extension, then we can simplify above expand logic into:
9106 1. dest_high = src_low >> (word_size - C).
9107 2. dest_low = src_low << C.
9109 We can use one arithmetic right shift to finish all the purpose of
9110 steps 2, 4, 5, 6, thus we reduce the steps needed from 6 into 2. */
9113 if (code
== LSHIFT_EXPR
9117 && mode
== GET_MODE_WIDER_MODE (word_mode
)
9118 && GET_MODE_SIZE (mode
) == 2 * GET_MODE_SIZE (word_mode
)
9119 && TREE_CONSTANT (treeop1
)
9120 && TREE_CODE (treeop0
) == SSA_NAME
)
9122 gimple
*def
= SSA_NAME_DEF_STMT (treeop0
);
9123 if (is_gimple_assign (def
)
9124 && gimple_assign_rhs_code (def
) == NOP_EXPR
)
9126 machine_mode rmode
= TYPE_MODE
9127 (TREE_TYPE (gimple_assign_rhs1 (def
)));
9129 if (GET_MODE_SIZE (rmode
) < GET_MODE_SIZE (mode
)
9130 && TREE_INT_CST_LOW (treeop1
) < GET_MODE_BITSIZE (word_mode
)
9131 && ((TREE_INT_CST_LOW (treeop1
) + GET_MODE_BITSIZE (rmode
))
9132 >= GET_MODE_BITSIZE (word_mode
)))
9134 rtx_insn
*seq
, *seq_old
;
9135 unsigned int high_off
= subreg_highpart_offset (word_mode
,
9137 rtx low
= lowpart_subreg (word_mode
, op0
, mode
);
9138 rtx dest_low
= lowpart_subreg (word_mode
, target
, mode
);
9139 rtx dest_high
= simplify_gen_subreg (word_mode
, target
,
9141 HOST_WIDE_INT ramount
= (BITS_PER_WORD
9142 - TREE_INT_CST_LOW (treeop1
));
9143 tree rshift
= build_int_cst (TREE_TYPE (treeop1
), ramount
);
9146 /* dest_high = src_low >> (word_size - C). */
9147 temp
= expand_variable_shift (RSHIFT_EXPR
, word_mode
, low
,
9148 rshift
, dest_high
, unsignedp
);
9149 if (temp
!= dest_high
)
9150 emit_move_insn (dest_high
, temp
);
9152 /* dest_low = src_low << C. */
9153 temp
= expand_variable_shift (LSHIFT_EXPR
, word_mode
, low
,
9154 treeop1
, dest_low
, unsignedp
);
9155 if (temp
!= dest_low
)
9156 emit_move_insn (dest_low
, temp
);
9162 if (have_insn_for (ASHIFT
, mode
))
9164 bool speed_p
= optimize_insn_for_speed_p ();
9166 rtx ret_old
= expand_variable_shift (code
, mode
, op0
,
9170 seq_old
= get_insns ();
9172 if (seq_cost (seq
, speed_p
)
9173 >= seq_cost (seq_old
, speed_p
))
9184 if (temp
== NULL_RTX
)
9185 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
9187 if (code
== LSHIFT_EXPR
)
9188 temp
= REDUCE_BIT_FIELD (temp
);
9192 /* Could determine the answer when only additive constants differ. Also,
9193 the addition of one can be handled by changing the condition. */
9200 case UNORDERED_EXPR
:
9209 temp
= do_store_flag (ops
,
9210 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
9211 tmode
!= VOIDmode
? tmode
: mode
);
9215 /* Use a compare and a jump for BLKmode comparisons, or for function
9216 type comparisons is have_canonicalize_funcptr_for_compare. */
9219 || modifier
== EXPAND_STACK_PARM
9220 || ! safe_from_p (target
, treeop0
, 1)
9221 || ! safe_from_p (target
, treeop1
, 1)
9222 /* Make sure we don't have a hard reg (such as function's return
9223 value) live across basic blocks, if not optimizing. */
9224 || (!optimize
&& REG_P (target
)
9225 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
9226 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9228 emit_move_insn (target
, const0_rtx
);
9230 rtx_code_label
*lab1
= gen_label_rtx ();
9231 jumpifnot_1 (code
, treeop0
, treeop1
, lab1
, -1);
9233 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
9234 emit_move_insn (target
, constm1_rtx
);
9236 emit_move_insn (target
, const1_rtx
);
9242 /* Get the rtx code of the operands. */
9243 op0
= expand_normal (treeop0
);
9244 op1
= expand_normal (treeop1
);
9247 target
= gen_reg_rtx (TYPE_MODE (type
));
9249 /* If target overlaps with op1, then either we need to force
9250 op1 into a pseudo (if target also overlaps with op0),
9251 or write the complex parts in reverse order. */
9252 switch (GET_CODE (target
))
9255 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
9257 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
9259 complex_expr_force_op1
:
9260 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
9261 emit_move_insn (temp
, op1
);
9265 complex_expr_swap_order
:
9266 /* Move the imaginary (op1) and real (op0) parts to their
9268 write_complex_part (target
, op1
, true);
9269 write_complex_part (target
, op0
, false);
9275 temp
= adjust_address_nv (target
,
9276 GET_MODE_INNER (GET_MODE (target
)), 0);
9277 if (reg_overlap_mentioned_p (temp
, op1
))
9279 machine_mode imode
= GET_MODE_INNER (GET_MODE (target
));
9280 temp
= adjust_address_nv (target
, imode
,
9281 GET_MODE_SIZE (imode
));
9282 if (reg_overlap_mentioned_p (temp
, op0
))
9283 goto complex_expr_force_op1
;
9284 goto complex_expr_swap_order
;
9288 if (reg_overlap_mentioned_p (target
, op1
))
9290 if (reg_overlap_mentioned_p (target
, op0
))
9291 goto complex_expr_force_op1
;
9292 goto complex_expr_swap_order
;
9297 /* Move the real (op0) and imaginary (op1) parts to their location. */
9298 write_complex_part (target
, op0
, false);
9299 write_complex_part (target
, op1
, true);
9303 case WIDEN_SUM_EXPR
:
9305 tree oprnd0
= treeop0
;
9306 tree oprnd1
= treeop1
;
9308 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9309 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9314 case REDUC_MAX_EXPR
:
9315 case REDUC_MIN_EXPR
:
9316 case REDUC_PLUS_EXPR
:
9318 op0
= expand_normal (treeop0
);
9319 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9320 machine_mode vec_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9322 struct expand_operand ops
[2];
9323 enum insn_code icode
= optab_handler (this_optab
, vec_mode
);
9325 create_output_operand (&ops
[0], target
, mode
);
9326 create_input_operand (&ops
[1], op0
, vec_mode
);
9327 expand_insn (icode
, 2, ops
);
9328 target
= ops
[0].value
;
9329 if (GET_MODE (target
) != mode
)
9330 return gen_lowpart (tmode
, target
);
9334 case VEC_UNPACK_HI_EXPR
:
9335 case VEC_UNPACK_LO_EXPR
:
9337 op0
= expand_normal (treeop0
);
9338 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9344 case VEC_UNPACK_FLOAT_HI_EXPR
:
9345 case VEC_UNPACK_FLOAT_LO_EXPR
:
9347 op0
= expand_normal (treeop0
);
9348 /* The signedness is determined from input operand. */
9349 temp
= expand_widen_pattern_expr
9350 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9351 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9357 case VEC_WIDEN_MULT_HI_EXPR
:
9358 case VEC_WIDEN_MULT_LO_EXPR
:
9359 case VEC_WIDEN_MULT_EVEN_EXPR
:
9360 case VEC_WIDEN_MULT_ODD_EXPR
:
9361 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9362 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9363 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9364 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9366 gcc_assert (target
);
9369 case VEC_PACK_TRUNC_EXPR
:
9370 case VEC_PACK_SAT_EXPR
:
9371 case VEC_PACK_FIX_TRUNC_EXPR
:
9372 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9376 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9377 op2
= expand_normal (treeop2
);
9379 /* Careful here: if the target doesn't support integral vector modes,
9380 a constant selection vector could wind up smooshed into a normal
9381 integral constant. */
9382 if (CONSTANT_P (op2
) && GET_CODE (op2
) != CONST_VECTOR
)
9384 tree sel_type
= TREE_TYPE (treeop2
);
9386 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type
)),
9387 TYPE_VECTOR_SUBPARTS (sel_type
));
9388 gcc_assert (GET_MODE_CLASS (vmode
) == MODE_VECTOR_INT
);
9389 op2
= simplify_subreg (vmode
, op2
, TYPE_MODE (sel_type
), 0);
9390 gcc_assert (op2
&& GET_CODE (op2
) == CONST_VECTOR
);
9393 gcc_assert (GET_MODE_CLASS (GET_MODE (op2
)) == MODE_VECTOR_INT
);
9395 temp
= expand_vec_perm (mode
, op0
, op1
, op2
, target
);
9401 tree oprnd0
= treeop0
;
9402 tree oprnd1
= treeop1
;
9403 tree oprnd2
= treeop2
;
9406 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9407 op2
= expand_normal (oprnd2
);
9408 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9415 tree oprnd0
= treeop0
;
9416 tree oprnd1
= treeop1
;
9417 tree oprnd2
= treeop2
;
9420 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9421 op2
= expand_normal (oprnd2
);
9422 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9427 case REALIGN_LOAD_EXPR
:
9429 tree oprnd0
= treeop0
;
9430 tree oprnd1
= treeop1
;
9431 tree oprnd2
= treeop2
;
9434 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9435 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9436 op2
= expand_normal (oprnd2
);
9437 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9445 /* A COND_EXPR with its type being VOID_TYPE represents a
9446 conditional jump and is handled in
9447 expand_gimple_cond_expr. */
9448 gcc_assert (!VOID_TYPE_P (type
));
9450 /* Note that COND_EXPRs whose type is a structure or union
9451 are required to be constructed to contain assignments of
9452 a temporary variable, so that we can evaluate them here
9453 for side effect only. If type is void, we must do likewise. */
9455 gcc_assert (!TREE_ADDRESSABLE (type
)
9457 && TREE_TYPE (treeop1
) != void_type_node
9458 && TREE_TYPE (treeop2
) != void_type_node
);
9460 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9464 /* If we are not to produce a result, we have no target. Otherwise,
9465 if a target was specified use it; it will not be used as an
9466 intermediate target unless it is safe. If no target, use a
9469 if (modifier
!= EXPAND_STACK_PARM
9471 && safe_from_p (original_target
, treeop0
, 1)
9472 && GET_MODE (original_target
) == mode
9473 && !MEM_P (original_target
))
9474 temp
= original_target
;
9476 temp
= assign_temp (type
, 0, 1);
9478 do_pending_stack_adjust ();
9480 rtx_code_label
*lab0
= gen_label_rtx ();
9481 rtx_code_label
*lab1
= gen_label_rtx ();
9482 jumpifnot (treeop0
, lab0
, -1);
9483 store_expr (treeop1
, temp
,
9484 modifier
== EXPAND_STACK_PARM
,
9487 emit_jump_insn (targetm
.gen_jump (lab1
));
9490 store_expr (treeop2
, temp
,
9491 modifier
== EXPAND_STACK_PARM
,
9500 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9503 case BIT_INSERT_EXPR
:
9505 unsigned bitpos
= tree_to_uhwi (treeop2
);
9507 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1
)))
9508 bitsize
= TYPE_PRECISION (TREE_TYPE (treeop1
));
9510 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1
)));
9511 rtx op0
= expand_normal (treeop0
);
9512 rtx op1
= expand_normal (treeop1
);
9513 rtx dst
= gen_reg_rtx (mode
);
9514 emit_move_insn (dst
, op0
);
9515 store_bit_field (dst
, bitsize
, bitpos
, 0, 0,
9516 TYPE_MODE (TREE_TYPE (treeop1
)), op1
, false);
9524 /* Here to do an ordinary binary operator. */
9526 expand_operands (treeop0
, treeop1
,
9527 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9529 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9531 if (modifier
== EXPAND_STACK_PARM
)
9533 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9534 unsignedp
, OPTAB_LIB_WIDEN
);
9536 /* Bitwise operations do not need bitfield reduction as we expect their
9537 operands being properly truncated. */
9538 if (code
== BIT_XOR_EXPR
9539 || code
== BIT_AND_EXPR
9540 || code
== BIT_IOR_EXPR
)
9542 return REDUCE_BIT_FIELD (temp
);
9544 #undef REDUCE_BIT_FIELD
9547 /* Return TRUE if expression STMT is suitable for replacement.
9548 Never consider memory loads as replaceable, because those don't ever lead
9549 into constant expressions. */
9552 stmt_is_replaceable_p (gimple
*stmt
)
9554 if (ssa_is_replaceable_p (stmt
))
9556 /* Don't move around loads. */
9557 if (!gimple_assign_single_p (stmt
)
9558 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9565 expand_expr_real_1 (tree exp
, rtx target
, machine_mode tmode
,
9566 enum expand_modifier modifier
, rtx
*alt_rtl
,
9567 bool inner_reference_p
)
9569 rtx op0
, op1
, temp
, decl_rtl
;
9572 machine_mode mode
, dmode
;
9573 enum tree_code code
= TREE_CODE (exp
);
9574 rtx subtarget
, original_target
;
9577 bool reduce_bit_field
;
9578 location_t loc
= EXPR_LOCATION (exp
);
9579 struct separate_ops ops
;
9580 tree treeop0
, treeop1
, treeop2
;
9581 tree ssa_name
= NULL_TREE
;
9584 type
= TREE_TYPE (exp
);
9585 mode
= TYPE_MODE (type
);
9586 unsignedp
= TYPE_UNSIGNED (type
);
9588 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9589 if (!VL_EXP_CLASS_P (exp
))
9590 switch (TREE_CODE_LENGTH (code
))
9593 case 3: treeop2
= TREE_OPERAND (exp
, 2);
9594 case 2: treeop1
= TREE_OPERAND (exp
, 1);
9595 case 1: treeop0
= TREE_OPERAND (exp
, 0);
9605 ignore
= (target
== const0_rtx
9606 || ((CONVERT_EXPR_CODE_P (code
)
9607 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9608 && TREE_CODE (type
) == VOID_TYPE
));
9610 /* An operation in what may be a bit-field type needs the
9611 result to be reduced to the precision of the bit-field type,
9612 which is narrower than that of the type's mode. */
9613 reduce_bit_field
= (!ignore
9614 && INTEGRAL_TYPE_P (type
)
9615 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
9617 /* If we are going to ignore this result, we need only do something
9618 if there is a side-effect somewhere in the expression. If there
9619 is, short-circuit the most common cases here. Note that we must
9620 not call expand_expr with anything but const0_rtx in case this
9621 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9625 if (! TREE_SIDE_EFFECTS (exp
))
9628 /* Ensure we reference a volatile object even if value is ignored, but
9629 don't do this if all we are doing is taking its address. */
9630 if (TREE_THIS_VOLATILE (exp
)
9631 && TREE_CODE (exp
) != FUNCTION_DECL
9632 && mode
!= VOIDmode
&& mode
!= BLKmode
9633 && modifier
!= EXPAND_CONST_ADDRESS
)
9635 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9641 if (TREE_CODE_CLASS (code
) == tcc_unary
9642 || code
== BIT_FIELD_REF
9643 || code
== COMPONENT_REF
9644 || code
== INDIRECT_REF
)
9645 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9648 else if (TREE_CODE_CLASS (code
) == tcc_binary
9649 || TREE_CODE_CLASS (code
) == tcc_comparison
9650 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9652 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9653 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9660 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9663 /* Use subtarget as the target for operand 0 of a binary operation. */
9664 subtarget
= get_subtarget (target
);
9665 original_target
= target
;
9671 tree function
= decl_function_context (exp
);
9673 temp
= label_rtx (exp
);
9674 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9676 if (function
!= current_function_decl
9678 LABEL_REF_NONLOCAL_P (temp
) = 1;
9680 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9685 /* ??? ivopts calls expander, without any preparation from
9686 out-of-ssa. So fake instructions as if this was an access to the
9687 base variable. This unnecessarily allocates a pseudo, see how we can
9688 reuse it, if partition base vars have it set already. */
9689 if (!currently_expanding_to_rtl
)
9691 tree var
= SSA_NAME_VAR (exp
);
9692 if (var
&& DECL_RTL_SET_P (var
))
9693 return DECL_RTL (var
);
9694 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9695 LAST_VIRTUAL_REGISTER
+ 1);
9698 g
= get_gimple_for_ssa_name (exp
);
9699 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9701 && modifier
== EXPAND_INITIALIZER
9702 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9703 && (optimize
|| !SSA_NAME_VAR (exp
)
9704 || DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9705 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9706 g
= SSA_NAME_DEF_STMT (exp
);
9710 location_t saved_loc
= curr_insn_location ();
9711 location_t loc
= gimple_location (g
);
9712 if (loc
!= UNKNOWN_LOCATION
)
9713 set_curr_insn_location (loc
);
9714 ops
.code
= gimple_assign_rhs_code (g
);
9715 switch (get_gimple_rhs_class (ops
.code
))
9717 case GIMPLE_TERNARY_RHS
:
9718 ops
.op2
= gimple_assign_rhs3 (g
);
9720 case GIMPLE_BINARY_RHS
:
9721 ops
.op1
= gimple_assign_rhs2 (g
);
9723 /* Try to expand conditonal compare. */
9724 if (targetm
.gen_ccmp_first
)
9726 gcc_checking_assert (targetm
.gen_ccmp_next
!= NULL
);
9727 r
= expand_ccmp_expr (g
);
9732 case GIMPLE_UNARY_RHS
:
9733 ops
.op0
= gimple_assign_rhs1 (g
);
9734 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9736 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9738 case GIMPLE_SINGLE_RHS
:
9740 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9741 tmode
, modifier
, NULL
, inner_reference_p
);
9747 set_curr_insn_location (saved_loc
);
9748 if (REG_P (r
) && !REG_EXPR (r
))
9749 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9754 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9755 exp
= SSA_NAME_VAR (ssa_name
);
9756 goto expand_decl_rtl
;
9760 /* If a static var's type was incomplete when the decl was written,
9761 but the type is complete now, lay out the decl now. */
9762 if (DECL_SIZE (exp
) == 0
9763 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9764 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9765 layout_decl (exp
, 0);
9767 /* ... fall through ... */
9771 decl_rtl
= DECL_RTL (exp
);
9773 gcc_assert (decl_rtl
);
9775 /* DECL_MODE might change when TYPE_MODE depends on attribute target
9776 settings for VECTOR_TYPE_P that might switch for the function. */
9777 if (currently_expanding_to_rtl
9778 && code
== VAR_DECL
&& MEM_P (decl_rtl
)
9779 && VECTOR_TYPE_P (type
) && exp
&& DECL_MODE (exp
) != mode
)
9780 decl_rtl
= change_address (decl_rtl
, TYPE_MODE (type
), 0);
9782 decl_rtl
= copy_rtx (decl_rtl
);
9784 /* Record writes to register variables. */
9785 if (modifier
== EXPAND_WRITE
9787 && HARD_REGISTER_P (decl_rtl
))
9788 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9789 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9791 /* Ensure variable marked as used even if it doesn't go through
9792 a parser. If it hasn't be used yet, write out an external
9795 TREE_USED (exp
) = 1;
9797 /* Show we haven't gotten RTL for this yet. */
9800 /* Variables inherited from containing functions should have
9801 been lowered by this point. */
9803 context
= decl_function_context (exp
);
9805 || SCOPE_FILE_SCOPE_P (context
)
9806 || context
== current_function_decl
9807 || TREE_STATIC (exp
)
9808 || DECL_EXTERNAL (exp
)
9809 /* ??? C++ creates functions that are not TREE_STATIC. */
9810 || TREE_CODE (exp
) == FUNCTION_DECL
);
9812 /* This is the case of an array whose size is to be determined
9813 from its initializer, while the initializer is still being parsed.
9814 ??? We aren't parsing while expanding anymore. */
9816 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9817 temp
= validize_mem (decl_rtl
);
9819 /* If DECL_RTL is memory, we are in the normal case and the
9820 address is not valid, get the address into a register. */
9822 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9825 *alt_rtl
= decl_rtl
;
9826 decl_rtl
= use_anchored_address (decl_rtl
);
9827 if (modifier
!= EXPAND_CONST_ADDRESS
9828 && modifier
!= EXPAND_SUM
9829 && !memory_address_addr_space_p (exp
? DECL_MODE (exp
)
9830 : GET_MODE (decl_rtl
),
9832 MEM_ADDR_SPACE (decl_rtl
)))
9833 temp
= replace_equiv_address (decl_rtl
,
9834 copy_rtx (XEXP (decl_rtl
, 0)));
9837 /* If we got something, return it. But first, set the alignment
9838 if the address is a register. */
9841 if (exp
&& MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
9842 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
9848 dmode
= DECL_MODE (exp
);
9850 dmode
= TYPE_MODE (TREE_TYPE (ssa_name
));
9852 /* If the mode of DECL_RTL does not match that of the decl,
9853 there are two cases: we are dealing with a BLKmode value
9854 that is returned in a register, or we are dealing with
9855 a promoted value. In the latter case, return a SUBREG
9856 of the wanted mode, but mark it so that we know that it
9857 was already extended. */
9858 if (REG_P (decl_rtl
)
9860 && GET_MODE (decl_rtl
) != dmode
)
9864 /* Get the signedness to be used for this variable. Ensure we get
9865 the same mode we got when the variable was declared. */
9866 if (code
!= SSA_NAME
)
9867 pmode
= promote_decl_mode (exp
, &unsignedp
);
9868 else if ((g
= SSA_NAME_DEF_STMT (ssa_name
))
9869 && gimple_code (g
) == GIMPLE_CALL
9870 && !gimple_call_internal_p (g
))
9871 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
9872 gimple_call_fntype (g
),
9875 pmode
= promote_ssa_mode (ssa_name
, &unsignedp
);
9876 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
9878 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
9879 SUBREG_PROMOTED_VAR_P (temp
) = 1;
9880 SUBREG_PROMOTED_SET (temp
, unsignedp
);
9887 /* Given that TYPE_PRECISION (type) is not always equal to
9888 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9889 the former to the latter according to the signedness of the
9891 temp
= immed_wide_int_const (wi::to_wide
9893 GET_MODE_PRECISION (TYPE_MODE (type
))),
9899 tree tmp
= NULL_TREE
;
9900 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
9901 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
9902 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
9903 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
9904 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
9905 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
9906 return const_vector_from_tree (exp
);
9907 if (GET_MODE_CLASS (mode
) == MODE_INT
)
9909 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
9910 return const_scalar_mask_from_tree (exp
);
9913 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
9915 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
,
9916 type_for_mode
, exp
);
9921 vec
<constructor_elt
, va_gc
> *v
;
9923 vec_alloc (v
, VECTOR_CST_NELTS (exp
));
9924 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
9925 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
9926 tmp
= build_constructor (type
, v
);
9928 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
9933 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
9936 /* If optimized, generate immediate CONST_DOUBLE
9937 which will be turned into memory by reload if necessary.
9939 We used to force a register so that loop.c could see it. But
9940 this does not allow gen_* patterns to perform optimizations with
9941 the constants. It also produces two insns in cases like "x = 1.0;".
9942 On most machines, floating-point constants are not permitted in
9943 many insns, so we'd end up copying it to a register in any case.
9945 Now, we do the copying in expand_binop, if appropriate. */
9946 return const_double_from_real_value (TREE_REAL_CST (exp
),
9947 TYPE_MODE (TREE_TYPE (exp
)));
9950 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
9951 TYPE_MODE (TREE_TYPE (exp
)));
9954 /* Handle evaluating a complex constant in a CONCAT target. */
9955 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
9957 machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9960 rtarg
= XEXP (original_target
, 0);
9961 itarg
= XEXP (original_target
, 1);
9963 /* Move the real and imaginary parts separately. */
9964 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
9965 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
9968 emit_move_insn (rtarg
, op0
);
9970 emit_move_insn (itarg
, op1
);
9972 return original_target
;
9975 /* ... fall through ... */
9978 temp
= expand_expr_constant (exp
, 1, modifier
);
9980 /* temp contains a constant address.
9981 On RISC machines where a constant address isn't valid,
9982 make some insns to get that address into a register. */
9983 if (modifier
!= EXPAND_CONST_ADDRESS
9984 && modifier
!= EXPAND_INITIALIZER
9985 && modifier
!= EXPAND_SUM
9986 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
9987 MEM_ADDR_SPACE (temp
)))
9988 return replace_equiv_address (temp
,
9989 copy_rtx (XEXP (temp
, 0)));
9995 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
9998 if (!SAVE_EXPR_RESOLVED_P (exp
))
10000 /* We can indeed still hit this case, typically via builtin
10001 expanders calling save_expr immediately before expanding
10002 something. Assume this means that we only have to deal
10003 with non-BLKmode values. */
10004 gcc_assert (GET_MODE (ret
) != BLKmode
);
10006 val
= build_decl (curr_insn_location (),
10007 VAR_DECL
, NULL
, TREE_TYPE (exp
));
10008 DECL_ARTIFICIAL (val
) = 1;
10009 DECL_IGNORED_P (val
) = 1;
10011 TREE_OPERAND (exp
, 0) = treeop0
;
10012 SAVE_EXPR_RESOLVED_P (exp
) = 1;
10014 if (!CONSTANT_P (ret
))
10015 ret
= copy_to_reg (ret
);
10016 SET_DECL_RTL (val
, ret
);
10024 /* If we don't need the result, just ensure we evaluate any
10028 unsigned HOST_WIDE_INT idx
;
10031 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
10032 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10037 return expand_constructor (exp
, target
, modifier
, false);
10039 case TARGET_MEM_REF
:
10042 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10043 enum insn_code icode
;
10044 unsigned int align
;
10046 op0
= addr_for_mem_ref (exp
, as
, true);
10047 op0
= memory_address_addr_space (mode
, op0
, as
);
10048 temp
= gen_rtx_MEM (mode
, op0
);
10049 set_mem_attributes (temp
, exp
, 0);
10050 set_mem_addr_space (temp
, as
);
10051 align
= get_object_alignment (exp
);
10052 if (modifier
!= EXPAND_WRITE
10053 && modifier
!= EXPAND_MEMORY
10055 && align
< GET_MODE_ALIGNMENT (mode
)
10056 /* If the target does not have special handling for unaligned
10057 loads of mode then it can use regular moves for them. */
10058 && ((icode
= optab_handler (movmisalign_optab
, mode
))
10059 != CODE_FOR_nothing
))
10061 struct expand_operand ops
[2];
10063 /* We've already validated the memory, and we're creating a
10064 new pseudo destination. The predicates really can't fail,
10065 nor can the generator. */
10066 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10067 create_fixed_operand (&ops
[1], temp
);
10068 expand_insn (icode
, 2, ops
);
10069 temp
= ops
[0].value
;
10076 const bool reverse
= REF_REVERSE_STORAGE_ORDER (exp
);
10078 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10079 machine_mode address_mode
;
10080 tree base
= TREE_OPERAND (exp
, 0);
10082 enum insn_code icode
;
10084 /* Handle expansion of non-aliased memory with non-BLKmode. That
10085 might end up in a register. */
10086 if (mem_ref_refers_to_non_mem_p (exp
))
10088 HOST_WIDE_INT offset
= mem_ref_offset (exp
).to_short_addr ();
10089 base
= TREE_OPERAND (base
, 0);
10092 && tree_fits_uhwi_p (TYPE_SIZE (type
))
10093 && (GET_MODE_BITSIZE (DECL_MODE (base
))
10094 == tree_to_uhwi (TYPE_SIZE (type
))))
10095 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
10096 target
, tmode
, modifier
);
10097 if (TYPE_MODE (type
) == BLKmode
)
10099 temp
= assign_stack_temp (DECL_MODE (base
),
10100 GET_MODE_SIZE (DECL_MODE (base
)));
10101 store_expr (base
, temp
, 0, false, false);
10102 temp
= adjust_address (temp
, BLKmode
, offset
);
10103 set_mem_size (temp
, int_size_in_bytes (type
));
10106 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
10107 bitsize_int (offset
* BITS_PER_UNIT
));
10108 REF_REVERSE_STORAGE_ORDER (exp
) = reverse
;
10109 return expand_expr (exp
, target
, tmode
, modifier
);
10111 address_mode
= targetm
.addr_space
.address_mode (as
);
10112 base
= TREE_OPERAND (exp
, 0);
10113 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
10115 tree mask
= gimple_assign_rhs2 (def_stmt
);
10116 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
10117 gimple_assign_rhs1 (def_stmt
), mask
);
10118 TREE_OPERAND (exp
, 0) = base
;
10120 align
= get_object_alignment (exp
);
10121 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
10122 op0
= memory_address_addr_space (mode
, op0
, as
);
10123 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
10125 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
10126 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
10127 op0
= memory_address_addr_space (mode
, op0
, as
);
10129 temp
= gen_rtx_MEM (mode
, op0
);
10130 set_mem_attributes (temp
, exp
, 0);
10131 set_mem_addr_space (temp
, as
);
10132 if (TREE_THIS_VOLATILE (exp
))
10133 MEM_VOLATILE_P (temp
) = 1;
10134 if (modifier
!= EXPAND_WRITE
10135 && modifier
!= EXPAND_MEMORY
10136 && !inner_reference_p
10138 && align
< GET_MODE_ALIGNMENT (mode
))
10140 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10141 != CODE_FOR_nothing
)
10143 struct expand_operand ops
[2];
10145 /* We've already validated the memory, and we're creating a
10146 new pseudo destination. The predicates really can't fail,
10147 nor can the generator. */
10148 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10149 create_fixed_operand (&ops
[1], temp
);
10150 expand_insn (icode
, 2, ops
);
10151 temp
= ops
[0].value
;
10153 else if (SLOW_UNALIGNED_ACCESS (mode
, align
))
10154 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
10155 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
10156 (modifier
== EXPAND_STACK_PARM
10157 ? NULL_RTX
: target
),
10158 mode
, mode
, false);
10161 && modifier
!= EXPAND_MEMORY
10162 && modifier
!= EXPAND_WRITE
)
10163 temp
= flip_storage_order (mode
, temp
);
10170 tree array
= treeop0
;
10171 tree index
= treeop1
;
10174 /* Fold an expression like: "foo"[2].
10175 This is not done in fold so it won't happen inside &.
10176 Don't fold if this is for wide characters since it's too
10177 difficult to do correctly and this is a very rare case. */
10179 if (modifier
!= EXPAND_CONST_ADDRESS
10180 && modifier
!= EXPAND_INITIALIZER
10181 && modifier
!= EXPAND_MEMORY
)
10183 tree t
= fold_read_from_constant_string (exp
);
10186 return expand_expr (t
, target
, tmode
, modifier
);
10189 /* If this is a constant index into a constant array,
10190 just get the value from the array. Handle both the cases when
10191 we have an explicit constructor and when our operand is a variable
10192 that was declared const. */
10194 if (modifier
!= EXPAND_CONST_ADDRESS
10195 && modifier
!= EXPAND_INITIALIZER
10196 && modifier
!= EXPAND_MEMORY
10197 && TREE_CODE (array
) == CONSTRUCTOR
10198 && ! TREE_SIDE_EFFECTS (array
)
10199 && TREE_CODE (index
) == INTEGER_CST
)
10201 unsigned HOST_WIDE_INT ix
;
10204 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
10206 if (tree_int_cst_equal (field
, index
))
10208 if (!TREE_SIDE_EFFECTS (value
))
10209 return expand_expr (fold (value
), target
, tmode
, modifier
);
10214 else if (optimize
>= 1
10215 && modifier
!= EXPAND_CONST_ADDRESS
10216 && modifier
!= EXPAND_INITIALIZER
10217 && modifier
!= EXPAND_MEMORY
10218 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
10219 && TREE_CODE (index
) == INTEGER_CST
10220 && (TREE_CODE (array
) == VAR_DECL
10221 || TREE_CODE (array
) == CONST_DECL
)
10222 && (init
= ctor_for_folding (array
)) != error_mark_node
)
10224 if (init
== NULL_TREE
)
10226 tree value
= build_zero_cst (type
);
10227 if (TREE_CODE (value
) == CONSTRUCTOR
)
10229 /* If VALUE is a CONSTRUCTOR, this optimization is only
10230 useful if this doesn't store the CONSTRUCTOR into
10231 memory. If it does, it is more efficient to just
10232 load the data from the array directly. */
10233 rtx ret
= expand_constructor (value
, target
,
10235 if (ret
== NULL_RTX
)
10240 return expand_expr (value
, target
, tmode
, modifier
);
10242 else if (TREE_CODE (init
) == CONSTRUCTOR
)
10244 unsigned HOST_WIDE_INT ix
;
10247 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
10249 if (tree_int_cst_equal (field
, index
))
10251 if (TREE_SIDE_EFFECTS (value
))
10254 if (TREE_CODE (value
) == CONSTRUCTOR
)
10256 /* If VALUE is a CONSTRUCTOR, this
10257 optimization is only useful if
10258 this doesn't store the CONSTRUCTOR
10259 into memory. If it does, it is more
10260 efficient to just load the data from
10261 the array directly. */
10262 rtx ret
= expand_constructor (value
, target
,
10264 if (ret
== NULL_RTX
)
10269 expand_expr (fold (value
), target
, tmode
, modifier
);
10272 else if (TREE_CODE (init
) == STRING_CST
)
10274 tree low_bound
= array_ref_low_bound (exp
);
10275 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
10277 /* Optimize the special case of a zero lower bound.
10279 We convert the lower bound to sizetype to avoid problems
10280 with constant folding. E.g. suppose the lower bound is
10281 1 and its mode is QI. Without the conversion
10282 (ARRAY + (INDEX - (unsigned char)1))
10284 (ARRAY + (-(unsigned char)1) + INDEX)
10286 (ARRAY + 255 + INDEX). Oops! */
10287 if (!integer_zerop (low_bound
))
10288 index1
= size_diffop_loc (loc
, index1
,
10289 fold_convert_loc (loc
, sizetype
,
10292 if (compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
10294 tree type
= TREE_TYPE (TREE_TYPE (init
));
10295 machine_mode mode
= TYPE_MODE (type
);
10297 if (GET_MODE_CLASS (mode
) == MODE_INT
10298 && GET_MODE_SIZE (mode
) == 1)
10299 return gen_int_mode (TREE_STRING_POINTER (init
)
10300 [TREE_INT_CST_LOW (index1
)],
10306 goto normal_inner_ref
;
10308 case COMPONENT_REF
:
10309 /* If the operand is a CONSTRUCTOR, we can just extract the
10310 appropriate field if it is present. */
10311 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
10313 unsigned HOST_WIDE_INT idx
;
10316 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
10318 if (field
== treeop1
10319 /* We can normally use the value of the field in the
10320 CONSTRUCTOR. However, if this is a bitfield in
10321 an integral mode that we can fit in a HOST_WIDE_INT,
10322 we must mask only the number of bits in the bitfield,
10323 since this is done implicitly by the constructor. If
10324 the bitfield does not meet either of those conditions,
10325 we can't do this optimization. */
10326 && (! DECL_BIT_FIELD (field
)
10327 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
10328 && (GET_MODE_PRECISION (DECL_MODE (field
))
10329 <= HOST_BITS_PER_WIDE_INT
))))
10331 if (DECL_BIT_FIELD (field
)
10332 && modifier
== EXPAND_STACK_PARM
)
10334 op0
= expand_expr (value
, target
, tmode
, modifier
);
10335 if (DECL_BIT_FIELD (field
))
10337 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10338 machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
10340 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10342 op1
= gen_int_mode (((HOST_WIDE_INT
) 1 << bitsize
) - 1,
10344 op0
= expand_and (imode
, op0
, op1
, target
);
10348 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10350 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10352 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10360 goto normal_inner_ref
;
10362 case BIT_FIELD_REF
:
10363 case ARRAY_RANGE_REF
:
10366 machine_mode mode1
, mode2
;
10367 HOST_WIDE_INT bitsize
, bitpos
;
10369 int reversep
, volatilep
= 0, must_force_mem
;
10371 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
10372 &unsignedp
, &reversep
, &volatilep
, true);
10373 rtx orig_op0
, memloc
;
10374 bool clear_mem_expr
= false;
10376 /* If we got back the original object, something is wrong. Perhaps
10377 we are evaluating an expression too early. In any event, don't
10378 infinitely recurse. */
10379 gcc_assert (tem
!= exp
);
10381 /* If TEM's type is a union of variable size, pass TARGET to the inner
10382 computation, since it will need a temporary and TARGET is known
10383 to have to do. This occurs in unchecked conversion in Ada. */
10385 = expand_expr_real (tem
,
10386 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10387 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10388 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10390 && modifier
!= EXPAND_STACK_PARM
10391 ? target
: NULL_RTX
),
10393 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10396 /* If the field has a mode, we want to access it in the
10397 field's mode, not the computed mode.
10398 If a MEM has VOIDmode (external with incomplete type),
10399 use BLKmode for it instead. */
10402 if (mode1
!= VOIDmode
)
10403 op0
= adjust_address (op0
, mode1
, 0);
10404 else if (GET_MODE (op0
) == VOIDmode
)
10405 op0
= adjust_address (op0
, BLKmode
, 0);
10409 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10411 /* If we have either an offset, a BLKmode result, or a reference
10412 outside the underlying object, we must force it to memory.
10413 Such a case can occur in Ada if we have unchecked conversion
10414 of an expression from a scalar type to an aggregate type or
10415 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10416 passed a partially uninitialized object or a view-conversion
10417 to a larger size. */
10418 must_force_mem
= (offset
10419 || mode1
== BLKmode
10420 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
10422 /* Handle CONCAT first. */
10423 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10426 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)))
10429 op0
= flip_storage_order (GET_MODE (op0
), op0
);
10433 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10436 op0
= XEXP (op0
, 0);
10437 mode2
= GET_MODE (op0
);
10439 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10440 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
10444 op0
= XEXP (op0
, 1);
10446 mode2
= GET_MODE (op0
);
10449 /* Otherwise force into memory. */
10450 must_force_mem
= 1;
10453 /* If this is a constant, put it in a register if it is a legitimate
10454 constant and we don't need a memory reference. */
10455 if (CONSTANT_P (op0
)
10456 && mode2
!= BLKmode
10457 && targetm
.legitimate_constant_p (mode2
, op0
)
10458 && !must_force_mem
)
10459 op0
= force_reg (mode2
, op0
);
10461 /* Otherwise, if this is a constant, try to force it to the constant
10462 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10463 is a legitimate constant. */
10464 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10465 op0
= validize_mem (memloc
);
10467 /* Otherwise, if this is a constant or the object is not in memory
10468 and need be, put it there. */
10469 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10471 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10472 emit_move_insn (memloc
, op0
);
10474 clear_mem_expr
= true;
10479 machine_mode address_mode
;
10480 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10483 gcc_assert (MEM_P (op0
));
10485 address_mode
= get_address_mode (op0
);
10486 if (GET_MODE (offset_rtx
) != address_mode
)
10488 /* We cannot be sure that the RTL in offset_rtx is valid outside
10489 of a memory address context, so force it into a register
10490 before attempting to convert it to the desired mode. */
10491 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
10492 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10495 /* See the comment in expand_assignment for the rationale. */
10496 if (mode1
!= VOIDmode
10499 && (bitpos
% bitsize
) == 0
10500 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
10501 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10503 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10507 op0
= offset_address (op0
, offset_rtx
,
10508 highest_pow2_factor (offset
));
10511 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10512 record its alignment as BIGGEST_ALIGNMENT. */
10513 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
10514 && is_aligning_offset (offset
, tem
))
10515 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10517 /* Don't forget about volatility even if this is a bitfield. */
10518 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10520 if (op0
== orig_op0
)
10521 op0
= copy_rtx (op0
);
10523 MEM_VOLATILE_P (op0
) = 1;
10526 /* In cases where an aligned union has an unaligned object
10527 as a field, we might be extracting a BLKmode value from
10528 an integer-mode (e.g., SImode) object. Handle this case
10529 by doing the extract into an object as wide as the field
10530 (which we know to be the width of a basic mode), then
10531 storing into memory, and changing the mode to BLKmode. */
10532 if (mode1
== VOIDmode
10533 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10534 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10535 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10536 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10537 && modifier
!= EXPAND_CONST_ADDRESS
10538 && modifier
!= EXPAND_INITIALIZER
10539 && modifier
!= EXPAND_MEMORY
)
10540 /* If the bitfield is volatile and the bitsize
10541 is narrower than the access size of the bitfield,
10542 we need to extract bitfields from the access. */
10543 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10544 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10545 && mode1
!= BLKmode
10546 && bitsize
< GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
)
10547 /* If the field isn't aligned enough to fetch as a memref,
10548 fetch it as a bit field. */
10549 || (mode1
!= BLKmode
10550 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10551 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
10553 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10554 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
10555 && modifier
!= EXPAND_MEMORY
10556 && ((modifier
== EXPAND_CONST_ADDRESS
10557 || modifier
== EXPAND_INITIALIZER
)
10559 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
10560 || (bitpos
% BITS_PER_UNIT
!= 0)))
10561 /* If the type and the field are a constant size and the
10562 size of the type isn't the same size as the bitfield,
10563 we must use bitfield operations. */
10565 && TYPE_SIZE (TREE_TYPE (exp
))
10566 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10567 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
10570 machine_mode ext_mode
= mode
;
10572 if (ext_mode
== BLKmode
10573 && ! (target
!= 0 && MEM_P (op0
)
10575 && bitpos
% BITS_PER_UNIT
== 0))
10576 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
10578 if (ext_mode
== BLKmode
)
10581 target
= assign_temp (type
, 1, 1);
10583 /* ??? Unlike the similar test a few lines below, this one is
10584 very likely obsolete. */
10588 /* In this case, BITPOS must start at a byte boundary and
10589 TARGET, if specified, must be a MEM. */
10590 gcc_assert (MEM_P (op0
)
10591 && (!target
|| MEM_P (target
))
10592 && !(bitpos
% BITS_PER_UNIT
));
10594 emit_block_move (target
,
10595 adjust_address (op0
, VOIDmode
,
10596 bitpos
/ BITS_PER_UNIT
),
10597 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
10599 (modifier
== EXPAND_STACK_PARM
10600 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10605 /* If we have nothing to extract, the result will be 0 for targets
10606 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10607 return 0 for the sake of consistency, as reading a zero-sized
10608 bitfield is valid in Ada and the value is fully specified. */
10612 op0
= validize_mem (op0
);
10614 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10615 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10617 /* If the result has a record type and the extraction is done in
10618 an integral mode, then the field may be not aligned on a byte
10619 boundary; in this case, if it has reverse storage order, it
10620 needs to be extracted as a scalar field with reverse storage
10621 order and put back into memory order afterwards. */
10622 if (TREE_CODE (type
) == RECORD_TYPE
10623 && GET_MODE_CLASS (ext_mode
) == MODE_INT
)
10624 reversep
= TYPE_REVERSE_STORAGE_ORDER (type
);
10626 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10627 (modifier
== EXPAND_STACK_PARM
10628 ? NULL_RTX
: target
),
10629 ext_mode
, ext_mode
, reversep
);
10631 /* If the result has a record type and the mode of OP0 is an
10632 integral mode then, if BITSIZE is narrower than this mode
10633 and this is for big-endian data, we must put the field
10634 into the high-order bits. And we must also put it back
10635 into memory order if it has been previously reversed. */
10636 if (TREE_CODE (type
) == RECORD_TYPE
10637 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
10639 HOST_WIDE_INT size
= GET_MODE_BITSIZE (GET_MODE (op0
));
10642 && reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
10643 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
10644 size
- bitsize
, op0
, 1);
10647 op0
= flip_storage_order (GET_MODE (op0
), op0
);
10650 /* If the result type is BLKmode, store the data into a temporary
10651 of the appropriate type, but with the mode corresponding to the
10652 mode for the data we have (op0's mode). */
10653 if (mode
== BLKmode
)
10656 = assign_stack_temp_for_type (ext_mode
,
10657 GET_MODE_BITSIZE (ext_mode
),
10659 emit_move_insn (new_rtx
, op0
);
10660 op0
= copy_rtx (new_rtx
);
10661 PUT_MODE (op0
, BLKmode
);
10667 /* If the result is BLKmode, use that to access the object
10669 if (mode
== BLKmode
)
10672 /* Get a reference to just this component. */
10673 if (modifier
== EXPAND_CONST_ADDRESS
10674 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10675 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10677 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10679 if (op0
== orig_op0
)
10680 op0
= copy_rtx (op0
);
10682 /* Don't set memory attributes if the base expression is
10683 SSA_NAME that got expanded as a MEM. In that case, we should
10684 just honor its original memory attributes. */
10685 if (TREE_CODE (tem
) != SSA_NAME
|| !MEM_P (orig_op0
))
10686 set_mem_attributes (op0
, exp
, 0);
10688 if (REG_P (XEXP (op0
, 0)))
10689 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10691 /* If op0 is a temporary because the original expressions was forced
10692 to memory, clear MEM_EXPR so that the original expression cannot
10693 be marked as addressable through MEM_EXPR of the temporary. */
10694 if (clear_mem_expr
)
10695 set_mem_expr (op0
, NULL_TREE
);
10697 MEM_VOLATILE_P (op0
) |= volatilep
;
10700 && modifier
!= EXPAND_MEMORY
10701 && modifier
!= EXPAND_WRITE
)
10702 op0
= flip_storage_order (mode1
, op0
);
10704 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10705 || modifier
== EXPAND_CONST_ADDRESS
10706 || modifier
== EXPAND_INITIALIZER
)
10710 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10712 convert_move (target
, op0
, unsignedp
);
10717 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10720 /* All valid uses of __builtin_va_arg_pack () are removed during
10722 if (CALL_EXPR_VA_ARG_PACK (exp
))
10723 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10725 tree fndecl
= get_callee_fndecl (exp
), attr
;
10728 && (attr
= lookup_attribute ("error",
10729 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10730 error ("%Kcall to %qs declared with attribute error: %s",
10731 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10732 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10734 && (attr
= lookup_attribute ("warning",
10735 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10736 warning_at (tree_nonartificial_location (exp
),
10737 0, "%Kcall to %qs declared with attribute warning: %s",
10738 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10739 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10741 /* Check for a built-in function. */
10742 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10744 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10745 if (CALL_WITH_BOUNDS_P (exp
))
10746 return expand_builtin_with_bounds (exp
, target
, subtarget
,
10749 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10752 return expand_call (exp
, target
, ignore
);
10754 case VIEW_CONVERT_EXPR
:
10757 /* If we are converting to BLKmode, try to avoid an intermediate
10758 temporary by fetching an inner memory reference. */
10759 if (mode
== BLKmode
10760 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
10761 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10762 && handled_component_p (treeop0
))
10764 machine_mode mode1
;
10765 HOST_WIDE_INT bitsize
, bitpos
;
10767 int unsignedp
, reversep
, volatilep
= 0;
10769 = get_inner_reference (treeop0
, &bitsize
, &bitpos
, &offset
, &mode1
,
10770 &unsignedp
, &reversep
, &volatilep
, true);
10773 /* ??? We should work harder and deal with non-zero offsets. */
10775 && (bitpos
% BITS_PER_UNIT
) == 0
10778 && compare_tree_int (TYPE_SIZE (type
), bitsize
) == 0)
10780 /* See the normal_inner_ref case for the rationale. */
10782 = expand_expr_real (tem
,
10783 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10784 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10786 && modifier
!= EXPAND_STACK_PARM
10787 ? target
: NULL_RTX
),
10789 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10792 if (MEM_P (orig_op0
))
10796 /* Get a reference to just this component. */
10797 if (modifier
== EXPAND_CONST_ADDRESS
10798 || modifier
== EXPAND_SUM
10799 || modifier
== EXPAND_INITIALIZER
)
10800 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10802 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10804 if (op0
== orig_op0
)
10805 op0
= copy_rtx (op0
);
10807 set_mem_attributes (op0
, treeop0
, 0);
10808 if (REG_P (XEXP (op0
, 0)))
10809 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10811 MEM_VOLATILE_P (op0
) |= volatilep
;
10817 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
10818 NULL
, inner_reference_p
);
10820 /* If the input and output modes are both the same, we are done. */
10821 if (mode
== GET_MODE (op0
))
10823 /* If neither mode is BLKmode, and both modes are the same size
10824 then we can use gen_lowpart. */
10825 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
10826 && (GET_MODE_PRECISION (mode
)
10827 == GET_MODE_PRECISION (GET_MODE (op0
)))
10828 && !COMPLEX_MODE_P (GET_MODE (op0
)))
10830 if (GET_CODE (op0
) == SUBREG
)
10831 op0
= force_reg (GET_MODE (op0
), op0
);
10832 temp
= gen_lowpart_common (mode
, op0
);
10837 if (!REG_P (op0
) && !MEM_P (op0
))
10838 op0
= force_reg (GET_MODE (op0
), op0
);
10839 op0
= gen_lowpart (mode
, op0
);
10842 /* If both types are integral, convert from one mode to the other. */
10843 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
10844 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
10845 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
10846 /* If the output type is a bit-field type, do an extraction. */
10847 else if (reduce_bit_field
)
10848 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
10849 TYPE_UNSIGNED (type
), NULL_RTX
,
10850 mode
, mode
, false);
10851 /* As a last resort, spill op0 to memory, and reload it in a
10853 else if (!MEM_P (op0
))
10855 /* If the operand is not a MEM, force it into memory. Since we
10856 are going to be changing the mode of the MEM, don't call
10857 force_const_mem for constants because we don't allow pool
10858 constants to change mode. */
10859 tree inner_type
= TREE_TYPE (treeop0
);
10861 gcc_assert (!TREE_ADDRESSABLE (exp
));
10863 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
10865 = assign_stack_temp_for_type
10866 (TYPE_MODE (inner_type
),
10867 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
10869 emit_move_insn (target
, op0
);
10873 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10874 output type is such that the operand is known to be aligned, indicate
10875 that it is. Otherwise, we need only be concerned about alignment for
10876 non-BLKmode results. */
10879 enum insn_code icode
;
10881 if (TYPE_ALIGN_OK (type
))
10883 /* ??? Copying the MEM without substantially changing it might
10884 run afoul of the code handling volatile memory references in
10885 store_expr, which assumes that TARGET is returned unmodified
10886 if it has been used. */
10887 op0
= copy_rtx (op0
);
10888 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
10890 else if (modifier
!= EXPAND_WRITE
10891 && modifier
!= EXPAND_MEMORY
10892 && !inner_reference_p
10894 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
10896 /* If the target does have special handling for unaligned
10897 loads of mode then use them. */
10898 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10899 != CODE_FOR_nothing
)
10903 op0
= adjust_address (op0
, mode
, 0);
10904 /* We've already validated the memory, and we're creating a
10905 new pseudo destination. The predicates really can't
10907 reg
= gen_reg_rtx (mode
);
10909 /* Nor can the insn generator. */
10910 rtx_insn
*insn
= GEN_FCN (icode
) (reg
, op0
);
10914 else if (STRICT_ALIGNMENT
)
10916 tree inner_type
= TREE_TYPE (treeop0
);
10917 HOST_WIDE_INT temp_size
10918 = MAX (int_size_in_bytes (inner_type
),
10919 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
10921 = assign_stack_temp_for_type (mode
, temp_size
, type
);
10922 rtx new_with_op0_mode
10923 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
10925 gcc_assert (!TREE_ADDRESSABLE (exp
));
10927 if (GET_MODE (op0
) == BLKmode
)
10928 emit_block_move (new_with_op0_mode
, op0
,
10929 GEN_INT (GET_MODE_SIZE (mode
)),
10930 (modifier
== EXPAND_STACK_PARM
10931 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10933 emit_move_insn (new_with_op0_mode
, op0
);
10939 op0
= adjust_address (op0
, mode
, 0);
10946 tree lhs
= treeop0
;
10947 tree rhs
= treeop1
;
10948 gcc_assert (ignore
);
10950 /* Check for |= or &= of a bitfield of size one into another bitfield
10951 of size 1. In this case, (unless we need the result of the
10952 assignment) we can do this more efficiently with a
10953 test followed by an assignment, if necessary.
10955 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10956 things change so we do, this code should be enhanced to
10958 if (TREE_CODE (lhs
) == COMPONENT_REF
10959 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
10960 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
10961 && TREE_OPERAND (rhs
, 0) == lhs
10962 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
10963 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
10964 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
10966 rtx_code_label
*label
= gen_label_rtx ();
10967 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
10968 do_jump (TREE_OPERAND (rhs
, 1),
10970 value
? 0 : label
, -1);
10971 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
10973 do_pending_stack_adjust ();
10974 emit_label (label
);
10978 expand_assignment (lhs
, rhs
, false);
10983 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
10985 case REALPART_EXPR
:
10986 op0
= expand_normal (treeop0
);
10987 return read_complex_part (op0
, false);
10989 case IMAGPART_EXPR
:
10990 op0
= expand_normal (treeop0
);
10991 return read_complex_part (op0
, true);
10998 /* Expanded in cfgexpand.c. */
10999 gcc_unreachable ();
11001 case TRY_CATCH_EXPR
:
11003 case EH_FILTER_EXPR
:
11004 case TRY_FINALLY_EXPR
:
11005 /* Lowered by tree-eh.c. */
11006 gcc_unreachable ();
11008 case WITH_CLEANUP_EXPR
:
11009 case CLEANUP_POINT_EXPR
:
11011 case CASE_LABEL_EXPR
:
11016 case COMPOUND_EXPR
:
11017 case PREINCREMENT_EXPR
:
11018 case PREDECREMENT_EXPR
:
11019 case POSTINCREMENT_EXPR
:
11020 case POSTDECREMENT_EXPR
:
11023 case COMPOUND_LITERAL_EXPR
:
11024 /* Lowered by gimplify.c. */
11025 gcc_unreachable ();
11028 /* Function descriptors are not valid except for as
11029 initialization constants, and should not be expanded. */
11030 gcc_unreachable ();
11032 case WITH_SIZE_EXPR
:
11033 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11034 have pulled out the size to use in whatever context it needed. */
11035 return expand_expr_real (treeop0
, original_target
, tmode
,
11036 modifier
, alt_rtl
, inner_reference_p
);
11039 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
11043 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11044 signedness of TYPE), possibly returning the result in TARGET. */
11046 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
11048 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
11049 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
11051 /* For constant values, reduce using build_int_cst_type. */
11052 if (CONST_INT_P (exp
))
11054 HOST_WIDE_INT value
= INTVAL (exp
);
11055 tree t
= build_int_cst_type (type
, value
);
11056 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
11058 else if (TYPE_UNSIGNED (type
))
11060 machine_mode mode
= GET_MODE (exp
);
11061 rtx mask
= immed_wide_int_const
11062 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
11063 return expand_and (mode
, exp
, mask
, target
);
11067 int count
= GET_MODE_PRECISION (GET_MODE (exp
)) - prec
;
11068 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
),
11069 exp
, count
, target
, 0);
11070 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
),
11071 exp
, count
, target
, 0);
11075 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11076 when applied to the address of EXP produces an address known to be
11077 aligned more than BIGGEST_ALIGNMENT. */
11080 is_aligning_offset (const_tree offset
, const_tree exp
)
11082 /* Strip off any conversions. */
11083 while (CONVERT_EXPR_P (offset
))
11084 offset
= TREE_OPERAND (offset
, 0);
11086 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11087 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11088 if (TREE_CODE (offset
) != BIT_AND_EXPR
11089 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
11090 || compare_tree_int (TREE_OPERAND (offset
, 1),
11091 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
11092 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1) < 0)
11095 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11096 It must be NEGATE_EXPR. Then strip any more conversions. */
11097 offset
= TREE_OPERAND (offset
, 0);
11098 while (CONVERT_EXPR_P (offset
))
11099 offset
= TREE_OPERAND (offset
, 0);
11101 if (TREE_CODE (offset
) != NEGATE_EXPR
)
11104 offset
= TREE_OPERAND (offset
, 0);
11105 while (CONVERT_EXPR_P (offset
))
11106 offset
= TREE_OPERAND (offset
, 0);
11108 /* This must now be the address of EXP. */
11109 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
11112 /* Return the tree node if an ARG corresponds to a string constant or zero
11113 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
11114 in bytes within the string that ARG is accessing. The type of the
11115 offset will be `sizetype'. */
11118 string_constant (tree arg
, tree
*ptr_offset
)
11120 tree array
, offset
, lower_bound
;
11123 if (TREE_CODE (arg
) == ADDR_EXPR
)
11125 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
11127 *ptr_offset
= size_zero_node
;
11128 return TREE_OPERAND (arg
, 0);
11130 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
11132 array
= TREE_OPERAND (arg
, 0);
11133 offset
= size_zero_node
;
11135 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
11137 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
11138 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
11139 if (TREE_CODE (array
) != STRING_CST
11140 && TREE_CODE (array
) != VAR_DECL
)
11143 /* Check if the array has a nonzero lower bound. */
11144 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
11145 if (!integer_zerop (lower_bound
))
11147 /* If the offset and base aren't both constants, return 0. */
11148 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
11150 if (TREE_CODE (offset
) != INTEGER_CST
)
11152 /* Adjust offset by the lower bound. */
11153 offset
= size_diffop (fold_convert (sizetype
, offset
),
11154 fold_convert (sizetype
, lower_bound
));
11157 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
11159 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
11160 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
11161 if (TREE_CODE (array
) != ADDR_EXPR
)
11163 array
= TREE_OPERAND (array
, 0);
11164 if (TREE_CODE (array
) != STRING_CST
11165 && TREE_CODE (array
) != VAR_DECL
)
11171 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
11173 tree arg0
= TREE_OPERAND (arg
, 0);
11174 tree arg1
= TREE_OPERAND (arg
, 1);
11179 if (TREE_CODE (arg0
) == ADDR_EXPR
11180 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
11181 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
11183 array
= TREE_OPERAND (arg0
, 0);
11186 else if (TREE_CODE (arg1
) == ADDR_EXPR
11187 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
11188 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
11190 array
= TREE_OPERAND (arg1
, 0);
11199 if (TREE_CODE (array
) == STRING_CST
)
11201 *ptr_offset
= fold_convert (sizetype
, offset
);
11204 else if (TREE_CODE (array
) == VAR_DECL
11205 || TREE_CODE (array
) == CONST_DECL
)
11208 tree init
= ctor_for_folding (array
);
11210 /* Variables initialized to string literals can be handled too. */
11211 if (init
== error_mark_node
11213 || TREE_CODE (init
) != STRING_CST
)
11216 /* Avoid const char foo[4] = "abcde"; */
11217 if (DECL_SIZE_UNIT (array
) == NULL_TREE
11218 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
11219 || (length
= TREE_STRING_LENGTH (init
)) <= 0
11220 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
11223 /* If variable is bigger than the string literal, OFFSET must be constant
11224 and inside of the bounds of the string literal. */
11225 offset
= fold_convert (sizetype
, offset
);
11226 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
11227 && (! tree_fits_uhwi_p (offset
)
11228 || compare_tree_int (offset
, length
) >= 0))
11231 *ptr_offset
= offset
;
11238 /* Generate code to calculate OPS, and exploded expression
11239 using a store-flag instruction and return an rtx for the result.
11240 OPS reflects a comparison.
11242 If TARGET is nonzero, store the result there if convenient.
11244 Return zero if there is no suitable set-flag instruction
11245 available on this machine.
11247 Once expand_expr has been called on the arguments of the comparison,
11248 we are committed to doing the store flag, since it is not safe to
11249 re-evaluate the expression. We emit the store-flag insn by calling
11250 emit_store_flag, but only expand the arguments if we have a reason
11251 to believe that emit_store_flag will be successful. If we think that
11252 it will, but it isn't, we have to simulate the store-flag with a
11253 set/jump/set sequence. */
11256 do_store_flag (sepops ops
, rtx target
, machine_mode mode
)
11258 enum rtx_code code
;
11259 tree arg0
, arg1
, type
;
11260 machine_mode operand_mode
;
11263 rtx subtarget
= target
;
11264 location_t loc
= ops
->location
;
11269 /* Don't crash if the comparison was erroneous. */
11270 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
11273 type
= TREE_TYPE (arg0
);
11274 operand_mode
= TYPE_MODE (type
);
11275 unsignedp
= TYPE_UNSIGNED (type
);
11277 /* We won't bother with BLKmode store-flag operations because it would mean
11278 passing a lot of information to emit_store_flag. */
11279 if (operand_mode
== BLKmode
)
11282 /* We won't bother with store-flag operations involving function pointers
11283 when function pointers must be canonicalized before comparisons. */
11284 if (targetm
.have_canonicalize_funcptr_for_compare ()
11285 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
11286 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
11288 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
11289 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
11290 == FUNCTION_TYPE
))))
11296 /* For vector typed comparisons emit code to generate the desired
11297 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11298 expander for this. */
11299 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
11301 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
11302 if (VECTOR_BOOLEAN_TYPE_P (ops
->type
)
11303 && expand_vec_cmp_expr_p (TREE_TYPE (arg0
), ops
->type
))
11304 return expand_vec_cmp_expr (ops
->type
, ifexp
, target
);
11307 tree if_true
= constant_boolean_node (true, ops
->type
);
11308 tree if_false
= constant_boolean_node (false, ops
->type
);
11309 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
,
11314 /* Get the rtx comparison code to use. We know that EXP is a comparison
11315 operation of some type. Some comparisons against 1 and -1 can be
11316 converted to comparisons with zero. Do so here so that the tests
11317 below will be aware that we have a comparison with zero. These
11318 tests will not catch constants in the first operand, but constants
11319 are rarely passed as the first operand. */
11330 if (integer_onep (arg1
))
11331 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
11333 code
= unsignedp
? LTU
: LT
;
11336 if (! unsignedp
&& integer_all_onesp (arg1
))
11337 arg1
= integer_zero_node
, code
= LT
;
11339 code
= unsignedp
? LEU
: LE
;
11342 if (! unsignedp
&& integer_all_onesp (arg1
))
11343 arg1
= integer_zero_node
, code
= GE
;
11345 code
= unsignedp
? GTU
: GT
;
11348 if (integer_onep (arg1
))
11349 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
11351 code
= unsignedp
? GEU
: GE
;
11354 case UNORDERED_EXPR
:
11380 gcc_unreachable ();
11383 /* Put a constant second. */
11384 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
11385 || TREE_CODE (arg0
) == FIXED_CST
)
11387 std::swap (arg0
, arg1
);
11388 code
= swap_condition (code
);
11391 /* If this is an equality or inequality test of a single bit, we can
11392 do this by shifting the bit being tested to the low-order bit and
11393 masking the result with the constant 1. If the condition was EQ,
11394 we xor it with 1. This does not require an scc insn and is faster
11395 than an scc insn even if we have it.
11397 The code to make this transformation was moved into fold_single_bit_test,
11398 so we just call into the folder and expand its result. */
11400 if ((code
== NE
|| code
== EQ
)
11401 && integer_zerop (arg1
)
11402 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
11404 gimple
*srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
11406 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
11408 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
11409 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
11410 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
11411 gimple_assign_rhs1 (srcstmt
),
11412 gimple_assign_rhs2 (srcstmt
));
11413 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
11415 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
11419 if (! get_subtarget (target
)
11420 || GET_MODE (subtarget
) != operand_mode
)
11423 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
11426 target
= gen_reg_rtx (mode
);
11428 /* Try a cstore if possible. */
11429 return emit_store_flag_force (target
, code
, op0
, op1
,
11430 operand_mode
, unsignedp
,
11431 (TYPE_PRECISION (ops
->type
) == 1
11432 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
11435 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11436 0 otherwise (i.e. if there is no casesi instruction).
11438 DEFAULT_PROBABILITY is the probability of jumping to the default
11441 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
11442 rtx table_label
, rtx default_label
, rtx fallback_label
,
11443 int default_probability
)
11445 struct expand_operand ops
[5];
11446 machine_mode index_mode
= SImode
;
11447 rtx op1
, op2
, index
;
11449 if (! targetm
.have_casesi ())
11452 /* Convert the index to SImode. */
11453 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
11455 machine_mode omode
= TYPE_MODE (index_type
);
11456 rtx rangertx
= expand_normal (range
);
11458 /* We must handle the endpoints in the original mode. */
11459 index_expr
= build2 (MINUS_EXPR
, index_type
,
11460 index_expr
, minval
);
11461 minval
= integer_zero_node
;
11462 index
= expand_normal (index_expr
);
11464 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
11465 omode
, 1, default_label
,
11466 default_probability
);
11467 /* Now we can safely truncate. */
11468 index
= convert_to_mode (index_mode
, index
, 0);
11472 if (TYPE_MODE (index_type
) != index_mode
)
11474 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
11475 index_expr
= fold_convert (index_type
, index_expr
);
11478 index
= expand_normal (index_expr
);
11481 do_pending_stack_adjust ();
11483 op1
= expand_normal (minval
);
11484 op2
= expand_normal (range
);
11486 create_input_operand (&ops
[0], index
, index_mode
);
11487 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
11488 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
11489 create_fixed_operand (&ops
[3], table_label
);
11490 create_fixed_operand (&ops
[4], (default_label
11492 : fallback_label
));
11493 expand_jump_insn (targetm
.code_for_casesi
, 5, ops
);
11497 /* Attempt to generate a tablejump instruction; same concept. */
11498 /* Subroutine of the next function.
11500 INDEX is the value being switched on, with the lowest value
11501 in the table already subtracted.
11502 MODE is its expected mode (needed if INDEX is constant).
11503 RANGE is the length of the jump table.
11504 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11506 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11507 index value is out of range.
11508 DEFAULT_PROBABILITY is the probability of jumping to
11509 the default label. */
11512 do_tablejump (rtx index
, machine_mode mode
, rtx range
, rtx table_label
,
11513 rtx default_label
, int default_probability
)
11517 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
11518 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
11520 /* Do an unsigned comparison (in the proper mode) between the index
11521 expression and the value which represents the length of the range.
11522 Since we just finished subtracting the lower bound of the range
11523 from the index expression, this comparison allows us to simultaneously
11524 check that the original index expression value is both greater than
11525 or equal to the minimum value of the range and less than or equal to
11526 the maximum value of the range. */
11529 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11530 default_label
, default_probability
);
11533 /* If index is in range, it must fit in Pmode.
11534 Convert to Pmode so we can index with it. */
11536 index
= convert_to_mode (Pmode
, index
, 1);
11538 /* Don't let a MEM slip through, because then INDEX that comes
11539 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11540 and break_out_memory_refs will go to work on it and mess it up. */
11541 #ifdef PIC_CASE_VECTOR_ADDRESS
11542 if (flag_pic
&& !REG_P (index
))
11543 index
= copy_to_mode_reg (Pmode
, index
);
11546 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11547 GET_MODE_SIZE, because this indicates how large insns are. The other
11548 uses should all be Pmode, because they are addresses. This code
11549 could fail if addresses and insns are not the same size. */
11550 index
= simplify_gen_binary (MULT
, Pmode
, index
,
11551 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
11553 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
11554 gen_rtx_LABEL_REF (Pmode
, table_label
));
11556 #ifdef PIC_CASE_VECTOR_ADDRESS
11558 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11561 index
= memory_address (CASE_VECTOR_MODE
, index
);
11562 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11563 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
11564 convert_move (temp
, vector
, 0);
11566 emit_jump_insn (targetm
.gen_tablejump (temp
, table_label
));
11568 /* If we are generating PIC code or if the table is PC-relative, the
11569 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11570 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11575 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
11576 rtx table_label
, rtx default_label
, int default_probability
)
11580 if (! targetm
.have_tablejump ())
11583 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
11584 fold_convert (index_type
, index_expr
),
11585 fold_convert (index_type
, minval
));
11586 index
= expand_normal (index_expr
);
11587 do_pending_stack_adjust ();
11589 do_tablejump (index
, TYPE_MODE (index_type
),
11590 convert_modes (TYPE_MODE (index_type
),
11591 TYPE_MODE (TREE_TYPE (range
)),
11592 expand_normal (range
),
11593 TYPE_UNSIGNED (TREE_TYPE (range
))),
11594 table_label
, default_label
, default_probability
);
11598 /* Return a CONST_VECTOR rtx representing vector mask for
11599 a VECTOR_CST of booleans. */
11601 const_vector_mask_from_tree (tree exp
)
11607 machine_mode inner
, mode
;
11609 mode
= TYPE_MODE (TREE_TYPE (exp
));
11610 units
= GET_MODE_NUNITS (mode
);
11611 inner
= GET_MODE_INNER (mode
);
11613 v
= rtvec_alloc (units
);
11615 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11617 elt
= VECTOR_CST_ELT (exp
, i
);
11619 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
11620 if (integer_zerop (elt
))
11621 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
11622 else if (integer_onep (elt
)
11623 || integer_minus_onep (elt
))
11624 RTVEC_ELT (v
, i
) = CONSTM1_RTX (inner
);
11626 gcc_unreachable ();
11629 return gen_rtx_CONST_VECTOR (mode
, v
);
11632 /* Return a CONST_INT rtx representing vector mask for
11633 a VECTOR_CST of booleans. */
11635 const_scalar_mask_from_tree (tree exp
)
11637 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
11638 wide_int res
= wi::zero (GET_MODE_PRECISION (mode
));
11642 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11644 elt
= VECTOR_CST_ELT (exp
, i
);
11645 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
11646 if (integer_all_onesp (elt
))
11647 res
= wi::set_bit (res
, i
);
11649 gcc_assert (integer_zerop (elt
));
11652 return immed_wide_int_const (res
, mode
);
11655 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11657 const_vector_from_tree (tree exp
)
11663 machine_mode inner
, mode
;
11665 mode
= TYPE_MODE (TREE_TYPE (exp
));
11667 if (initializer_zerop (exp
))
11668 return CONST0_RTX (mode
);
11670 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
11671 return const_vector_mask_from_tree (exp
);
11673 units
= GET_MODE_NUNITS (mode
);
11674 inner
= GET_MODE_INNER (mode
);
11676 v
= rtvec_alloc (units
);
11678 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11680 elt
= VECTOR_CST_ELT (exp
, i
);
11682 if (TREE_CODE (elt
) == REAL_CST
)
11683 RTVEC_ELT (v
, i
) = const_double_from_real_value (TREE_REAL_CST (elt
),
11685 else if (TREE_CODE (elt
) == FIXED_CST
)
11686 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11689 RTVEC_ELT (v
, i
) = immed_wide_int_const (elt
, inner
);
11692 return gen_rtx_CONST_VECTOR (mode
, v
);
11695 /* Build a decl for a personality function given a language prefix. */
11698 build_personality_function (const char *lang
)
11700 const char *unwind_and_version
;
11704 switch (targetm_common
.except_unwind_info (&global_options
))
11709 unwind_and_version
= "_sj0";
11713 unwind_and_version
= "_v0";
11716 unwind_and_version
= "_seh0";
11719 gcc_unreachable ();
11722 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11724 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11725 long_long_unsigned_type_node
,
11726 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11727 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11728 get_identifier (name
), type
);
11729 DECL_ARTIFICIAL (decl
) = 1;
11730 DECL_EXTERNAL (decl
) = 1;
11731 TREE_PUBLIC (decl
) = 1;
11733 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11734 are the flags assigned by targetm.encode_section_info. */
11735 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11740 /* Extracts the personality function of DECL and returns the corresponding
11744 get_personality_function (tree decl
)
11746 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11747 enum eh_personality_kind pk
;
11749 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11750 if (pk
== eh_personality_none
)
11754 && pk
== eh_personality_any
)
11755 personality
= lang_hooks
.eh_personality ();
11757 if (pk
== eh_personality_lang
)
11758 gcc_assert (personality
!= NULL_TREE
);
11760 return XEXP (DECL_RTL (personality
), 0);
11763 /* Returns a tree for the size of EXP in bytes. */
11766 tree_expr_size (const_tree exp
)
11769 && DECL_SIZE_UNIT (exp
) != 0)
11770 return DECL_SIZE_UNIT (exp
);
11772 return size_in_bytes (TREE_TYPE (exp
));
11775 /* Return an rtx for the size in bytes of the value of EXP. */
11778 expr_size (tree exp
)
11782 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11783 size
= TREE_OPERAND (exp
, 1);
11786 size
= tree_expr_size (exp
);
11788 gcc_assert (size
== SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, exp
));
11791 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
), EXPAND_NORMAL
);
11794 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11795 if the size can vary or is larger than an integer. */
11797 static HOST_WIDE_INT
11798 int_expr_size (tree exp
)
11802 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11803 size
= TREE_OPERAND (exp
, 1);
11806 size
= tree_expr_size (exp
);
11810 if (size
== 0 || !tree_fits_shwi_p (size
))
11813 return tree_to_shwi (size
);