1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
38 #include "diagnostic.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
45 #include "insn-attr.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
52 #include "optabs-tree.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
57 #include "tree-ssa-live.h"
58 #include "tree-outof-ssa.h"
59 #include "tree-ssa-address.h"
61 #include "tree-chkp.h"
66 /* If this is nonzero, we do not bother generating VOLATILE
67 around volatile memory references, and we are willing to
68 output indirect addresses. If cse is to follow, we reject
69 indirect addresses so a useful potential cse is generated;
70 if it is used only once, instruction combination will produce
71 the same indirect address eventually. */
74 static bool block_move_libcall_safe_for_call_parm (void);
75 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
76 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
77 unsigned HOST_WIDE_INT
);
78 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
79 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
80 static rtx_insn
*compress_float_constant (rtx
, rtx
);
81 static rtx
get_subtarget (rtx
);
82 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
83 HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
84 unsigned HOST_WIDE_INT
, machine_mode
,
85 tree
, int, alias_set_type
, bool);
86 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
, bool);
87 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
,
88 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
89 machine_mode
, tree
, alias_set_type
, bool, bool);
91 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
93 static int is_aligning_offset (const_tree
, const_tree
);
94 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
95 static rtx
do_store_flag (sepops
, rtx
, machine_mode
);
97 static void emit_single_push_insn (machine_mode
, rtx
, tree
);
99 static void do_tablejump (rtx
, machine_mode
, rtx
, rtx
, rtx
,
100 profile_probability
);
101 static rtx
const_vector_from_tree (tree
);
102 static rtx
const_scalar_mask_from_tree (tree
);
103 static tree
tree_expr_size (const_tree
);
104 static HOST_WIDE_INT
int_expr_size (tree
);
107 /* This is run to set up which modes can be used
108 directly in memory and to initialize the block move optab. It is run
109 at the beginning of compilation and when the target is reinitialized. */
112 init_expr_target (void)
120 /* Try indexing by frame ptr and try by stack ptr.
121 It is known that on the Convex the stack ptr isn't a valid index.
122 With luck, one or the other is valid on any machine. */
123 mem
= gen_rtx_MEM (word_mode
, stack_pointer_rtx
);
124 mem1
= gen_rtx_MEM (word_mode
, frame_pointer_rtx
);
126 /* A scratch register we can modify in-place below to avoid
127 useless RTL allocations. */
128 reg
= gen_rtx_REG (word_mode
, LAST_VIRTUAL_REGISTER
+ 1);
130 rtx_insn
*insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
131 pat
= gen_rtx_SET (NULL_RTX
, NULL_RTX
);
132 PATTERN (insn
) = pat
;
134 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
135 mode
= (machine_mode
) ((int) mode
+ 1))
139 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
140 PUT_MODE (mem
, mode
);
141 PUT_MODE (mem1
, mode
);
143 /* See if there is some register that can be used in this mode and
144 directly loaded or stored from memory. */
146 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
147 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
148 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
151 if (! HARD_REGNO_MODE_OK (regno
, mode
))
154 set_mode_and_regno (reg
, mode
, regno
);
157 SET_DEST (pat
) = reg
;
158 if (recog (pat
, insn
, &num_clobbers
) >= 0)
159 direct_load
[(int) mode
] = 1;
161 SET_SRC (pat
) = mem1
;
162 SET_DEST (pat
) = reg
;
163 if (recog (pat
, insn
, &num_clobbers
) >= 0)
164 direct_load
[(int) mode
] = 1;
167 SET_DEST (pat
) = mem
;
168 if (recog (pat
, insn
, &num_clobbers
) >= 0)
169 direct_store
[(int) mode
] = 1;
172 SET_DEST (pat
) = mem1
;
173 if (recog (pat
, insn
, &num_clobbers
) >= 0)
174 direct_store
[(int) mode
] = 1;
178 mem
= gen_rtx_MEM (VOIDmode
, gen_raw_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1));
180 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
181 mode
= GET_MODE_WIDER_MODE (mode
))
183 machine_mode srcmode
;
184 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
185 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
189 ic
= can_extend_p (mode
, srcmode
, 0);
190 if (ic
== CODE_FOR_nothing
)
193 PUT_MODE (mem
, srcmode
);
195 if (insn_operand_matches (ic
, 1, mem
))
196 float_extend_from_mem
[mode
][srcmode
] = true;
201 /* This is run at the start of compiling a function. */
206 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
209 /* Copy data from FROM to TO, where the machine modes are not the same.
210 Both modes may be integer, or both may be floating, or both may be
212 UNSIGNEDP should be nonzero if FROM is an unsigned type.
213 This causes zero-extension instead of sign-extension. */
216 convert_move (rtx to
, rtx from
, int unsignedp
)
218 machine_mode to_mode
= GET_MODE (to
);
219 machine_mode from_mode
= GET_MODE (from
);
220 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
221 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
225 /* rtx code for making an equivalent value. */
226 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
227 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
230 gcc_assert (to_real
== from_real
);
231 gcc_assert (to_mode
!= BLKmode
);
232 gcc_assert (from_mode
!= BLKmode
);
234 /* If the source and destination are already the same, then there's
239 /* If FROM is a SUBREG that indicates that we have already done at least
240 the required extension, strip it. We don't handle such SUBREGs as
243 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
244 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from
)))
245 >= GET_MODE_PRECISION (to_mode
))
246 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
247 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
249 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
251 if (to_mode
== from_mode
252 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
254 emit_move_insn (to
, from
);
258 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
260 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
262 if (VECTOR_MODE_P (to_mode
))
263 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
265 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
267 emit_move_insn (to
, from
);
271 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
273 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
274 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
284 gcc_assert ((GET_MODE_PRECISION (from_mode
)
285 != GET_MODE_PRECISION (to_mode
))
286 || (DECIMAL_FLOAT_MODE_P (from_mode
)
287 != DECIMAL_FLOAT_MODE_P (to_mode
)));
289 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
290 /* Conversion between decimal float and binary float, same size. */
291 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
292 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
297 /* Try converting directly if the insn is supported. */
299 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
300 if (code
!= CODE_FOR_nothing
)
302 emit_unop_insn (code
, to
, from
,
303 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
307 /* Otherwise use a libcall. */
308 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
310 /* Is this conversion implemented yet? */
311 gcc_assert (libcall
);
314 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
316 insns
= get_insns ();
318 emit_libcall_block (insns
, to
, value
,
319 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
321 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
325 /* Handle pointer conversion. */ /* SPEE 900220. */
326 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
330 if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
337 if (convert_optab_handler (ctab
, to_mode
, from_mode
)
340 emit_unop_insn (convert_optab_handler (ctab
, to_mode
, from_mode
),
346 /* Targets are expected to provide conversion insns between PxImode and
347 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
348 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
350 machine_mode full_mode
351 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
353 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
354 != CODE_FOR_nothing
);
356 if (full_mode
!= from_mode
)
357 from
= convert_to_mode (full_mode
, from
, unsignedp
);
358 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
362 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
365 machine_mode full_mode
366 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
367 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
368 enum insn_code icode
;
370 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
371 gcc_assert (icode
!= CODE_FOR_nothing
);
373 if (to_mode
== full_mode
)
375 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
379 new_from
= gen_reg_rtx (full_mode
);
380 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
382 /* else proceed to integer conversions below. */
383 from_mode
= full_mode
;
387 /* Make sure both are fixed-point modes or both are not. */
388 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
389 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
390 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
392 /* If we widen from_mode to to_mode and they are in the same class,
393 we won't saturate the result.
394 Otherwise, always saturate the result to play safe. */
395 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
396 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
397 expand_fixed_convert (to
, from
, 0, 0);
399 expand_fixed_convert (to
, from
, 0, 1);
403 /* Now both modes are integers. */
405 /* Handle expanding beyond a word. */
406 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
407 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
414 machine_mode lowpart_mode
;
415 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
417 /* Try converting directly if the insn is supported. */
418 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
421 /* If FROM is a SUBREG, put it into a register. Do this
422 so that we always generate the same set of insns for
423 better cse'ing; if an intermediate assignment occurred,
424 we won't be doing the operation directly on the SUBREG. */
425 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
426 from
= force_reg (from_mode
, from
);
427 emit_unop_insn (code
, to
, from
, equiv_code
);
430 /* Next, try converting via full word. */
431 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
432 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
433 != CODE_FOR_nothing
))
435 rtx word_to
= gen_reg_rtx (word_mode
);
438 if (reg_overlap_mentioned_p (to
, from
))
439 from
= force_reg (from_mode
, from
);
442 convert_move (word_to
, from
, unsignedp
);
443 emit_unop_insn (code
, to
, word_to
, equiv_code
);
447 /* No special multiword conversion insn; do it by hand. */
450 /* Since we will turn this into a no conflict block, we must ensure
451 the source does not overlap the target so force it into an isolated
452 register when maybe so. Likewise for any MEM input, since the
453 conversion sequence might require several references to it and we
454 must ensure we're getting the same value every time. */
456 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
457 from
= force_reg (from_mode
, from
);
459 /* Get a copy of FROM widened to a word, if necessary. */
460 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
461 lowpart_mode
= word_mode
;
463 lowpart_mode
= from_mode
;
465 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
467 lowpart
= gen_lowpart (lowpart_mode
, to
);
468 emit_move_insn (lowpart
, lowfrom
);
470 /* Compute the value to put in each remaining word. */
472 fill_value
= const0_rtx
;
474 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
475 LT
, lowfrom
, const0_rtx
,
476 lowpart_mode
, 0, -1);
478 /* Fill the remaining words. */
479 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
481 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
482 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
484 gcc_assert (subword
);
486 if (fill_value
!= subword
)
487 emit_move_insn (subword
, fill_value
);
490 insns
= get_insns ();
497 /* Truncating multi-word to a word or less. */
498 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
499 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
502 && ! MEM_VOLATILE_P (from
)
503 && direct_load
[(int) to_mode
]
504 && ! mode_dependent_address_p (XEXP (from
, 0),
505 MEM_ADDR_SPACE (from
)))
507 || GET_CODE (from
) == SUBREG
))
508 from
= force_reg (from_mode
, from
);
509 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
513 /* Now follow all the conversions between integers
514 no more than a word long. */
516 /* For truncation, usually we can just refer to FROM in a narrower mode. */
517 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
518 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
521 && ! MEM_VOLATILE_P (from
)
522 && direct_load
[(int) to_mode
]
523 && ! mode_dependent_address_p (XEXP (from
, 0),
524 MEM_ADDR_SPACE (from
)))
526 || GET_CODE (from
) == SUBREG
))
527 from
= force_reg (from_mode
, from
);
528 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
529 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
530 from
= copy_to_reg (from
);
531 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
535 /* Handle extension. */
536 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
538 /* Convert directly if that works. */
539 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
542 emit_unop_insn (code
, to
, from
, equiv_code
);
547 machine_mode intermediate
;
551 /* Search for a mode to convert via. */
552 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
553 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
554 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
556 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
557 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, intermediate
)))
558 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
559 != CODE_FOR_nothing
))
561 convert_move (to
, convert_to_mode (intermediate
, from
,
562 unsignedp
), unsignedp
);
566 /* No suitable intermediate mode.
567 Generate what we need with shifts. */
568 shift_amount
= (GET_MODE_PRECISION (to_mode
)
569 - GET_MODE_PRECISION (from_mode
));
570 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
571 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
573 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
576 emit_move_insn (to
, tmp
);
581 /* Support special truncate insns for certain modes. */
582 if (convert_optab_handler (trunc_optab
, to_mode
,
583 from_mode
) != CODE_FOR_nothing
)
585 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
590 /* Handle truncation of volatile memrefs, and so on;
591 the things that couldn't be truncated directly,
592 and for which there was no special instruction.
594 ??? Code above formerly short-circuited this, for most integer
595 mode pairs, with a force_reg in from_mode followed by a recursive
596 call to this routine. Appears always to have been wrong. */
597 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
599 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
600 emit_move_insn (to
, temp
);
604 /* Mode combination is not recognized. */
608 /* Return an rtx for a value that would result
609 from converting X to mode MODE.
610 Both X and MODE may be floating, or both integer.
611 UNSIGNEDP is nonzero if X is an unsigned value.
612 This can be done by referring to a part of X in place
613 or by copying to a new temporary with conversion. */
616 convert_to_mode (machine_mode mode
, rtx x
, int unsignedp
)
618 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
621 /* Return an rtx for a value that would result
622 from converting X from mode OLDMODE to mode MODE.
623 Both modes may be floating, or both integer.
624 UNSIGNEDP is nonzero if X is an unsigned value.
626 This can be done by referring to a part of X in place
627 or by copying to a new temporary with conversion.
629 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
632 convert_modes (machine_mode mode
, machine_mode oldmode
, rtx x
, int unsignedp
)
636 /* If FROM is a SUBREG that indicates that we have already done at least
637 the required extension, strip it. */
639 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
640 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
641 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
642 x
= gen_lowpart (mode
, SUBREG_REG (x
));
644 if (GET_MODE (x
) != VOIDmode
)
645 oldmode
= GET_MODE (x
);
650 if (CONST_SCALAR_INT_P (x
) && GET_MODE_CLASS (mode
) == MODE_INT
)
652 /* If the caller did not tell us the old mode, then there is not
653 much to do with respect to canonicalization. We have to
654 assume that all the bits are significant. */
655 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
656 oldmode
= MAX_MODE_INT
;
657 wide_int w
= wide_int::from (rtx_mode_t (x
, oldmode
),
658 GET_MODE_PRECISION (mode
),
659 unsignedp
? UNSIGNED
: SIGNED
);
660 return immed_wide_int_const (w
, mode
);
663 /* We can do this with a gen_lowpart if both desired and current modes
664 are integer, and this is either a constant integer, a register, or a
666 if (GET_MODE_CLASS (mode
) == MODE_INT
667 && GET_MODE_CLASS (oldmode
) == MODE_INT
668 && GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (oldmode
)
669 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) mode
])
671 && (!HARD_REGISTER_P (x
)
672 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
673 && TRULY_NOOP_TRUNCATION_MODES_P (mode
, GET_MODE (x
)))))
675 return gen_lowpart (mode
, x
);
677 /* Converting from integer constant into mode is always equivalent to an
679 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
681 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
682 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
685 temp
= gen_reg_rtx (mode
);
686 convert_move (temp
, x
, unsignedp
);
690 /* Return the largest alignment we can use for doing a move (or store)
691 of MAX_PIECES. ALIGN is the largest alignment we could use. */
694 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
698 tmode
= mode_for_size (max_pieces
* BITS_PER_UNIT
, MODE_INT
, 1);
699 if (align
>= GET_MODE_ALIGNMENT (tmode
))
700 align
= GET_MODE_ALIGNMENT (tmode
);
703 machine_mode tmode
, xmode
;
705 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
707 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
708 if (GET_MODE_SIZE (tmode
) > max_pieces
709 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
712 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
718 /* Return the widest integer mode no wider than SIZE. If no such mode
719 can be found, return VOIDmode. */
722 widest_int_mode_for_size (unsigned int size
)
724 machine_mode tmode
, mode
= VOIDmode
;
726 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
727 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
728 if (GET_MODE_SIZE (tmode
) < size
)
734 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
735 and should be performed piecewise. */
738 can_do_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
,
739 enum by_pieces_operation op
)
741 return targetm
.use_by_pieces_infrastructure_p (len
, align
, op
,
742 optimize_insn_for_speed_p ());
745 /* Determine whether the LEN bytes can be moved by using several move
746 instructions. Return nonzero if a call to move_by_pieces should
750 can_move_by_pieces (unsigned HOST_WIDE_INT len
, unsigned int align
)
752 return can_do_by_pieces (len
, align
, MOVE_BY_PIECES
);
755 /* Return number of insns required to perform operation OP by pieces
756 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
758 unsigned HOST_WIDE_INT
759 by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
760 unsigned int max_size
, by_pieces_operation op
)
762 unsigned HOST_WIDE_INT n_insns
= 0;
764 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
766 while (max_size
> 1 && l
> 0)
769 enum insn_code icode
;
771 mode
= widest_int_mode_for_size (max_size
);
773 if (mode
== VOIDmode
)
775 unsigned int modesize
= GET_MODE_SIZE (mode
);
777 icode
= optab_handler (mov_optab
, mode
);
778 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
780 unsigned HOST_WIDE_INT n_pieces
= l
/ modesize
;
788 case COMPARE_BY_PIECES
:
789 int batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
790 int batch_ops
= 4 * batch
- 1;
791 unsigned HOST_WIDE_INT full
= n_pieces
/ batch
;
792 n_insns
+= full
* batch_ops
;
793 if (n_pieces
% batch
!= 0)
806 /* Used when performing piecewise block operations, holds information
807 about one of the memory objects involved. The member functions
808 can be used to generate code for loading from the object and
809 updating the address when iterating. */
813 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
816 /* The address of the object. Can differ from that seen in the
817 MEM rtx if we copied the address to a register. */
819 /* Nonzero if the address on the object has an autoincrement already,
820 signifies whether that was an increment or decrement. */
821 signed char m_addr_inc
;
822 /* Nonzero if we intend to use autoinc without the address already
823 having autoinc form. We will insert add insns around each memory
824 reference, expecting later passes to form autoinc addressing modes.
825 The only supported options are predecrement and postincrement. */
826 signed char m_explicit_inc
;
827 /* True if we have either of the two possible cases of using
830 /* True if this is an address to be used for load operations rather
834 /* Optionally, a function to obtain constants for any given offset into
835 the objects, and data associated with it. */
836 by_pieces_constfn m_constfn
;
839 pieces_addr (rtx
, bool, by_pieces_constfn
, void *);
840 rtx
adjust (machine_mode
, HOST_WIDE_INT
);
841 void increment_address (HOST_WIDE_INT
);
842 void maybe_predec (HOST_WIDE_INT
);
843 void maybe_postinc (HOST_WIDE_INT
);
844 void decide_autoinc (machine_mode
, bool, HOST_WIDE_INT
);
851 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
852 true if the operation to be performed on this object is a load
853 rather than a store. For stores, OBJ can be NULL, in which case we
854 assume the operation is a stack push. For loads, the optional
855 CONSTFN and its associated CFNDATA can be used in place of the
858 pieces_addr::pieces_addr (rtx obj
, bool is_load
, by_pieces_constfn constfn
,
860 : m_obj (obj
), m_is_load (is_load
), m_constfn (constfn
), m_cfndata (cfndata
)
866 rtx addr
= XEXP (obj
, 0);
867 rtx_code code
= GET_CODE (addr
);
869 bool dec
= code
== PRE_DEC
|| code
== POST_DEC
;
870 bool inc
= code
== PRE_INC
|| code
== POST_INC
;
873 m_addr_inc
= dec
? -1 : 1;
875 /* While we have always looked for these codes here, the code
876 implementing the memory operation has never handled them.
877 Support could be added later if necessary or beneficial. */
878 gcc_assert (code
!= PRE_INC
&& code
!= POST_DEC
);
886 if (STACK_GROWS_DOWNWARD
)
892 gcc_assert (constfn
!= NULL
);
896 gcc_assert (is_load
);
899 /* Decide whether to use autoinc for an address involved in a memory op.
900 MODE is the mode of the accesses, REVERSE is true if we've decided to
901 perform the operation starting from the end, and LEN is the length of
902 the operation. Don't override an earlier decision to set m_auto. */
905 pieces_addr::decide_autoinc (machine_mode
ARG_UNUSED (mode
), bool reverse
,
908 if (m_auto
|| m_obj
== NULL_RTX
)
911 bool use_predec
= (m_is_load
912 ? USE_LOAD_PRE_DECREMENT (mode
)
913 : USE_STORE_PRE_DECREMENT (mode
));
914 bool use_postinc
= (m_is_load
915 ? USE_LOAD_POST_INCREMENT (mode
)
916 : USE_STORE_POST_INCREMENT (mode
));
917 machine_mode addr_mode
= get_address_mode (m_obj
);
919 if (use_predec
&& reverse
)
921 m_addr
= copy_to_mode_reg (addr_mode
,
922 plus_constant (addr_mode
,
927 else if (use_postinc
&& !reverse
)
929 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
933 else if (CONSTANT_P (m_addr
))
934 m_addr
= copy_to_mode_reg (addr_mode
, m_addr
);
937 /* Adjust the address to refer to the data at OFFSET in MODE. If we
938 are using autoincrement for this address, we don't add the offset,
939 but we still modify the MEM's properties. */
942 pieces_addr::adjust (machine_mode mode
, HOST_WIDE_INT offset
)
945 return m_constfn (m_cfndata
, offset
, mode
);
946 if (m_obj
== NULL_RTX
)
949 return adjust_automodify_address (m_obj
, mode
, m_addr
, offset
);
951 return adjust_address (m_obj
, mode
, offset
);
954 /* Emit an add instruction to increment the address by SIZE. */
957 pieces_addr::increment_address (HOST_WIDE_INT size
)
959 rtx amount
= gen_int_mode (size
, GET_MODE (m_addr
));
960 emit_insn (gen_add2_insn (m_addr
, amount
));
963 /* If we are supposed to decrement the address after each access, emit code
964 to do so now. Increment by SIZE (which has should have the correct sign
968 pieces_addr::maybe_predec (HOST_WIDE_INT size
)
970 if (m_explicit_inc
>= 0)
972 gcc_assert (HAVE_PRE_DECREMENT
);
973 increment_address (size
);
976 /* If we are supposed to decrement the address after each access, emit code
977 to do so now. Increment by SIZE. */
980 pieces_addr::maybe_postinc (HOST_WIDE_INT size
)
982 if (m_explicit_inc
<= 0)
984 gcc_assert (HAVE_POST_INCREMENT
);
985 increment_address (size
);
988 /* This structure is used by do_op_by_pieces to describe the operation
994 pieces_addr m_to
, m_from
;
995 unsigned HOST_WIDE_INT m_len
;
996 HOST_WIDE_INT m_offset
;
997 unsigned int m_align
;
998 unsigned int m_max_size
;
1001 /* Virtual functions, overriden by derived classes for the specific
1003 virtual void generate (rtx
, rtx
, machine_mode
) = 0;
1004 virtual bool prepare_mode (machine_mode
, unsigned int) = 0;
1005 virtual void finish_mode (machine_mode
)
1010 op_by_pieces_d (rtx
, bool, rtx
, bool, by_pieces_constfn
, void *,
1011 unsigned HOST_WIDE_INT
, unsigned int);
1015 /* The constructor for an op_by_pieces_d structure. We require two
1016 objects named TO and FROM, which are identified as loads or stores
1017 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1018 and its associated FROM_CFN_DATA can be used to replace loads with
1019 constant values. LEN describes the length of the operation. */
1021 op_by_pieces_d::op_by_pieces_d (rtx to
, bool to_load
,
1022 rtx from
, bool from_load
,
1023 by_pieces_constfn from_cfn
,
1024 void *from_cfn_data
,
1025 unsigned HOST_WIDE_INT len
,
1027 : m_to (to
, to_load
, NULL
, NULL
),
1028 m_from (from
, from_load
, from_cfn
, from_cfn_data
),
1029 m_len (len
), m_max_size (MOVE_MAX_PIECES
+ 1)
1031 int toi
= m_to
.get_addr_inc ();
1032 int fromi
= m_from
.get_addr_inc ();
1033 if (toi
>= 0 && fromi
>= 0)
1035 else if (toi
<= 0 && fromi
<= 0)
1040 m_offset
= m_reverse
? len
: 0;
1041 align
= MIN (to
? MEM_ALIGN (to
) : align
,
1042 from
? MEM_ALIGN (from
) : align
);
1044 /* If copying requires more than two move insns,
1045 copy addresses to registers (to make displacements shorter)
1046 and use post-increment if available. */
1047 if (by_pieces_ninsns (len
, align
, m_max_size
, MOVE_BY_PIECES
) > 2)
1049 /* Find the mode of the largest comparison. */
1050 machine_mode mode
= widest_int_mode_for_size (m_max_size
);
1052 m_from
.decide_autoinc (mode
, m_reverse
, len
);
1053 m_to
.decide_autoinc (mode
, m_reverse
, len
);
1056 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
1060 /* This function contains the main loop used for expanding a block
1061 operation. First move what we can in the largest integer mode,
1062 then go to successively smaller modes. For every access, call
1063 GENFUN with the two operands and the EXTRA_DATA. */
1066 op_by_pieces_d::run ()
1068 while (m_max_size
> 1 && m_len
> 0)
1070 machine_mode mode
= widest_int_mode_for_size (m_max_size
);
1072 if (mode
== VOIDmode
)
1075 if (prepare_mode (mode
, m_align
))
1077 unsigned int size
= GET_MODE_SIZE (mode
);
1078 rtx to1
= NULL_RTX
, from1
;
1080 while (m_len
>= size
)
1085 to1
= m_to
.adjust (mode
, m_offset
);
1086 from1
= m_from
.adjust (mode
, m_offset
);
1088 m_to
.maybe_predec (-(HOST_WIDE_INT
)size
);
1089 m_from
.maybe_predec (-(HOST_WIDE_INT
)size
);
1091 generate (to1
, from1
, mode
);
1093 m_to
.maybe_postinc (size
);
1094 m_from
.maybe_postinc (size
);
1105 m_max_size
= GET_MODE_SIZE (mode
);
1108 /* The code above should have handled everything. */
1109 gcc_assert (!m_len
);
1112 /* Derived class from op_by_pieces_d, providing support for block move
1115 class move_by_pieces_d
: public op_by_pieces_d
1117 insn_gen_fn m_gen_fun
;
1118 void generate (rtx
, rtx
, machine_mode
);
1119 bool prepare_mode (machine_mode
, unsigned int);
1122 move_by_pieces_d (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1124 : op_by_pieces_d (to
, false, from
, true, NULL
, NULL
, len
, align
)
1127 rtx
finish_endp (int);
1130 /* Return true if MODE can be used for a set of copies, given an
1131 alignment ALIGN. Prepare whatever data is necessary for later
1132 calls to generate. */
1135 move_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1137 insn_code icode
= optab_handler (mov_optab
, mode
);
1138 m_gen_fun
= GEN_FCN (icode
);
1139 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1142 /* A callback used when iterating for a compare_by_pieces_operation.
1143 OP0 and OP1 are the values that have been loaded and should be
1144 compared in MODE. If OP0 is NULL, this means we should generate a
1145 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1146 gen function that should be used to generate the mode. */
1149 move_by_pieces_d::generate (rtx op0
, rtx op1
,
1150 machine_mode mode ATTRIBUTE_UNUSED
)
1152 #ifdef PUSH_ROUNDING
1153 if (op0
== NULL_RTX
)
1155 emit_single_push_insn (mode
, op1
, NULL
);
1159 emit_insn (m_gen_fun (op0
, op1
));
1162 /* Perform the final adjustment at the end of a string to obtain the
1163 correct return value for the block operation. If ENDP is 1 return
1164 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1165 end minus one byte ala stpcpy. */
1168 move_by_pieces_d::finish_endp (int endp
)
1170 gcc_assert (!m_reverse
);
1173 m_to
.maybe_postinc (-1);
1176 return m_to
.adjust (QImode
, m_offset
);
1179 /* Generate several move instructions to copy LEN bytes from block FROM to
1180 block TO. (These are MEM rtx's with BLKmode).
1182 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1183 used to push FROM to the stack.
1185 ALIGN is maximum stack alignment we can assume.
1187 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1188 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1192 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
1193 unsigned int align
, int endp
)
1195 #ifndef PUSH_ROUNDING
1200 move_by_pieces_d
data (to
, from
, len
, align
);
1205 return data
.finish_endp (endp
);
1210 /* Derived class from op_by_pieces_d, providing support for block move
1213 class store_by_pieces_d
: public op_by_pieces_d
1215 insn_gen_fn m_gen_fun
;
1216 void generate (rtx
, rtx
, machine_mode
);
1217 bool prepare_mode (machine_mode
, unsigned int);
1220 store_by_pieces_d (rtx to
, by_pieces_constfn cfn
, void *cfn_data
,
1221 unsigned HOST_WIDE_INT len
, unsigned int align
)
1222 : op_by_pieces_d (to
, false, NULL_RTX
, true, cfn
, cfn_data
, len
, align
)
1225 rtx
finish_endp (int);
1228 /* Return true if MODE can be used for a set of stores, given an
1229 alignment ALIGN. Prepare whatever data is necessary for later
1230 calls to generate. */
1233 store_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1235 insn_code icode
= optab_handler (mov_optab
, mode
);
1236 m_gen_fun
= GEN_FCN (icode
);
1237 return icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
);
1240 /* A callback used when iterating for a store_by_pieces_operation.
1241 OP0 and OP1 are the values that have been loaded and should be
1242 compared in MODE. If OP0 is NULL, this means we should generate a
1243 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1244 gen function that should be used to generate the mode. */
1247 store_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode
)
1249 emit_insn (m_gen_fun (op0
, op1
));
1252 /* Perform the final adjustment at the end of a string to obtain the
1253 correct return value for the block operation. If ENDP is 1 return
1254 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1255 end minus one byte ala stpcpy. */
1258 store_by_pieces_d::finish_endp (int endp
)
1260 gcc_assert (!m_reverse
);
1263 m_to
.maybe_postinc (-1);
1266 return m_to
.adjust (QImode
, m_offset
);
1269 /* Determine whether the LEN bytes generated by CONSTFUN can be
1270 stored to memory using several move instructions. CONSTFUNDATA is
1271 a pointer which will be passed as argument in every CONSTFUN call.
1272 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1273 a memset operation and false if it's a copy of a constant string.
1274 Return nonzero if a call to store_by_pieces should succeed. */
1277 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
1278 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
),
1279 void *constfundata
, unsigned int align
, bool memsetp
)
1281 unsigned HOST_WIDE_INT l
;
1282 unsigned int max_size
;
1283 HOST_WIDE_INT offset
= 0;
1285 enum insn_code icode
;
1287 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1288 rtx cst ATTRIBUTE_UNUSED
;
1293 if (!targetm
.use_by_pieces_infrastructure_p (len
, align
,
1297 optimize_insn_for_speed_p ()))
1300 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
1302 /* We would first store what we can in the largest integer mode, then go to
1303 successively smaller modes. */
1306 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
1310 max_size
= STORE_MAX_PIECES
+ 1;
1311 while (max_size
> 1 && l
> 0)
1313 mode
= widest_int_mode_for_size (max_size
);
1315 if (mode
== VOIDmode
)
1318 icode
= optab_handler (mov_optab
, mode
);
1319 if (icode
!= CODE_FOR_nothing
1320 && align
>= GET_MODE_ALIGNMENT (mode
))
1322 unsigned int size
= GET_MODE_SIZE (mode
);
1329 cst
= (*constfun
) (constfundata
, offset
, mode
);
1330 if (!targetm
.legitimate_constant_p (mode
, cst
))
1340 max_size
= GET_MODE_SIZE (mode
);
1343 /* The code above should have handled everything. */
1350 /* Generate several move instructions to store LEN bytes generated by
1351 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1352 pointer which will be passed as argument in every CONSTFUN call.
1353 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1354 a memset operation and false if it's a copy of a constant string.
1355 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1356 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1360 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
1361 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
),
1362 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
1366 gcc_assert (endp
!= 2);
1370 gcc_assert (targetm
.use_by_pieces_infrastructure_p
1372 memsetp
? SET_BY_PIECES
: STORE_BY_PIECES
,
1373 optimize_insn_for_speed_p ()));
1375 store_by_pieces_d
data (to
, constfun
, constfundata
, len
, align
);
1379 return data
.finish_endp (endp
);
1384 /* Callback routine for clear_by_pieces.
1385 Return const0_rtx unconditionally. */
1388 clear_by_pieces_1 (void *, HOST_WIDE_INT
, machine_mode
)
1393 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1394 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1397 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
1402 store_by_pieces_d
data (to
, clear_by_pieces_1
, NULL
, len
, align
);
1406 /* Context used by compare_by_pieces_genfn. It stores the fail label
1407 to jump to in case of miscomparison, and for branch ratios greater than 1,
1408 it stores an accumulator and the current and maximum counts before
1409 emitting another branch. */
1411 class compare_by_pieces_d
: public op_by_pieces_d
1413 rtx_code_label
*m_fail_label
;
1415 int m_count
, m_batch
;
1417 void generate (rtx
, rtx
, machine_mode
);
1418 bool prepare_mode (machine_mode
, unsigned int);
1419 void finish_mode (machine_mode
);
1421 compare_by_pieces_d (rtx op0
, rtx op1
, by_pieces_constfn op1_cfn
,
1422 void *op1_cfn_data
, HOST_WIDE_INT len
, int align
,
1423 rtx_code_label
*fail_label
)
1424 : op_by_pieces_d (op0
, true, op1
, true, op1_cfn
, op1_cfn_data
, len
, align
)
1426 m_fail_label
= fail_label
;
1430 /* A callback used when iterating for a compare_by_pieces_operation.
1431 OP0 and OP1 are the values that have been loaded and should be
1432 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1433 context structure. */
1436 compare_by_pieces_d::generate (rtx op0
, rtx op1
, machine_mode mode
)
1440 rtx temp
= expand_binop (mode
, sub_optab
, op0
, op1
, NULL_RTX
,
1441 true, OPTAB_LIB_WIDEN
);
1443 temp
= expand_binop (mode
, ior_optab
, m_accumulator
, temp
, temp
,
1444 true, OPTAB_LIB_WIDEN
);
1445 m_accumulator
= temp
;
1447 if (++m_count
< m_batch
)
1451 op0
= m_accumulator
;
1453 m_accumulator
= NULL_RTX
;
1455 do_compare_rtx_and_jump (op0
, op1
, NE
, true, mode
, NULL_RTX
, NULL
,
1456 m_fail_label
, profile_probability::uninitialized ());
1459 /* Return true if MODE can be used for a set of moves and comparisons,
1460 given an alignment ALIGN. Prepare whatever data is necessary for
1461 later calls to generate. */
1464 compare_by_pieces_d::prepare_mode (machine_mode mode
, unsigned int align
)
1466 insn_code icode
= optab_handler (mov_optab
, mode
);
1467 if (icode
== CODE_FOR_nothing
1468 || align
< GET_MODE_ALIGNMENT (mode
)
1469 || !can_compare_p (EQ
, mode
, ccp_jump
))
1471 m_batch
= targetm
.compare_by_pieces_branch_ratio (mode
);
1474 m_accumulator
= NULL_RTX
;
1479 /* Called after expanding a series of comparisons in MODE. If we have
1480 accumulated results for which we haven't emitted a branch yet, do
1484 compare_by_pieces_d::finish_mode (machine_mode mode
)
1486 if (m_accumulator
!= NULL_RTX
)
1487 do_compare_rtx_and_jump (m_accumulator
, const0_rtx
, NE
, true, mode
,
1488 NULL_RTX
, NULL
, m_fail_label
,
1489 profile_probability::uninitialized ());
1492 /* Generate several move instructions to compare LEN bytes from blocks
1493 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1495 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1496 used to push FROM to the stack.
1498 ALIGN is maximum stack alignment we can assume.
1500 Optionally, the caller can pass a constfn and associated data in A1_CFN
1501 and A1_CFN_DATA. describing that the second operand being compared is a
1502 known constant and how to obtain its data. */
1505 compare_by_pieces (rtx arg0
, rtx arg1
, unsigned HOST_WIDE_INT len
,
1506 rtx target
, unsigned int align
,
1507 by_pieces_constfn a1_cfn
, void *a1_cfn_data
)
1509 rtx_code_label
*fail_label
= gen_label_rtx ();
1510 rtx_code_label
*end_label
= gen_label_rtx ();
1512 if (target
== NULL_RTX
1513 || !REG_P (target
) || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
1514 target
= gen_reg_rtx (TYPE_MODE (integer_type_node
));
1516 compare_by_pieces_d
data (arg0
, arg1
, a1_cfn
, a1_cfn_data
, len
, align
,
1521 emit_move_insn (target
, const0_rtx
);
1522 emit_jump (end_label
);
1524 emit_label (fail_label
);
1525 emit_move_insn (target
, const1_rtx
);
1526 emit_label (end_label
);
1531 /* Emit code to move a block Y to a block X. This may be done with
1532 string-move instructions, with multiple scalar move instructions,
1533 or with a library call.
1535 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1536 SIZE is an rtx that says how long they are.
1537 ALIGN is the maximum alignment we can assume they have.
1538 METHOD describes what kind of copy this is, and what mechanisms may be used.
1539 MIN_SIZE is the minimal size of block to move
1540 MAX_SIZE is the maximal size of block to move, if it can not be represented
1541 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1543 Return the address of the new block, if memcpy is called and returns it,
1547 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1548 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1549 unsigned HOST_WIDE_INT min_size
,
1550 unsigned HOST_WIDE_INT max_size
,
1551 unsigned HOST_WIDE_INT probable_max_size
)
1558 if (CONST_INT_P (size
) && INTVAL (size
) == 0)
1563 case BLOCK_OP_NORMAL
:
1564 case BLOCK_OP_TAILCALL
:
1565 may_use_call
= true;
1568 case BLOCK_OP_CALL_PARM
:
1569 may_use_call
= block_move_libcall_safe_for_call_parm ();
1571 /* Make inhibit_defer_pop nonzero around the library call
1572 to force it to pop the arguments right away. */
1576 case BLOCK_OP_NO_LIBCALL
:
1577 may_use_call
= false;
1584 gcc_assert (MEM_P (x
) && MEM_P (y
));
1585 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1586 gcc_assert (align
>= BITS_PER_UNIT
);
1588 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1589 block copy is more efficient for other large modes, e.g. DCmode. */
1590 x
= adjust_address (x
, BLKmode
, 0);
1591 y
= adjust_address (y
, BLKmode
, 0);
1593 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1594 can be incorrect is coming from __builtin_memcpy. */
1595 if (CONST_INT_P (size
))
1597 x
= shallow_copy_rtx (x
);
1598 y
= shallow_copy_rtx (y
);
1599 set_mem_size (x
, INTVAL (size
));
1600 set_mem_size (y
, INTVAL (size
));
1603 if (CONST_INT_P (size
) && can_move_by_pieces (INTVAL (size
), align
))
1604 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1605 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1606 expected_align
, expected_size
,
1607 min_size
, max_size
, probable_max_size
))
1609 else if (may_use_call
1610 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1611 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1613 /* Since x and y are passed to a libcall, mark the corresponding
1614 tree EXPR as addressable. */
1615 tree y_expr
= MEM_EXPR (y
);
1616 tree x_expr
= MEM_EXPR (x
);
1618 mark_addressable (y_expr
);
1620 mark_addressable (x_expr
);
1621 retval
= emit_block_copy_via_libcall (x
, y
, size
,
1622 method
== BLOCK_OP_TAILCALL
);
1626 emit_block_move_via_loop (x
, y
, size
, align
);
1628 if (method
== BLOCK_OP_CALL_PARM
)
1635 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1637 unsigned HOST_WIDE_INT max
, min
= 0;
1638 if (GET_CODE (size
) == CONST_INT
)
1639 min
= max
= UINTVAL (size
);
1641 max
= GET_MODE_MASK (GET_MODE (size
));
1642 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1646 /* A subroutine of emit_block_move. Returns true if calling the
1647 block move libcall will not clobber any parameters which may have
1648 already been placed on the stack. */
1651 block_move_libcall_safe_for_call_parm (void)
1653 #if defined (REG_PARM_STACK_SPACE)
1657 /* If arguments are pushed on the stack, then they're safe. */
1661 /* If registers go on the stack anyway, any argument is sure to clobber
1662 an outgoing argument. */
1663 #if defined (REG_PARM_STACK_SPACE)
1664 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1665 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1666 depend on its argument. */
1668 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1669 && REG_PARM_STACK_SPACE (fn
) != 0)
1673 /* If any argument goes in memory, then it might clobber an outgoing
1676 CUMULATIVE_ARGS args_so_far_v
;
1677 cumulative_args_t args_so_far
;
1680 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1681 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1682 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1684 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1685 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1687 machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1688 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1690 if (!tmp
|| !REG_P (tmp
))
1692 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1694 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1701 /* A subroutine of emit_block_move. Expand a movmem pattern;
1702 return true if successful. */
1705 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1706 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1707 unsigned HOST_WIDE_INT min_size
,
1708 unsigned HOST_WIDE_INT max_size
,
1709 unsigned HOST_WIDE_INT probable_max_size
)
1711 int save_volatile_ok
= volatile_ok
;
1714 if (expected_align
< align
)
1715 expected_align
= align
;
1716 if (expected_size
!= -1)
1718 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1719 expected_size
= probable_max_size
;
1720 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1721 expected_size
= min_size
;
1724 /* Since this is a move insn, we don't care about volatility. */
1727 /* Try the most limited insn first, because there's no point
1728 including more than one in the machine description unless
1729 the more limited one has some advantage. */
1731 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1732 mode
= GET_MODE_WIDER_MODE (mode
))
1734 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1736 if (code
!= CODE_FOR_nothing
1737 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1738 here because if SIZE is less than the mode mask, as it is
1739 returned by the macro, it will definitely be less than the
1740 actual mode mask. Since SIZE is within the Pmode address
1741 space, we limit MODE to Pmode. */
1742 && ((CONST_INT_P (size
)
1743 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1744 <= (GET_MODE_MASK (mode
) >> 1)))
1745 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1746 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1748 struct expand_operand ops
[9];
1751 /* ??? When called via emit_block_move_for_call, it'd be
1752 nice if there were some way to inform the backend, so
1753 that it doesn't fail the expansion because it thinks
1754 emitting the libcall would be more efficient. */
1755 nops
= insn_data
[(int) code
].n_generator_args
;
1756 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1758 create_fixed_operand (&ops
[0], x
);
1759 create_fixed_operand (&ops
[1], y
);
1760 /* The check above guarantees that this size conversion is valid. */
1761 create_convert_operand_to (&ops
[2], size
, mode
, true);
1762 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1765 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1766 create_integer_operand (&ops
[5], expected_size
);
1770 create_integer_operand (&ops
[6], min_size
);
1771 /* If we can not represent the maximal size,
1772 make parameter NULL. */
1773 if ((HOST_WIDE_INT
) max_size
!= -1)
1774 create_integer_operand (&ops
[7], max_size
);
1776 create_fixed_operand (&ops
[7], NULL
);
1780 /* If we can not represent the maximal size,
1781 make parameter NULL. */
1782 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1783 create_integer_operand (&ops
[8], probable_max_size
);
1785 create_fixed_operand (&ops
[8], NULL
);
1787 if (maybe_expand_insn (code
, nops
, ops
))
1789 volatile_ok
= save_volatile_ok
;
1795 volatile_ok
= save_volatile_ok
;
1799 /* A subroutine of emit_block_move. Copy the data via an explicit
1800 loop. This is used only when libcalls are forbidden. */
1801 /* ??? It'd be nice to copy in hunks larger than QImode. */
1804 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1805 unsigned int align ATTRIBUTE_UNUSED
)
1807 rtx_code_label
*cmp_label
, *top_label
;
1808 rtx iter
, x_addr
, y_addr
, tmp
;
1809 machine_mode x_addr_mode
= get_address_mode (x
);
1810 machine_mode y_addr_mode
= get_address_mode (y
);
1811 machine_mode iter_mode
;
1813 iter_mode
= GET_MODE (size
);
1814 if (iter_mode
== VOIDmode
)
1815 iter_mode
= word_mode
;
1817 top_label
= gen_label_rtx ();
1818 cmp_label
= gen_label_rtx ();
1819 iter
= gen_reg_rtx (iter_mode
);
1821 emit_move_insn (iter
, const0_rtx
);
1823 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1824 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1825 do_pending_stack_adjust ();
1827 emit_jump (cmp_label
);
1828 emit_label (top_label
);
1830 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1831 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1833 if (x_addr_mode
!= y_addr_mode
)
1834 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1835 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1837 x
= change_address (x
, QImode
, x_addr
);
1838 y
= change_address (y
, QImode
, y_addr
);
1840 emit_move_insn (x
, y
);
1842 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1843 true, OPTAB_LIB_WIDEN
);
1845 emit_move_insn (iter
, tmp
);
1847 emit_label (cmp_label
);
1849 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1851 profile_probability::guessed_always ()
1852 .apply_scale (9, 10));
1855 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1856 TAILCALL is true if this is a tail call. */
1859 emit_block_op_via_libcall (enum built_in_function fncode
, rtx dst
, rtx src
,
1860 rtx size
, bool tailcall
)
1862 rtx dst_addr
, src_addr
;
1863 tree call_expr
, dst_tree
, src_tree
, size_tree
;
1864 machine_mode size_mode
;
1866 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1867 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1868 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1870 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1871 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1872 src_tree
= make_tree (ptr_type_node
, src_addr
);
1874 size_mode
= TYPE_MODE (sizetype
);
1875 size
= convert_to_mode (size_mode
, size
, 1);
1876 size
= copy_to_mode_reg (size_mode
, size
);
1877 size_tree
= make_tree (sizetype
, size
);
1879 /* It is incorrect to use the libcall calling conventions for calls to
1880 memcpy/memmove/memcmp because they can be provided by the user. */
1881 tree fn
= builtin_decl_implicit (fncode
);
1882 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1883 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1885 return expand_call (call_expr
, NULL_RTX
, false);
1888 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1889 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1890 otherwise return null. */
1893 expand_cmpstrn_or_cmpmem (insn_code icode
, rtx target
, rtx arg1_rtx
,
1894 rtx arg2_rtx
, tree arg3_type
, rtx arg3_rtx
,
1895 HOST_WIDE_INT align
)
1897 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
1899 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
1902 struct expand_operand ops
[5];
1903 create_output_operand (&ops
[0], target
, insn_mode
);
1904 create_fixed_operand (&ops
[1], arg1_rtx
);
1905 create_fixed_operand (&ops
[2], arg2_rtx
);
1906 create_convert_operand_from (&ops
[3], arg3_rtx
, TYPE_MODE (arg3_type
),
1907 TYPE_UNSIGNED (arg3_type
));
1908 create_integer_operand (&ops
[4], align
);
1909 if (maybe_expand_insn (icode
, 5, ops
))
1910 return ops
[0].value
;
1914 /* Expand a block compare between X and Y with length LEN using the
1915 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
1916 of the expression that was used to calculate the length. ALIGN
1917 gives the known minimum common alignment. */
1920 emit_block_cmp_via_cmpmem (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1923 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1924 implementing memcmp because it will stop if it encounters two
1926 insn_code icode
= direct_optab_handler (cmpmem_optab
, SImode
);
1928 if (icode
== CODE_FOR_nothing
)
1931 return expand_cmpstrn_or_cmpmem (icode
, target
, x
, y
, len_type
, len
, align
);
1934 /* Emit code to compare a block Y to a block X. This may be done with
1935 string-compare instructions, with multiple scalar instructions,
1936 or with a library call.
1938 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
1939 they are. LEN_TYPE is the type of the expression that was used to
1942 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1943 value of a normal memcmp call, instead we can just compare for equality.
1944 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1947 Optionally, the caller can pass a constfn and associated data in Y_CFN
1948 and Y_CFN_DATA. describing that the second operand being compared is a
1949 known constant and how to obtain its data.
1950 Return the result of the comparison, or NULL_RTX if we failed to
1951 perform the operation. */
1954 emit_block_cmp_hints (rtx x
, rtx y
, rtx len
, tree len_type
, rtx target
,
1955 bool equality_only
, by_pieces_constfn y_cfn
,
1960 if (CONST_INT_P (len
) && INTVAL (len
) == 0)
1963 gcc_assert (MEM_P (x
) && MEM_P (y
));
1964 unsigned int align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1965 gcc_assert (align
>= BITS_PER_UNIT
);
1967 x
= adjust_address (x
, BLKmode
, 0);
1968 y
= adjust_address (y
, BLKmode
, 0);
1971 && CONST_INT_P (len
)
1972 && can_do_by_pieces (INTVAL (len
), align
, COMPARE_BY_PIECES
))
1973 result
= compare_by_pieces (x
, y
, INTVAL (len
), target
, align
,
1976 result
= emit_block_cmp_via_cmpmem (x
, y
, len
, len_type
, target
, align
);
1981 /* Copy all or part of a value X into registers starting at REGNO.
1982 The number of registers to be filled is NREGS. */
1985 move_block_to_reg (int regno
, rtx x
, int nregs
, machine_mode mode
)
1990 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
1991 x
= validize_mem (force_const_mem (mode
, x
));
1993 /* See if the machine can do this with a load multiple insn. */
1994 if (targetm
.have_load_multiple ())
1996 rtx_insn
*last
= get_last_insn ();
1997 rtx first
= gen_rtx_REG (word_mode
, regno
);
1998 if (rtx_insn
*pat
= targetm
.gen_load_multiple (first
, x
,
2005 delete_insns_since (last
);
2008 for (int i
= 0; i
< nregs
; i
++)
2009 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
2010 operand_subword_force (x
, i
, mode
));
2013 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2014 The number of registers to be filled is NREGS. */
2017 move_block_from_reg (int regno
, rtx x
, int nregs
)
2022 /* See if the machine can do this with a store multiple insn. */
2023 if (targetm
.have_store_multiple ())
2025 rtx_insn
*last
= get_last_insn ();
2026 rtx first
= gen_rtx_REG (word_mode
, regno
);
2027 if (rtx_insn
*pat
= targetm
.gen_store_multiple (x
, first
,
2034 delete_insns_since (last
);
2037 for (int i
= 0; i
< nregs
; i
++)
2039 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
2043 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
2047 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2048 ORIG, where ORIG is a non-consecutive group of registers represented by
2049 a PARALLEL. The clone is identical to the original except in that the
2050 original set of registers is replaced by a new set of pseudo registers.
2051 The new set has the same modes as the original set. */
2054 gen_group_rtx (rtx orig
)
2059 gcc_assert (GET_CODE (orig
) == PARALLEL
);
2061 length
= XVECLEN (orig
, 0);
2062 tmps
= XALLOCAVEC (rtx
, length
);
2064 /* Skip a NULL entry in first slot. */
2065 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
2070 for (; i
< length
; i
++)
2072 machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
2073 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
2075 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
2078 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
2081 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2082 except that values are placed in TMPS[i], and must later be moved
2083 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2086 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
2090 machine_mode m
= GET_MODE (orig_src
);
2092 gcc_assert (GET_CODE (dst
) == PARALLEL
);
2095 && !SCALAR_INT_MODE_P (m
)
2096 && !MEM_P (orig_src
)
2097 && GET_CODE (orig_src
) != CONCAT
)
2099 machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
2100 if (imode
== BLKmode
)
2101 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
2103 src
= gen_reg_rtx (imode
);
2104 if (imode
!= BLKmode
)
2105 src
= gen_lowpart (GET_MODE (orig_src
), src
);
2106 emit_move_insn (src
, orig_src
);
2107 /* ...and back again. */
2108 if (imode
!= BLKmode
)
2109 src
= gen_lowpart (imode
, src
);
2110 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2114 /* Check for a NULL entry, used to indicate that the parameter goes
2115 both on the stack and in registers. */
2116 if (XEXP (XVECEXP (dst
, 0, 0), 0))
2121 /* Process the pieces. */
2122 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
2124 machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
2125 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
2126 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2129 /* Handle trailing fragments that run over the size of the struct. */
2130 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2132 /* Arrange to shift the fragment to where it belongs.
2133 extract_bit_field loads to the lsb of the reg. */
2135 #ifdef BLOCK_REG_PADDING
2136 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
2137 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2142 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2143 bytelen
= ssize
- bytepos
;
2144 gcc_assert (bytelen
> 0);
2147 /* If we won't be loading directly from memory, protect the real source
2148 from strange tricks we might play; but make sure that the source can
2149 be loaded directly into the destination. */
2151 if (!MEM_P (orig_src
)
2152 && (!CONSTANT_P (orig_src
)
2153 || (GET_MODE (orig_src
) != mode
2154 && GET_MODE (orig_src
) != VOIDmode
)))
2156 if (GET_MODE (orig_src
) == VOIDmode
)
2157 src
= gen_reg_rtx (mode
);
2159 src
= gen_reg_rtx (GET_MODE (orig_src
));
2161 emit_move_insn (src
, orig_src
);
2164 /* Optimize the access just a bit. */
2166 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
2167 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
2168 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2169 && bytelen
== GET_MODE_SIZE (mode
))
2171 tmps
[i
] = gen_reg_rtx (mode
);
2172 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
2174 else if (COMPLEX_MODE_P (mode
)
2175 && GET_MODE (src
) == mode
2176 && bytelen
== GET_MODE_SIZE (mode
))
2177 /* Let emit_move_complex do the bulk of the work. */
2179 else if (GET_CODE (src
) == CONCAT
)
2181 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
2182 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
2183 unsigned int elt
= bytepos
/ slen0
;
2184 unsigned int subpos
= bytepos
% slen0
;
2186 if (subpos
+ bytelen
<= slen0
)
2188 /* The following assumes that the concatenated objects all
2189 have the same size. In this case, a simple calculation
2190 can be used to determine the object and the bit field
2192 tmps
[i
] = XEXP (src
, elt
);
2194 || subpos
+ bytelen
!= slen0
2195 || (!CONSTANT_P (tmps
[i
])
2196 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
)))
2197 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
2198 subpos
* BITS_PER_UNIT
,
2199 1, NULL_RTX
, mode
, mode
, false,
2206 gcc_assert (!bytepos
);
2207 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2208 emit_move_insn (mem
, src
);
2209 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
2210 0, 1, NULL_RTX
, mode
, mode
, false,
2214 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2215 SIMD register, which is currently broken. While we get GCC
2216 to emit proper RTL for these cases, let's dump to memory. */
2217 else if (VECTOR_MODE_P (GET_MODE (dst
))
2220 int slen
= GET_MODE_SIZE (GET_MODE (src
));
2223 mem
= assign_stack_temp (GET_MODE (src
), slen
);
2224 emit_move_insn (mem
, src
);
2225 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
2227 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
2228 && XVECLEN (dst
, 0) > 1)
2229 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
2230 else if (CONSTANT_P (src
))
2232 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
2240 /* TODO: const_wide_int can have sizes other than this... */
2241 gcc_assert (2 * len
== ssize
);
2242 split_double (src
, &first
, &second
);
2249 else if (REG_P (src
) && GET_MODE (src
) == mode
)
2252 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
2253 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
2254 mode
, mode
, false, NULL
);
2257 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
2262 /* Emit code to move a block SRC of type TYPE to a block DST,
2263 where DST is non-consecutive registers represented by a PARALLEL.
2264 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2268 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
2273 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
2274 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
2276 /* Copy the extracted pieces into the proper (probable) hard regs. */
2277 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
2279 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
2282 emit_move_insn (d
, tmps
[i
]);
2286 /* Similar, but load SRC into new pseudos in a format that looks like
2287 PARALLEL. This can later be fed to emit_group_move to get things
2288 in the right place. */
2291 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
2296 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
2297 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
2299 /* Convert the vector to look just like the original PARALLEL, except
2300 with the computed values. */
2301 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
2303 rtx e
= XVECEXP (parallel
, 0, i
);
2304 rtx d
= XEXP (e
, 0);
2308 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
2309 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
2311 RTVEC_ELT (vec
, i
) = e
;
2314 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
2317 /* Emit code to move a block SRC to block DST, where SRC and DST are
2318 non-consecutive groups of registers, each represented by a PARALLEL. */
2321 emit_group_move (rtx dst
, rtx src
)
2325 gcc_assert (GET_CODE (src
) == PARALLEL
2326 && GET_CODE (dst
) == PARALLEL
2327 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
2329 /* Skip first entry if NULL. */
2330 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
2331 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
2332 XEXP (XVECEXP (src
, 0, i
), 0));
2335 /* Move a group of registers represented by a PARALLEL into pseudos. */
2338 emit_group_move_into_temps (rtx src
)
2340 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
2343 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
2345 rtx e
= XVECEXP (src
, 0, i
);
2346 rtx d
= XEXP (e
, 0);
2349 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
2350 RTVEC_ELT (vec
, i
) = e
;
2353 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
2356 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2357 where SRC is non-consecutive registers represented by a PARALLEL.
2358 SSIZE represents the total size of block ORIG_DST, or -1 if not
2362 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
2365 int start
, finish
, i
;
2366 machine_mode m
= GET_MODE (orig_dst
);
2368 gcc_assert (GET_CODE (src
) == PARALLEL
);
2370 if (!SCALAR_INT_MODE_P (m
)
2371 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
2373 machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
2374 if (imode
== BLKmode
)
2375 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
2377 dst
= gen_reg_rtx (imode
);
2378 emit_group_store (dst
, src
, type
, ssize
);
2379 if (imode
!= BLKmode
)
2380 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
2381 emit_move_insn (orig_dst
, dst
);
2385 /* Check for a NULL entry, used to indicate that the parameter goes
2386 both on the stack and in registers. */
2387 if (XEXP (XVECEXP (src
, 0, 0), 0))
2391 finish
= XVECLEN (src
, 0);
2393 tmps
= XALLOCAVEC (rtx
, finish
);
2395 /* Copy the (probable) hard regs into pseudos. */
2396 for (i
= start
; i
< finish
; i
++)
2398 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
2399 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
2401 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
2402 emit_move_insn (tmps
[i
], reg
);
2408 /* If we won't be storing directly into memory, protect the real destination
2409 from strange tricks we might play. */
2411 if (GET_CODE (dst
) == PARALLEL
)
2415 /* We can get a PARALLEL dst if there is a conditional expression in
2416 a return statement. In that case, the dst and src are the same,
2417 so no action is necessary. */
2418 if (rtx_equal_p (dst
, src
))
2421 /* It is unclear if we can ever reach here, but we may as well handle
2422 it. Allocate a temporary, and split this into a store/load to/from
2424 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
2425 emit_group_store (temp
, src
, type
, ssize
);
2426 emit_group_load (dst
, temp
, type
, ssize
);
2429 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
2431 machine_mode outer
= GET_MODE (dst
);
2433 HOST_WIDE_INT bytepos
;
2437 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
2438 dst
= gen_reg_rtx (outer
);
2440 /* Make life a bit easier for combine. */
2441 /* If the first element of the vector is the low part
2442 of the destination mode, use a paradoxical subreg to
2443 initialize the destination. */
2446 inner
= GET_MODE (tmps
[start
]);
2447 bytepos
= subreg_lowpart_offset (inner
, outer
);
2448 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
2450 temp
= simplify_gen_subreg (outer
, tmps
[start
],
2454 emit_move_insn (dst
, temp
);
2461 /* If the first element wasn't the low part, try the last. */
2463 && start
< finish
- 1)
2465 inner
= GET_MODE (tmps
[finish
- 1]);
2466 bytepos
= subreg_lowpart_offset (inner
, outer
);
2467 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
2469 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
2473 emit_move_insn (dst
, temp
);
2480 /* Otherwise, simply initialize the result to zero. */
2482 emit_move_insn (dst
, CONST0_RTX (outer
));
2485 /* Process the pieces. */
2486 for (i
= start
; i
< finish
; i
++)
2488 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2489 machine_mode mode
= GET_MODE (tmps
[i
]);
2490 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2491 unsigned int adj_bytelen
;
2494 /* Handle trailing fragments that run over the size of the struct. */
2495 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2496 adj_bytelen
= ssize
- bytepos
;
2498 adj_bytelen
= bytelen
;
2500 if (GET_CODE (dst
) == CONCAT
)
2502 if (bytepos
+ adj_bytelen
2503 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2504 dest
= XEXP (dst
, 0);
2505 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2507 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2508 dest
= XEXP (dst
, 1);
2512 machine_mode dest_mode
= GET_MODE (dest
);
2513 machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2515 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2517 if (GET_MODE_ALIGNMENT (dest_mode
)
2518 >= GET_MODE_ALIGNMENT (tmp_mode
))
2520 dest
= assign_stack_temp (dest_mode
,
2521 GET_MODE_SIZE (dest_mode
));
2522 emit_move_insn (adjust_address (dest
,
2530 dest
= assign_stack_temp (tmp_mode
,
2531 GET_MODE_SIZE (tmp_mode
));
2532 emit_move_insn (dest
, tmps
[i
]);
2533 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2539 /* Handle trailing fragments that run over the size of the struct. */
2540 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2542 /* store_bit_field always takes its value from the lsb.
2543 Move the fragment to the lsb if it's not already there. */
2545 #ifdef BLOCK_REG_PADDING
2546 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2547 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2553 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2554 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2558 /* Make sure not to write past the end of the struct. */
2559 store_bit_field (dest
,
2560 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2561 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2562 VOIDmode
, tmps
[i
], false);
2565 /* Optimize the access just a bit. */
2566 else if (MEM_P (dest
)
2567 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2568 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2569 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2570 && bytelen
== GET_MODE_SIZE (mode
))
2571 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2574 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2575 0, 0, mode
, tmps
[i
], false);
2578 /* Copy from the pseudo into the (probable) hard reg. */
2579 if (orig_dst
!= dst
)
2580 emit_move_insn (orig_dst
, dst
);
2583 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2584 of the value stored in X. */
2587 maybe_emit_group_store (rtx x
, tree type
)
2589 machine_mode mode
= TYPE_MODE (type
);
2590 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2591 if (GET_CODE (x
) == PARALLEL
)
2593 rtx result
= gen_reg_rtx (mode
);
2594 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2600 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2602 This is used on targets that return BLKmode values in registers. */
2605 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2607 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2608 rtx src
= NULL
, dst
= NULL
;
2609 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2610 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2611 machine_mode mode
= GET_MODE (srcreg
);
2612 machine_mode tmode
= GET_MODE (target
);
2613 machine_mode copy_mode
;
2615 /* BLKmode registers created in the back-end shouldn't have survived. */
2616 gcc_assert (mode
!= BLKmode
);
2618 /* If the structure doesn't take up a whole number of words, see whether
2619 SRCREG is padded on the left or on the right. If it's on the left,
2620 set PADDING_CORRECTION to the number of bits to skip.
2622 In most ABIs, the structure will be returned at the least end of
2623 the register, which translates to right padding on little-endian
2624 targets and left padding on big-endian targets. The opposite
2625 holds if the structure is returned at the most significant
2626 end of the register. */
2627 if (bytes
% UNITS_PER_WORD
!= 0
2628 && (targetm
.calls
.return_in_msb (type
)
2630 : BYTES_BIG_ENDIAN
))
2632 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2634 /* We can use a single move if we have an exact mode for the size. */
2635 else if (MEM_P (target
)
2636 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
2637 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2638 && bytes
== GET_MODE_SIZE (mode
))
2640 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2644 /* And if we additionally have the same mode for a register. */
2645 else if (REG_P (target
)
2646 && GET_MODE (target
) == mode
2647 && bytes
== GET_MODE_SIZE (mode
))
2649 emit_move_insn (target
, srcreg
);
2653 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2654 into a new pseudo which is a full word. */
2655 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2657 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2661 /* Copy the structure BITSIZE bits at a time. If the target lives in
2662 memory, take care of not reading/writing past its end by selecting
2663 a copy mode suited to BITSIZE. This should always be possible given
2666 If the target lives in register, make sure not to select a copy mode
2667 larger than the mode of the register.
2669 We could probably emit more efficient code for machines which do not use
2670 strict alignment, but it doesn't seem worth the effort at the current
2673 copy_mode
= word_mode
;
2676 machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2677 if (mem_mode
!= BLKmode
)
2678 copy_mode
= mem_mode
;
2680 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2683 for (bitpos
= 0, xbitpos
= padding_correction
;
2684 bitpos
< bytes
* BITS_PER_UNIT
;
2685 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2687 /* We need a new source operand each time xbitpos is on a
2688 word boundary and when xbitpos == padding_correction
2689 (the first time through). */
2690 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2691 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2693 /* We need a new destination operand each time bitpos is on
2695 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2697 else if (bitpos
% BITS_PER_WORD
== 0)
2698 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2700 /* Use xbitpos for the source extraction (right justified) and
2701 bitpos for the destination store (left justified). */
2702 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2703 extract_bit_field (src
, bitsize
,
2704 xbitpos
% BITS_PER_WORD
, 1,
2705 NULL_RTX
, copy_mode
, copy_mode
,
2711 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2712 register if it contains any data, otherwise return null.
2714 This is used on targets that return BLKmode values in registers. */
2717 copy_blkmode_to_reg (machine_mode mode
, tree src
)
2720 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2721 unsigned int bitsize
;
2722 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2723 machine_mode dst_mode
;
2725 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2727 x
= expand_normal (src
);
2729 bytes
= int_size_in_bytes (TREE_TYPE (src
));
2733 /* If the structure doesn't take up a whole number of words, see
2734 whether the register value should be padded on the left or on
2735 the right. Set PADDING_CORRECTION to the number of padding
2736 bits needed on the left side.
2738 In most ABIs, the structure will be returned at the least end of
2739 the register, which translates to right padding on little-endian
2740 targets and left padding on big-endian targets. The opposite
2741 holds if the structure is returned at the most significant
2742 end of the register. */
2743 if (bytes
% UNITS_PER_WORD
!= 0
2744 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2746 : BYTES_BIG_ENDIAN
))
2747 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2750 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2751 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2752 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2754 /* Copy the structure BITSIZE bits at a time. */
2755 for (bitpos
= 0, xbitpos
= padding_correction
;
2756 bitpos
< bytes
* BITS_PER_UNIT
;
2757 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2759 /* We need a new destination pseudo each time xbitpos is
2760 on a word boundary and when xbitpos == padding_correction
2761 (the first time through). */
2762 if (xbitpos
% BITS_PER_WORD
== 0
2763 || xbitpos
== padding_correction
)
2765 /* Generate an appropriate register. */
2766 dst_word
= gen_reg_rtx (word_mode
);
2767 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2769 /* Clear the destination before we move anything into it. */
2770 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2773 /* We need a new source operand each time bitpos is on a word
2775 if (bitpos
% BITS_PER_WORD
== 0)
2776 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2778 /* Use bitpos for the source extraction (left justified) and
2779 xbitpos for the destination store (right justified). */
2780 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2782 extract_bit_field (src_word
, bitsize
,
2783 bitpos
% BITS_PER_WORD
, 1,
2784 NULL_RTX
, word_mode
, word_mode
,
2789 if (mode
== BLKmode
)
2791 /* Find the smallest integer mode large enough to hold the
2792 entire structure. */
2793 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2795 mode
= GET_MODE_WIDER_MODE (mode
))
2796 /* Have we found a large enough mode? */
2797 if (GET_MODE_SIZE (mode
) >= bytes
)
2800 /* A suitable mode should have been found. */
2801 gcc_assert (mode
!= VOIDmode
);
2804 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2805 dst_mode
= word_mode
;
2808 dst
= gen_reg_rtx (dst_mode
);
2810 for (i
= 0; i
< n_regs
; i
++)
2811 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2813 if (mode
!= dst_mode
)
2814 dst
= gen_lowpart (mode
, dst
);
2819 /* Add a USE expression for REG to the (possibly empty) list pointed
2820 to by CALL_FUSAGE. REG must denote a hard register. */
2823 use_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2825 gcc_assert (REG_P (reg
));
2827 if (!HARD_REGISTER_P (reg
))
2831 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2834 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2835 to by CALL_FUSAGE. REG must denote a hard register. */
2838 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2840 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2843 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2846 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2847 starting at REGNO. All of these registers must be hard registers. */
2850 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2854 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2856 for (i
= 0; i
< nregs
; i
++)
2857 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2860 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2861 PARALLEL REGS. This is for calls that pass values in multiple
2862 non-contiguous locations. The Irix 6 ABI has examples of this. */
2865 use_group_regs (rtx
*call_fusage
, rtx regs
)
2869 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2871 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2873 /* A NULL entry means the parameter goes both on the stack and in
2874 registers. This can also be a MEM for targets that pass values
2875 partially on the stack and partially in registers. */
2876 if (reg
!= 0 && REG_P (reg
))
2877 use_reg (call_fusage
, reg
);
2881 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2882 assigment and the code of the expresion on the RHS is CODE. Return
2886 get_def_for_expr (tree name
, enum tree_code code
)
2890 if (TREE_CODE (name
) != SSA_NAME
)
2893 def_stmt
= get_gimple_for_ssa_name (name
);
2895 || gimple_assign_rhs_code (def_stmt
) != code
)
2901 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2902 assigment and the class of the expresion on the RHS is CLASS. Return
2906 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2910 if (TREE_CODE (name
) != SSA_NAME
)
2913 def_stmt
= get_gimple_for_ssa_name (name
);
2915 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2921 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2922 its length in bytes. */
2925 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2926 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2927 unsigned HOST_WIDE_INT min_size
,
2928 unsigned HOST_WIDE_INT max_size
,
2929 unsigned HOST_WIDE_INT probable_max_size
)
2931 machine_mode mode
= GET_MODE (object
);
2934 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2936 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2937 just move a zero. Otherwise, do this a piece at a time. */
2939 && CONST_INT_P (size
)
2940 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2942 rtx zero
= CONST0_RTX (mode
);
2945 emit_move_insn (object
, zero
);
2949 if (COMPLEX_MODE_P (mode
))
2951 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2954 write_complex_part (object
, zero
, 0);
2955 write_complex_part (object
, zero
, 1);
2961 if (size
== const0_rtx
)
2964 align
= MEM_ALIGN (object
);
2966 if (CONST_INT_P (size
)
2967 && targetm
.use_by_pieces_infrastructure_p (INTVAL (size
), align
,
2969 optimize_insn_for_speed_p ()))
2970 clear_by_pieces (object
, INTVAL (size
), align
);
2971 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2972 expected_align
, expected_size
,
2973 min_size
, max_size
, probable_max_size
))
2975 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2976 return set_storage_via_libcall (object
, size
, const0_rtx
,
2977 method
== BLOCK_OP_TAILCALL
);
2985 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2987 unsigned HOST_WIDE_INT max
, min
= 0;
2988 if (GET_CODE (size
) == CONST_INT
)
2989 min
= max
= UINTVAL (size
);
2991 max
= GET_MODE_MASK (GET_MODE (size
));
2992 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
2996 /* A subroutine of clear_storage. Expand a call to memset.
2997 Return the return value of memset, 0 otherwise. */
3000 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
3002 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
3003 machine_mode size_mode
;
3005 object
= copy_addr_to_reg (XEXP (object
, 0));
3006 object_tree
= make_tree (ptr_type_node
, object
);
3008 if (!CONST_INT_P (val
))
3009 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
3010 val_tree
= make_tree (integer_type_node
, val
);
3012 size_mode
= TYPE_MODE (sizetype
);
3013 size
= convert_to_mode (size_mode
, size
, 1);
3014 size
= copy_to_mode_reg (size_mode
, size
);
3015 size_tree
= make_tree (sizetype
, size
);
3017 /* It is incorrect to use the libcall calling conventions for calls to
3018 memset because it can be provided by the user. */
3019 fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
3020 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
3021 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
3023 return expand_call (call_expr
, NULL_RTX
, false);
3026 /* Expand a setmem pattern; return true if successful. */
3029 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
3030 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
3031 unsigned HOST_WIDE_INT min_size
,
3032 unsigned HOST_WIDE_INT max_size
,
3033 unsigned HOST_WIDE_INT probable_max_size
)
3035 /* Try the most limited insn first, because there's no point
3036 including more than one in the machine description unless
3037 the more limited one has some advantage. */
3041 if (expected_align
< align
)
3042 expected_align
= align
;
3043 if (expected_size
!= -1)
3045 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
3046 expected_size
= max_size
;
3047 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
3048 expected_size
= min_size
;
3051 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
3052 mode
= GET_MODE_WIDER_MODE (mode
))
3054 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
3056 if (code
!= CODE_FOR_nothing
3057 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3058 here because if SIZE is less than the mode mask, as it is
3059 returned by the macro, it will definitely be less than the
3060 actual mode mask. Since SIZE is within the Pmode address
3061 space, we limit MODE to Pmode. */
3062 && ((CONST_INT_P (size
)
3063 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
3064 <= (GET_MODE_MASK (mode
) >> 1)))
3065 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
3066 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
3068 struct expand_operand ops
[9];
3071 nops
= insn_data
[(int) code
].n_generator_args
;
3072 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
3074 create_fixed_operand (&ops
[0], object
);
3075 /* The check above guarantees that this size conversion is valid. */
3076 create_convert_operand_to (&ops
[1], size
, mode
, true);
3077 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
3078 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
3081 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
3082 create_integer_operand (&ops
[5], expected_size
);
3086 create_integer_operand (&ops
[6], min_size
);
3087 /* If we can not represent the maximal size,
3088 make parameter NULL. */
3089 if ((HOST_WIDE_INT
) max_size
!= -1)
3090 create_integer_operand (&ops
[7], max_size
);
3092 create_fixed_operand (&ops
[7], NULL
);
3096 /* If we can not represent the maximal size,
3097 make parameter NULL. */
3098 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
3099 create_integer_operand (&ops
[8], probable_max_size
);
3101 create_fixed_operand (&ops
[8], NULL
);
3103 if (maybe_expand_insn (code
, nops
, ops
))
3112 /* Write to one of the components of the complex value CPLX. Write VAL to
3113 the real part if IMAG_P is false, and the imaginary part if its true. */
3116 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
3122 if (GET_CODE (cplx
) == CONCAT
)
3124 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3128 cmode
= GET_MODE (cplx
);
3129 imode
= GET_MODE_INNER (cmode
);
3130 ibitsize
= GET_MODE_BITSIZE (imode
);
3132 /* For MEMs simplify_gen_subreg may generate an invalid new address
3133 because, e.g., the original address is considered mode-dependent
3134 by the target, which restricts simplify_subreg from invoking
3135 adjust_address_nv. Instead of preparing fallback support for an
3136 invalid address, we call adjust_address_nv directly. */
3139 emit_move_insn (adjust_address_nv (cplx
, imode
,
3140 imag_p
? GET_MODE_SIZE (imode
) : 0),
3145 /* If the sub-object is at least word sized, then we know that subregging
3146 will work. This special case is important, since store_bit_field
3147 wants to operate on integer modes, and there's rarely an OImode to
3148 correspond to TCmode. */
3149 if (ibitsize
>= BITS_PER_WORD
3150 /* For hard regs we have exact predicates. Assume we can split
3151 the original object if it spans an even number of hard regs.
3152 This special case is important for SCmode on 64-bit platforms
3153 where the natural size of floating-point regs is 32-bit. */
3155 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3156 && REG_NREGS (cplx
) % 2 == 0))
3158 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3159 imag_p
? GET_MODE_SIZE (imode
) : 0);
3162 emit_move_insn (part
, val
);
3166 /* simplify_gen_subreg may fail for sub-word MEMs. */
3167 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3170 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
,
3174 /* Extract one of the components of the complex value CPLX. Extract the
3175 real part if IMAG_P is false, and the imaginary part if it's true. */
3178 read_complex_part (rtx cplx
, bool imag_p
)
3180 machine_mode cmode
, imode
;
3183 if (GET_CODE (cplx
) == CONCAT
)
3184 return XEXP (cplx
, imag_p
);
3186 cmode
= GET_MODE (cplx
);
3187 imode
= GET_MODE_INNER (cmode
);
3188 ibitsize
= GET_MODE_BITSIZE (imode
);
3190 /* Special case reads from complex constants that got spilled to memory. */
3191 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3193 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3194 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3196 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3197 if (CONSTANT_CLASS_P (part
))
3198 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3202 /* For MEMs simplify_gen_subreg may generate an invalid new address
3203 because, e.g., the original address is considered mode-dependent
3204 by the target, which restricts simplify_subreg from invoking
3205 adjust_address_nv. Instead of preparing fallback support for an
3206 invalid address, we call adjust_address_nv directly. */
3208 return adjust_address_nv (cplx
, imode
,
3209 imag_p
? GET_MODE_SIZE (imode
) : 0);
3211 /* If the sub-object is at least word sized, then we know that subregging
3212 will work. This special case is important, since extract_bit_field
3213 wants to operate on integer modes, and there's rarely an OImode to
3214 correspond to TCmode. */
3215 if (ibitsize
>= BITS_PER_WORD
3216 /* For hard regs we have exact predicates. Assume we can split
3217 the original object if it spans an even number of hard regs.
3218 This special case is important for SCmode on 64-bit platforms
3219 where the natural size of floating-point regs is 32-bit. */
3221 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3222 && REG_NREGS (cplx
) % 2 == 0))
3224 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3225 imag_p
? GET_MODE_SIZE (imode
) : 0);
3229 /* simplify_gen_subreg may fail for sub-word MEMs. */
3230 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3233 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3234 true, NULL_RTX
, imode
, imode
, false, NULL
);
3237 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3238 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3239 represented in NEW_MODE. If FORCE is true, this will never happen, as
3240 we'll force-create a SUBREG if needed. */
3243 emit_move_change_mode (machine_mode new_mode
,
3244 machine_mode old_mode
, rtx x
, bool force
)
3248 if (push_operand (x
, GET_MODE (x
)))
3250 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3251 MEM_COPY_ATTRIBUTES (ret
, x
);
3255 /* We don't have to worry about changing the address since the
3256 size in bytes is supposed to be the same. */
3257 if (reload_in_progress
)
3259 /* Copy the MEM to change the mode and move any
3260 substitutions from the old MEM to the new one. */
3261 ret
= adjust_address_nv (x
, new_mode
, 0);
3262 copy_replacements (x
, ret
);
3265 ret
= adjust_address (x
, new_mode
, 0);
3269 /* Note that we do want simplify_subreg's behavior of validating
3270 that the new mode is ok for a hard register. If we were to use
3271 simplify_gen_subreg, we would create the subreg, but would
3272 probably run into the target not being able to implement it. */
3273 /* Except, of course, when FORCE is true, when this is exactly what
3274 we want. Which is needed for CCmodes on some targets. */
3276 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3278 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3284 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3285 an integer mode of the same size as MODE. Returns the instruction
3286 emitted, or NULL if such a move could not be generated. */
3289 emit_move_via_integer (machine_mode mode
, rtx x
, rtx y
, bool force
)
3292 enum insn_code code
;
3294 /* There must exist a mode of the exact size we require. */
3295 imode
= int_mode_for_mode (mode
);
3296 if (imode
== BLKmode
)
3299 /* The target must support moves in this mode. */
3300 code
= optab_handler (mov_optab
, imode
);
3301 if (code
== CODE_FOR_nothing
)
3304 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3307 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3310 return emit_insn (GEN_FCN (code
) (x
, y
));
3313 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3314 Return an equivalent MEM that does not use an auto-increment. */
3317 emit_move_resolve_push (machine_mode mode
, rtx x
)
3319 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3320 HOST_WIDE_INT adjust
;
3323 adjust
= GET_MODE_SIZE (mode
);
3324 #ifdef PUSH_ROUNDING
3325 adjust
= PUSH_ROUNDING (adjust
);
3327 if (code
== PRE_DEC
|| code
== POST_DEC
)
3329 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3331 rtx expr
= XEXP (XEXP (x
, 0), 1);
3334 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3335 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3336 val
= INTVAL (XEXP (expr
, 1));
3337 if (GET_CODE (expr
) == MINUS
)
3339 gcc_assert (adjust
== val
|| adjust
== -val
);
3343 /* Do not use anti_adjust_stack, since we don't want to update
3344 stack_pointer_delta. */
3345 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3346 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3347 0, OPTAB_LIB_WIDEN
);
3348 if (temp
!= stack_pointer_rtx
)
3349 emit_move_insn (stack_pointer_rtx
, temp
);
3356 temp
= stack_pointer_rtx
;
3361 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3367 return replace_equiv_address (x
, temp
);
3370 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3371 X is known to satisfy push_operand, and MODE is known to be complex.
3372 Returns the last instruction emitted. */
3375 emit_move_complex_push (machine_mode mode
, rtx x
, rtx y
)
3377 machine_mode submode
= GET_MODE_INNER (mode
);
3380 #ifdef PUSH_ROUNDING
3381 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3383 /* In case we output to the stack, but the size is smaller than the
3384 machine can push exactly, we need to use move instructions. */
3385 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3387 x
= emit_move_resolve_push (mode
, x
);
3388 return emit_move_insn (x
, y
);
3392 /* Note that the real part always precedes the imag part in memory
3393 regardless of machine's endianness. */
3394 switch (GET_CODE (XEXP (x
, 0)))
3408 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3409 read_complex_part (y
, imag_first
));
3410 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3411 read_complex_part (y
, !imag_first
));
3414 /* A subroutine of emit_move_complex. Perform the move from Y to X
3415 via two moves of the parts. Returns the last instruction emitted. */
3418 emit_move_complex_parts (rtx x
, rtx y
)
3420 /* Show the output dies here. This is necessary for SUBREGs
3421 of pseudos since we cannot track their lifetimes correctly;
3422 hard regs shouldn't appear here except as return values. */
3423 if (!reload_completed
&& !reload_in_progress
3424 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3427 write_complex_part (x
, read_complex_part (y
, false), false);
3428 write_complex_part (x
, read_complex_part (y
, true), true);
3430 return get_last_insn ();
3433 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3434 MODE is known to be complex. Returns the last instruction emitted. */
3437 emit_move_complex (machine_mode mode
, rtx x
, rtx y
)
3441 /* Need to take special care for pushes, to maintain proper ordering
3442 of the data, and possibly extra padding. */
3443 if (push_operand (x
, mode
))
3444 return emit_move_complex_push (mode
, x
, y
);
3446 /* See if we can coerce the target into moving both values at once, except
3447 for floating point where we favor moving as parts if this is easy. */
3448 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3449 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3451 && HARD_REGISTER_P (x
)
3452 && REG_NREGS (x
) == 1)
3454 && HARD_REGISTER_P (y
)
3455 && REG_NREGS (y
) == 1))
3457 /* Not possible if the values are inherently not adjacent. */
3458 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3460 /* Is possible if both are registers (or subregs of registers). */
3461 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3463 /* If one of the operands is a memory, and alignment constraints
3464 are friendly enough, we may be able to do combined memory operations.
3465 We do not attempt this if Y is a constant because that combination is
3466 usually better with the by-parts thing below. */
3467 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3468 && (!STRICT_ALIGNMENT
3469 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3478 /* For memory to memory moves, optimal behavior can be had with the
3479 existing block move logic. */
3480 if (MEM_P (x
) && MEM_P (y
))
3482 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3483 BLOCK_OP_NO_LIBCALL
);
3484 return get_last_insn ();
3487 ret
= emit_move_via_integer (mode
, x
, y
, true);
3492 return emit_move_complex_parts (x
, y
);
3495 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3496 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3499 emit_move_ccmode (machine_mode mode
, rtx x
, rtx y
)
3503 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3506 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3507 if (code
!= CODE_FOR_nothing
)
3509 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3510 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3511 return emit_insn (GEN_FCN (code
) (x
, y
));
3515 /* Otherwise, find the MODE_INT mode of the same width. */
3516 ret
= emit_move_via_integer (mode
, x
, y
, false);
3517 gcc_assert (ret
!= NULL
);
3521 /* Return true if word I of OP lies entirely in the
3522 undefined bits of a paradoxical subreg. */
3525 undefined_operand_subword_p (const_rtx op
, int i
)
3527 machine_mode innermode
, innermostmode
;
3529 if (GET_CODE (op
) != SUBREG
)
3531 innermode
= GET_MODE (op
);
3532 innermostmode
= GET_MODE (SUBREG_REG (op
));
3533 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3534 /* The SUBREG_BYTE represents offset, as if the value were stored in
3535 memory, except for a paradoxical subreg where we define
3536 SUBREG_BYTE to be 0; undo this exception as in
3538 if (SUBREG_BYTE (op
) == 0
3539 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3541 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3542 if (WORDS_BIG_ENDIAN
)
3543 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3544 if (BYTES_BIG_ENDIAN
)
3545 offset
+= difference
% UNITS_PER_WORD
;
3547 if (offset
>= GET_MODE_SIZE (innermostmode
)
3548 || offset
<= -GET_MODE_SIZE (word_mode
))
3553 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3554 MODE is any multi-word or full-word mode that lacks a move_insn
3555 pattern. Note that you will get better code if you define such
3556 patterns, even if they must turn into multiple assembler instructions. */
3559 emit_move_multi_word (machine_mode mode
, rtx x
, rtx y
)
3561 rtx_insn
*last_insn
= 0;
3567 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3569 /* If X is a push on the stack, do the push now and replace
3570 X with a reference to the stack pointer. */
3571 if (push_operand (x
, mode
))
3572 x
= emit_move_resolve_push (mode
, x
);
3574 /* If we are in reload, see if either operand is a MEM whose address
3575 is scheduled for replacement. */
3576 if (reload_in_progress
&& MEM_P (x
)
3577 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3578 x
= replace_equiv_address_nv (x
, inner
);
3579 if (reload_in_progress
&& MEM_P (y
)
3580 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3581 y
= replace_equiv_address_nv (y
, inner
);
3585 need_clobber
= false;
3587 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3590 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3593 /* Do not generate code for a move if it would come entirely
3594 from the undefined bits of a paradoxical subreg. */
3595 if (undefined_operand_subword_p (y
, i
))
3598 ypart
= operand_subword (y
, i
, 1, mode
);
3600 /* If we can't get a part of Y, put Y into memory if it is a
3601 constant. Otherwise, force it into a register. Then we must
3602 be able to get a part of Y. */
3603 if (ypart
== 0 && CONSTANT_P (y
))
3605 y
= use_anchored_address (force_const_mem (mode
, y
));
3606 ypart
= operand_subword (y
, i
, 1, mode
);
3608 else if (ypart
== 0)
3609 ypart
= operand_subword_force (y
, i
, mode
);
3611 gcc_assert (xpart
&& ypart
);
3613 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3615 last_insn
= emit_move_insn (xpart
, ypart
);
3621 /* Show the output dies here. This is necessary for SUBREGs
3622 of pseudos since we cannot track their lifetimes correctly;
3623 hard regs shouldn't appear here except as return values.
3624 We never want to emit such a clobber after reload. */
3626 && ! (reload_in_progress
|| reload_completed
)
3627 && need_clobber
!= 0)
3635 /* Low level part of emit_move_insn.
3636 Called just like emit_move_insn, but assumes X and Y
3637 are basically valid. */
3640 emit_move_insn_1 (rtx x
, rtx y
)
3642 machine_mode mode
= GET_MODE (x
);
3643 enum insn_code code
;
3645 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3647 code
= optab_handler (mov_optab
, mode
);
3648 if (code
!= CODE_FOR_nothing
)
3649 return emit_insn (GEN_FCN (code
) (x
, y
));
3651 /* Expand complex moves by moving real part and imag part. */
3652 if (COMPLEX_MODE_P (mode
))
3653 return emit_move_complex (mode
, x
, y
);
3655 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3656 || ALL_FIXED_POINT_MODE_P (mode
))
3658 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3660 /* If we can't find an integer mode, use multi words. */
3664 return emit_move_multi_word (mode
, x
, y
);
3667 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3668 return emit_move_ccmode (mode
, x
, y
);
3670 /* Try using a move pattern for the corresponding integer mode. This is
3671 only safe when simplify_subreg can convert MODE constants into integer
3672 constants. At present, it can only do this reliably if the value
3673 fits within a HOST_WIDE_INT. */
3674 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3676 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3680 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3685 return emit_move_multi_word (mode
, x
, y
);
3688 /* Generate code to copy Y into X.
3689 Both Y and X must have the same mode, except that
3690 Y can be a constant with VOIDmode.
3691 This mode cannot be BLKmode; use emit_block_move for that.
3693 Return the last instruction emitted. */
3696 emit_move_insn (rtx x
, rtx y
)
3698 machine_mode mode
= GET_MODE (x
);
3699 rtx y_cst
= NULL_RTX
;
3700 rtx_insn
*last_insn
;
3703 gcc_assert (mode
!= BLKmode
3704 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3709 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3710 && (last_insn
= compress_float_constant (x
, y
)))
3715 if (!targetm
.legitimate_constant_p (mode
, y
))
3717 y
= force_const_mem (mode
, y
);
3719 /* If the target's cannot_force_const_mem prevented the spill,
3720 assume that the target's move expanders will also take care
3721 of the non-legitimate constant. */
3725 y
= use_anchored_address (y
);
3729 /* If X or Y are memory references, verify that their addresses are valid
3732 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3734 && ! push_operand (x
, GET_MODE (x
))))
3735 x
= validize_mem (x
);
3738 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3739 MEM_ADDR_SPACE (y
)))
3740 y
= validize_mem (y
);
3742 gcc_assert (mode
!= BLKmode
);
3744 last_insn
= emit_move_insn_1 (x
, y
);
3746 if (y_cst
&& REG_P (x
)
3747 && (set
= single_set (last_insn
)) != NULL_RTX
3748 && SET_DEST (set
) == x
3749 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3750 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3755 /* Generate the body of an instruction to copy Y into X.
3756 It may be a list of insns, if one insn isn't enough. */
3759 gen_move_insn (rtx x
, rtx y
)
3764 emit_move_insn_1 (x
, y
);
3770 /* If Y is representable exactly in a narrower mode, and the target can
3771 perform the extension directly from constant or memory, then emit the
3772 move as an extension. */
3775 compress_float_constant (rtx x
, rtx y
)
3777 machine_mode dstmode
= GET_MODE (x
);
3778 machine_mode orig_srcmode
= GET_MODE (y
);
3779 machine_mode srcmode
;
3780 const REAL_VALUE_TYPE
*r
;
3781 int oldcost
, newcost
;
3782 bool speed
= optimize_insn_for_speed_p ();
3784 r
= CONST_DOUBLE_REAL_VALUE (y
);
3786 if (targetm
.legitimate_constant_p (dstmode
, y
))
3787 oldcost
= set_src_cost (y
, orig_srcmode
, speed
);
3789 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), dstmode
, speed
);
3791 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3792 srcmode
!= orig_srcmode
;
3793 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3797 rtx_insn
*last_insn
;
3799 /* Skip if the target can't extend this way. */
3800 ic
= can_extend_p (dstmode
, srcmode
, 0);
3801 if (ic
== CODE_FOR_nothing
)
3804 /* Skip if the narrowed value isn't exact. */
3805 if (! exact_real_truncate (srcmode
, r
))
3808 trunc_y
= const_double_from_real_value (*r
, srcmode
);
3810 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3812 /* Skip if the target needs extra instructions to perform
3814 if (!insn_operand_matches (ic
, 1, trunc_y
))
3816 /* This is valid, but may not be cheaper than the original. */
3817 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3819 if (oldcost
< newcost
)
3822 else if (float_extend_from_mem
[dstmode
][srcmode
])
3824 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3825 /* This is valid, but may not be cheaper than the original. */
3826 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3828 if (oldcost
< newcost
)
3830 trunc_y
= validize_mem (trunc_y
);
3835 /* For CSE's benefit, force the compressed constant pool entry
3836 into a new pseudo. This constant may be used in different modes,
3837 and if not, combine will put things back together for us. */
3838 trunc_y
= force_reg (srcmode
, trunc_y
);
3840 /* If x is a hard register, perform the extension into a pseudo,
3841 so that e.g. stack realignment code is aware of it. */
3843 if (REG_P (x
) && HARD_REGISTER_P (x
))
3844 target
= gen_reg_rtx (dstmode
);
3846 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3847 last_insn
= get_last_insn ();
3850 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3853 return emit_move_insn (x
, target
);
3860 /* Pushing data onto the stack. */
3862 /* Push a block of length SIZE (perhaps variable)
3863 and return an rtx to address the beginning of the block.
3864 The value may be virtual_outgoing_args_rtx.
3866 EXTRA is the number of bytes of padding to push in addition to SIZE.
3867 BELOW nonzero means this padding comes at low addresses;
3868 otherwise, the padding comes at high addresses. */
3871 push_block (rtx size
, int extra
, int below
)
3875 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3876 if (CONSTANT_P (size
))
3877 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3878 else if (REG_P (size
) && extra
== 0)
3879 anti_adjust_stack (size
);
3882 temp
= copy_to_mode_reg (Pmode
, size
);
3884 temp
= expand_binop (Pmode
, add_optab
, temp
,
3885 gen_int_mode (extra
, Pmode
),
3886 temp
, 0, OPTAB_LIB_WIDEN
);
3887 anti_adjust_stack (temp
);
3890 if (STACK_GROWS_DOWNWARD
)
3892 temp
= virtual_outgoing_args_rtx
;
3893 if (extra
!= 0 && below
)
3894 temp
= plus_constant (Pmode
, temp
, extra
);
3898 if (CONST_INT_P (size
))
3899 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3900 -INTVAL (size
) - (below
? 0 : extra
));
3901 else if (extra
!= 0 && !below
)
3902 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3903 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3906 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3907 negate_rtx (Pmode
, size
));
3910 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3913 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3916 mem_autoinc_base (rtx mem
)
3920 rtx addr
= XEXP (mem
, 0);
3921 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3922 return XEXP (addr
, 0);
3927 /* A utility routine used here, in reload, and in try_split. The insns
3928 after PREV up to and including LAST are known to adjust the stack,
3929 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3930 placing notes as appropriate. PREV may be NULL, indicating the
3931 entire insn sequence prior to LAST should be scanned.
3933 The set of allowed stack pointer modifications is small:
3934 (1) One or more auto-inc style memory references (aka pushes),
3935 (2) One or more addition/subtraction with the SP as destination,
3936 (3) A single move insn with the SP as destination,
3937 (4) A call_pop insn,
3938 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3940 Insns in the sequence that do not modify the SP are ignored,
3941 except for noreturn calls.
3943 The return value is the amount of adjustment that can be trivially
3944 verified, via immediate operand or auto-inc. If the adjustment
3945 cannot be trivially extracted, the return value is INT_MIN. */
3948 find_args_size_adjust (rtx_insn
*insn
)
3953 pat
= PATTERN (insn
);
3956 /* Look for a call_pop pattern. */
3959 /* We have to allow non-call_pop patterns for the case
3960 of emit_single_push_insn of a TLS address. */
3961 if (GET_CODE (pat
) != PARALLEL
)
3964 /* All call_pop have a stack pointer adjust in the parallel.
3965 The call itself is always first, and the stack adjust is
3966 usually last, so search from the end. */
3967 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3969 set
= XVECEXP (pat
, 0, i
);
3970 if (GET_CODE (set
) != SET
)
3972 dest
= SET_DEST (set
);
3973 if (dest
== stack_pointer_rtx
)
3976 /* We'd better have found the stack pointer adjust. */
3979 /* Fall through to process the extracted SET and DEST
3980 as if it was a standalone insn. */
3982 else if (GET_CODE (pat
) == SET
)
3984 else if ((set
= single_set (insn
)) != NULL
)
3986 else if (GET_CODE (pat
) == PARALLEL
)
3988 /* ??? Some older ports use a parallel with a stack adjust
3989 and a store for a PUSH_ROUNDING pattern, rather than a
3990 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3991 /* ??? See h8300 and m68k, pushqi1. */
3992 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
3994 set
= XVECEXP (pat
, 0, i
);
3995 if (GET_CODE (set
) != SET
)
3997 dest
= SET_DEST (set
);
3998 if (dest
== stack_pointer_rtx
)
4001 /* We do not expect an auto-inc of the sp in the parallel. */
4002 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
4003 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4004 != stack_pointer_rtx
);
4012 dest
= SET_DEST (set
);
4014 /* Look for direct modifications of the stack pointer. */
4015 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
4017 /* Look for a trivial adjustment, otherwise assume nothing. */
4018 /* Note that the SPU restore_stack_block pattern refers to
4019 the stack pointer in V4SImode. Consider that non-trivial. */
4020 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
4021 && GET_CODE (SET_SRC (set
)) == PLUS
4022 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
4023 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
4024 return INTVAL (XEXP (SET_SRC (set
), 1));
4025 /* ??? Reload can generate no-op moves, which will be cleaned
4026 up later. Recognize it and continue searching. */
4027 else if (rtx_equal_p (dest
, SET_SRC (set
)))
4030 return HOST_WIDE_INT_MIN
;
4036 /* Otherwise only think about autoinc patterns. */
4037 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
4040 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
4041 != stack_pointer_rtx
);
4043 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
4044 mem
= SET_SRC (set
);
4048 addr
= XEXP (mem
, 0);
4049 switch (GET_CODE (addr
))
4053 return GET_MODE_SIZE (GET_MODE (mem
));
4056 return -GET_MODE_SIZE (GET_MODE (mem
));
4059 addr
= XEXP (addr
, 1);
4060 gcc_assert (GET_CODE (addr
) == PLUS
);
4061 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
4062 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
4063 return INTVAL (XEXP (addr
, 1));
4071 fixup_args_size_notes (rtx_insn
*prev
, rtx_insn
*last
, int end_args_size
)
4073 int args_size
= end_args_size
;
4074 bool saw_unknown
= false;
4077 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
4079 HOST_WIDE_INT this_delta
;
4081 if (!NONDEBUG_INSN_P (insn
))
4084 this_delta
= find_args_size_adjust (insn
);
4085 if (this_delta
== 0)
4088 || ACCUMULATE_OUTGOING_ARGS
4089 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
4093 gcc_assert (!saw_unknown
);
4094 if (this_delta
== HOST_WIDE_INT_MIN
)
4097 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (args_size
));
4098 if (STACK_GROWS_DOWNWARD
)
4099 this_delta
= -(unsigned HOST_WIDE_INT
) this_delta
;
4101 args_size
-= this_delta
;
4104 return saw_unknown
? INT_MIN
: args_size
;
4107 #ifdef PUSH_ROUNDING
4108 /* Emit single push insn. */
4111 emit_single_push_insn_1 (machine_mode mode
, rtx x
, tree type
)
4114 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4116 enum insn_code icode
;
4118 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
4119 /* If there is push pattern, use it. Otherwise try old way of throwing
4120 MEM representing push operation to move expander. */
4121 icode
= optab_handler (push_optab
, mode
);
4122 if (icode
!= CODE_FOR_nothing
)
4124 struct expand_operand ops
[1];
4126 create_input_operand (&ops
[0], x
, mode
);
4127 if (maybe_expand_insn (icode
, 1, ops
))
4130 if (GET_MODE_SIZE (mode
) == rounded_size
)
4131 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
4132 /* If we are to pad downward, adjust the stack pointer first and
4133 then store X into the stack location using an offset. This is
4134 because emit_move_insn does not know how to pad; it does not have
4136 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
4138 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
4139 HOST_WIDE_INT offset
;
4141 emit_move_insn (stack_pointer_rtx
,
4142 expand_binop (Pmode
,
4143 STACK_GROWS_DOWNWARD
? sub_optab
4146 gen_int_mode (rounded_size
, Pmode
),
4147 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4149 offset
= (HOST_WIDE_INT
) padding_size
;
4150 if (STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_DEC
)
4151 /* We have already decremented the stack pointer, so get the
4153 offset
+= (HOST_WIDE_INT
) rounded_size
;
4155 if (!STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_INC
)
4156 /* We have already incremented the stack pointer, so get the
4158 offset
-= (HOST_WIDE_INT
) rounded_size
;
4160 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4161 gen_int_mode (offset
, Pmode
));
4165 if (STACK_GROWS_DOWNWARD
)
4166 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4167 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4168 gen_int_mode (-(HOST_WIDE_INT
) rounded_size
,
4171 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4172 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4173 gen_int_mode (rounded_size
, Pmode
));
4175 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4178 dest
= gen_rtx_MEM (mode
, dest_addr
);
4182 set_mem_attributes (dest
, type
, 1);
4184 if (cfun
->tail_call_marked
)
4185 /* Function incoming arguments may overlap with sibling call
4186 outgoing arguments and we cannot allow reordering of reads
4187 from function arguments with stores to outgoing arguments
4188 of sibling calls. */
4189 set_mem_alias_set (dest
, 0);
4191 emit_move_insn (dest
, x
);
4194 /* Emit and annotate a single push insn. */
4197 emit_single_push_insn (machine_mode mode
, rtx x
, tree type
)
4199 int delta
, old_delta
= stack_pointer_delta
;
4200 rtx_insn
*prev
= get_last_insn ();
4203 emit_single_push_insn_1 (mode
, x
, type
);
4205 last
= get_last_insn ();
4207 /* Notice the common case where we emitted exactly one insn. */
4208 if (PREV_INSN (last
) == prev
)
4210 add_reg_note (last
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4214 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4215 gcc_assert (delta
== INT_MIN
|| delta
== old_delta
);
4219 /* If reading SIZE bytes from X will end up reading from
4220 Y return the number of bytes that overlap. Return -1
4221 if there is no overlap or -2 if we can't determine
4222 (for example when X and Y have different base registers). */
4225 memory_load_overlap (rtx x
, rtx y
, HOST_WIDE_INT size
)
4227 rtx tmp
= plus_constant (Pmode
, x
, size
);
4228 rtx sub
= simplify_gen_binary (MINUS
, Pmode
, tmp
, y
);
4230 if (!CONST_INT_P (sub
))
4233 HOST_WIDE_INT val
= INTVAL (sub
);
4235 return IN_RANGE (val
, 1, size
) ? val
: -1;
4238 /* Generate code to push X onto the stack, assuming it has mode MODE and
4240 MODE is redundant except when X is a CONST_INT (since they don't
4242 SIZE is an rtx for the size of data to be copied (in bytes),
4243 needed only if X is BLKmode.
4244 Return true if successful. May return false if asked to push a
4245 partial argument during a sibcall optimization (as specified by
4246 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4249 ALIGN (in bits) is maximum alignment we can assume.
4251 If PARTIAL and REG are both nonzero, then copy that many of the first
4252 bytes of X into registers starting with REG, and push the rest of X.
4253 The amount of space pushed is decreased by PARTIAL bytes.
4254 REG must be a hard register in this case.
4255 If REG is zero but PARTIAL is not, take any all others actions for an
4256 argument partially in registers, but do not actually load any
4259 EXTRA is the amount in bytes of extra space to leave next to this arg.
4260 This is ignored if an argument block has already been allocated.
4262 On a machine that lacks real push insns, ARGS_ADDR is the address of
4263 the bottom of the argument block for this call. We use indexing off there
4264 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4265 argument block has not been preallocated.
4267 ARGS_SO_FAR is the size of args previously pushed for this call.
4269 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4270 for arguments passed in registers. If nonzero, it will be the number
4271 of bytes required. */
4274 emit_push_insn (rtx x
, machine_mode mode
, tree type
, rtx size
,
4275 unsigned int align
, int partial
, rtx reg
, int extra
,
4276 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4277 rtx alignment_pad
, bool sibcall_p
)
4280 enum direction stack_direction
= STACK_GROWS_DOWNWARD
? downward
: upward
;
4282 /* Decide where to pad the argument: `downward' for below,
4283 `upward' for above, or `none' for don't pad it.
4284 Default is below for small data on big-endian machines; else above. */
4285 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
4287 /* Invert direction if stack is post-decrement.
4289 if (STACK_PUSH_CODE
== POST_DEC
)
4290 if (where_pad
!= none
)
4291 where_pad
= (where_pad
== downward
? upward
: downward
);
4295 int nregs
= partial
/ UNITS_PER_WORD
;
4296 rtx
*tmp_regs
= NULL
;
4297 int overlapping
= 0;
4300 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4302 /* Copy a block into the stack, entirely or partially. */
4309 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4310 used
= partial
- offset
;
4312 if (mode
!= BLKmode
)
4314 /* A value is to be stored in an insufficiently aligned
4315 stack slot; copy via a suitably aligned slot if
4317 size
= GEN_INT (GET_MODE_SIZE (mode
));
4318 if (!MEM_P (xinner
))
4320 temp
= assign_temp (type
, 1, 1);
4321 emit_move_insn (temp
, xinner
);
4328 /* USED is now the # of bytes we need not copy to the stack
4329 because registers will take care of them. */
4332 xinner
= adjust_address (xinner
, BLKmode
, used
);
4334 /* If the partial register-part of the arg counts in its stack size,
4335 skip the part of stack space corresponding to the registers.
4336 Otherwise, start copying to the beginning of the stack space,
4337 by setting SKIP to 0. */
4338 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4340 #ifdef PUSH_ROUNDING
4341 /* Do it with several push insns if that doesn't take lots of insns
4342 and if there is no difficulty with push insns that skip bytes
4343 on the stack for alignment purposes. */
4346 && CONST_INT_P (size
)
4348 && MEM_ALIGN (xinner
) >= align
4349 && can_move_by_pieces ((unsigned) INTVAL (size
) - used
, align
)
4350 /* Here we avoid the case of a structure whose weak alignment
4351 forces many pushes of a small amount of data,
4352 and such small pushes do rounding that causes trouble. */
4353 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
4354 || align
>= BIGGEST_ALIGNMENT
4355 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4356 == (align
/ BITS_PER_UNIT
)))
4357 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4359 /* Push padding now if padding above and stack grows down,
4360 or if padding below and stack grows up.
4361 But if space already allocated, this has already been done. */
4362 if (extra
&& args_addr
== 0
4363 && where_pad
!= none
&& where_pad
!= stack_direction
)
4364 anti_adjust_stack (GEN_INT (extra
));
4366 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4369 #endif /* PUSH_ROUNDING */
4373 /* Otherwise make space on the stack and copy the data
4374 to the address of that space. */
4376 /* Deduct words put into registers from the size we must copy. */
4379 if (CONST_INT_P (size
))
4380 size
= GEN_INT (INTVAL (size
) - used
);
4382 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4383 gen_int_mode (used
, GET_MODE (size
)),
4384 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4387 /* Get the address of the stack space.
4388 In this case, we do not deal with EXTRA separately.
4389 A single stack adjust will do. */
4392 temp
= push_block (size
, extra
, where_pad
== downward
);
4395 else if (CONST_INT_P (args_so_far
))
4396 temp
= memory_address (BLKmode
,
4397 plus_constant (Pmode
, args_addr
,
4398 skip
+ INTVAL (args_so_far
)));
4400 temp
= memory_address (BLKmode
,
4401 plus_constant (Pmode
,
4402 gen_rtx_PLUS (Pmode
,
4407 if (!ACCUMULATE_OUTGOING_ARGS
)
4409 /* If the source is referenced relative to the stack pointer,
4410 copy it to another register to stabilize it. We do not need
4411 to do this if we know that we won't be changing sp. */
4413 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4414 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4415 temp
= copy_to_reg (temp
);
4418 target
= gen_rtx_MEM (BLKmode
, temp
);
4420 /* We do *not* set_mem_attributes here, because incoming arguments
4421 may overlap with sibling call outgoing arguments and we cannot
4422 allow reordering of reads from function arguments with stores
4423 to outgoing arguments of sibling calls. We do, however, want
4424 to record the alignment of the stack slot. */
4425 /* ALIGN may well be better aligned than TYPE, e.g. due to
4426 PARM_BOUNDARY. Assume the caller isn't lying. */
4427 set_mem_align (target
, align
);
4429 /* If part should go in registers and pushing to that part would
4430 overwrite some of the values that need to go into regs, load the
4431 overlapping values into temporary pseudos to be moved into the hard
4432 regs at the end after the stack pushing has completed.
4433 We cannot load them directly into the hard regs here because
4434 they can be clobbered by the block move expansions.
4437 if (partial
> 0 && reg
!= 0 && mode
== BLKmode
4438 && GET_CODE (reg
) != PARALLEL
)
4440 overlapping
= memory_load_overlap (XEXP (x
, 0), temp
, partial
);
4441 if (overlapping
> 0)
4443 gcc_assert (overlapping
% UNITS_PER_WORD
== 0);
4444 overlapping
/= UNITS_PER_WORD
;
4446 tmp_regs
= XALLOCAVEC (rtx
, overlapping
);
4448 for (int i
= 0; i
< overlapping
; i
++)
4449 tmp_regs
[i
] = gen_reg_rtx (word_mode
);
4451 for (int i
= 0; i
< overlapping
; i
++)
4452 emit_move_insn (tmp_regs
[i
],
4453 operand_subword_force (target
, i
, mode
));
4455 else if (overlapping
== -1)
4457 /* Could not determine whether there is overlap.
4458 Fail the sibcall. */
4466 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4469 else if (partial
> 0)
4471 /* Scalar partly in registers. */
4473 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4476 /* # bytes of start of argument
4477 that we must make space for but need not store. */
4478 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4479 int args_offset
= INTVAL (args_so_far
);
4482 /* Push padding now if padding above and stack grows down,
4483 or if padding below and stack grows up.
4484 But if space already allocated, this has already been done. */
4485 if (extra
&& args_addr
== 0
4486 && where_pad
!= none
&& where_pad
!= stack_direction
)
4487 anti_adjust_stack (GEN_INT (extra
));
4489 /* If we make space by pushing it, we might as well push
4490 the real data. Otherwise, we can leave OFFSET nonzero
4491 and leave the space uninitialized. */
4495 /* Now NOT_STACK gets the number of words that we don't need to
4496 allocate on the stack. Convert OFFSET to words too. */
4497 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4498 offset
/= UNITS_PER_WORD
;
4500 /* If the partial register-part of the arg counts in its stack size,
4501 skip the part of stack space corresponding to the registers.
4502 Otherwise, start copying to the beginning of the stack space,
4503 by setting SKIP to 0. */
4504 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4506 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4507 x
= validize_mem (force_const_mem (mode
, x
));
4509 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4510 SUBREGs of such registers are not allowed. */
4511 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4512 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4513 x
= copy_to_reg (x
);
4515 /* Loop over all the words allocated on the stack for this arg. */
4516 /* We can do it by words, because any scalar bigger than a word
4517 has a size a multiple of a word. */
4518 for (i
= size
- 1; i
>= not_stack
; i
--)
4519 if (i
>= not_stack
+ offset
)
4520 if (!emit_push_insn (operand_subword_force (x
, i
, mode
),
4521 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4523 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4525 reg_parm_stack_space
, alignment_pad
, sibcall_p
))
4533 /* Push padding now if padding above and stack grows down,
4534 or if padding below and stack grows up.
4535 But if space already allocated, this has already been done. */
4536 if (extra
&& args_addr
== 0
4537 && where_pad
!= none
&& where_pad
!= stack_direction
)
4538 anti_adjust_stack (GEN_INT (extra
));
4540 #ifdef PUSH_ROUNDING
4541 if (args_addr
== 0 && PUSH_ARGS
)
4542 emit_single_push_insn (mode
, x
, type
);
4546 if (CONST_INT_P (args_so_far
))
4548 = memory_address (mode
,
4549 plus_constant (Pmode
, args_addr
,
4550 INTVAL (args_so_far
)));
4552 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4554 dest
= gen_rtx_MEM (mode
, addr
);
4556 /* We do *not* set_mem_attributes here, because incoming arguments
4557 may overlap with sibling call outgoing arguments and we cannot
4558 allow reordering of reads from function arguments with stores
4559 to outgoing arguments of sibling calls. We do, however, want
4560 to record the alignment of the stack slot. */
4561 /* ALIGN may well be better aligned than TYPE, e.g. due to
4562 PARM_BOUNDARY. Assume the caller isn't lying. */
4563 set_mem_align (dest
, align
);
4565 emit_move_insn (dest
, x
);
4569 /* Move the partial arguments into the registers and any overlapping
4570 values that we moved into the pseudos in tmp_regs. */
4571 if (partial
> 0 && reg
!= 0)
4573 /* Handle calls that pass values in multiple non-contiguous locations.
4574 The Irix 6 ABI has examples of this. */
4575 if (GET_CODE (reg
) == PARALLEL
)
4576 emit_group_load (reg
, x
, type
, -1);
4579 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4580 move_block_to_reg (REGNO (reg
), x
, nregs
- overlapping
, mode
);
4582 for (int i
= 0; i
< overlapping
; i
++)
4583 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
)
4584 + nregs
- overlapping
+ i
),
4590 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4591 anti_adjust_stack (GEN_INT (extra
));
4593 if (alignment_pad
&& args_addr
== 0)
4594 anti_adjust_stack (alignment_pad
);
4599 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4603 get_subtarget (rtx x
)
4607 /* Only registers can be subtargets. */
4609 /* Don't use hard regs to avoid extending their life. */
4610 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4614 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4615 FIELD is a bitfield. Returns true if the optimization was successful,
4616 and there's nothing else to do. */
4619 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4620 unsigned HOST_WIDE_INT bitpos
,
4621 unsigned HOST_WIDE_INT bitregion_start
,
4622 unsigned HOST_WIDE_INT bitregion_end
,
4623 machine_mode mode1
, rtx str_rtx
,
4624 tree to
, tree src
, bool reverse
)
4626 machine_mode str_mode
= GET_MODE (str_rtx
);
4627 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4632 enum tree_code code
;
4634 if (mode1
!= VOIDmode
4635 || bitsize
>= BITS_PER_WORD
4636 || str_bitsize
> BITS_PER_WORD
4637 || TREE_SIDE_EFFECTS (to
)
4638 || TREE_THIS_VOLATILE (to
))
4642 if (TREE_CODE (src
) != SSA_NAME
)
4644 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4647 srcstmt
= get_gimple_for_ssa_name (src
);
4649 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4652 code
= gimple_assign_rhs_code (srcstmt
);
4654 op0
= gimple_assign_rhs1 (srcstmt
);
4656 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4657 to find its initialization. Hopefully the initialization will
4658 be from a bitfield load. */
4659 if (TREE_CODE (op0
) == SSA_NAME
)
4661 gimple
*op0stmt
= get_gimple_for_ssa_name (op0
);
4663 /* We want to eventually have OP0 be the same as TO, which
4664 should be a bitfield. */
4666 || !is_gimple_assign (op0stmt
)
4667 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4669 op0
= gimple_assign_rhs1 (op0stmt
);
4672 op1
= gimple_assign_rhs2 (srcstmt
);
4674 if (!operand_equal_p (to
, op0
, 0))
4677 if (MEM_P (str_rtx
))
4679 unsigned HOST_WIDE_INT offset1
;
4681 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4682 str_mode
= word_mode
;
4683 str_mode
= get_best_mode (bitsize
, bitpos
,
4684 bitregion_start
, bitregion_end
,
4685 MEM_ALIGN (str_rtx
), str_mode
, 0);
4686 if (str_mode
== VOIDmode
)
4688 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4691 bitpos
%= str_bitsize
;
4692 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4693 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4695 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4698 gcc_assert (!reverse
);
4700 /* If the bit field covers the whole REG/MEM, store_field
4701 will likely generate better code. */
4702 if (bitsize
>= str_bitsize
)
4705 /* We can't handle fields split across multiple entities. */
4706 if (bitpos
+ bitsize
> str_bitsize
)
4709 if (reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
4710 bitpos
= str_bitsize
- bitpos
- bitsize
;
4716 /* For now, just optimize the case of the topmost bitfield
4717 where we don't need to do any masking and also
4718 1 bit bitfields where xor can be used.
4719 We might win by one instruction for the other bitfields
4720 too if insv/extv instructions aren't used, so that
4721 can be added later. */
4722 if ((reverse
|| bitpos
+ bitsize
!= str_bitsize
)
4723 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4726 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4727 value
= convert_modes (str_mode
,
4728 TYPE_MODE (TREE_TYPE (op1
)), value
,
4729 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4731 /* We may be accessing data outside the field, which means
4732 we can alias adjacent data. */
4733 if (MEM_P (str_rtx
))
4735 str_rtx
= shallow_copy_rtx (str_rtx
);
4736 set_mem_alias_set (str_rtx
, 0);
4737 set_mem_expr (str_rtx
, 0);
4740 if (bitsize
== 1 && (reverse
|| bitpos
+ bitsize
!= str_bitsize
))
4742 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4746 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4748 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4750 value
= flip_storage_order (str_mode
, value
);
4751 result
= expand_binop (str_mode
, binop
, str_rtx
,
4752 value
, str_rtx
, 1, OPTAB_WIDEN
);
4753 if (result
!= str_rtx
)
4754 emit_move_insn (str_rtx
, result
);
4759 if (TREE_CODE (op1
) != INTEGER_CST
)
4761 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4762 value
= convert_modes (str_mode
,
4763 TYPE_MODE (TREE_TYPE (op1
)), value
,
4764 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4766 /* We may be accessing data outside the field, which means
4767 we can alias adjacent data. */
4768 if (MEM_P (str_rtx
))
4770 str_rtx
= shallow_copy_rtx (str_rtx
);
4771 set_mem_alias_set (str_rtx
, 0);
4772 set_mem_expr (str_rtx
, 0);
4775 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4776 if (bitpos
+ bitsize
!= str_bitsize
)
4778 rtx mask
= gen_int_mode ((HOST_WIDE_INT_1U
<< bitsize
) - 1,
4780 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4782 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4784 value
= flip_storage_order (str_mode
, value
);
4785 result
= expand_binop (str_mode
, binop
, str_rtx
,
4786 value
, str_rtx
, 1, OPTAB_WIDEN
);
4787 if (result
!= str_rtx
)
4788 emit_move_insn (str_rtx
, result
);
4798 /* In the C++ memory model, consecutive bit fields in a structure are
4799 considered one memory location.
4801 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4802 returns the bit range of consecutive bits in which this COMPONENT_REF
4803 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4804 and *OFFSET may be adjusted in the process.
4806 If the access does not need to be restricted, 0 is returned in both
4807 *BITSTART and *BITEND. */
4810 get_bit_range (unsigned HOST_WIDE_INT
*bitstart
,
4811 unsigned HOST_WIDE_INT
*bitend
,
4813 HOST_WIDE_INT
*bitpos
,
4816 HOST_WIDE_INT bitoffset
;
4819 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4821 field
= TREE_OPERAND (exp
, 1);
4822 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4823 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4824 need to limit the range we can access. */
4827 *bitstart
= *bitend
= 0;
4831 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4832 part of a larger bit field, then the representative does not serve any
4833 useful purpose. This can occur in Ada. */
4834 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4837 HOST_WIDE_INT rbitsize
, rbitpos
;
4839 int unsignedp
, reversep
, volatilep
= 0;
4840 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4841 &roffset
, &rmode
, &unsignedp
, &reversep
,
4843 if ((rbitpos
% BITS_PER_UNIT
) != 0)
4845 *bitstart
= *bitend
= 0;
4850 /* Compute the adjustment to bitpos from the offset of the field
4851 relative to the representative. DECL_FIELD_OFFSET of field and
4852 repr are the same by construction if they are not constants,
4853 see finish_bitfield_layout. */
4854 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
))
4855 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr
)))
4856 bitoffset
= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
4857 - tree_to_uhwi (DECL_FIELD_OFFSET (repr
))) * BITS_PER_UNIT
;
4860 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4861 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4863 /* If the adjustment is larger than bitpos, we would have a negative bit
4864 position for the lower bound and this may wreak havoc later. Adjust
4865 offset and bitpos to make the lower bound non-negative in that case. */
4866 if (bitoffset
> *bitpos
)
4868 HOST_WIDE_INT adjust
= bitoffset
- *bitpos
;
4869 gcc_assert ((adjust
% BITS_PER_UNIT
) == 0);
4872 if (*offset
== NULL_TREE
)
4873 *offset
= size_int (-adjust
/ BITS_PER_UNIT
);
4876 = size_binop (MINUS_EXPR
, *offset
, size_int (adjust
/ BITS_PER_UNIT
));
4880 *bitstart
= *bitpos
- bitoffset
;
4882 *bitend
= *bitstart
+ tree_to_uhwi (DECL_SIZE (repr
)) - 1;
4885 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4886 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4887 DECL_RTL was not set yet, return NORTL. */
4890 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4892 if (TREE_CODE (addr
) != ADDR_EXPR
)
4895 tree base
= TREE_OPERAND (addr
, 0);
4898 || TREE_ADDRESSABLE (base
)
4899 || DECL_MODE (base
) == BLKmode
)
4902 if (!DECL_RTL_SET_P (base
))
4905 return (!MEM_P (DECL_RTL (base
)));
4908 /* Returns true if the MEM_REF REF refers to an object that does not
4909 reside in memory and has non-BLKmode. */
4912 mem_ref_refers_to_non_mem_p (tree ref
)
4914 tree base
= TREE_OPERAND (ref
, 0);
4915 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4918 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4919 is true, try generating a nontemporal store. */
4922 expand_assignment (tree to
, tree from
, bool nontemporal
)
4928 enum insn_code icode
;
4930 /* Don't crash if the lhs of the assignment was erroneous. */
4931 if (TREE_CODE (to
) == ERROR_MARK
)
4933 expand_normal (from
);
4937 /* Optimize away no-op moves without side-effects. */
4938 if (operand_equal_p (to
, from
, 0))
4941 /* Handle misaligned stores. */
4942 mode
= TYPE_MODE (TREE_TYPE (to
));
4943 if ((TREE_CODE (to
) == MEM_REF
4944 || TREE_CODE (to
) == TARGET_MEM_REF
)
4946 && !mem_ref_refers_to_non_mem_p (to
)
4947 && ((align
= get_object_alignment (to
))
4948 < GET_MODE_ALIGNMENT (mode
))
4949 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4950 != CODE_FOR_nothing
)
4951 || SLOW_UNALIGNED_ACCESS (mode
, align
)))
4955 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4956 reg
= force_not_mem (reg
);
4957 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4958 if (TREE_CODE (to
) == MEM_REF
&& REF_REVERSE_STORAGE_ORDER (to
))
4959 reg
= flip_storage_order (mode
, reg
);
4961 if (icode
!= CODE_FOR_nothing
)
4963 struct expand_operand ops
[2];
4965 create_fixed_operand (&ops
[0], mem
);
4966 create_input_operand (&ops
[1], reg
, mode
);
4967 /* The movmisalign<mode> pattern cannot fail, else the assignment
4968 would silently be omitted. */
4969 expand_insn (icode
, 2, ops
);
4972 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
,
4977 /* Assignment of a structure component needs special treatment
4978 if the structure component's rtx is not simply a MEM.
4979 Assignment of an array element at a constant index, and assignment of
4980 an array element in an unaligned packed structure field, has the same
4981 problem. Same for (partially) storing into a non-memory object. */
4982 if (handled_component_p (to
)
4983 || (TREE_CODE (to
) == MEM_REF
4984 && (REF_REVERSE_STORAGE_ORDER (to
)
4985 || mem_ref_refers_to_non_mem_p (to
)))
4986 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4989 HOST_WIDE_INT bitsize
, bitpos
;
4990 unsigned HOST_WIDE_INT bitregion_start
= 0;
4991 unsigned HOST_WIDE_INT bitregion_end
= 0;
4993 int unsignedp
, reversep
, volatilep
= 0;
4997 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4998 &unsignedp
, &reversep
, &volatilep
);
5000 /* Make sure bitpos is not negative, it can wreak havoc later. */
5003 gcc_assert (offset
== NULL_TREE
);
5004 offset
= size_int (bitpos
>> LOG2_BITS_PER_UNIT
);
5005 bitpos
&= BITS_PER_UNIT
- 1;
5008 if (TREE_CODE (to
) == COMPONENT_REF
5009 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
5010 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
5011 /* The C++ memory model naturally applies to byte-aligned fields.
5012 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5013 BITSIZE are not byte-aligned, there is no need to limit the range
5014 we can access. This can occur with packed structures in Ada. */
5015 else if (bitsize
> 0
5016 && bitsize
% BITS_PER_UNIT
== 0
5017 && bitpos
% BITS_PER_UNIT
== 0)
5019 bitregion_start
= bitpos
;
5020 bitregion_end
= bitpos
+ bitsize
- 1;
5023 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5025 /* If the field has a mode, we want to access it in the
5026 field's mode, not the computed mode.
5027 If a MEM has VOIDmode (external with incomplete type),
5028 use BLKmode for it instead. */
5031 if (mode1
!= VOIDmode
)
5032 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
5033 else if (GET_MODE (to_rtx
) == VOIDmode
)
5034 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
5039 machine_mode address_mode
;
5042 if (!MEM_P (to_rtx
))
5044 /* We can get constant negative offsets into arrays with broken
5045 user code. Translate this to a trap instead of ICEing. */
5046 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
5047 expand_builtin_trap ();
5048 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
5051 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
5052 address_mode
= get_address_mode (to_rtx
);
5053 if (GET_MODE (offset_rtx
) != address_mode
)
5055 /* We cannot be sure that the RTL in offset_rtx is valid outside
5056 of a memory address context, so force it into a register
5057 before attempting to convert it to the desired mode. */
5058 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
5059 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5062 /* If we have an expression in OFFSET_RTX and a non-zero
5063 byte offset in BITPOS, adding the byte offset before the
5064 OFFSET_RTX results in better intermediate code, which makes
5065 later rtl optimization passes perform better.
5067 We prefer intermediate code like this:
5069 r124:DI=r123:DI+0x18
5074 r124:DI=r123:DI+0x10
5075 [r124:DI+0x8]=r121:DI
5077 This is only done for aligned data values, as these can
5078 be expected to result in single move instructions. */
5079 if (mode1
!= VOIDmode
5082 && (bitpos
% bitsize
) == 0
5083 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
5084 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
5086 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
5087 bitregion_start
= 0;
5088 if (bitregion_end
>= (unsigned HOST_WIDE_INT
) bitpos
)
5089 bitregion_end
-= bitpos
;
5093 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5094 highest_pow2_factor_for_target (to
,
5098 /* No action is needed if the target is not a memory and the field
5099 lies completely outside that target. This can occur if the source
5100 code contains an out-of-bounds access to a small array. */
5102 && GET_MODE (to_rtx
) != BLKmode
5103 && (unsigned HOST_WIDE_INT
) bitpos
5104 >= GET_MODE_PRECISION (GET_MODE (to_rtx
)))
5106 expand_normal (from
);
5109 /* Handle expand_expr of a complex value returning a CONCAT. */
5110 else if (GET_CODE (to_rtx
) == CONCAT
)
5112 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
5113 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
)))
5115 && bitsize
== mode_bitsize
)
5116 result
= store_expr (from
, to_rtx
, false, nontemporal
, reversep
);
5117 else if (bitsize
== mode_bitsize
/ 2
5118 && (bitpos
== 0 || bitpos
== mode_bitsize
/ 2))
5119 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
5120 nontemporal
, reversep
);
5121 else if (bitpos
+ bitsize
<= mode_bitsize
/ 2)
5122 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
5123 bitregion_start
, bitregion_end
,
5124 mode1
, from
, get_alias_set (to
),
5125 nontemporal
, reversep
);
5126 else if (bitpos
>= mode_bitsize
/ 2)
5127 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
5128 bitpos
- mode_bitsize
/ 2,
5129 bitregion_start
, bitregion_end
,
5130 mode1
, from
, get_alias_set (to
),
5131 nontemporal
, reversep
);
5132 else if (bitpos
== 0 && bitsize
== mode_bitsize
)
5135 result
= expand_normal (from
);
5136 from_rtx
= simplify_gen_subreg (GET_MODE (to_rtx
), result
,
5137 TYPE_MODE (TREE_TYPE (from
)), 0);
5138 emit_move_insn (XEXP (to_rtx
, 0),
5139 read_complex_part (from_rtx
, false));
5140 emit_move_insn (XEXP (to_rtx
, 1),
5141 read_complex_part (from_rtx
, true));
5145 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
5146 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5147 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
5148 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
5149 result
= store_field (temp
, bitsize
, bitpos
,
5150 bitregion_start
, bitregion_end
,
5151 mode1
, from
, get_alias_set (to
),
5152 nontemporal
, reversep
);
5153 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
5154 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
5161 /* If the field is at offset zero, we could have been given the
5162 DECL_RTX of the parent struct. Don't munge it. */
5163 to_rtx
= shallow_copy_rtx (to_rtx
);
5164 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
5166 MEM_VOLATILE_P (to_rtx
) = 1;
5169 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
5170 bitregion_start
, bitregion_end
,
5171 mode1
, to_rtx
, to
, from
,
5175 result
= store_field (to_rtx
, bitsize
, bitpos
,
5176 bitregion_start
, bitregion_end
,
5177 mode1
, from
, get_alias_set (to
),
5178 nontemporal
, reversep
);
5182 preserve_temp_slots (result
);
5187 /* If the rhs is a function call and its value is not an aggregate,
5188 call the function before we start to compute the lhs.
5189 This is needed for correct code for cases such as
5190 val = setjmp (buf) on machines where reference to val
5191 requires loading up part of an address in a separate insn.
5193 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5194 since it might be a promoted variable where the zero- or sign- extension
5195 needs to be done. Handling this in the normal way is safe because no
5196 computation is done before the call. The same is true for SSA names. */
5197 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
5198 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5199 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
5201 || TREE_CODE (to
) == PARM_DECL
5202 || TREE_CODE (to
) == RESULT_DECL
)
5203 && REG_P (DECL_RTL (to
)))
5204 || TREE_CODE (to
) == SSA_NAME
))
5210 value
= expand_normal (from
);
5212 /* Split value and bounds to store them separately. */
5213 chkp_split_slot (value
, &value
, &bounds
);
5216 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5218 /* Handle calls that return values in multiple non-contiguous locations.
5219 The Irix 6 ABI has examples of this. */
5220 if (GET_CODE (to_rtx
) == PARALLEL
)
5222 if (GET_CODE (value
) == PARALLEL
)
5223 emit_group_move (to_rtx
, value
);
5225 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5226 int_size_in_bytes (TREE_TYPE (from
)));
5228 else if (GET_CODE (value
) == PARALLEL
)
5229 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5230 int_size_in_bytes (TREE_TYPE (from
)));
5231 else if (GET_MODE (to_rtx
) == BLKmode
)
5233 /* Handle calls that return BLKmode values in registers. */
5235 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5237 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5241 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5242 value
= convert_memory_address_addr_space
5243 (GET_MODE (to_rtx
), value
,
5244 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5246 emit_move_insn (to_rtx
, value
);
5249 /* Store bounds if required. */
5251 && (BOUNDED_P (to
) || chkp_type_has_pointer (TREE_TYPE (to
))))
5253 gcc_assert (MEM_P (to_rtx
));
5254 chkp_emit_bounds_store (bounds
, value
, to_rtx
);
5257 preserve_temp_slots (to_rtx
);
5262 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5263 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5265 /* Don't move directly into a return register. */
5266 if (TREE_CODE (to
) == RESULT_DECL
5267 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5273 /* If the source is itself a return value, it still is in a pseudo at
5274 this point so we can move it back to the return register directly. */
5276 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5277 && TREE_CODE (from
) != CALL_EXPR
)
5278 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5280 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5282 /* Handle calls that return values in multiple non-contiguous locations.
5283 The Irix 6 ABI has examples of this. */
5284 if (GET_CODE (to_rtx
) == PARALLEL
)
5286 if (GET_CODE (temp
) == PARALLEL
)
5287 emit_group_move (to_rtx
, temp
);
5289 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5290 int_size_in_bytes (TREE_TYPE (from
)));
5293 emit_move_insn (to_rtx
, temp
);
5295 preserve_temp_slots (to_rtx
);
5300 /* In case we are returning the contents of an object which overlaps
5301 the place the value is being stored, use a safe function when copying
5302 a value through a pointer into a structure value return block. */
5303 if (TREE_CODE (to
) == RESULT_DECL
5304 && TREE_CODE (from
) == INDIRECT_REF
5305 && ADDR_SPACE_GENERIC_P
5306 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5307 && refs_may_alias_p (to
, from
)
5308 && cfun
->returns_struct
5309 && !cfun
->returns_pcc_struct
)
5314 size
= expr_size (from
);
5315 from_rtx
= expand_normal (from
);
5317 emit_block_move_via_libcall (XEXP (to_rtx
, 0), XEXP (from_rtx
, 0), size
);
5319 preserve_temp_slots (to_rtx
);
5324 /* Compute FROM and store the value in the rtx we got. */
5327 result
= store_expr_with_bounds (from
, to_rtx
, 0, nontemporal
, false, to
);
5328 preserve_temp_slots (result
);
5333 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5334 succeeded, false otherwise. */
5337 emit_storent_insn (rtx to
, rtx from
)
5339 struct expand_operand ops
[2];
5340 machine_mode mode
= GET_MODE (to
);
5341 enum insn_code code
= optab_handler (storent_optab
, mode
);
5343 if (code
== CODE_FOR_nothing
)
5346 create_fixed_operand (&ops
[0], to
);
5347 create_input_operand (&ops
[1], from
, mode
);
5348 return maybe_expand_insn (code
, 2, ops
);
5351 /* Generate code for computing expression EXP,
5352 and storing the value into TARGET.
5354 If the mode is BLKmode then we may return TARGET itself.
5355 It turns out that in BLKmode it doesn't cause a problem.
5356 because C has no operators that could combine two different
5357 assignments into the same BLKmode object with different values
5358 with no sequence point. Will other languages need this to
5361 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5362 stack, and block moves may need to be treated specially.
5364 If NONTEMPORAL is true, try using a nontemporal store instruction.
5366 If REVERSE is true, the store is to be done in reverse order.
5368 If BTARGET is not NULL then computed bounds of EXP are
5369 associated with BTARGET. */
5372 store_expr_with_bounds (tree exp
, rtx target
, int call_param_p
,
5373 bool nontemporal
, bool reverse
, tree btarget
)
5376 rtx alt_rtl
= NULL_RTX
;
5377 location_t loc
= curr_insn_location ();
5379 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5381 /* C++ can generate ?: expressions with a throw expression in one
5382 branch and an rvalue in the other. Here, we resolve attempts to
5383 store the throw expression's nonexistent result. */
5384 gcc_assert (!call_param_p
);
5385 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5388 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5390 /* Perform first part of compound expression, then assign from second
5392 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5393 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5394 return store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
,
5395 call_param_p
, nontemporal
, reverse
,
5398 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5400 /* For conditional expression, get safe form of the target. Then
5401 test the condition, doing the appropriate assignment on either
5402 side. This avoids the creation of unnecessary temporaries.
5403 For non-BLKmode, it is more efficient not to do this. */
5405 rtx_code_label
*lab1
= gen_label_rtx (), *lab2
= gen_label_rtx ();
5407 do_pending_stack_adjust ();
5409 jumpifnot (TREE_OPERAND (exp
, 0), lab1
,
5410 profile_probability::uninitialized ());
5411 store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5412 nontemporal
, reverse
, btarget
);
5413 emit_jump_insn (targetm
.gen_jump (lab2
));
5416 store_expr_with_bounds (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5417 nontemporal
, reverse
, btarget
);
5423 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5424 /* If this is a scalar in a register that is stored in a wider mode
5425 than the declared mode, compute the result into its declared mode
5426 and then convert to the wider mode. Our value is the computed
5429 rtx inner_target
= 0;
5431 /* We can do the conversion inside EXP, which will often result
5432 in some optimizations. Do the conversion in two steps: first
5433 change the signedness, if needed, then the extend. But don't
5434 do this if the type of EXP is a subtype of something else
5435 since then the conversion might involve more than just
5436 converting modes. */
5437 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5438 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5439 && GET_MODE_PRECISION (GET_MODE (target
))
5440 == TYPE_PRECISION (TREE_TYPE (exp
)))
5442 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5443 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5445 /* Some types, e.g. Fortran's logical*4, won't have a signed
5446 version, so use the mode instead. */
5448 = (signed_or_unsigned_type_for
5449 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5451 ntype
= lang_hooks
.types
.type_for_mode
5452 (TYPE_MODE (TREE_TYPE (exp
)),
5453 SUBREG_PROMOTED_SIGN (target
));
5455 exp
= fold_convert_loc (loc
, ntype
, exp
);
5458 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5459 (GET_MODE (SUBREG_REG (target
)),
5460 SUBREG_PROMOTED_SIGN (target
)),
5463 inner_target
= SUBREG_REG (target
);
5466 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5467 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5469 /* Handle bounds returned by call. */
5470 if (TREE_CODE (exp
) == CALL_EXPR
)
5473 chkp_split_slot (temp
, &temp
, &bounds
);
5474 if (bounds
&& btarget
)
5476 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5477 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5478 chkp_set_rtl_bounds (btarget
, tmp
);
5482 /* If TEMP is a VOIDmode constant, use convert_modes to make
5483 sure that we properly convert it. */
5484 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5486 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5487 temp
, SUBREG_PROMOTED_SIGN (target
));
5488 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
5489 GET_MODE (target
), temp
,
5490 SUBREG_PROMOTED_SIGN (target
));
5493 convert_move (SUBREG_REG (target
), temp
,
5494 SUBREG_PROMOTED_SIGN (target
));
5498 else if ((TREE_CODE (exp
) == STRING_CST
5499 || (TREE_CODE (exp
) == MEM_REF
5500 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5501 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5503 && integer_zerop (TREE_OPERAND (exp
, 1))))
5504 && !nontemporal
&& !call_param_p
5507 /* Optimize initialization of an array with a STRING_CST. */
5508 HOST_WIDE_INT exp_len
, str_copy_len
;
5510 tree str
= TREE_CODE (exp
) == STRING_CST
5511 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5513 exp_len
= int_expr_size (exp
);
5517 if (TREE_STRING_LENGTH (str
) <= 0)
5520 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5521 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5524 str_copy_len
= TREE_STRING_LENGTH (str
);
5525 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5526 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5528 str_copy_len
+= STORE_MAX_PIECES
- 1;
5529 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5531 str_copy_len
= MIN (str_copy_len
, exp_len
);
5532 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5533 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5534 MEM_ALIGN (target
), false))
5539 dest_mem
= store_by_pieces (dest_mem
,
5540 str_copy_len
, builtin_strncpy_read_str
,
5542 TREE_STRING_POINTER (str
)),
5543 MEM_ALIGN (target
), false,
5544 exp_len
> str_copy_len
? 1 : 0);
5545 if (exp_len
> str_copy_len
)
5546 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5547 GEN_INT (exp_len
- str_copy_len
),
5556 /* If we want to use a nontemporal or a reverse order store, force the
5557 value into a register first. */
5558 tmp_target
= nontemporal
|| reverse
? NULL_RTX
: target
;
5559 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5561 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5564 /* Handle bounds returned by call. */
5565 if (TREE_CODE (exp
) == CALL_EXPR
)
5568 chkp_split_slot (temp
, &temp
, &bounds
);
5569 if (bounds
&& btarget
)
5571 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5572 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5573 chkp_set_rtl_bounds (btarget
, tmp
);
5578 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5579 the same as that of TARGET, adjust the constant. This is needed, for
5580 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5581 only a word-sized value. */
5582 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5583 && TREE_CODE (exp
) != ERROR_MARK
5584 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5585 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5586 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5588 /* If value was not generated in the target, store it there.
5589 Convert the value to TARGET's type first if necessary and emit the
5590 pending incrementations that have been queued when expanding EXP.
5591 Note that we cannot emit the whole queue blindly because this will
5592 effectively disable the POST_INC optimization later.
5594 If TEMP and TARGET compare equal according to rtx_equal_p, but
5595 one or both of them are volatile memory refs, we have to distinguish
5597 - expand_expr has used TARGET. In this case, we must not generate
5598 another copy. This can be detected by TARGET being equal according
5600 - expand_expr has not used TARGET - that means that the source just
5601 happens to have the same RTX form. Since temp will have been created
5602 by expand_expr, it will compare unequal according to == .
5603 We must generate a copy in this case, to reach the correct number
5604 of volatile memory references. */
5606 if ((! rtx_equal_p (temp
, target
)
5607 || (temp
!= target
&& (side_effects_p (temp
)
5608 || side_effects_p (target
))))
5609 && TREE_CODE (exp
) != ERROR_MARK
5610 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5611 but TARGET is not valid memory reference, TEMP will differ
5612 from TARGET although it is really the same location. */
5614 && rtx_equal_p (alt_rtl
, target
)
5615 && !side_effects_p (alt_rtl
)
5616 && !side_effects_p (target
))
5617 /* If there's nothing to copy, don't bother. Don't call
5618 expr_size unless necessary, because some front-ends (C++)
5619 expr_size-hook must not be given objects that are not
5620 supposed to be bit-copied or bit-initialized. */
5621 && expr_size (exp
) != const0_rtx
)
5623 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5625 if (GET_MODE (target
) == BLKmode
)
5627 /* Handle calls that return BLKmode values in registers. */
5628 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5629 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5631 store_bit_field (target
,
5632 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5633 0, 0, 0, GET_MODE (temp
), temp
, reverse
);
5636 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5639 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5641 /* Handle copying a string constant into an array. The string
5642 constant may be shorter than the array. So copy just the string's
5643 actual length, and clear the rest. First get the size of the data
5644 type of the string, which is actually the size of the target. */
5645 rtx size
= expr_size (exp
);
5647 if (CONST_INT_P (size
)
5648 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5649 emit_block_move (target
, temp
, size
,
5651 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5654 machine_mode pointer_mode
5655 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5656 machine_mode address_mode
= get_address_mode (target
);
5658 /* Compute the size of the data to copy from the string. */
5660 = size_binop_loc (loc
, MIN_EXPR
,
5661 make_tree (sizetype
, size
),
5662 size_int (TREE_STRING_LENGTH (exp
)));
5664 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5666 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5667 rtx_code_label
*label
= 0;
5669 /* Copy that much. */
5670 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5671 TYPE_UNSIGNED (sizetype
));
5672 emit_block_move (target
, temp
, copy_size_rtx
,
5674 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5676 /* Figure out how much is left in TARGET that we have to clear.
5677 Do all calculations in pointer_mode. */
5678 if (CONST_INT_P (copy_size_rtx
))
5680 size
= plus_constant (address_mode
, size
,
5681 -INTVAL (copy_size_rtx
));
5682 target
= adjust_address (target
, BLKmode
,
5683 INTVAL (copy_size_rtx
));
5687 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5688 copy_size_rtx
, NULL_RTX
, 0,
5691 if (GET_MODE (copy_size_rtx
) != address_mode
)
5692 copy_size_rtx
= convert_to_mode (address_mode
,
5694 TYPE_UNSIGNED (sizetype
));
5696 target
= offset_address (target
, copy_size_rtx
,
5697 highest_pow2_factor (copy_size
));
5698 label
= gen_label_rtx ();
5699 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5700 GET_MODE (size
), 0, label
);
5703 if (size
!= const0_rtx
)
5704 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5710 /* Handle calls that return values in multiple non-contiguous locations.
5711 The Irix 6 ABI has examples of this. */
5712 else if (GET_CODE (target
) == PARALLEL
)
5714 if (GET_CODE (temp
) == PARALLEL
)
5715 emit_group_move (target
, temp
);
5717 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5718 int_size_in_bytes (TREE_TYPE (exp
)));
5720 else if (GET_CODE (temp
) == PARALLEL
)
5721 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5722 int_size_in_bytes (TREE_TYPE (exp
)));
5723 else if (GET_MODE (temp
) == BLKmode
)
5724 emit_block_move (target
, temp
, expr_size (exp
),
5726 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5727 /* If we emit a nontemporal store, there is nothing else to do. */
5728 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5733 temp
= flip_storage_order (GET_MODE (target
), temp
);
5734 temp
= force_operand (temp
, target
);
5736 emit_move_insn (target
, temp
);
5743 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5745 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
,
5748 return store_expr_with_bounds (exp
, target
, call_param_p
, nontemporal
,
5752 /* Return true if field F of structure TYPE is a flexible array. */
5755 flexible_array_member_p (const_tree f
, const_tree type
)
5760 return (DECL_CHAIN (f
) == NULL
5761 && TREE_CODE (tf
) == ARRAY_TYPE
5763 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5764 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5765 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5766 && int_size_in_bytes (type
) >= 0);
5769 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5770 must have in order for it to completely initialize a value of type TYPE.
5771 Return -1 if the number isn't known.
5773 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5775 static HOST_WIDE_INT
5776 count_type_elements (const_tree type
, bool for_ctor_p
)
5778 switch (TREE_CODE (type
))
5784 nelts
= array_type_nelts (type
);
5785 if (nelts
&& tree_fits_uhwi_p (nelts
))
5787 unsigned HOST_WIDE_INT n
;
5789 n
= tree_to_uhwi (nelts
) + 1;
5790 if (n
== 0 || for_ctor_p
)
5793 return n
* count_type_elements (TREE_TYPE (type
), false);
5795 return for_ctor_p
? -1 : 1;
5800 unsigned HOST_WIDE_INT n
;
5804 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5805 if (TREE_CODE (f
) == FIELD_DECL
)
5808 n
+= count_type_elements (TREE_TYPE (f
), false);
5809 else if (!flexible_array_member_p (f
, type
))
5810 /* Don't count flexible arrays, which are not supposed
5811 to be initialized. */
5819 case QUAL_UNION_TYPE
:
5824 gcc_assert (!for_ctor_p
);
5825 /* Estimate the number of scalars in each field and pick the
5826 maximum. Other estimates would do instead; the idea is simply
5827 to make sure that the estimate is not sensitive to the ordering
5830 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5831 if (TREE_CODE (f
) == FIELD_DECL
)
5833 m
= count_type_elements (TREE_TYPE (f
), false);
5834 /* If the field doesn't span the whole union, add an extra
5835 scalar for the rest. */
5836 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5837 TYPE_SIZE (type
)) != 1)
5849 return TYPE_VECTOR_SUBPARTS (type
);
5853 case FIXED_POINT_TYPE
:
5858 case REFERENCE_TYPE
:
5874 /* Helper for categorize_ctor_elements. Identical interface. */
5877 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5878 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5880 unsigned HOST_WIDE_INT idx
;
5881 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5882 tree value
, purpose
, elt_type
;
5884 /* Whether CTOR is a valid constant initializer, in accordance with what
5885 initializer_constant_valid_p does. If inferred from the constructor
5886 elements, true until proven otherwise. */
5887 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5888 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5893 elt_type
= NULL_TREE
;
5895 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5897 HOST_WIDE_INT mult
= 1;
5899 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5901 tree lo_index
= TREE_OPERAND (purpose
, 0);
5902 tree hi_index
= TREE_OPERAND (purpose
, 1);
5904 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5905 mult
= (tree_to_uhwi (hi_index
)
5906 - tree_to_uhwi (lo_index
) + 1);
5909 elt_type
= TREE_TYPE (value
);
5911 switch (TREE_CODE (value
))
5915 HOST_WIDE_INT nz
= 0, ic
= 0;
5917 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5920 nz_elts
+= mult
* nz
;
5921 init_elts
+= mult
* ic
;
5923 if (const_from_elts_p
&& const_p
)
5924 const_p
= const_elt_p
;
5931 if (!initializer_zerop (value
))
5937 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
5938 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
5942 if (!initializer_zerop (TREE_REALPART (value
)))
5944 if (!initializer_zerop (TREE_IMAGPART (value
)))
5952 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
5954 tree v
= VECTOR_CST_ELT (value
, i
);
5955 if (!initializer_zerop (v
))
5964 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
5965 nz_elts
+= mult
* tc
;
5966 init_elts
+= mult
* tc
;
5968 if (const_from_elts_p
&& const_p
)
5970 = initializer_constant_valid_p (value
,
5972 TYPE_REVERSE_STORAGE_ORDER
5980 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
5981 num_fields
, elt_type
))
5982 *p_complete
= false;
5984 *p_nz_elts
+= nz_elts
;
5985 *p_init_elts
+= init_elts
;
5990 /* Examine CTOR to discover:
5991 * how many scalar fields are set to nonzero values,
5992 and place it in *P_NZ_ELTS;
5993 * how many scalar fields in total are in CTOR,
5994 and place it in *P_ELT_COUNT.
5995 * whether the constructor is complete -- in the sense that every
5996 meaningful byte is explicitly given a value --
5997 and place it in *P_COMPLETE.
5999 Return whether or not CTOR is a valid static constant initializer, the same
6000 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
6003 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
6004 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
6010 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
6013 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6014 of which had type LAST_TYPE. Each element was itself a complete
6015 initializer, in the sense that every meaningful byte was explicitly
6016 given a value. Return true if the same is true for the constructor
6020 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
6021 const_tree last_type
)
6023 if (TREE_CODE (type
) == UNION_TYPE
6024 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6029 gcc_assert (num_elts
== 1 && last_type
);
6031 /* ??? We could look at each element of the union, and find the
6032 largest element. Which would avoid comparing the size of the
6033 initialized element against any tail padding in the union.
6034 Doesn't seem worth the effort... */
6035 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
6038 return count_type_elements (type
, true) == num_elts
;
6041 /* Return 1 if EXP contains mostly (3/4) zeros. */
6044 mostly_zeros_p (const_tree exp
)
6046 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6048 HOST_WIDE_INT nz_elts
, init_elts
;
6051 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
6052 return !complete_p
|| nz_elts
< init_elts
/ 4;
6055 return initializer_zerop (exp
);
6058 /* Return 1 if EXP contains all zeros. */
6061 all_zeros_p (const_tree exp
)
6063 if (TREE_CODE (exp
) == CONSTRUCTOR
)
6065 HOST_WIDE_INT nz_elts
, init_elts
;
6068 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
6069 return nz_elts
== 0;
6072 return initializer_zerop (exp
);
6075 /* Helper function for store_constructor.
6076 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6077 CLEARED is as for store_constructor.
6078 ALIAS_SET is the alias set to use for any stores.
6079 If REVERSE is true, the store is to be done in reverse order.
6081 This provides a recursive shortcut back to store_constructor when it isn't
6082 necessary to go through store_field. This is so that we can pass through
6083 the cleared field to let store_constructor know that we may not have to
6084 clear a substructure if the outer structure has already been cleared. */
6087 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
6088 HOST_WIDE_INT bitpos
,
6089 unsigned HOST_WIDE_INT bitregion_start
,
6090 unsigned HOST_WIDE_INT bitregion_end
,
6092 tree exp
, int cleared
,
6093 alias_set_type alias_set
, bool reverse
)
6095 if (TREE_CODE (exp
) == CONSTRUCTOR
6096 /* We can only call store_constructor recursively if the size and
6097 bit position are on a byte boundary. */
6098 && bitpos
% BITS_PER_UNIT
== 0
6099 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
6100 /* If we have a nonzero bitpos for a register target, then we just
6101 let store_field do the bitfield handling. This is unlikely to
6102 generate unnecessary clear instructions anyways. */
6103 && (bitpos
== 0 || MEM_P (target
)))
6107 = adjust_address (target
,
6108 GET_MODE (target
) == BLKmode
6110 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
6111 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6114 /* Update the alias set, if required. */
6115 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
6116 && MEM_ALIAS_SET (target
) != 0)
6118 target
= copy_rtx (target
);
6119 set_mem_alias_set (target
, alias_set
);
6122 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
,
6126 store_field (target
, bitsize
, bitpos
, bitregion_start
, bitregion_end
, mode
,
6127 exp
, alias_set
, false, reverse
);
6131 /* Returns the number of FIELD_DECLs in TYPE. */
6134 fields_length (const_tree type
)
6136 tree t
= TYPE_FIELDS (type
);
6139 for (; t
; t
= DECL_CHAIN (t
))
6140 if (TREE_CODE (t
) == FIELD_DECL
)
6147 /* Store the value of constructor EXP into the rtx TARGET.
6148 TARGET is either a REG or a MEM; we know it cannot conflict, since
6149 safe_from_p has been called.
6150 CLEARED is true if TARGET is known to have been zero'd.
6151 SIZE is the number of bytes of TARGET we are allowed to modify: this
6152 may not be the same as the size of EXP if we are assigning to a field
6153 which has been packed to exclude padding bits.
6154 If REVERSE is true, the store is to be done in reverse order. */
6157 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
,
6160 tree type
= TREE_TYPE (exp
);
6161 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
6162 HOST_WIDE_INT bitregion_end
= size
> 0 ? size
* BITS_PER_UNIT
- 1 : 0;
6164 switch (TREE_CODE (type
))
6168 case QUAL_UNION_TYPE
:
6170 unsigned HOST_WIDE_INT idx
;
6173 /* The storage order is specified for every aggregate type. */
6174 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6176 /* If size is zero or the target is already cleared, do nothing. */
6177 if (size
== 0 || cleared
)
6179 /* We either clear the aggregate or indicate the value is dead. */
6180 else if ((TREE_CODE (type
) == UNION_TYPE
6181 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6182 && ! CONSTRUCTOR_ELTS (exp
))
6183 /* If the constructor is empty, clear the union. */
6185 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6189 /* If we are building a static constructor into a register,
6190 set the initial value as zero so we can fold the value into
6191 a constant. But if more than one register is involved,
6192 this probably loses. */
6193 else if (REG_P (target
) && TREE_STATIC (exp
)
6194 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
6196 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6200 /* If the constructor has fewer fields than the structure or
6201 if we are initializing the structure to mostly zeros, clear
6202 the whole structure first. Don't do this if TARGET is a
6203 register whose mode size isn't equal to SIZE since
6204 clear_storage can't handle this case. */
6206 && (((int) CONSTRUCTOR_NELTS (exp
) != fields_length (type
))
6207 || mostly_zeros_p (exp
))
6209 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
6212 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6216 if (REG_P (target
) && !cleared
)
6217 emit_clobber (target
);
6219 /* Store each element of the constructor into the
6220 corresponding field of TARGET. */
6221 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
6224 HOST_WIDE_INT bitsize
;
6225 HOST_WIDE_INT bitpos
= 0;
6227 rtx to_rtx
= target
;
6229 /* Just ignore missing fields. We cleared the whole
6230 structure, above, if any fields are missing. */
6234 if (cleared
&& initializer_zerop (value
))
6237 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
6238 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
6242 mode
= DECL_MODE (field
);
6243 if (DECL_BIT_FIELD (field
))
6246 offset
= DECL_FIELD_OFFSET (field
);
6247 if (tree_fits_shwi_p (offset
)
6248 && tree_fits_shwi_p (bit_position (field
)))
6250 bitpos
= int_bit_position (field
);
6256 /* If this initializes a field that is smaller than a
6257 word, at the start of a word, try to widen it to a full
6258 word. This special case allows us to output C++ member
6259 function initializations in a form that the optimizers
6261 if (WORD_REGISTER_OPERATIONS
6263 && bitsize
< BITS_PER_WORD
6264 && bitpos
% BITS_PER_WORD
== 0
6265 && GET_MODE_CLASS (mode
) == MODE_INT
6266 && TREE_CODE (value
) == INTEGER_CST
6268 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6270 tree type
= TREE_TYPE (value
);
6272 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6274 type
= lang_hooks
.types
.type_for_mode
6275 (word_mode
, TYPE_UNSIGNED (type
));
6276 value
= fold_convert (type
, value
);
6277 /* Make sure the bits beyond the original bitsize are zero
6278 so that we can correctly avoid extra zeroing stores in
6279 later constructor elements. */
6281 = wide_int_to_tree (type
, wi::mask (bitsize
, false,
6283 value
= fold_build2 (BIT_AND_EXPR
, type
, value
, bitsize_mask
);
6286 if (BYTES_BIG_ENDIAN
)
6288 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6289 build_int_cst (type
,
6290 BITS_PER_WORD
- bitsize
));
6291 bitsize
= BITS_PER_WORD
;
6295 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6296 && DECL_NONADDRESSABLE_P (field
))
6298 to_rtx
= copy_rtx (to_rtx
);
6299 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6302 store_constructor_field (to_rtx
, bitsize
, bitpos
,
6303 0, bitregion_end
, mode
,
6305 get_alias_set (TREE_TYPE (field
)),
6313 unsigned HOST_WIDE_INT i
;
6316 tree elttype
= TREE_TYPE (type
);
6318 HOST_WIDE_INT minelt
= 0;
6319 HOST_WIDE_INT maxelt
= 0;
6321 /* The storage order is specified for every aggregate type. */
6322 reverse
= TYPE_REVERSE_STORAGE_ORDER (type
);
6324 domain
= TYPE_DOMAIN (type
);
6325 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6326 && TYPE_MAX_VALUE (domain
)
6327 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6328 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6330 /* If we have constant bounds for the range of the type, get them. */
6333 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6334 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6337 /* If the constructor has fewer elements than the array, clear
6338 the whole array first. Similarly if this is static
6339 constructor of a non-BLKmode object. */
6342 else if (REG_P (target
) && TREE_STATIC (exp
))
6346 unsigned HOST_WIDE_INT idx
;
6348 HOST_WIDE_INT count
= 0, zero_count
= 0;
6349 need_to_clear
= ! const_bounds_p
;
6351 /* This loop is a more accurate version of the loop in
6352 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6353 is also needed to check for missing elements. */
6354 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6356 HOST_WIDE_INT this_node_count
;
6361 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6363 tree lo_index
= TREE_OPERAND (index
, 0);
6364 tree hi_index
= TREE_OPERAND (index
, 1);
6366 if (! tree_fits_uhwi_p (lo_index
)
6367 || ! tree_fits_uhwi_p (hi_index
))
6373 this_node_count
= (tree_to_uhwi (hi_index
)
6374 - tree_to_uhwi (lo_index
) + 1);
6377 this_node_count
= 1;
6379 count
+= this_node_count
;
6380 if (mostly_zeros_p (value
))
6381 zero_count
+= this_node_count
;
6384 /* Clear the entire array first if there are any missing
6385 elements, or if the incidence of zero elements is >=
6388 && (count
< maxelt
- minelt
+ 1
6389 || 4 * zero_count
>= 3 * count
))
6393 if (need_to_clear
&& size
> 0)
6396 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6398 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6402 if (!cleared
&& REG_P (target
))
6403 /* Inform later passes that the old value is dead. */
6404 emit_clobber (target
);
6406 /* Store each element of the constructor into the
6407 corresponding element of TARGET, determined by counting the
6409 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6412 HOST_WIDE_INT bitsize
;
6413 HOST_WIDE_INT bitpos
;
6414 rtx xtarget
= target
;
6416 if (cleared
&& initializer_zerop (value
))
6419 mode
= TYPE_MODE (elttype
);
6420 if (mode
== BLKmode
)
6421 bitsize
= (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6422 ? tree_to_uhwi (TYPE_SIZE (elttype
))
6425 bitsize
= GET_MODE_BITSIZE (mode
);
6427 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6429 tree lo_index
= TREE_OPERAND (index
, 0);
6430 tree hi_index
= TREE_OPERAND (index
, 1);
6431 rtx index_r
, pos_rtx
;
6432 HOST_WIDE_INT lo
, hi
, count
;
6435 /* If the range is constant and "small", unroll the loop. */
6437 && tree_fits_shwi_p (lo_index
)
6438 && tree_fits_shwi_p (hi_index
)
6439 && (lo
= tree_to_shwi (lo_index
),
6440 hi
= tree_to_shwi (hi_index
),
6441 count
= hi
- lo
+ 1,
6444 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6445 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6448 lo
-= minelt
; hi
-= minelt
;
6449 for (; lo
<= hi
; lo
++)
6451 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6454 && !MEM_KEEP_ALIAS_SET_P (target
)
6455 && TREE_CODE (type
) == ARRAY_TYPE
6456 && TYPE_NONALIASED_COMPONENT (type
))
6458 target
= copy_rtx (target
);
6459 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6462 store_constructor_field
6463 (target
, bitsize
, bitpos
, 0, bitregion_end
,
6464 mode
, value
, cleared
,
6465 get_alias_set (elttype
), reverse
);
6470 rtx_code_label
*loop_start
= gen_label_rtx ();
6471 rtx_code_label
*loop_end
= gen_label_rtx ();
6474 expand_normal (hi_index
);
6476 index
= build_decl (EXPR_LOCATION (exp
),
6477 VAR_DECL
, NULL_TREE
, domain
);
6478 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6479 SET_DECL_RTL (index
, index_r
);
6480 store_expr (lo_index
, index_r
, 0, false, reverse
);
6482 /* Build the head of the loop. */
6483 do_pending_stack_adjust ();
6484 emit_label (loop_start
);
6486 /* Assign value to element index. */
6488 fold_convert (ssizetype
,
6489 fold_build2 (MINUS_EXPR
,
6492 TYPE_MIN_VALUE (domain
)));
6495 size_binop (MULT_EXPR
, position
,
6496 fold_convert (ssizetype
,
6497 TYPE_SIZE_UNIT (elttype
)));
6499 pos_rtx
= expand_normal (position
);
6500 xtarget
= offset_address (target
, pos_rtx
,
6501 highest_pow2_factor (position
));
6502 xtarget
= adjust_address (xtarget
, mode
, 0);
6503 if (TREE_CODE (value
) == CONSTRUCTOR
)
6504 store_constructor (value
, xtarget
, cleared
,
6505 bitsize
/ BITS_PER_UNIT
, reverse
);
6507 store_expr (value
, xtarget
, 0, false, reverse
);
6509 /* Generate a conditional jump to exit the loop. */
6510 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6512 jumpif (exit_cond
, loop_end
,
6513 profile_probability::uninitialized ());
6515 /* Update the loop counter, and jump to the head of
6517 expand_assignment (index
,
6518 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6519 index
, integer_one_node
),
6522 emit_jump (loop_start
);
6524 /* Build the end of the loop. */
6525 emit_label (loop_end
);
6528 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6529 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6534 index
= ssize_int (1);
6537 index
= fold_convert (ssizetype
,
6538 fold_build2 (MINUS_EXPR
,
6541 TYPE_MIN_VALUE (domain
)));
6544 size_binop (MULT_EXPR
, index
,
6545 fold_convert (ssizetype
,
6546 TYPE_SIZE_UNIT (elttype
)));
6547 xtarget
= offset_address (target
,
6548 expand_normal (position
),
6549 highest_pow2_factor (position
));
6550 xtarget
= adjust_address (xtarget
, mode
, 0);
6551 store_expr (value
, xtarget
, 0, false, reverse
);
6556 bitpos
= ((tree_to_shwi (index
) - minelt
)
6557 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6559 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6561 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6562 && TREE_CODE (type
) == ARRAY_TYPE
6563 && TYPE_NONALIASED_COMPONENT (type
))
6565 target
= copy_rtx (target
);
6566 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6568 store_constructor_field (target
, bitsize
, bitpos
, 0,
6569 bitregion_end
, mode
, value
,
6570 cleared
, get_alias_set (elttype
),
6579 unsigned HOST_WIDE_INT idx
;
6580 constructor_elt
*ce
;
6583 int icode
= CODE_FOR_nothing
;
6584 tree elttype
= TREE_TYPE (type
);
6585 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6586 machine_mode eltmode
= TYPE_MODE (elttype
);
6587 HOST_WIDE_INT bitsize
;
6588 HOST_WIDE_INT bitpos
;
6589 rtvec vector
= NULL
;
6591 alias_set_type alias
;
6592 bool vec_vec_init_p
= false;
6594 gcc_assert (eltmode
!= BLKmode
);
6596 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6597 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
6599 machine_mode mode
= GET_MODE (target
);
6600 machine_mode emode
= eltmode
;
6602 if (CONSTRUCTOR_NELTS (exp
)
6603 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
))
6606 tree etype
= TREE_TYPE (CONSTRUCTOR_ELT (exp
, 0)->value
);
6607 gcc_assert (CONSTRUCTOR_NELTS (exp
) * TYPE_VECTOR_SUBPARTS (etype
)
6609 emode
= TYPE_MODE (etype
);
6611 icode
= (int) convert_optab_handler (vec_init_optab
, mode
, emode
);
6612 if (icode
!= CODE_FOR_nothing
)
6614 unsigned int i
, n
= n_elts
;
6616 if (emode
!= eltmode
)
6618 n
= CONSTRUCTOR_NELTS (exp
);
6619 vec_vec_init_p
= true;
6621 vector
= rtvec_alloc (n
);
6622 for (i
= 0; i
< n
; i
++)
6623 RTVEC_ELT (vector
, i
) = CONST0_RTX (emode
);
6627 /* If the constructor has fewer elements than the vector,
6628 clear the whole array first. Similarly if this is static
6629 constructor of a non-BLKmode object. */
6632 else if (REG_P (target
) && TREE_STATIC (exp
))
6636 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6639 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6641 tree sz
= TYPE_SIZE (TREE_TYPE (value
));
6643 = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR
, sz
,
6644 TYPE_SIZE (elttype
)));
6646 count
+= n_elts_here
;
6647 if (mostly_zeros_p (value
))
6648 zero_count
+= n_elts_here
;
6651 /* Clear the entire vector first if there are any missing elements,
6652 or if the incidence of zero elements is >= 75%. */
6653 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6656 if (need_to_clear
&& size
> 0 && !vector
)
6659 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6661 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6665 /* Inform later passes that the old value is dead. */
6666 if (!cleared
&& !vector
&& REG_P (target
))
6667 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6670 alias
= MEM_ALIAS_SET (target
);
6672 alias
= get_alias_set (elttype
);
6674 /* Store each element of the constructor into the corresponding
6675 element of TARGET, determined by counting the elements. */
6676 for (idx
= 0, i
= 0;
6677 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6678 idx
++, i
+= bitsize
/ elt_size
)
6680 HOST_WIDE_INT eltpos
;
6681 tree value
= ce
->value
;
6683 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6684 if (cleared
&& initializer_zerop (value
))
6688 eltpos
= tree_to_uhwi (ce
->index
);
6696 gcc_assert (ce
->index
== NULL_TREE
);
6697 gcc_assert (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
);
6701 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6702 RTVEC_ELT (vector
, eltpos
) = expand_normal (value
);
6706 machine_mode value_mode
6707 = (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6708 ? TYPE_MODE (TREE_TYPE (value
)) : eltmode
);
6709 bitpos
= eltpos
* elt_size
;
6710 store_constructor_field (target
, bitsize
, bitpos
, 0,
6711 bitregion_end
, value_mode
,
6712 value
, cleared
, alias
, reverse
);
6717 emit_insn (GEN_FCN (icode
) (target
,
6718 gen_rtx_PARALLEL (GET_MODE (target
),
6728 /* Store the value of EXP (an expression tree)
6729 into a subfield of TARGET which has mode MODE and occupies
6730 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6731 If MODE is VOIDmode, it means that we are storing into a bit-field.
6733 BITREGION_START is bitpos of the first bitfield in this region.
6734 BITREGION_END is the bitpos of the ending bitfield in this region.
6735 These two fields are 0, if the C++ memory model does not apply,
6736 or we are not interested in keeping track of bitfield regions.
6738 Always return const0_rtx unless we have something particular to
6741 ALIAS_SET is the alias set for the destination. This value will
6742 (in general) be different from that for TARGET, since TARGET is a
6743 reference to the containing structure.
6745 If NONTEMPORAL is true, try generating a nontemporal store.
6747 If REVERSE is true, the store is to be done in reverse order. */
6750 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
6751 unsigned HOST_WIDE_INT bitregion_start
,
6752 unsigned HOST_WIDE_INT bitregion_end
,
6753 machine_mode mode
, tree exp
,
6754 alias_set_type alias_set
, bool nontemporal
, bool reverse
)
6756 if (TREE_CODE (exp
) == ERROR_MARK
)
6759 /* If we have nothing to store, do nothing unless the expression has
6762 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6764 if (GET_CODE (target
) == CONCAT
)
6766 /* We're storing into a struct containing a single __complex. */
6768 gcc_assert (!bitpos
);
6769 return store_expr (exp
, target
, 0, nontemporal
, reverse
);
6772 /* If the structure is in a register or if the component
6773 is a bit field, we cannot use addressing to access it.
6774 Use bit-field techniques or SUBREG to store in it. */
6776 if (mode
== VOIDmode
6777 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6778 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6779 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6781 || GET_CODE (target
) == SUBREG
6782 /* If the field isn't aligned enough to store as an ordinary memref,
6783 store it as a bit field. */
6785 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6786 || bitpos
% GET_MODE_ALIGNMENT (mode
))
6787 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
6788 || (bitpos
% BITS_PER_UNIT
!= 0)))
6789 || (bitsize
>= 0 && mode
!= BLKmode
6790 && GET_MODE_BITSIZE (mode
) > bitsize
)
6791 /* If the RHS and field are a constant size and the size of the
6792 RHS isn't the same size as the bitfield, we must use bitfield
6795 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6796 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0
6797 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6798 we will handle specially below. */
6799 && !(TREE_CODE (exp
) == CONSTRUCTOR
6800 && bitsize
% BITS_PER_UNIT
== 0)
6801 /* And except for bitwise copying of TREE_ADDRESSABLE types,
6802 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6803 includes some extra padding. store_expr / expand_expr will in
6804 that case call get_inner_reference that will have the bitsize
6805 we check here and thus the block move will not clobber the
6806 padding that shouldn't be clobbered. In the future we could
6807 replace the TREE_ADDRESSABLE check with a check that
6808 get_base_address needs to live in memory. */
6809 && (!TREE_ADDRESSABLE (TREE_TYPE (exp
))
6810 || TREE_CODE (exp
) != COMPONENT_REF
6811 || TREE_CODE (DECL_SIZE (TREE_OPERAND (exp
, 1))) != INTEGER_CST
6812 || (bitsize
% BITS_PER_UNIT
!= 0)
6813 || (bitpos
% BITS_PER_UNIT
!= 0)
6814 || (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp
, 1)), bitsize
)
6816 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6817 decl we must use bitfield operations. */
6819 && TREE_CODE (exp
) == MEM_REF
6820 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6821 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6822 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6823 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6828 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6829 implies a mask operation. If the precision is the same size as
6830 the field we're storing into, that mask is redundant. This is
6831 particularly common with bit field assignments generated by the
6833 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6836 tree type
= TREE_TYPE (exp
);
6837 if (INTEGRAL_TYPE_P (type
)
6838 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6839 && bitsize
== TYPE_PRECISION (type
))
6841 tree op
= gimple_assign_rhs1 (nop_def
);
6842 type
= TREE_TYPE (op
);
6843 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
6848 temp
= expand_normal (exp
);
6850 /* Handle calls that return values in multiple non-contiguous locations.
6851 The Irix 6 ABI has examples of this. */
6852 if (GET_CODE (temp
) == PARALLEL
)
6854 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6855 machine_mode temp_mode
6856 = smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6857 rtx temp_target
= gen_reg_rtx (temp_mode
);
6858 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
6862 /* Handle calls that return BLKmode values in registers. */
6863 else if (mode
== BLKmode
&& REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
6865 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
6866 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
6870 /* If the value has aggregate type and an integral mode then, if BITSIZE
6871 is narrower than this mode and this is for big-endian data, we first
6872 need to put the value into the low-order bits for store_bit_field,
6873 except when MODE is BLKmode and BITSIZE larger than the word size
6874 (see the handling of fields larger than a word in store_bit_field).
6875 Moreover, the field may be not aligned on a byte boundary; in this
6876 case, if it has reverse storage order, it needs to be accessed as a
6877 scalar field with reverse storage order and we must first put the
6878 value into target order. */
6879 if (AGGREGATE_TYPE_P (TREE_TYPE (exp
))
6880 && GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
)
6882 HOST_WIDE_INT size
= GET_MODE_BITSIZE (GET_MODE (temp
));
6884 reverse
= TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp
));
6887 temp
= flip_storage_order (GET_MODE (temp
), temp
);
6890 && reverse
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
6891 && !(mode
== BLKmode
&& bitsize
> BITS_PER_WORD
))
6892 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
6893 size
- bitsize
, NULL_RTX
, 1);
6896 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6897 if (mode
!= VOIDmode
&& mode
!= BLKmode
6898 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
6899 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
6901 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
6902 and BITPOS must be aligned on a byte boundary. If so, we simply do
6903 a block copy. Likewise for a BLKmode-like TARGET. */
6904 if (GET_MODE (temp
) == BLKmode
6905 && (GET_MODE (target
) == BLKmode
6907 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
6908 && (bitpos
% BITS_PER_UNIT
) == 0
6909 && (bitsize
% BITS_PER_UNIT
) == 0)))
6911 gcc_assert (MEM_P (target
) && MEM_P (temp
)
6912 && (bitpos
% BITS_PER_UNIT
) == 0);
6914 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6915 emit_block_move (target
, temp
,
6916 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6923 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
6924 word size, we need to load the value (see again store_bit_field). */
6925 if (GET_MODE (temp
) == BLKmode
&& bitsize
<= BITS_PER_WORD
)
6927 machine_mode temp_mode
= smallest_mode_for_size (bitsize
, MODE_INT
);
6928 temp
= extract_bit_field (temp
, bitsize
, 0, 1, NULL_RTX
, temp_mode
,
6929 temp_mode
, false, NULL
);
6932 /* Store the value in the bitfield. */
6933 store_bit_field (target
, bitsize
, bitpos
,
6934 bitregion_start
, bitregion_end
,
6935 mode
, temp
, reverse
);
6941 /* Now build a reference to just the desired component. */
6942 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
6944 if (to_rtx
== target
)
6945 to_rtx
= copy_rtx (to_rtx
);
6947 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
6948 set_mem_alias_set (to_rtx
, alias_set
);
6950 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
6951 into a target smaller than its type; handle that case now. */
6952 if (TREE_CODE (exp
) == CONSTRUCTOR
&& bitsize
>= 0)
6954 gcc_assert (bitsize
% BITS_PER_UNIT
== 0);
6955 store_constructor (exp
, to_rtx
, 0, bitsize
/ BITS_PER_UNIT
, reverse
);
6959 return store_expr (exp
, to_rtx
, 0, nontemporal
, reverse
);
6963 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6964 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6965 codes and find the ultimate containing object, which we return.
6967 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6968 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
6969 storage order of the field.
6970 If the position of the field is variable, we store a tree
6971 giving the variable offset (in units) in *POFFSET.
6972 This offset is in addition to the bit position.
6973 If the position is not variable, we store 0 in *POFFSET.
6975 If any of the extraction expressions is volatile,
6976 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6978 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6979 Otherwise, it is a mode that can be used to access the field.
6981 If the field describes a variable-sized object, *PMODE is set to
6982 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6983 this case, but the address of the object can be found. */
6986 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
6987 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
6988 machine_mode
*pmode
, int *punsignedp
,
6989 int *preversep
, int *pvolatilep
)
6992 machine_mode mode
= VOIDmode
;
6993 bool blkmode_bitfield
= false;
6994 tree offset
= size_zero_node
;
6995 offset_int bit_offset
= 0;
6997 /* First get the mode, signedness, storage order and size. We do this from
6998 just the outermost expression. */
7000 if (TREE_CODE (exp
) == COMPONENT_REF
)
7002 tree field
= TREE_OPERAND (exp
, 1);
7003 size_tree
= DECL_SIZE (field
);
7004 if (flag_strict_volatile_bitfields
> 0
7005 && TREE_THIS_VOLATILE (exp
)
7006 && DECL_BIT_FIELD_TYPE (field
)
7007 && DECL_MODE (field
) != BLKmode
)
7008 /* Volatile bitfields should be accessed in the mode of the
7009 field's type, not the mode computed based on the bit
7011 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
7012 else if (!DECL_BIT_FIELD (field
))
7013 mode
= DECL_MODE (field
);
7014 else if (DECL_MODE (field
) == BLKmode
)
7015 blkmode_bitfield
= true;
7017 *punsignedp
= DECL_UNSIGNED (field
);
7019 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
7021 size_tree
= TREE_OPERAND (exp
, 1);
7022 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
7023 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
7025 /* For vector types, with the correct size of access, use the mode of
7027 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
7028 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
7029 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
7030 mode
= TYPE_MODE (TREE_TYPE (exp
));
7034 mode
= TYPE_MODE (TREE_TYPE (exp
));
7035 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
7037 if (mode
== BLKmode
)
7038 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
7040 *pbitsize
= GET_MODE_BITSIZE (mode
);
7045 if (! tree_fits_uhwi_p (size_tree
))
7046 mode
= BLKmode
, *pbitsize
= -1;
7048 *pbitsize
= tree_to_uhwi (size_tree
);
7051 *preversep
= reverse_storage_order_for_component_p (exp
);
7053 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7054 and find the ultimate containing object. */
7057 switch (TREE_CODE (exp
))
7060 bit_offset
+= wi::to_offset (TREE_OPERAND (exp
, 2));
7065 tree field
= TREE_OPERAND (exp
, 1);
7066 tree this_offset
= component_ref_field_offset (exp
);
7068 /* If this field hasn't been filled in yet, don't go past it.
7069 This should only happen when folding expressions made during
7070 type construction. */
7071 if (this_offset
== 0)
7074 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
7075 bit_offset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
7077 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7082 case ARRAY_RANGE_REF
:
7084 tree index
= TREE_OPERAND (exp
, 1);
7085 tree low_bound
= array_ref_low_bound (exp
);
7086 tree unit_size
= array_ref_element_size (exp
);
7088 /* We assume all arrays have sizes that are a multiple of a byte.
7089 First subtract the lower bound, if any, in the type of the
7090 index, then convert to sizetype and multiply by the size of
7091 the array element. */
7092 if (! integer_zerop (low_bound
))
7093 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
7096 offset
= size_binop (PLUS_EXPR
, offset
,
7097 size_binop (MULT_EXPR
,
7098 fold_convert (sizetype
, index
),
7107 bit_offset
+= *pbitsize
;
7110 case VIEW_CONVERT_EXPR
:
7114 /* Hand back the decl for MEM[&decl, off]. */
7115 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
7117 tree off
= TREE_OPERAND (exp
, 1);
7118 if (!integer_zerop (off
))
7120 offset_int boff
, coff
= mem_ref_offset (exp
);
7121 boff
= coff
<< LOG2_BITS_PER_UNIT
;
7124 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
7132 /* If any reference in the chain is volatile, the effect is volatile. */
7133 if (TREE_THIS_VOLATILE (exp
))
7136 exp
= TREE_OPERAND (exp
, 0);
7140 /* If OFFSET is constant, see if we can return the whole thing as a
7141 constant bit position. Make sure to handle overflow during
7143 if (TREE_CODE (offset
) == INTEGER_CST
)
7145 offset_int tem
= wi::sext (wi::to_offset (offset
),
7146 TYPE_PRECISION (sizetype
));
7147 tem
<<= LOG2_BITS_PER_UNIT
;
7149 if (wi::fits_shwi_p (tem
))
7151 *pbitpos
= tem
.to_shwi ();
7152 *poffset
= offset
= NULL_TREE
;
7156 /* Otherwise, split it up. */
7159 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7160 if (wi::neg_p (bit_offset
) || !wi::fits_shwi_p (bit_offset
))
7162 offset_int mask
= wi::mask
<offset_int
> (LOG2_BITS_PER_UNIT
, false);
7163 offset_int tem
= bit_offset
.and_not (mask
);
7164 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
7165 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
7167 tem
>>= LOG2_BITS_PER_UNIT
;
7168 offset
= size_binop (PLUS_EXPR
, offset
,
7169 wide_int_to_tree (sizetype
, tem
));
7172 *pbitpos
= bit_offset
.to_shwi ();
7176 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7177 if (mode
== VOIDmode
7179 && (*pbitpos
% BITS_PER_UNIT
) == 0
7180 && (*pbitsize
% BITS_PER_UNIT
) == 0)
7188 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7190 static unsigned HOST_WIDE_INT
7191 target_align (const_tree target
)
7193 /* We might have a chain of nested references with intermediate misaligning
7194 bitfields components, so need to recurse to find out. */
7196 unsigned HOST_WIDE_INT this_align
, outer_align
;
7198 switch (TREE_CODE (target
))
7204 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7205 outer_align
= target_align (TREE_OPERAND (target
, 0));
7206 return MIN (this_align
, outer_align
);
7209 case ARRAY_RANGE_REF
:
7210 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7211 outer_align
= target_align (TREE_OPERAND (target
, 0));
7212 return MIN (this_align
, outer_align
);
7215 case NON_LVALUE_EXPR
:
7216 case VIEW_CONVERT_EXPR
:
7217 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7218 outer_align
= target_align (TREE_OPERAND (target
, 0));
7219 return MAX (this_align
, outer_align
);
7222 return TYPE_ALIGN (TREE_TYPE (target
));
7227 /* Given an rtx VALUE that may contain additions and multiplications, return
7228 an equivalent value that just refers to a register, memory, or constant.
7229 This is done by generating instructions to perform the arithmetic and
7230 returning a pseudo-register containing the value.
7232 The returned value may be a REG, SUBREG, MEM or constant. */
7235 force_operand (rtx value
, rtx target
)
7238 /* Use subtarget as the target for operand 0 of a binary operation. */
7239 rtx subtarget
= get_subtarget (target
);
7240 enum rtx_code code
= GET_CODE (value
);
7242 /* Check for subreg applied to an expression produced by loop optimizer. */
7244 && !REG_P (SUBREG_REG (value
))
7245 && !MEM_P (SUBREG_REG (value
)))
7248 = simplify_gen_subreg (GET_MODE (value
),
7249 force_reg (GET_MODE (SUBREG_REG (value
)),
7250 force_operand (SUBREG_REG (value
),
7252 GET_MODE (SUBREG_REG (value
)),
7253 SUBREG_BYTE (value
));
7254 code
= GET_CODE (value
);
7257 /* Check for a PIC address load. */
7258 if ((code
== PLUS
|| code
== MINUS
)
7259 && XEXP (value
, 0) == pic_offset_table_rtx
7260 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7261 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7262 || GET_CODE (XEXP (value
, 1)) == CONST
))
7265 subtarget
= gen_reg_rtx (GET_MODE (value
));
7266 emit_move_insn (subtarget
, value
);
7270 if (ARITHMETIC_P (value
))
7272 op2
= XEXP (value
, 1);
7273 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7275 if (code
== MINUS
&& CONST_INT_P (op2
))
7278 op2
= negate_rtx (GET_MODE (value
), op2
);
7281 /* Check for an addition with OP2 a constant integer and our first
7282 operand a PLUS of a virtual register and something else. In that
7283 case, we want to emit the sum of the virtual register and the
7284 constant first and then add the other value. This allows virtual
7285 register instantiation to simply modify the constant rather than
7286 creating another one around this addition. */
7287 if (code
== PLUS
&& CONST_INT_P (op2
)
7288 && GET_CODE (XEXP (value
, 0)) == PLUS
7289 && REG_P (XEXP (XEXP (value
, 0), 0))
7290 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7291 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7293 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7294 XEXP (XEXP (value
, 0), 0), op2
,
7295 subtarget
, 0, OPTAB_LIB_WIDEN
);
7296 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7297 force_operand (XEXP (XEXP (value
,
7299 target
, 0, OPTAB_LIB_WIDEN
);
7302 op1
= force_operand (XEXP (value
, 0), subtarget
);
7303 op2
= force_operand (op2
, NULL_RTX
);
7307 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7309 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7310 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7311 target
, 1, OPTAB_LIB_WIDEN
);
7313 return expand_divmod (0,
7314 FLOAT_MODE_P (GET_MODE (value
))
7315 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7316 GET_MODE (value
), op1
, op2
, target
, 0);
7318 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7321 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7324 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7327 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7328 target
, 0, OPTAB_LIB_WIDEN
);
7330 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7331 target
, 1, OPTAB_LIB_WIDEN
);
7334 if (UNARY_P (value
))
7337 target
= gen_reg_rtx (GET_MODE (value
));
7338 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7345 case FLOAT_TRUNCATE
:
7346 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7351 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7355 case UNSIGNED_FLOAT
:
7356 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7360 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7364 #ifdef INSN_SCHEDULING
7365 /* On machines that have insn scheduling, we want all memory reference to be
7366 explicit, so we need to deal with such paradoxical SUBREGs. */
7367 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7369 = simplify_gen_subreg (GET_MODE (value
),
7370 force_reg (GET_MODE (SUBREG_REG (value
)),
7371 force_operand (SUBREG_REG (value
),
7373 GET_MODE (SUBREG_REG (value
)),
7374 SUBREG_BYTE (value
));
7380 /* Subroutine of expand_expr: return nonzero iff there is no way that
7381 EXP can reference X, which is being modified. TOP_P is nonzero if this
7382 call is going to be used to determine whether we need a temporary
7383 for EXP, as opposed to a recursive call to this function.
7385 It is always safe for this routine to return zero since it merely
7386 searches for optimization opportunities. */
7389 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7395 /* If EXP has varying size, we MUST use a target since we currently
7396 have no way of allocating temporaries of variable size
7397 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7398 So we assume here that something at a higher level has prevented a
7399 clash. This is somewhat bogus, but the best we can do. Only
7400 do this when X is BLKmode and when we are at the top level. */
7401 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7402 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7403 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7404 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7405 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7407 && GET_MODE (x
) == BLKmode
)
7408 /* If X is in the outgoing argument area, it is always safe. */
7410 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7411 || (GET_CODE (XEXP (x
, 0)) == PLUS
7412 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7415 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7416 find the underlying pseudo. */
7417 if (GET_CODE (x
) == SUBREG
)
7420 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7424 /* Now look at our tree code and possibly recurse. */
7425 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7427 case tcc_declaration
:
7428 exp_rtl
= DECL_RTL_IF_SET (exp
);
7434 case tcc_exceptional
:
7435 if (TREE_CODE (exp
) == TREE_LIST
)
7439 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7441 exp
= TREE_CHAIN (exp
);
7444 if (TREE_CODE (exp
) != TREE_LIST
)
7445 return safe_from_p (x
, exp
, 0);
7448 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7450 constructor_elt
*ce
;
7451 unsigned HOST_WIDE_INT idx
;
7453 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7454 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7455 || !safe_from_p (x
, ce
->value
, 0))
7459 else if (TREE_CODE (exp
) == ERROR_MARK
)
7460 return 1; /* An already-visited SAVE_EXPR? */
7465 /* The only case we look at here is the DECL_INITIAL inside a
7467 return (TREE_CODE (exp
) != DECL_EXPR
7468 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7469 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7470 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7473 case tcc_comparison
:
7474 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7479 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7481 case tcc_expression
:
7484 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7485 the expression. If it is set, we conflict iff we are that rtx or
7486 both are in memory. Otherwise, we check all operands of the
7487 expression recursively. */
7489 switch (TREE_CODE (exp
))
7492 /* If the operand is static or we are static, we can't conflict.
7493 Likewise if we don't conflict with the operand at all. */
7494 if (staticp (TREE_OPERAND (exp
, 0))
7495 || TREE_STATIC (exp
)
7496 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7499 /* Otherwise, the only way this can conflict is if we are taking
7500 the address of a DECL a that address if part of X, which is
7502 exp
= TREE_OPERAND (exp
, 0);
7505 if (!DECL_RTL_SET_P (exp
)
7506 || !MEM_P (DECL_RTL (exp
)))
7509 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7515 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7516 get_alias_set (exp
)))
7521 /* Assume that the call will clobber all hard registers and
7523 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7528 case WITH_CLEANUP_EXPR
:
7529 case CLEANUP_POINT_EXPR
:
7530 /* Lowered by gimplify.c. */
7534 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7540 /* If we have an rtx, we do not need to scan our operands. */
7544 nops
= TREE_OPERAND_LENGTH (exp
);
7545 for (i
= 0; i
< nops
; i
++)
7546 if (TREE_OPERAND (exp
, i
) != 0
7547 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7553 /* Should never get a type here. */
7557 /* If we have an rtl, find any enclosed object. Then see if we conflict
7561 if (GET_CODE (exp_rtl
) == SUBREG
)
7563 exp_rtl
= SUBREG_REG (exp_rtl
);
7565 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7569 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7570 are memory and they conflict. */
7571 return ! (rtx_equal_p (x
, exp_rtl
)
7572 || (MEM_P (x
) && MEM_P (exp_rtl
)
7573 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7576 /* If we reach here, it is safe. */
7581 /* Return the highest power of two that EXP is known to be a multiple of.
7582 This is used in updating alignment of MEMs in array references. */
7584 unsigned HOST_WIDE_INT
7585 highest_pow2_factor (const_tree exp
)
7587 unsigned HOST_WIDE_INT ret
;
7588 int trailing_zeros
= tree_ctz (exp
);
7589 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7590 return BIGGEST_ALIGNMENT
;
7591 ret
= HOST_WIDE_INT_1U
<< trailing_zeros
;
7592 if (ret
> BIGGEST_ALIGNMENT
)
7593 return BIGGEST_ALIGNMENT
;
7597 /* Similar, except that the alignment requirements of TARGET are
7598 taken into account. Assume it is at least as aligned as its
7599 type, unless it is a COMPONENT_REF in which case the layout of
7600 the structure gives the alignment. */
7602 static unsigned HOST_WIDE_INT
7603 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7605 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7606 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7608 return MAX (factor
, talign
);
7611 /* Convert the tree comparison code TCODE to the rtl one where the
7612 signedness is UNSIGNEDP. */
7614 static enum rtx_code
7615 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7627 code
= unsignedp
? LTU
: LT
;
7630 code
= unsignedp
? LEU
: LE
;
7633 code
= unsignedp
? GTU
: GT
;
7636 code
= unsignedp
? GEU
: GE
;
7638 case UNORDERED_EXPR
:
7669 /* Subroutine of expand_expr. Expand the two operands of a binary
7670 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7671 The value may be stored in TARGET if TARGET is nonzero. The
7672 MODIFIER argument is as documented by expand_expr. */
7675 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7676 enum expand_modifier modifier
)
7678 if (! safe_from_p (target
, exp1
, 1))
7680 if (operand_equal_p (exp0
, exp1
, 0))
7682 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7683 *op1
= copy_rtx (*op0
);
7687 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7688 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7693 /* Return a MEM that contains constant EXP. DEFER is as for
7694 output_constant_def and MODIFIER is as for expand_expr. */
7697 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7701 mem
= output_constant_def (exp
, defer
);
7702 if (modifier
!= EXPAND_INITIALIZER
)
7703 mem
= use_anchored_address (mem
);
7707 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7708 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7711 expand_expr_addr_expr_1 (tree exp
, rtx target
, machine_mode tmode
,
7712 enum expand_modifier modifier
, addr_space_t as
)
7714 rtx result
, subtarget
;
7716 HOST_WIDE_INT bitsize
, bitpos
;
7717 int unsignedp
, reversep
, volatilep
= 0;
7720 /* If we are taking the address of a constant and are at the top level,
7721 we have to use output_constant_def since we can't call force_const_mem
7723 /* ??? This should be considered a front-end bug. We should not be
7724 generating ADDR_EXPR of something that isn't an LVALUE. The only
7725 exception here is STRING_CST. */
7726 if (CONSTANT_CLASS_P (exp
))
7728 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7729 if (modifier
< EXPAND_SUM
)
7730 result
= force_operand (result
, target
);
7734 /* Everything must be something allowed by is_gimple_addressable. */
7735 switch (TREE_CODE (exp
))
7738 /* This case will happen via recursion for &a->b. */
7739 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7743 tree tem
= TREE_OPERAND (exp
, 0);
7744 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7745 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7746 return expand_expr (tem
, target
, tmode
, modifier
);
7750 /* Expand the initializer like constants above. */
7751 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7753 if (modifier
< EXPAND_SUM
)
7754 result
= force_operand (result
, target
);
7758 /* The real part of the complex number is always first, therefore
7759 the address is the same as the address of the parent object. */
7762 inner
= TREE_OPERAND (exp
, 0);
7766 /* The imaginary part of the complex number is always second.
7767 The expression is therefore always offset by the size of the
7770 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
7771 inner
= TREE_OPERAND (exp
, 0);
7774 case COMPOUND_LITERAL_EXPR
:
7775 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7776 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7777 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7778 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7779 the initializers aren't gimplified. */
7780 if (COMPOUND_LITERAL_EXPR_DECL (exp
)
7781 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp
)))
7782 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7783 target
, tmode
, modifier
, as
);
7786 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7787 expand_expr, as that can have various side effects; LABEL_DECLs for
7788 example, may not have their DECL_RTL set yet. Expand the rtl of
7789 CONSTRUCTORs too, which should yield a memory reference for the
7790 constructor's contents. Assume language specific tree nodes can
7791 be expanded in some interesting way. */
7792 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7794 || TREE_CODE (exp
) == CONSTRUCTOR
7795 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7797 result
= expand_expr (exp
, target
, tmode
,
7798 modifier
== EXPAND_INITIALIZER
7799 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7801 /* If the DECL isn't in memory, then the DECL wasn't properly
7802 marked TREE_ADDRESSABLE, which will be either a front-end
7803 or a tree optimizer bug. */
7805 gcc_assert (MEM_P (result
));
7806 result
= XEXP (result
, 0);
7808 /* ??? Is this needed anymore? */
7810 TREE_USED (exp
) = 1;
7812 if (modifier
!= EXPAND_INITIALIZER
7813 && modifier
!= EXPAND_CONST_ADDRESS
7814 && modifier
!= EXPAND_SUM
)
7815 result
= force_operand (result
, target
);
7819 /* Pass FALSE as the last argument to get_inner_reference although
7820 we are expanding to RTL. The rationale is that we know how to
7821 handle "aligning nodes" here: we can just bypass them because
7822 they won't change the final object whose address will be returned
7823 (they actually exist only for that purpose). */
7824 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
7825 &unsignedp
, &reversep
, &volatilep
);
7829 /* We must have made progress. */
7830 gcc_assert (inner
!= exp
);
7832 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
7833 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7834 inner alignment, force the inner to be sufficiently aligned. */
7835 if (CONSTANT_CLASS_P (inner
)
7836 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7838 inner
= copy_node (inner
);
7839 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7840 SET_TYPE_ALIGN (TREE_TYPE (inner
), TYPE_ALIGN (TREE_TYPE (exp
)));
7841 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7843 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7849 if (modifier
!= EXPAND_NORMAL
)
7850 result
= force_operand (result
, NULL
);
7851 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7852 modifier
== EXPAND_INITIALIZER
7853 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7855 /* expand_expr is allowed to return an object in a mode other
7856 than TMODE. If it did, we need to convert. */
7857 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
7858 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
7859 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
7860 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7861 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7863 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7864 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7867 subtarget
= bitpos
? NULL_RTX
: target
;
7868 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7869 1, OPTAB_LIB_WIDEN
);
7875 /* Someone beforehand should have rejected taking the address
7876 of such an object. */
7877 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7879 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7880 result
= plus_constant (tmode
, result
, bitpos
/ BITS_PER_UNIT
);
7881 if (modifier
< EXPAND_SUM
)
7882 result
= force_operand (result
, target
);
7888 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7889 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7892 expand_expr_addr_expr (tree exp
, rtx target
, machine_mode tmode
,
7893 enum expand_modifier modifier
)
7895 addr_space_t as
= ADDR_SPACE_GENERIC
;
7896 machine_mode address_mode
= Pmode
;
7897 machine_mode pointer_mode
= ptr_mode
;
7901 /* Target mode of VOIDmode says "whatever's natural". */
7902 if (tmode
== VOIDmode
)
7903 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7905 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7907 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7908 address_mode
= targetm
.addr_space
.address_mode (as
);
7909 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7912 /* We can get called with some Weird Things if the user does silliness
7913 like "(short) &a". In that case, convert_memory_address won't do
7914 the right thing, so ignore the given target mode. */
7915 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7916 tmode
= address_mode
;
7918 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7919 tmode
, modifier
, as
);
7921 /* Despite expand_expr claims concerning ignoring TMODE when not
7922 strictly convenient, stuff breaks if we don't honor it. Note
7923 that combined with the above, we only do this for pointer modes. */
7924 rmode
= GET_MODE (result
);
7925 if (rmode
== VOIDmode
)
7928 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7933 /* Generate code for computing CONSTRUCTOR EXP.
7934 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7935 is TRUE, instead of creating a temporary variable in memory
7936 NULL is returned and the caller needs to handle it differently. */
7939 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7940 bool avoid_temp_mem
)
7942 tree type
= TREE_TYPE (exp
);
7943 machine_mode mode
= TYPE_MODE (type
);
7945 /* Try to avoid creating a temporary at all. This is possible
7946 if all of the initializer is zero.
7947 FIXME: try to handle all [0..255] initializers we can handle
7949 if (TREE_STATIC (exp
)
7950 && !TREE_ADDRESSABLE (exp
)
7951 && target
!= 0 && mode
== BLKmode
7952 && all_zeros_p (exp
))
7954 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7958 /* All elts simple constants => refer to a constant in memory. But
7959 if this is a non-BLKmode mode, let it store a field at a time
7960 since that should make a CONST_INT, CONST_WIDE_INT or
7961 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7962 use, it is best to store directly into the target unless the type
7963 is large enough that memcpy will be used. If we are making an
7964 initializer and all operands are constant, put it in memory as
7967 FIXME: Avoid trying to fill vector constructors piece-meal.
7968 Output them with output_constant_def below unless we're sure
7969 they're zeros. This should go away when vector initializers
7970 are treated like VECTOR_CST instead of arrays. */
7971 if ((TREE_STATIC (exp
)
7972 && ((mode
== BLKmode
7973 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7974 || TREE_ADDRESSABLE (exp
)
7975 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
7976 && (! can_move_by_pieces
7977 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
7979 && ! mostly_zeros_p (exp
))))
7980 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7981 && TREE_CONSTANT (exp
)))
7988 constructor
= expand_expr_constant (exp
, 1, modifier
);
7990 if (modifier
!= EXPAND_CONST_ADDRESS
7991 && modifier
!= EXPAND_INITIALIZER
7992 && modifier
!= EXPAND_SUM
)
7993 constructor
= validize_mem (constructor
);
7998 /* Handle calls that pass values in multiple non-contiguous
7999 locations. The Irix 6 ABI has examples of this. */
8000 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
8001 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
8006 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
8009 store_constructor (exp
, target
, 0, int_expr_size (exp
), false);
8014 /* expand_expr: generate code for computing expression EXP.
8015 An rtx for the computed value is returned. The value is never null.
8016 In the case of a void EXP, const0_rtx is returned.
8018 The value may be stored in TARGET if TARGET is nonzero.
8019 TARGET is just a suggestion; callers must assume that
8020 the rtx returned may not be the same as TARGET.
8022 If TARGET is CONST0_RTX, it means that the value will be ignored.
8024 If TMODE is not VOIDmode, it suggests generating the
8025 result in mode TMODE. But this is done only when convenient.
8026 Otherwise, TMODE is ignored and the value generated in its natural mode.
8027 TMODE is just a suggestion; callers must assume that
8028 the rtx returned may not have mode TMODE.
8030 Note that TARGET may have neither TMODE nor MODE. In that case, it
8031 probably will not be used.
8033 If MODIFIER is EXPAND_SUM then when EXP is an addition
8034 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8035 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8036 products as above, or REG or MEM, or constant.
8037 Ordinarily in such cases we would output mul or add instructions
8038 and then return a pseudo reg containing the sum.
8040 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8041 it also marks a label as absolutely required (it can't be dead).
8042 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8043 This is used for outputting expressions used in initializers.
8045 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8046 with a constant address even if that address is not normally legitimate.
8047 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8049 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8050 a call parameter. Such targets require special care as we haven't yet
8051 marked TARGET so that it's safe from being trashed by libcalls. We
8052 don't want to use TARGET for anything but the final result;
8053 Intermediate values must go elsewhere. Additionally, calls to
8054 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8056 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8057 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8058 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8059 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8062 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8063 In this case, we don't adjust a returned MEM rtx that wouldn't be
8064 sufficiently aligned for its mode; instead, it's up to the caller
8065 to deal with it afterwards. This is used to make sure that unaligned
8066 base objects for which out-of-bounds accesses are supported, for
8067 example record types with trailing arrays, aren't realigned behind
8068 the back of the caller.
8069 The normal operating mode is to pass FALSE for this parameter. */
8072 expand_expr_real (tree exp
, rtx target
, machine_mode tmode
,
8073 enum expand_modifier modifier
, rtx
*alt_rtl
,
8074 bool inner_reference_p
)
8078 /* Handle ERROR_MARK before anybody tries to access its type. */
8079 if (TREE_CODE (exp
) == ERROR_MARK
8080 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
8082 ret
= CONST0_RTX (tmode
);
8083 return ret
? ret
: const0_rtx
;
8086 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
8091 /* Try to expand the conditional expression which is represented by
8092 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8093 return the rtl reg which represents the result. Otherwise return
8097 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
8098 tree treeop1 ATTRIBUTE_UNUSED
,
8099 tree treeop2 ATTRIBUTE_UNUSED
)
8102 rtx op00
, op01
, op1
, op2
;
8103 enum rtx_code comparison_code
;
8104 machine_mode comparison_mode
;
8107 tree type
= TREE_TYPE (treeop1
);
8108 int unsignedp
= TYPE_UNSIGNED (type
);
8109 machine_mode mode
= TYPE_MODE (type
);
8110 machine_mode orig_mode
= mode
;
8111 static bool expanding_cond_expr_using_cmove
= false;
8113 /* Conditional move expansion can end up TERing two operands which,
8114 when recursively hitting conditional expressions can result in
8115 exponential behavior if the cmove expansion ultimatively fails.
8116 It's hardly profitable to TER a cmove into a cmove so avoid doing
8117 that by failing early if we end up recursing. */
8118 if (expanding_cond_expr_using_cmove
)
8121 /* If we cannot do a conditional move on the mode, try doing it
8122 with the promoted mode. */
8123 if (!can_conditionally_move_p (mode
))
8125 mode
= promote_mode (type
, mode
, &unsignedp
);
8126 if (!can_conditionally_move_p (mode
))
8128 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
8131 temp
= assign_temp (type
, 0, 1);
8133 expanding_cond_expr_using_cmove
= true;
8135 expand_operands (treeop1
, treeop2
,
8136 temp
, &op1
, &op2
, EXPAND_NORMAL
);
8138 if (TREE_CODE (treeop0
) == SSA_NAME
8139 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
8141 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
8142 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
8143 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
8144 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
8145 comparison_mode
= TYPE_MODE (type
);
8146 unsignedp
= TYPE_UNSIGNED (type
);
8147 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8149 else if (COMPARISON_CLASS_P (treeop0
))
8151 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
8152 enum tree_code cmpcode
= TREE_CODE (treeop0
);
8153 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
8154 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
8155 unsignedp
= TYPE_UNSIGNED (type
);
8156 comparison_mode
= TYPE_MODE (type
);
8157 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
8161 op00
= expand_normal (treeop0
);
8163 comparison_code
= NE
;
8164 comparison_mode
= GET_MODE (op00
);
8165 if (comparison_mode
== VOIDmode
)
8166 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
8168 expanding_cond_expr_using_cmove
= false;
8170 if (GET_MODE (op1
) != mode
)
8171 op1
= gen_lowpart (mode
, op1
);
8173 if (GET_MODE (op2
) != mode
)
8174 op2
= gen_lowpart (mode
, op2
);
8176 /* Try to emit the conditional move. */
8177 insn
= emit_conditional_move (temp
, comparison_code
,
8178 op00
, op01
, comparison_mode
,
8182 /* If we could do the conditional move, emit the sequence,
8186 rtx_insn
*seq
= get_insns ();
8189 return convert_modes (orig_mode
, mode
, temp
, 0);
8192 /* Otherwise discard the sequence and fall back to code with
8199 expand_expr_real_2 (sepops ops
, rtx target
, machine_mode tmode
,
8200 enum expand_modifier modifier
)
8202 rtx op0
, op1
, op2
, temp
;
8203 rtx_code_label
*lab
;
8207 enum tree_code code
= ops
->code
;
8209 rtx subtarget
, original_target
;
8211 bool reduce_bit_field
;
8212 location_t loc
= ops
->location
;
8213 tree treeop0
, treeop1
, treeop2
;
8214 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8215 ? reduce_to_bit_field_precision ((expr), \
8221 mode
= TYPE_MODE (type
);
8222 unsignedp
= TYPE_UNSIGNED (type
);
8228 /* We should be called only on simple (binary or unary) expressions,
8229 exactly those that are valid in gimple expressions that aren't
8230 GIMPLE_SINGLE_RHS (or invalid). */
8231 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8232 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8233 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8235 ignore
= (target
== const0_rtx
8236 || ((CONVERT_EXPR_CODE_P (code
)
8237 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8238 && TREE_CODE (type
) == VOID_TYPE
));
8240 /* We should be called only if we need the result. */
8241 gcc_assert (!ignore
);
8243 /* An operation in what may be a bit-field type needs the
8244 result to be reduced to the precision of the bit-field type,
8245 which is narrower than that of the type's mode. */
8246 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8247 && !type_has_mode_precision_p (type
));
8249 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8252 /* Use subtarget as the target for operand 0 of a binary operation. */
8253 subtarget
= get_subtarget (target
);
8254 original_target
= target
;
8258 case NON_LVALUE_EXPR
:
8261 if (treeop0
== error_mark_node
)
8264 if (TREE_CODE (type
) == UNION_TYPE
)
8266 tree valtype
= TREE_TYPE (treeop0
);
8268 /* If both input and output are BLKmode, this conversion isn't doing
8269 anything except possibly changing memory attribute. */
8270 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8272 rtx result
= expand_expr (treeop0
, target
, tmode
,
8275 result
= copy_rtx (result
);
8276 set_mem_attributes (result
, type
, 0);
8282 if (TYPE_MODE (type
) != BLKmode
)
8283 target
= gen_reg_rtx (TYPE_MODE (type
));
8285 target
= assign_temp (type
, 1, 1);
8289 /* Store data into beginning of memory target. */
8290 store_expr (treeop0
,
8291 adjust_address (target
, TYPE_MODE (valtype
), 0),
8292 modifier
== EXPAND_STACK_PARM
,
8293 false, TYPE_REVERSE_STORAGE_ORDER (type
));
8297 gcc_assert (REG_P (target
)
8298 && !TYPE_REVERSE_STORAGE_ORDER (type
));
8300 /* Store this field into a union of the proper type. */
8301 store_field (target
,
8302 MIN ((int_size_in_bytes (TREE_TYPE
8305 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8306 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0,
8310 /* Return the entire union. */
8314 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8316 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8319 /* If the signedness of the conversion differs and OP0 is
8320 a promoted SUBREG, clear that indication since we now
8321 have to do the proper extension. */
8322 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8323 && GET_CODE (op0
) == SUBREG
)
8324 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8326 return REDUCE_BIT_FIELD (op0
);
8329 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8330 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8331 if (GET_MODE (op0
) == mode
)
8334 /* If OP0 is a constant, just convert it into the proper mode. */
8335 else if (CONSTANT_P (op0
))
8337 tree inner_type
= TREE_TYPE (treeop0
);
8338 machine_mode inner_mode
= GET_MODE (op0
);
8340 if (inner_mode
== VOIDmode
)
8341 inner_mode
= TYPE_MODE (inner_type
);
8343 if (modifier
== EXPAND_INITIALIZER
)
8344 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
8346 op0
= convert_modes (mode
, inner_mode
, op0
,
8347 TYPE_UNSIGNED (inner_type
));
8350 else if (modifier
== EXPAND_INITIALIZER
)
8351 op0
= gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8352 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8354 else if (target
== 0)
8355 op0
= convert_to_mode (mode
, op0
,
8356 TYPE_UNSIGNED (TREE_TYPE
8360 convert_move (target
, op0
,
8361 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8365 return REDUCE_BIT_FIELD (op0
);
8367 case ADDR_SPACE_CONVERT_EXPR
:
8369 tree treeop0_type
= TREE_TYPE (treeop0
);
8371 gcc_assert (POINTER_TYPE_P (type
));
8372 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8374 addr_space_t as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8375 addr_space_t as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8377 /* Conversions between pointers to the same address space should
8378 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8379 gcc_assert (as_to
!= as_from
);
8381 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8383 /* Ask target code to handle conversion between pointers
8384 to overlapping address spaces. */
8385 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8386 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8388 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8392 /* For disjoint address spaces, converting anything but a null
8393 pointer invokes undefined behavior. We truncate or extend the
8394 value as if we'd converted via integers, which handles 0 as
8395 required, and all others as the programmer likely expects. */
8396 #ifndef POINTERS_EXTEND_UNSIGNED
8397 const int POINTERS_EXTEND_UNSIGNED
= 1;
8399 op0
= convert_modes (mode
, TYPE_MODE (treeop0_type
),
8400 op0
, POINTERS_EXTEND_UNSIGNED
);
8406 case POINTER_PLUS_EXPR
:
8407 /* Even though the sizetype mode and the pointer's mode can be different
8408 expand is able to handle this correctly and get the correct result out
8409 of the PLUS_EXPR code. */
8410 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8411 if sizetype precision is smaller than pointer precision. */
8412 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8413 treeop1
= fold_convert_loc (loc
, type
,
8414 fold_convert_loc (loc
, ssizetype
,
8416 /* If sizetype precision is larger than pointer precision, truncate the
8417 offset to have matching modes. */
8418 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8419 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8423 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8424 something else, make sure we add the register to the constant and
8425 then to the other thing. This case can occur during strength
8426 reduction and doing it this way will produce better code if the
8427 frame pointer or argument pointer is eliminated.
8429 fold-const.c will ensure that the constant is always in the inner
8430 PLUS_EXPR, so the only case we need to do anything about is if
8431 sp, ap, or fp is our second argument, in which case we must swap
8432 the innermost first argument and our second argument. */
8434 if (TREE_CODE (treeop0
) == PLUS_EXPR
8435 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8437 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8438 || DECL_RTL (treeop1
) == stack_pointer_rtx
8439 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8444 /* If the result is to be ptr_mode and we are adding an integer to
8445 something, we might be forming a constant. So try to use
8446 plus_constant. If it produces a sum and we can't accept it,
8447 use force_operand. This allows P = &ARR[const] to generate
8448 efficient code on machines where a SYMBOL_REF is not a valid
8451 If this is an EXPAND_SUM call, always return the sum. */
8452 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8453 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8455 if (modifier
== EXPAND_STACK_PARM
)
8457 if (TREE_CODE (treeop0
) == INTEGER_CST
8458 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8459 && TREE_CONSTANT (treeop1
))
8463 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8465 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8467 /* Use wi::shwi to ensure that the constant is
8468 truncated according to the mode of OP1, then sign extended
8469 to a HOST_WIDE_INT. Using the constant directly can result
8470 in non-canonical RTL in a 64x32 cross compile. */
8471 wc
= TREE_INT_CST_LOW (treeop0
);
8473 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8474 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8475 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8476 op1
= force_operand (op1
, target
);
8477 return REDUCE_BIT_FIELD (op1
);
8480 else if (TREE_CODE (treeop1
) == INTEGER_CST
8481 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8482 && TREE_CONSTANT (treeop0
))
8486 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8488 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8489 (modifier
== EXPAND_INITIALIZER
8490 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8491 if (! CONSTANT_P (op0
))
8493 op1
= expand_expr (treeop1
, NULL_RTX
,
8494 VOIDmode
, modifier
);
8495 /* Return a PLUS if modifier says it's OK. */
8496 if (modifier
== EXPAND_SUM
8497 || modifier
== EXPAND_INITIALIZER
)
8498 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8501 /* Use wi::shwi to ensure that the constant is
8502 truncated according to the mode of OP1, then sign extended
8503 to a HOST_WIDE_INT. Using the constant directly can result
8504 in non-canonical RTL in a 64x32 cross compile. */
8505 wc
= TREE_INT_CST_LOW (treeop1
);
8507 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8508 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8509 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8510 op0
= force_operand (op0
, target
);
8511 return REDUCE_BIT_FIELD (op0
);
8515 /* Use TER to expand pointer addition of a negated value
8516 as pointer subtraction. */
8517 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8518 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8519 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8520 && TREE_CODE (treeop1
) == SSA_NAME
8521 && TYPE_MODE (TREE_TYPE (treeop0
))
8522 == TYPE_MODE (TREE_TYPE (treeop1
)))
8524 gimple
*def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8527 treeop1
= gimple_assign_rhs1 (def
);
8533 /* No sense saving up arithmetic to be done
8534 if it's all in the wrong mode to form part of an address.
8535 And force_operand won't know whether to sign-extend or
8537 if (modifier
!= EXPAND_INITIALIZER
8538 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8540 expand_operands (treeop0
, treeop1
,
8541 subtarget
, &op0
, &op1
, modifier
);
8542 if (op0
== const0_rtx
)
8544 if (op1
== const0_rtx
)
8549 expand_operands (treeop0
, treeop1
,
8550 subtarget
, &op0
, &op1
, modifier
);
8551 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8555 /* For initializers, we are allowed to return a MINUS of two
8556 symbolic constants. Here we handle all cases when both operands
8558 /* Handle difference of two symbolic constants,
8559 for the sake of an initializer. */
8560 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8561 && really_constant_p (treeop0
)
8562 && really_constant_p (treeop1
))
8564 expand_operands (treeop0
, treeop1
,
8565 NULL_RTX
, &op0
, &op1
, modifier
);
8567 /* If the last operand is a CONST_INT, use plus_constant of
8568 the negated constant. Else make the MINUS. */
8569 if (CONST_INT_P (op1
))
8570 return REDUCE_BIT_FIELD (plus_constant (mode
, op0
,
8573 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8576 /* No sense saving up arithmetic to be done
8577 if it's all in the wrong mode to form part of an address.
8578 And force_operand won't know whether to sign-extend or
8580 if (modifier
!= EXPAND_INITIALIZER
8581 && (modifier
!= EXPAND_SUM
|| mode
!= ptr_mode
))
8584 expand_operands (treeop0
, treeop1
,
8585 subtarget
, &op0
, &op1
, modifier
);
8587 /* Convert A - const to A + (-const). */
8588 if (CONST_INT_P (op1
))
8590 op1
= negate_rtx (mode
, op1
);
8591 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8596 case WIDEN_MULT_PLUS_EXPR
:
8597 case WIDEN_MULT_MINUS_EXPR
:
8598 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8599 op2
= expand_normal (treeop2
);
8600 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8604 case WIDEN_MULT_EXPR
:
8605 /* If first operand is constant, swap them.
8606 Thus the following special case checks need only
8607 check the second operand. */
8608 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8609 std::swap (treeop0
, treeop1
);
8611 /* First, check if we have a multiplication of one signed and one
8612 unsigned operand. */
8613 if (TREE_CODE (treeop1
) != INTEGER_CST
8614 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8615 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8617 machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8618 this_optab
= usmul_widen_optab
;
8619 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8620 != CODE_FOR_nothing
)
8622 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8623 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8626 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8628 /* op0 and op1 might still be constant, despite the above
8629 != INTEGER_CST check. Handle it. */
8630 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8632 op0
= convert_modes (innermode
, mode
, op0
, true);
8633 op1
= convert_modes (innermode
, mode
, op1
, false);
8634 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8635 target
, unsignedp
));
8640 /* Check for a multiplication with matching signedness. */
8641 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8642 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8643 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8644 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8646 tree op0type
= TREE_TYPE (treeop0
);
8647 machine_mode innermode
= TYPE_MODE (op0type
);
8648 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8649 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8650 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8652 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8654 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8655 != CODE_FOR_nothing
)
8657 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8659 /* op0 and op1 might still be constant, despite the above
8660 != INTEGER_CST check. Handle it. */
8661 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8664 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8666 = convert_modes (innermode
, mode
, op1
,
8667 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8668 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8672 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8673 unsignedp
, this_optab
);
8674 return REDUCE_BIT_FIELD (temp
);
8676 if (find_widening_optab_handler (other_optab
, mode
, innermode
, 0)
8678 && innermode
== word_mode
)
8681 op0
= expand_normal (treeop0
);
8682 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8683 op1
= convert_modes (innermode
, mode
,
8684 expand_normal (treeop1
),
8685 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8687 op1
= expand_normal (treeop1
);
8688 /* op0 and op1 might still be constant, despite the above
8689 != INTEGER_CST check. Handle it. */
8690 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8691 goto widen_mult_const
;
8692 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8693 unsignedp
, OPTAB_LIB_WIDEN
);
8694 hipart
= gen_highpart (innermode
, temp
);
8695 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8699 emit_move_insn (hipart
, htem
);
8700 return REDUCE_BIT_FIELD (temp
);
8704 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8705 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8706 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8707 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8711 optab opt
= fma_optab
;
8712 gimple
*def0
, *def2
;
8714 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8716 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8718 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8721 gcc_assert (fn
!= NULL_TREE
);
8722 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8723 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8726 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8727 /* The multiplication is commutative - look at its 2nd operand
8728 if the first isn't fed by a negate. */
8731 def0
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8732 /* Swap operands if the 2nd operand is fed by a negate. */
8734 std::swap (treeop0
, treeop1
);
8736 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8741 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8744 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8745 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8748 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8751 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8754 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8757 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8761 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8763 op2
= expand_normal (treeop2
);
8764 op1
= expand_normal (treeop1
);
8766 return expand_ternary_op (TYPE_MODE (type
), opt
,
8767 op0
, op1
, op2
, target
, 0);
8771 /* If this is a fixed-point operation, then we cannot use the code
8772 below because "expand_mult" doesn't support sat/no-sat fixed-point
8774 if (ALL_FIXED_POINT_MODE_P (mode
))
8777 /* If first operand is constant, swap them.
8778 Thus the following special case checks need only
8779 check the second operand. */
8780 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8781 std::swap (treeop0
, treeop1
);
8783 /* Attempt to return something suitable for generating an
8784 indexed address, for machines that support that. */
8786 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8787 && tree_fits_shwi_p (treeop1
))
8789 tree exp1
= treeop1
;
8791 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8795 op0
= force_operand (op0
, NULL_RTX
);
8797 op0
= copy_to_mode_reg (mode
, op0
);
8799 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8800 gen_int_mode (tree_to_shwi (exp1
),
8801 TYPE_MODE (TREE_TYPE (exp1
)))));
8804 if (modifier
== EXPAND_STACK_PARM
)
8807 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8808 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8810 case TRUNC_MOD_EXPR
:
8811 case FLOOR_MOD_EXPR
:
8813 case ROUND_MOD_EXPR
:
8815 case TRUNC_DIV_EXPR
:
8816 case FLOOR_DIV_EXPR
:
8818 case ROUND_DIV_EXPR
:
8819 case EXACT_DIV_EXPR
:
8821 /* If this is a fixed-point operation, then we cannot use the code
8822 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8824 if (ALL_FIXED_POINT_MODE_P (mode
))
8827 if (modifier
== EXPAND_STACK_PARM
)
8829 /* Possible optimization: compute the dividend with EXPAND_SUM
8830 then if the divisor is constant can optimize the case
8831 where some terms of the dividend have coeffs divisible by it. */
8832 expand_operands (treeop0
, treeop1
,
8833 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8834 bool mod_p
= code
== TRUNC_MOD_EXPR
|| code
== FLOOR_MOD_EXPR
8835 || code
== CEIL_MOD_EXPR
|| code
== ROUND_MOD_EXPR
;
8836 if (SCALAR_INT_MODE_P (mode
)
8838 && get_range_pos_neg (treeop0
) == 1
8839 && get_range_pos_neg (treeop1
) == 1)
8841 /* If both arguments are known to be positive when interpreted
8842 as signed, we can expand it as both signed and unsigned
8843 division or modulo. Choose the cheaper sequence in that case. */
8844 bool speed_p
= optimize_insn_for_speed_p ();
8845 do_pending_stack_adjust ();
8847 rtx uns_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 1);
8848 rtx_insn
*uns_insns
= get_insns ();
8851 rtx sgn_ret
= expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, 0);
8852 rtx_insn
*sgn_insns
= get_insns ();
8854 unsigned uns_cost
= seq_cost (uns_insns
, speed_p
);
8855 unsigned sgn_cost
= seq_cost (sgn_insns
, speed_p
);
8857 /* If costs are the same then use as tie breaker the other
8859 if (uns_cost
== sgn_cost
)
8861 uns_cost
= seq_cost (uns_insns
, !speed_p
);
8862 sgn_cost
= seq_cost (sgn_insns
, !speed_p
);
8865 if (uns_cost
< sgn_cost
|| (uns_cost
== sgn_cost
&& unsignedp
))
8867 emit_insn (uns_insns
);
8870 emit_insn (sgn_insns
);
8873 return expand_divmod (mod_p
, code
, mode
, op0
, op1
, target
, unsignedp
);
8878 case MULT_HIGHPART_EXPR
:
8879 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8880 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8884 case FIXED_CONVERT_EXPR
:
8885 op0
= expand_normal (treeop0
);
8886 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8887 target
= gen_reg_rtx (mode
);
8889 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8890 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8891 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8892 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8894 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8897 case FIX_TRUNC_EXPR
:
8898 op0
= expand_normal (treeop0
);
8899 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8900 target
= gen_reg_rtx (mode
);
8901 expand_fix (target
, op0
, unsignedp
);
8905 op0
= expand_normal (treeop0
);
8906 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8907 target
= gen_reg_rtx (mode
);
8908 /* expand_float can't figure out what to do if FROM has VOIDmode.
8909 So give it the correct mode. With -O, cse will optimize this. */
8910 if (GET_MODE (op0
) == VOIDmode
)
8911 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8913 expand_float (target
, op0
,
8914 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8918 op0
= expand_expr (treeop0
, subtarget
,
8919 VOIDmode
, EXPAND_NORMAL
);
8920 if (modifier
== EXPAND_STACK_PARM
)
8922 temp
= expand_unop (mode
,
8923 optab_for_tree_code (NEGATE_EXPR
, type
,
8927 return REDUCE_BIT_FIELD (temp
);
8930 op0
= expand_expr (treeop0
, subtarget
,
8931 VOIDmode
, EXPAND_NORMAL
);
8932 if (modifier
== EXPAND_STACK_PARM
)
8935 /* ABS_EXPR is not valid for complex arguments. */
8936 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8937 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8939 /* Unsigned abs is simply the operand. Testing here means we don't
8940 risk generating incorrect code below. */
8941 if (TYPE_UNSIGNED (type
))
8944 return expand_abs (mode
, op0
, target
, unsignedp
,
8945 safe_from_p (target
, treeop0
, 1));
8949 target
= original_target
;
8951 || modifier
== EXPAND_STACK_PARM
8952 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8953 || GET_MODE (target
) != mode
8955 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8956 target
= gen_reg_rtx (mode
);
8957 expand_operands (treeop0
, treeop1
,
8958 target
, &op0
, &op1
, EXPAND_NORMAL
);
8960 /* First try to do it with a special MIN or MAX instruction.
8961 If that does not win, use a conditional jump to select the proper
8963 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8964 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8969 /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
8970 and similarly for MAX <x, y>. */
8971 if (VECTOR_TYPE_P (type
))
8973 tree t0
= make_tree (type
, op0
);
8974 tree t1
= make_tree (type
, op1
);
8975 tree comparison
= build2 (code
== MIN_EXPR
? LE_EXPR
: GE_EXPR
,
8977 return expand_vec_cond_expr (type
, comparison
, t0
, t1
,
8981 /* At this point, a MEM target is no longer useful; we will get better
8984 if (! REG_P (target
))
8985 target
= gen_reg_rtx (mode
);
8987 /* If op1 was placed in target, swap op0 and op1. */
8988 if (target
!= op0
&& target
== op1
)
8989 std::swap (op0
, op1
);
8991 /* We generate better code and avoid problems with op1 mentioning
8992 target by forcing op1 into a pseudo if it isn't a constant. */
8993 if (! CONSTANT_P (op1
))
8994 op1
= force_reg (mode
, op1
);
8997 enum rtx_code comparison_code
;
9000 if (code
== MAX_EXPR
)
9001 comparison_code
= unsignedp
? GEU
: GE
;
9003 comparison_code
= unsignedp
? LEU
: LE
;
9005 /* Canonicalize to comparisons against 0. */
9006 if (op1
== const1_rtx
)
9008 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9009 or (a != 0 ? a : 1) for unsigned.
9010 For MIN we are safe converting (a <= 1 ? a : 1)
9011 into (a <= 0 ? a : 1) */
9012 cmpop1
= const0_rtx
;
9013 if (code
== MAX_EXPR
)
9014 comparison_code
= unsignedp
? NE
: GT
;
9016 if (op1
== constm1_rtx
&& !unsignedp
)
9018 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9019 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9020 cmpop1
= const0_rtx
;
9021 if (code
== MIN_EXPR
)
9022 comparison_code
= LT
;
9025 /* Use a conditional move if possible. */
9026 if (can_conditionally_move_p (mode
))
9032 /* Try to emit the conditional move. */
9033 insn
= emit_conditional_move (target
, comparison_code
,
9038 /* If we could do the conditional move, emit the sequence,
9042 rtx_insn
*seq
= get_insns ();
9048 /* Otherwise discard the sequence and fall back to code with
9054 emit_move_insn (target
, op0
);
9056 lab
= gen_label_rtx ();
9057 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
9058 unsignedp
, mode
, NULL_RTX
, NULL
, lab
,
9059 profile_probability::uninitialized ());
9061 emit_move_insn (target
, op1
);
9066 op0
= expand_expr (treeop0
, subtarget
,
9067 VOIDmode
, EXPAND_NORMAL
);
9068 if (modifier
== EXPAND_STACK_PARM
)
9070 /* In case we have to reduce the result to bitfield precision
9071 for unsigned bitfield expand this as XOR with a proper constant
9073 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
9075 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
9076 false, GET_MODE_PRECISION (mode
));
9078 temp
= expand_binop (mode
, xor_optab
, op0
,
9079 immed_wide_int_const (mask
, mode
),
9080 target
, 1, OPTAB_LIB_WIDEN
);
9083 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
9087 /* ??? Can optimize bitwise operations with one arg constant.
9088 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9089 and (a bitwise1 b) bitwise2 b (etc)
9090 but that is probably not worth while. */
9099 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
9100 || type_has_mode_precision_p (type
));
9106 /* If this is a fixed-point operation, then we cannot use the code
9107 below because "expand_shift" doesn't support sat/no-sat fixed-point
9109 if (ALL_FIXED_POINT_MODE_P (mode
))
9112 if (! safe_from_p (subtarget
, treeop1
, 1))
9114 if (modifier
== EXPAND_STACK_PARM
)
9116 op0
= expand_expr (treeop0
, subtarget
,
9117 VOIDmode
, EXPAND_NORMAL
);
9119 /* Left shift optimization when shifting across word_size boundary.
9121 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9122 there isn't native instruction to support this wide mode
9123 left shift. Given below scenario:
9125 Type A = (Type) B << C
9128 | dest_high | dest_low |
9132 If the shift amount C caused we shift B to across the word
9133 size boundary, i.e part of B shifted into high half of
9134 destination register, and part of B remains in the low
9135 half, then GCC will use the following left shift expand
9138 1. Initialize dest_low to B.
9139 2. Initialize every bit of dest_high to the sign bit of B.
9140 3. Logic left shift dest_low by C bit to finalize dest_low.
9141 The value of dest_low before this shift is kept in a temp D.
9142 4. Logic left shift dest_high by C.
9143 5. Logic right shift D by (word_size - C).
9144 6. Or the result of 4 and 5 to finalize dest_high.
9146 While, by checking gimple statements, if operand B is
9147 coming from signed extension, then we can simplify above
9150 1. dest_high = src_low >> (word_size - C).
9151 2. dest_low = src_low << C.
9153 We can use one arithmetic right shift to finish all the
9154 purpose of steps 2, 4, 5, 6, thus we reduce the steps
9155 needed from 6 into 2.
9157 The case is similar for zero extension, except that we
9158 initialize dest_high to zero rather than copies of the sign
9159 bit from B. Furthermore, we need to use a logical right shift
9162 The choice of sign-extension versus zero-extension is
9163 determined entirely by whether or not B is signed and is
9164 independent of the current setting of unsignedp. */
9167 if (code
== LSHIFT_EXPR
9170 && mode
== GET_MODE_WIDER_MODE (word_mode
)
9171 && GET_MODE_SIZE (mode
) == 2 * GET_MODE_SIZE (word_mode
)
9172 && TREE_CONSTANT (treeop1
)
9173 && TREE_CODE (treeop0
) == SSA_NAME
)
9175 gimple
*def
= SSA_NAME_DEF_STMT (treeop0
);
9176 if (is_gimple_assign (def
)
9177 && gimple_assign_rhs_code (def
) == NOP_EXPR
)
9179 machine_mode rmode
= TYPE_MODE
9180 (TREE_TYPE (gimple_assign_rhs1 (def
)));
9182 if (GET_MODE_SIZE (rmode
) < GET_MODE_SIZE (mode
)
9183 && TREE_INT_CST_LOW (treeop1
) < GET_MODE_BITSIZE (word_mode
)
9184 && ((TREE_INT_CST_LOW (treeop1
) + GET_MODE_BITSIZE (rmode
))
9185 >= GET_MODE_BITSIZE (word_mode
)))
9187 rtx_insn
*seq
, *seq_old
;
9188 unsigned int high_off
= subreg_highpart_offset (word_mode
,
9190 bool extend_unsigned
9191 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def
)));
9192 rtx low
= lowpart_subreg (word_mode
, op0
, mode
);
9193 rtx dest_low
= lowpart_subreg (word_mode
, target
, mode
);
9194 rtx dest_high
= simplify_gen_subreg (word_mode
, target
,
9196 HOST_WIDE_INT ramount
= (BITS_PER_WORD
9197 - TREE_INT_CST_LOW (treeop1
));
9198 tree rshift
= build_int_cst (TREE_TYPE (treeop1
), ramount
);
9201 /* dest_high = src_low >> (word_size - C). */
9202 temp
= expand_variable_shift (RSHIFT_EXPR
, word_mode
, low
,
9205 if (temp
!= dest_high
)
9206 emit_move_insn (dest_high
, temp
);
9208 /* dest_low = src_low << C. */
9209 temp
= expand_variable_shift (LSHIFT_EXPR
, word_mode
, low
,
9210 treeop1
, dest_low
, unsignedp
);
9211 if (temp
!= dest_low
)
9212 emit_move_insn (dest_low
, temp
);
9218 if (have_insn_for (ASHIFT
, mode
))
9220 bool speed_p
= optimize_insn_for_speed_p ();
9222 rtx ret_old
= expand_variable_shift (code
, mode
, op0
,
9226 seq_old
= get_insns ();
9228 if (seq_cost (seq
, speed_p
)
9229 >= seq_cost (seq_old
, speed_p
))
9240 if (temp
== NULL_RTX
)
9241 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
9243 if (code
== LSHIFT_EXPR
)
9244 temp
= REDUCE_BIT_FIELD (temp
);
9248 /* Could determine the answer when only additive constants differ. Also,
9249 the addition of one can be handled by changing the condition. */
9256 case UNORDERED_EXPR
:
9265 temp
= do_store_flag (ops
,
9266 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
9267 tmode
!= VOIDmode
? tmode
: mode
);
9271 /* Use a compare and a jump for BLKmode comparisons, or for function
9272 type comparisons is have_canonicalize_funcptr_for_compare. */
9275 || modifier
== EXPAND_STACK_PARM
9276 || ! safe_from_p (target
, treeop0
, 1)
9277 || ! safe_from_p (target
, treeop1
, 1)
9278 /* Make sure we don't have a hard reg (such as function's return
9279 value) live across basic blocks, if not optimizing. */
9280 || (!optimize
&& REG_P (target
)
9281 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
9282 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
9284 emit_move_insn (target
, const0_rtx
);
9286 rtx_code_label
*lab1
= gen_label_rtx ();
9287 jumpifnot_1 (code
, treeop0
, treeop1
, lab1
,
9288 profile_probability::uninitialized ());
9290 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
9291 emit_move_insn (target
, constm1_rtx
);
9293 emit_move_insn (target
, const1_rtx
);
9299 /* Get the rtx code of the operands. */
9300 op0
= expand_normal (treeop0
);
9301 op1
= expand_normal (treeop1
);
9304 target
= gen_reg_rtx (TYPE_MODE (type
));
9306 /* If target overlaps with op1, then either we need to force
9307 op1 into a pseudo (if target also overlaps with op0),
9308 or write the complex parts in reverse order. */
9309 switch (GET_CODE (target
))
9312 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
9314 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
9316 complex_expr_force_op1
:
9317 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
9318 emit_move_insn (temp
, op1
);
9322 complex_expr_swap_order
:
9323 /* Move the imaginary (op1) and real (op0) parts to their
9325 write_complex_part (target
, op1
, true);
9326 write_complex_part (target
, op0
, false);
9332 temp
= adjust_address_nv (target
,
9333 GET_MODE_INNER (GET_MODE (target
)), 0);
9334 if (reg_overlap_mentioned_p (temp
, op1
))
9336 machine_mode imode
= GET_MODE_INNER (GET_MODE (target
));
9337 temp
= adjust_address_nv (target
, imode
,
9338 GET_MODE_SIZE (imode
));
9339 if (reg_overlap_mentioned_p (temp
, op0
))
9340 goto complex_expr_force_op1
;
9341 goto complex_expr_swap_order
;
9345 if (reg_overlap_mentioned_p (target
, op1
))
9347 if (reg_overlap_mentioned_p (target
, op0
))
9348 goto complex_expr_force_op1
;
9349 goto complex_expr_swap_order
;
9354 /* Move the real (op0) and imaginary (op1) parts to their location. */
9355 write_complex_part (target
, op0
, false);
9356 write_complex_part (target
, op1
, true);
9360 case WIDEN_SUM_EXPR
:
9362 tree oprnd0
= treeop0
;
9363 tree oprnd1
= treeop1
;
9365 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9366 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9371 case REDUC_MAX_EXPR
:
9372 case REDUC_MIN_EXPR
:
9373 case REDUC_PLUS_EXPR
:
9375 op0
= expand_normal (treeop0
);
9376 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9377 machine_mode vec_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9379 struct expand_operand ops
[2];
9380 enum insn_code icode
= optab_handler (this_optab
, vec_mode
);
9382 create_output_operand (&ops
[0], target
, mode
);
9383 create_input_operand (&ops
[1], op0
, vec_mode
);
9384 expand_insn (icode
, 2, ops
);
9385 target
= ops
[0].value
;
9386 if (GET_MODE (target
) != mode
)
9387 return gen_lowpart (tmode
, target
);
9391 case VEC_UNPACK_HI_EXPR
:
9392 case VEC_UNPACK_LO_EXPR
:
9394 op0
= expand_normal (treeop0
);
9395 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9401 case VEC_UNPACK_FLOAT_HI_EXPR
:
9402 case VEC_UNPACK_FLOAT_LO_EXPR
:
9404 op0
= expand_normal (treeop0
);
9405 /* The signedness is determined from input operand. */
9406 temp
= expand_widen_pattern_expr
9407 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9408 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9414 case VEC_WIDEN_MULT_HI_EXPR
:
9415 case VEC_WIDEN_MULT_LO_EXPR
:
9416 case VEC_WIDEN_MULT_EVEN_EXPR
:
9417 case VEC_WIDEN_MULT_ODD_EXPR
:
9418 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9419 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9420 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9421 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9423 gcc_assert (target
);
9426 case VEC_PACK_TRUNC_EXPR
:
9427 case VEC_PACK_SAT_EXPR
:
9428 case VEC_PACK_FIX_TRUNC_EXPR
:
9429 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9433 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9434 op2
= expand_normal (treeop2
);
9436 /* Careful here: if the target doesn't support integral vector modes,
9437 a constant selection vector could wind up smooshed into a normal
9438 integral constant. */
9439 if (CONSTANT_P (op2
) && GET_CODE (op2
) != CONST_VECTOR
)
9441 tree sel_type
= TREE_TYPE (treeop2
);
9443 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type
)),
9444 TYPE_VECTOR_SUBPARTS (sel_type
));
9445 gcc_assert (GET_MODE_CLASS (vmode
) == MODE_VECTOR_INT
);
9446 op2
= simplify_subreg (vmode
, op2
, TYPE_MODE (sel_type
), 0);
9447 gcc_assert (op2
&& GET_CODE (op2
) == CONST_VECTOR
);
9450 gcc_assert (GET_MODE_CLASS (GET_MODE (op2
)) == MODE_VECTOR_INT
);
9452 temp
= expand_vec_perm (mode
, op0
, op1
, op2
, target
);
9458 tree oprnd0
= treeop0
;
9459 tree oprnd1
= treeop1
;
9460 tree oprnd2
= treeop2
;
9463 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9464 op2
= expand_normal (oprnd2
);
9465 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9472 tree oprnd0
= treeop0
;
9473 tree oprnd1
= treeop1
;
9474 tree oprnd2
= treeop2
;
9477 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9478 op2
= expand_normal (oprnd2
);
9479 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9484 case REALIGN_LOAD_EXPR
:
9486 tree oprnd0
= treeop0
;
9487 tree oprnd1
= treeop1
;
9488 tree oprnd2
= treeop2
;
9491 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9492 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9493 op2
= expand_normal (oprnd2
);
9494 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9502 /* A COND_EXPR with its type being VOID_TYPE represents a
9503 conditional jump and is handled in
9504 expand_gimple_cond_expr. */
9505 gcc_assert (!VOID_TYPE_P (type
));
9507 /* Note that COND_EXPRs whose type is a structure or union
9508 are required to be constructed to contain assignments of
9509 a temporary variable, so that we can evaluate them here
9510 for side effect only. If type is void, we must do likewise. */
9512 gcc_assert (!TREE_ADDRESSABLE (type
)
9514 && TREE_TYPE (treeop1
) != void_type_node
9515 && TREE_TYPE (treeop2
) != void_type_node
);
9517 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9521 /* If we are not to produce a result, we have no target. Otherwise,
9522 if a target was specified use it; it will not be used as an
9523 intermediate target unless it is safe. If no target, use a
9526 if (modifier
!= EXPAND_STACK_PARM
9528 && safe_from_p (original_target
, treeop0
, 1)
9529 && GET_MODE (original_target
) == mode
9530 && !MEM_P (original_target
))
9531 temp
= original_target
;
9533 temp
= assign_temp (type
, 0, 1);
9535 do_pending_stack_adjust ();
9537 rtx_code_label
*lab0
= gen_label_rtx ();
9538 rtx_code_label
*lab1
= gen_label_rtx ();
9539 jumpifnot (treeop0
, lab0
,
9540 profile_probability::uninitialized ());
9541 store_expr (treeop1
, temp
,
9542 modifier
== EXPAND_STACK_PARM
,
9545 emit_jump_insn (targetm
.gen_jump (lab1
));
9548 store_expr (treeop2
, temp
,
9549 modifier
== EXPAND_STACK_PARM
,
9558 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9561 case BIT_INSERT_EXPR
:
9563 unsigned bitpos
= tree_to_uhwi (treeop2
);
9565 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1
)))
9566 bitsize
= TYPE_PRECISION (TREE_TYPE (treeop1
));
9568 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1
)));
9569 rtx op0
= expand_normal (treeop0
);
9570 rtx op1
= expand_normal (treeop1
);
9571 rtx dst
= gen_reg_rtx (mode
);
9572 emit_move_insn (dst
, op0
);
9573 store_bit_field (dst
, bitsize
, bitpos
, 0, 0,
9574 TYPE_MODE (TREE_TYPE (treeop1
)), op1
, false);
9582 /* Here to do an ordinary binary operator. */
9584 expand_operands (treeop0
, treeop1
,
9585 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9587 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9589 if (modifier
== EXPAND_STACK_PARM
)
9591 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9592 unsignedp
, OPTAB_LIB_WIDEN
);
9594 /* Bitwise operations do not need bitfield reduction as we expect their
9595 operands being properly truncated. */
9596 if (code
== BIT_XOR_EXPR
9597 || code
== BIT_AND_EXPR
9598 || code
== BIT_IOR_EXPR
)
9600 return REDUCE_BIT_FIELD (temp
);
9602 #undef REDUCE_BIT_FIELD
9605 /* Return TRUE if expression STMT is suitable for replacement.
9606 Never consider memory loads as replaceable, because those don't ever lead
9607 into constant expressions. */
9610 stmt_is_replaceable_p (gimple
*stmt
)
9612 if (ssa_is_replaceable_p (stmt
))
9614 /* Don't move around loads. */
9615 if (!gimple_assign_single_p (stmt
)
9616 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9623 expand_expr_real_1 (tree exp
, rtx target
, machine_mode tmode
,
9624 enum expand_modifier modifier
, rtx
*alt_rtl
,
9625 bool inner_reference_p
)
9627 rtx op0
, op1
, temp
, decl_rtl
;
9630 machine_mode mode
, dmode
;
9631 enum tree_code code
= TREE_CODE (exp
);
9632 rtx subtarget
, original_target
;
9635 bool reduce_bit_field
;
9636 location_t loc
= EXPR_LOCATION (exp
);
9637 struct separate_ops ops
;
9638 tree treeop0
, treeop1
, treeop2
;
9639 tree ssa_name
= NULL_TREE
;
9642 type
= TREE_TYPE (exp
);
9643 mode
= TYPE_MODE (type
);
9644 unsignedp
= TYPE_UNSIGNED (type
);
9646 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9647 if (!VL_EXP_CLASS_P (exp
))
9648 switch (TREE_CODE_LENGTH (code
))
9651 case 3: treeop2
= TREE_OPERAND (exp
, 2); /* FALLTHRU */
9652 case 2: treeop1
= TREE_OPERAND (exp
, 1); /* FALLTHRU */
9653 case 1: treeop0
= TREE_OPERAND (exp
, 0); /* FALLTHRU */
9663 ignore
= (target
== const0_rtx
9664 || ((CONVERT_EXPR_CODE_P (code
)
9665 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9666 && TREE_CODE (type
) == VOID_TYPE
));
9668 /* An operation in what may be a bit-field type needs the
9669 result to be reduced to the precision of the bit-field type,
9670 which is narrower than that of the type's mode. */
9671 reduce_bit_field
= (!ignore
9672 && INTEGRAL_TYPE_P (type
)
9673 && !type_has_mode_precision_p (type
));
9675 /* If we are going to ignore this result, we need only do something
9676 if there is a side-effect somewhere in the expression. If there
9677 is, short-circuit the most common cases here. Note that we must
9678 not call expand_expr with anything but const0_rtx in case this
9679 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9683 if (! TREE_SIDE_EFFECTS (exp
))
9686 /* Ensure we reference a volatile object even if value is ignored, but
9687 don't do this if all we are doing is taking its address. */
9688 if (TREE_THIS_VOLATILE (exp
)
9689 && TREE_CODE (exp
) != FUNCTION_DECL
9690 && mode
!= VOIDmode
&& mode
!= BLKmode
9691 && modifier
!= EXPAND_CONST_ADDRESS
)
9693 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9699 if (TREE_CODE_CLASS (code
) == tcc_unary
9700 || code
== BIT_FIELD_REF
9701 || code
== COMPONENT_REF
9702 || code
== INDIRECT_REF
)
9703 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9706 else if (TREE_CODE_CLASS (code
) == tcc_binary
9707 || TREE_CODE_CLASS (code
) == tcc_comparison
9708 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9710 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9711 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9718 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9721 /* Use subtarget as the target for operand 0 of a binary operation. */
9722 subtarget
= get_subtarget (target
);
9723 original_target
= target
;
9729 tree function
= decl_function_context (exp
);
9731 temp
= label_rtx (exp
);
9732 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9734 if (function
!= current_function_decl
9736 LABEL_REF_NONLOCAL_P (temp
) = 1;
9738 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9743 /* ??? ivopts calls expander, without any preparation from
9744 out-of-ssa. So fake instructions as if this was an access to the
9745 base variable. This unnecessarily allocates a pseudo, see how we can
9746 reuse it, if partition base vars have it set already. */
9747 if (!currently_expanding_to_rtl
)
9749 tree var
= SSA_NAME_VAR (exp
);
9750 if (var
&& DECL_RTL_SET_P (var
))
9751 return DECL_RTL (var
);
9752 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9753 LAST_VIRTUAL_REGISTER
+ 1);
9756 g
= get_gimple_for_ssa_name (exp
);
9757 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9759 && modifier
== EXPAND_INITIALIZER
9760 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9761 && (optimize
|| !SSA_NAME_VAR (exp
)
9762 || DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9763 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9764 g
= SSA_NAME_DEF_STMT (exp
);
9768 location_t saved_loc
= curr_insn_location ();
9769 location_t loc
= gimple_location (g
);
9770 if (loc
!= UNKNOWN_LOCATION
)
9771 set_curr_insn_location (loc
);
9772 ops
.code
= gimple_assign_rhs_code (g
);
9773 switch (get_gimple_rhs_class (ops
.code
))
9775 case GIMPLE_TERNARY_RHS
:
9776 ops
.op2
= gimple_assign_rhs3 (g
);
9778 case GIMPLE_BINARY_RHS
:
9779 ops
.op1
= gimple_assign_rhs2 (g
);
9781 /* Try to expand conditonal compare. */
9782 if (targetm
.gen_ccmp_first
)
9784 gcc_checking_assert (targetm
.gen_ccmp_next
!= NULL
);
9785 r
= expand_ccmp_expr (g
, mode
);
9790 case GIMPLE_UNARY_RHS
:
9791 ops
.op0
= gimple_assign_rhs1 (g
);
9792 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9794 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9796 case GIMPLE_SINGLE_RHS
:
9798 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9799 tmode
, modifier
, alt_rtl
,
9806 set_curr_insn_location (saved_loc
);
9807 if (REG_P (r
) && !REG_EXPR (r
))
9808 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9813 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9814 exp
= SSA_NAME_VAR (ssa_name
);
9815 goto expand_decl_rtl
;
9819 /* If a static var's type was incomplete when the decl was written,
9820 but the type is complete now, lay out the decl now. */
9821 if (DECL_SIZE (exp
) == 0
9822 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9823 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9824 layout_decl (exp
, 0);
9830 decl_rtl
= DECL_RTL (exp
);
9832 gcc_assert (decl_rtl
);
9834 /* DECL_MODE might change when TYPE_MODE depends on attribute target
9835 settings for VECTOR_TYPE_P that might switch for the function. */
9836 if (currently_expanding_to_rtl
9837 && code
== VAR_DECL
&& MEM_P (decl_rtl
)
9838 && VECTOR_TYPE_P (type
) && exp
&& DECL_MODE (exp
) != mode
)
9839 decl_rtl
= change_address (decl_rtl
, TYPE_MODE (type
), 0);
9841 decl_rtl
= copy_rtx (decl_rtl
);
9843 /* Record writes to register variables. */
9844 if (modifier
== EXPAND_WRITE
9846 && HARD_REGISTER_P (decl_rtl
))
9847 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9848 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9850 /* Ensure variable marked as used even if it doesn't go through
9851 a parser. If it hasn't be used yet, write out an external
9854 TREE_USED (exp
) = 1;
9856 /* Show we haven't gotten RTL for this yet. */
9859 /* Variables inherited from containing functions should have
9860 been lowered by this point. */
9862 context
= decl_function_context (exp
);
9864 || SCOPE_FILE_SCOPE_P (context
)
9865 || context
== current_function_decl
9866 || TREE_STATIC (exp
)
9867 || DECL_EXTERNAL (exp
)
9868 /* ??? C++ creates functions that are not TREE_STATIC. */
9869 || TREE_CODE (exp
) == FUNCTION_DECL
);
9871 /* This is the case of an array whose size is to be determined
9872 from its initializer, while the initializer is still being parsed.
9873 ??? We aren't parsing while expanding anymore. */
9875 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9876 temp
= validize_mem (decl_rtl
);
9878 /* If DECL_RTL is memory, we are in the normal case and the
9879 address is not valid, get the address into a register. */
9881 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9884 *alt_rtl
= decl_rtl
;
9885 decl_rtl
= use_anchored_address (decl_rtl
);
9886 if (modifier
!= EXPAND_CONST_ADDRESS
9887 && modifier
!= EXPAND_SUM
9888 && !memory_address_addr_space_p (exp
? DECL_MODE (exp
)
9889 : GET_MODE (decl_rtl
),
9891 MEM_ADDR_SPACE (decl_rtl
)))
9892 temp
= replace_equiv_address (decl_rtl
,
9893 copy_rtx (XEXP (decl_rtl
, 0)));
9896 /* If we got something, return it. But first, set the alignment
9897 if the address is a register. */
9900 if (exp
&& MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
9901 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
9907 dmode
= DECL_MODE (exp
);
9909 dmode
= TYPE_MODE (TREE_TYPE (ssa_name
));
9911 /* If the mode of DECL_RTL does not match that of the decl,
9912 there are two cases: we are dealing with a BLKmode value
9913 that is returned in a register, or we are dealing with
9914 a promoted value. In the latter case, return a SUBREG
9915 of the wanted mode, but mark it so that we know that it
9916 was already extended. */
9917 if (REG_P (decl_rtl
)
9919 && GET_MODE (decl_rtl
) != dmode
)
9923 /* Get the signedness to be used for this variable. Ensure we get
9924 the same mode we got when the variable was declared. */
9925 if (code
!= SSA_NAME
)
9926 pmode
= promote_decl_mode (exp
, &unsignedp
);
9927 else if ((g
= SSA_NAME_DEF_STMT (ssa_name
))
9928 && gimple_code (g
) == GIMPLE_CALL
9929 && !gimple_call_internal_p (g
))
9930 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
9931 gimple_call_fntype (g
),
9934 pmode
= promote_ssa_mode (ssa_name
, &unsignedp
);
9935 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
9937 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
9938 SUBREG_PROMOTED_VAR_P (temp
) = 1;
9939 SUBREG_PROMOTED_SET (temp
, unsignedp
);
9946 /* Given that TYPE_PRECISION (type) is not always equal to
9947 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9948 the former to the latter according to the signedness of the
9950 temp
= immed_wide_int_const (wi::to_wide
9952 GET_MODE_PRECISION (TYPE_MODE (type
))),
9958 tree tmp
= NULL_TREE
;
9959 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
9960 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
9961 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
9962 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
9963 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
9964 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
9965 return const_vector_from_tree (exp
);
9966 if (GET_MODE_CLASS (mode
) == MODE_INT
)
9968 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
9969 return const_scalar_mask_from_tree (exp
);
9972 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
9974 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
,
9975 type_for_mode
, exp
);
9980 vec
<constructor_elt
, va_gc
> *v
;
9982 vec_alloc (v
, VECTOR_CST_NELTS (exp
));
9983 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
9984 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
9985 tmp
= build_constructor (type
, v
);
9987 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
9992 if (modifier
== EXPAND_WRITE
)
9994 /* Writing into CONST_DECL is always invalid, but handle it
9996 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
9997 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
9998 op0
= expand_expr_addr_expr_1 (exp
, NULL_RTX
, address_mode
,
10000 op0
= memory_address_addr_space (mode
, op0
, as
);
10001 temp
= gen_rtx_MEM (mode
, op0
);
10002 set_mem_addr_space (temp
, as
);
10005 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
10008 /* If optimized, generate immediate CONST_DOUBLE
10009 which will be turned into memory by reload if necessary.
10011 We used to force a register so that loop.c could see it. But
10012 this does not allow gen_* patterns to perform optimizations with
10013 the constants. It also produces two insns in cases like "x = 1.0;".
10014 On most machines, floating-point constants are not permitted in
10015 many insns, so we'd end up copying it to a register in any case.
10017 Now, we do the copying in expand_binop, if appropriate. */
10018 return const_double_from_real_value (TREE_REAL_CST (exp
),
10019 TYPE_MODE (TREE_TYPE (exp
)));
10022 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
10023 TYPE_MODE (TREE_TYPE (exp
)));
10026 /* Handle evaluating a complex constant in a CONCAT target. */
10027 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
10029 machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
10032 rtarg
= XEXP (original_target
, 0);
10033 itarg
= XEXP (original_target
, 1);
10035 /* Move the real and imaginary parts separately. */
10036 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
10037 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
10040 emit_move_insn (rtarg
, op0
);
10042 emit_move_insn (itarg
, op1
);
10044 return original_target
;
10050 temp
= expand_expr_constant (exp
, 1, modifier
);
10052 /* temp contains a constant address.
10053 On RISC machines where a constant address isn't valid,
10054 make some insns to get that address into a register. */
10055 if (modifier
!= EXPAND_CONST_ADDRESS
10056 && modifier
!= EXPAND_INITIALIZER
10057 && modifier
!= EXPAND_SUM
10058 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
10059 MEM_ADDR_SPACE (temp
)))
10060 return replace_equiv_address (temp
,
10061 copy_rtx (XEXP (temp
, 0)));
10066 tree val
= treeop0
;
10067 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
10068 inner_reference_p
);
10070 if (!SAVE_EXPR_RESOLVED_P (exp
))
10072 /* We can indeed still hit this case, typically via builtin
10073 expanders calling save_expr immediately before expanding
10074 something. Assume this means that we only have to deal
10075 with non-BLKmode values. */
10076 gcc_assert (GET_MODE (ret
) != BLKmode
);
10078 val
= build_decl (curr_insn_location (),
10079 VAR_DECL
, NULL
, TREE_TYPE (exp
));
10080 DECL_ARTIFICIAL (val
) = 1;
10081 DECL_IGNORED_P (val
) = 1;
10083 TREE_OPERAND (exp
, 0) = treeop0
;
10084 SAVE_EXPR_RESOLVED_P (exp
) = 1;
10086 if (!CONSTANT_P (ret
))
10087 ret
= copy_to_reg (ret
);
10088 SET_DECL_RTL (val
, ret
);
10096 /* If we don't need the result, just ensure we evaluate any
10100 unsigned HOST_WIDE_INT idx
;
10103 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
10104 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
10109 return expand_constructor (exp
, target
, modifier
, false);
10111 case TARGET_MEM_REF
:
10114 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10115 enum insn_code icode
;
10116 unsigned int align
;
10118 op0
= addr_for_mem_ref (exp
, as
, true);
10119 op0
= memory_address_addr_space (mode
, op0
, as
);
10120 temp
= gen_rtx_MEM (mode
, op0
);
10121 set_mem_attributes (temp
, exp
, 0);
10122 set_mem_addr_space (temp
, as
);
10123 align
= get_object_alignment (exp
);
10124 if (modifier
!= EXPAND_WRITE
10125 && modifier
!= EXPAND_MEMORY
10127 && align
< GET_MODE_ALIGNMENT (mode
)
10128 /* If the target does not have special handling for unaligned
10129 loads of mode then it can use regular moves for them. */
10130 && ((icode
= optab_handler (movmisalign_optab
, mode
))
10131 != CODE_FOR_nothing
))
10133 struct expand_operand ops
[2];
10135 /* We've already validated the memory, and we're creating a
10136 new pseudo destination. The predicates really can't fail,
10137 nor can the generator. */
10138 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10139 create_fixed_operand (&ops
[1], temp
);
10140 expand_insn (icode
, 2, ops
);
10141 temp
= ops
[0].value
;
10148 const bool reverse
= REF_REVERSE_STORAGE_ORDER (exp
);
10150 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
10151 machine_mode address_mode
;
10152 tree base
= TREE_OPERAND (exp
, 0);
10154 enum insn_code icode
;
10156 /* Handle expansion of non-aliased memory with non-BLKmode. That
10157 might end up in a register. */
10158 if (mem_ref_refers_to_non_mem_p (exp
))
10160 HOST_WIDE_INT offset
= mem_ref_offset (exp
).to_short_addr ();
10161 base
= TREE_OPERAND (base
, 0);
10164 && tree_fits_uhwi_p (TYPE_SIZE (type
))
10165 && (GET_MODE_BITSIZE (DECL_MODE (base
))
10166 == tree_to_uhwi (TYPE_SIZE (type
))))
10167 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
10168 target
, tmode
, modifier
);
10169 if (TYPE_MODE (type
) == BLKmode
)
10171 temp
= assign_stack_temp (DECL_MODE (base
),
10172 GET_MODE_SIZE (DECL_MODE (base
)));
10173 store_expr (base
, temp
, 0, false, false);
10174 temp
= adjust_address (temp
, BLKmode
, offset
);
10175 set_mem_size (temp
, int_size_in_bytes (type
));
10178 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
10179 bitsize_int (offset
* BITS_PER_UNIT
));
10180 REF_REVERSE_STORAGE_ORDER (exp
) = reverse
;
10181 return expand_expr (exp
, target
, tmode
, modifier
);
10183 address_mode
= targetm
.addr_space
.address_mode (as
);
10184 base
= TREE_OPERAND (exp
, 0);
10185 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
10187 tree mask
= gimple_assign_rhs2 (def_stmt
);
10188 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
10189 gimple_assign_rhs1 (def_stmt
), mask
);
10190 TREE_OPERAND (exp
, 0) = base
;
10192 align
= get_object_alignment (exp
);
10193 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
10194 op0
= memory_address_addr_space (mode
, op0
, as
);
10195 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
10197 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
10198 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
10199 op0
= memory_address_addr_space (mode
, op0
, as
);
10201 temp
= gen_rtx_MEM (mode
, op0
);
10202 set_mem_attributes (temp
, exp
, 0);
10203 set_mem_addr_space (temp
, as
);
10204 if (TREE_THIS_VOLATILE (exp
))
10205 MEM_VOLATILE_P (temp
) = 1;
10206 if (modifier
!= EXPAND_WRITE
10207 && modifier
!= EXPAND_MEMORY
10208 && !inner_reference_p
10210 && align
< GET_MODE_ALIGNMENT (mode
))
10212 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10213 != CODE_FOR_nothing
)
10215 struct expand_operand ops
[2];
10217 /* We've already validated the memory, and we're creating a
10218 new pseudo destination. The predicates really can't fail,
10219 nor can the generator. */
10220 create_output_operand (&ops
[0], NULL_RTX
, mode
);
10221 create_fixed_operand (&ops
[1], temp
);
10222 expand_insn (icode
, 2, ops
);
10223 temp
= ops
[0].value
;
10225 else if (SLOW_UNALIGNED_ACCESS (mode
, align
))
10226 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
10227 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
10228 (modifier
== EXPAND_STACK_PARM
10229 ? NULL_RTX
: target
),
10230 mode
, mode
, false, alt_rtl
);
10233 && modifier
!= EXPAND_MEMORY
10234 && modifier
!= EXPAND_WRITE
)
10235 temp
= flip_storage_order (mode
, temp
);
10242 tree array
= treeop0
;
10243 tree index
= treeop1
;
10246 /* Fold an expression like: "foo"[2].
10247 This is not done in fold so it won't happen inside &.
10248 Don't fold if this is for wide characters since it's too
10249 difficult to do correctly and this is a very rare case. */
10251 if (modifier
!= EXPAND_CONST_ADDRESS
10252 && modifier
!= EXPAND_INITIALIZER
10253 && modifier
!= EXPAND_MEMORY
)
10255 tree t
= fold_read_from_constant_string (exp
);
10258 return expand_expr (t
, target
, tmode
, modifier
);
10261 /* If this is a constant index into a constant array,
10262 just get the value from the array. Handle both the cases when
10263 we have an explicit constructor and when our operand is a variable
10264 that was declared const. */
10266 if (modifier
!= EXPAND_CONST_ADDRESS
10267 && modifier
!= EXPAND_INITIALIZER
10268 && modifier
!= EXPAND_MEMORY
10269 && TREE_CODE (array
) == CONSTRUCTOR
10270 && ! TREE_SIDE_EFFECTS (array
)
10271 && TREE_CODE (index
) == INTEGER_CST
)
10273 unsigned HOST_WIDE_INT ix
;
10276 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
10278 if (tree_int_cst_equal (field
, index
))
10280 if (!TREE_SIDE_EFFECTS (value
))
10281 return expand_expr (fold (value
), target
, tmode
, modifier
);
10286 else if (optimize
>= 1
10287 && modifier
!= EXPAND_CONST_ADDRESS
10288 && modifier
!= EXPAND_INITIALIZER
10289 && modifier
!= EXPAND_MEMORY
10290 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
10291 && TREE_CODE (index
) == INTEGER_CST
10292 && (VAR_P (array
) || TREE_CODE (array
) == CONST_DECL
)
10293 && (init
= ctor_for_folding (array
)) != error_mark_node
)
10295 if (init
== NULL_TREE
)
10297 tree value
= build_zero_cst (type
);
10298 if (TREE_CODE (value
) == CONSTRUCTOR
)
10300 /* If VALUE is a CONSTRUCTOR, this optimization is only
10301 useful if this doesn't store the CONSTRUCTOR into
10302 memory. If it does, it is more efficient to just
10303 load the data from the array directly. */
10304 rtx ret
= expand_constructor (value
, target
,
10306 if (ret
== NULL_RTX
)
10311 return expand_expr (value
, target
, tmode
, modifier
);
10313 else if (TREE_CODE (init
) == CONSTRUCTOR
)
10315 unsigned HOST_WIDE_INT ix
;
10318 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
10320 if (tree_int_cst_equal (field
, index
))
10322 if (TREE_SIDE_EFFECTS (value
))
10325 if (TREE_CODE (value
) == CONSTRUCTOR
)
10327 /* If VALUE is a CONSTRUCTOR, this
10328 optimization is only useful if
10329 this doesn't store the CONSTRUCTOR
10330 into memory. If it does, it is more
10331 efficient to just load the data from
10332 the array directly. */
10333 rtx ret
= expand_constructor (value
, target
,
10335 if (ret
== NULL_RTX
)
10340 expand_expr (fold (value
), target
, tmode
, modifier
);
10343 else if (TREE_CODE (init
) == STRING_CST
)
10345 tree low_bound
= array_ref_low_bound (exp
);
10346 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
10348 /* Optimize the special case of a zero lower bound.
10350 We convert the lower bound to sizetype to avoid problems
10351 with constant folding. E.g. suppose the lower bound is
10352 1 and its mode is QI. Without the conversion
10353 (ARRAY + (INDEX - (unsigned char)1))
10355 (ARRAY + (-(unsigned char)1) + INDEX)
10357 (ARRAY + 255 + INDEX). Oops! */
10358 if (!integer_zerop (low_bound
))
10359 index1
= size_diffop_loc (loc
, index1
,
10360 fold_convert_loc (loc
, sizetype
,
10363 if (tree_fits_uhwi_p (index1
)
10364 && compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
10366 tree type
= TREE_TYPE (TREE_TYPE (init
));
10367 machine_mode mode
= TYPE_MODE (type
);
10369 if (GET_MODE_CLASS (mode
) == MODE_INT
10370 && GET_MODE_SIZE (mode
) == 1)
10371 return gen_int_mode (TREE_STRING_POINTER (init
)
10372 [TREE_INT_CST_LOW (index1
)],
10378 goto normal_inner_ref
;
10380 case COMPONENT_REF
:
10381 /* If the operand is a CONSTRUCTOR, we can just extract the
10382 appropriate field if it is present. */
10383 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
10385 unsigned HOST_WIDE_INT idx
;
10388 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
10390 if (field
== treeop1
10391 /* We can normally use the value of the field in the
10392 CONSTRUCTOR. However, if this is a bitfield in
10393 an integral mode that we can fit in a HOST_WIDE_INT,
10394 we must mask only the number of bits in the bitfield,
10395 since this is done implicitly by the constructor. If
10396 the bitfield does not meet either of those conditions,
10397 we can't do this optimization. */
10398 && (! DECL_BIT_FIELD (field
)
10399 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
10400 && (GET_MODE_PRECISION (DECL_MODE (field
))
10401 <= HOST_BITS_PER_WIDE_INT
))))
10403 if (DECL_BIT_FIELD (field
)
10404 && modifier
== EXPAND_STACK_PARM
)
10406 op0
= expand_expr (value
, target
, tmode
, modifier
);
10407 if (DECL_BIT_FIELD (field
))
10409 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10410 machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
10412 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10414 op1
= gen_int_mode ((HOST_WIDE_INT_1
<< bitsize
) - 1,
10416 op0
= expand_and (imode
, op0
, op1
, target
);
10420 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10422 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10424 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10432 goto normal_inner_ref
;
10434 case BIT_FIELD_REF
:
10435 case ARRAY_RANGE_REF
:
10438 machine_mode mode1
, mode2
;
10439 HOST_WIDE_INT bitsize
, bitpos
;
10441 int reversep
, volatilep
= 0, must_force_mem
;
10443 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
10444 &unsignedp
, &reversep
, &volatilep
);
10445 rtx orig_op0
, memloc
;
10446 bool clear_mem_expr
= false;
10448 /* If we got back the original object, something is wrong. Perhaps
10449 we are evaluating an expression too early. In any event, don't
10450 infinitely recurse. */
10451 gcc_assert (tem
!= exp
);
10453 /* If TEM's type is a union of variable size, pass TARGET to the inner
10454 computation, since it will need a temporary and TARGET is known
10455 to have to do. This occurs in unchecked conversion in Ada. */
10457 = expand_expr_real (tem
,
10458 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10459 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10460 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10462 && modifier
!= EXPAND_STACK_PARM
10463 ? target
: NULL_RTX
),
10465 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10468 /* If the field has a mode, we want to access it in the
10469 field's mode, not the computed mode.
10470 If a MEM has VOIDmode (external with incomplete type),
10471 use BLKmode for it instead. */
10474 if (mode1
!= VOIDmode
)
10475 op0
= adjust_address (op0
, mode1
, 0);
10476 else if (GET_MODE (op0
) == VOIDmode
)
10477 op0
= adjust_address (op0
, BLKmode
, 0);
10481 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10483 /* If we have either an offset, a BLKmode result, or a reference
10484 outside the underlying object, we must force it to memory.
10485 Such a case can occur in Ada if we have unchecked conversion
10486 of an expression from a scalar type to an aggregate type or
10487 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10488 passed a partially uninitialized object or a view-conversion
10489 to a larger size. */
10490 must_force_mem
= (offset
10491 || mode1
== BLKmode
10492 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
10494 /* Handle CONCAT first. */
10495 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10498 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
))
10499 && COMPLEX_MODE_P (mode1
)
10500 && COMPLEX_MODE_P (GET_MODE (op0
))
10501 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1
))
10502 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0
)))))
10505 op0
= flip_storage_order (GET_MODE (op0
), op0
);
10506 if (mode1
!= GET_MODE (op0
))
10509 for (int i
= 0; i
< 2; i
++)
10511 rtx op
= read_complex_part (op0
, i
!= 0);
10512 if (GET_CODE (op
) == SUBREG
)
10513 op
= force_reg (GET_MODE (op
), op
);
10514 rtx temp
= gen_lowpart_common (GET_MODE_INNER (mode1
),
10520 if (!REG_P (op
) && !MEM_P (op
))
10521 op
= force_reg (GET_MODE (op
), op
);
10522 op
= gen_lowpart (GET_MODE_INNER (mode1
), op
);
10526 op0
= gen_rtx_CONCAT (mode1
, parts
[0], parts
[1]);
10531 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10534 op0
= XEXP (op0
, 0);
10535 mode2
= GET_MODE (op0
);
10537 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10538 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
10542 op0
= XEXP (op0
, 1);
10544 mode2
= GET_MODE (op0
);
10547 /* Otherwise force into memory. */
10548 must_force_mem
= 1;
10551 /* If this is a constant, put it in a register if it is a legitimate
10552 constant and we don't need a memory reference. */
10553 if (CONSTANT_P (op0
)
10554 && mode2
!= BLKmode
10555 && targetm
.legitimate_constant_p (mode2
, op0
)
10556 && !must_force_mem
)
10557 op0
= force_reg (mode2
, op0
);
10559 /* Otherwise, if this is a constant, try to force it to the constant
10560 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10561 is a legitimate constant. */
10562 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10563 op0
= validize_mem (memloc
);
10565 /* Otherwise, if this is a constant or the object is not in memory
10566 and need be, put it there. */
10567 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10569 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10570 emit_move_insn (memloc
, op0
);
10572 clear_mem_expr
= true;
10577 machine_mode address_mode
;
10578 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10581 gcc_assert (MEM_P (op0
));
10583 address_mode
= get_address_mode (op0
);
10584 if (GET_MODE (offset_rtx
) != address_mode
)
10586 /* We cannot be sure that the RTL in offset_rtx is valid outside
10587 of a memory address context, so force it into a register
10588 before attempting to convert it to the desired mode. */
10589 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
10590 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10593 /* See the comment in expand_assignment for the rationale. */
10594 if (mode1
!= VOIDmode
10597 && (bitpos
% bitsize
) == 0
10598 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
10599 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10601 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10605 op0
= offset_address (op0
, offset_rtx
,
10606 highest_pow2_factor (offset
));
10609 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10610 record its alignment as BIGGEST_ALIGNMENT. */
10611 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
10612 && is_aligning_offset (offset
, tem
))
10613 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10615 /* Don't forget about volatility even if this is a bitfield. */
10616 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10618 if (op0
== orig_op0
)
10619 op0
= copy_rtx (op0
);
10621 MEM_VOLATILE_P (op0
) = 1;
10624 /* In cases where an aligned union has an unaligned object
10625 as a field, we might be extracting a BLKmode value from
10626 an integer-mode (e.g., SImode) object. Handle this case
10627 by doing the extract into an object as wide as the field
10628 (which we know to be the width of a basic mode), then
10629 storing into memory, and changing the mode to BLKmode. */
10630 if (mode1
== VOIDmode
10631 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10632 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10633 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10634 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10635 && modifier
!= EXPAND_CONST_ADDRESS
10636 && modifier
!= EXPAND_INITIALIZER
10637 && modifier
!= EXPAND_MEMORY
)
10638 /* If the bitfield is volatile and the bitsize
10639 is narrower than the access size of the bitfield,
10640 we need to extract bitfields from the access. */
10641 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10642 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10643 && mode1
!= BLKmode
10644 && bitsize
< GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
)
10645 /* If the field isn't aligned enough to fetch as a memref,
10646 fetch it as a bit field. */
10647 || (mode1
!= BLKmode
10649 ? MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10650 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0)
10651 : TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10652 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0))
10653 && modifier
!= EXPAND_MEMORY
10654 && ((modifier
== EXPAND_CONST_ADDRESS
10655 || modifier
== EXPAND_INITIALIZER
)
10657 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
10658 || (bitpos
% BITS_PER_UNIT
!= 0)))
10659 /* If the type and the field are a constant size and the
10660 size of the type isn't the same size as the bitfield,
10661 we must use bitfield operations. */
10663 && TYPE_SIZE (TREE_TYPE (exp
))
10664 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10665 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
10668 machine_mode ext_mode
= mode
;
10670 if (ext_mode
== BLKmode
10671 && ! (target
!= 0 && MEM_P (op0
)
10673 && bitpos
% BITS_PER_UNIT
== 0))
10674 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
10676 if (ext_mode
== BLKmode
)
10679 target
= assign_temp (type
, 1, 1);
10681 /* ??? Unlike the similar test a few lines below, this one is
10682 very likely obsolete. */
10686 /* In this case, BITPOS must start at a byte boundary and
10687 TARGET, if specified, must be a MEM. */
10688 gcc_assert (MEM_P (op0
)
10689 && (!target
|| MEM_P (target
))
10690 && !(bitpos
% BITS_PER_UNIT
));
10692 emit_block_move (target
,
10693 adjust_address (op0
, VOIDmode
,
10694 bitpos
/ BITS_PER_UNIT
),
10695 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
10697 (modifier
== EXPAND_STACK_PARM
10698 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10703 /* If we have nothing to extract, the result will be 0 for targets
10704 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10705 return 0 for the sake of consistency, as reading a zero-sized
10706 bitfield is valid in Ada and the value is fully specified. */
10710 op0
= validize_mem (op0
);
10712 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10713 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10715 /* If the result has a record type and the extraction is done in
10716 an integral mode, then the field may be not aligned on a byte
10717 boundary; in this case, if it has reverse storage order, it
10718 needs to be extracted as a scalar field with reverse storage
10719 order and put back into memory order afterwards. */
10720 if (TREE_CODE (type
) == RECORD_TYPE
10721 && GET_MODE_CLASS (ext_mode
) == MODE_INT
)
10722 reversep
= TYPE_REVERSE_STORAGE_ORDER (type
);
10724 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10725 (modifier
== EXPAND_STACK_PARM
10726 ? NULL_RTX
: target
),
10727 ext_mode
, ext_mode
, reversep
, alt_rtl
);
10729 /* If the result has a record type and the mode of OP0 is an
10730 integral mode then, if BITSIZE is narrower than this mode
10731 and this is for big-endian data, we must put the field
10732 into the high-order bits. And we must also put it back
10733 into memory order if it has been previously reversed. */
10734 if (TREE_CODE (type
) == RECORD_TYPE
10735 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
)
10737 HOST_WIDE_INT size
= GET_MODE_BITSIZE (GET_MODE (op0
));
10740 && reversep
? !BYTES_BIG_ENDIAN
: BYTES_BIG_ENDIAN
)
10741 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
10742 size
- bitsize
, op0
, 1);
10745 op0
= flip_storage_order (GET_MODE (op0
), op0
);
10748 /* If the result type is BLKmode, store the data into a temporary
10749 of the appropriate type, but with the mode corresponding to the
10750 mode for the data we have (op0's mode). */
10751 if (mode
== BLKmode
)
10754 = assign_stack_temp_for_type (ext_mode
,
10755 GET_MODE_BITSIZE (ext_mode
),
10757 emit_move_insn (new_rtx
, op0
);
10758 op0
= copy_rtx (new_rtx
);
10759 PUT_MODE (op0
, BLKmode
);
10765 /* If the result is BLKmode, use that to access the object
10767 if (mode
== BLKmode
)
10770 /* Get a reference to just this component. */
10771 if (modifier
== EXPAND_CONST_ADDRESS
10772 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10773 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10775 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10777 if (op0
== orig_op0
)
10778 op0
= copy_rtx (op0
);
10780 /* Don't set memory attributes if the base expression is
10781 SSA_NAME that got expanded as a MEM. In that case, we should
10782 just honor its original memory attributes. */
10783 if (TREE_CODE (tem
) != SSA_NAME
|| !MEM_P (orig_op0
))
10784 set_mem_attributes (op0
, exp
, 0);
10786 if (REG_P (XEXP (op0
, 0)))
10787 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10789 /* If op0 is a temporary because the original expressions was forced
10790 to memory, clear MEM_EXPR so that the original expression cannot
10791 be marked as addressable through MEM_EXPR of the temporary. */
10792 if (clear_mem_expr
)
10793 set_mem_expr (op0
, NULL_TREE
);
10795 MEM_VOLATILE_P (op0
) |= volatilep
;
10798 && modifier
!= EXPAND_MEMORY
10799 && modifier
!= EXPAND_WRITE
)
10800 op0
= flip_storage_order (mode1
, op0
);
10802 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10803 || modifier
== EXPAND_CONST_ADDRESS
10804 || modifier
== EXPAND_INITIALIZER
)
10808 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10810 convert_move (target
, op0
, unsignedp
);
10815 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10818 /* All valid uses of __builtin_va_arg_pack () are removed during
10820 if (CALL_EXPR_VA_ARG_PACK (exp
))
10821 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10823 tree fndecl
= get_callee_fndecl (exp
), attr
;
10826 && (attr
= lookup_attribute ("error",
10827 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10828 error ("%Kcall to %qs declared with attribute error: %s",
10829 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10830 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10832 && (attr
= lookup_attribute ("warning",
10833 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10834 warning_at (tree_nonartificial_location (exp
),
10835 0, "%Kcall to %qs declared with attribute warning: %s",
10836 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10837 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10839 /* Check for a built-in function. */
10840 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10842 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10843 if (CALL_WITH_BOUNDS_P (exp
))
10844 return expand_builtin_with_bounds (exp
, target
, subtarget
,
10847 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10850 return expand_call (exp
, target
, ignore
);
10852 case VIEW_CONVERT_EXPR
:
10855 /* If we are converting to BLKmode, try to avoid an intermediate
10856 temporary by fetching an inner memory reference. */
10857 if (mode
== BLKmode
10858 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
10859 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10860 && handled_component_p (treeop0
))
10862 machine_mode mode1
;
10863 HOST_WIDE_INT bitsize
, bitpos
;
10865 int unsignedp
, reversep
, volatilep
= 0;
10867 = get_inner_reference (treeop0
, &bitsize
, &bitpos
, &offset
, &mode1
,
10868 &unsignedp
, &reversep
, &volatilep
);
10871 /* ??? We should work harder and deal with non-zero offsets. */
10873 && (bitpos
% BITS_PER_UNIT
) == 0
10876 && compare_tree_int (TYPE_SIZE (type
), bitsize
) == 0)
10878 /* See the normal_inner_ref case for the rationale. */
10880 = expand_expr_real (tem
,
10881 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10882 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10884 && modifier
!= EXPAND_STACK_PARM
10885 ? target
: NULL_RTX
),
10887 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10890 if (MEM_P (orig_op0
))
10894 /* Get a reference to just this component. */
10895 if (modifier
== EXPAND_CONST_ADDRESS
10896 || modifier
== EXPAND_SUM
10897 || modifier
== EXPAND_INITIALIZER
)
10898 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10900 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10902 if (op0
== orig_op0
)
10903 op0
= copy_rtx (op0
);
10905 set_mem_attributes (op0
, treeop0
, 0);
10906 if (REG_P (XEXP (op0
, 0)))
10907 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10909 MEM_VOLATILE_P (op0
) |= volatilep
;
10915 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
10916 NULL
, inner_reference_p
);
10918 /* If the input and output modes are both the same, we are done. */
10919 if (mode
== GET_MODE (op0
))
10921 /* If neither mode is BLKmode, and both modes are the same size
10922 then we can use gen_lowpart. */
10923 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
10924 && (GET_MODE_PRECISION (mode
)
10925 == GET_MODE_PRECISION (GET_MODE (op0
)))
10926 && !COMPLEX_MODE_P (GET_MODE (op0
)))
10928 if (GET_CODE (op0
) == SUBREG
)
10929 op0
= force_reg (GET_MODE (op0
), op0
);
10930 temp
= gen_lowpart_common (mode
, op0
);
10935 if (!REG_P (op0
) && !MEM_P (op0
))
10936 op0
= force_reg (GET_MODE (op0
), op0
);
10937 op0
= gen_lowpart (mode
, op0
);
10940 /* If both types are integral, convert from one mode to the other. */
10941 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
10942 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
10943 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
10944 /* If the output type is a bit-field type, do an extraction. */
10945 else if (reduce_bit_field
)
10946 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
10947 TYPE_UNSIGNED (type
), NULL_RTX
,
10948 mode
, mode
, false, NULL
);
10949 /* As a last resort, spill op0 to memory, and reload it in a
10951 else if (!MEM_P (op0
))
10953 /* If the operand is not a MEM, force it into memory. Since we
10954 are going to be changing the mode of the MEM, don't call
10955 force_const_mem for constants because we don't allow pool
10956 constants to change mode. */
10957 tree inner_type
= TREE_TYPE (treeop0
);
10959 gcc_assert (!TREE_ADDRESSABLE (exp
));
10961 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
10963 = assign_stack_temp_for_type
10964 (TYPE_MODE (inner_type
),
10965 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
10967 emit_move_insn (target
, op0
);
10971 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10972 output type is such that the operand is known to be aligned, indicate
10973 that it is. Otherwise, we need only be concerned about alignment for
10974 non-BLKmode results. */
10977 enum insn_code icode
;
10979 if (modifier
!= EXPAND_WRITE
10980 && modifier
!= EXPAND_MEMORY
10981 && !inner_reference_p
10983 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
10985 /* If the target does have special handling for unaligned
10986 loads of mode then use them. */
10987 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10988 != CODE_FOR_nothing
)
10992 op0
= adjust_address (op0
, mode
, 0);
10993 /* We've already validated the memory, and we're creating a
10994 new pseudo destination. The predicates really can't
10996 reg
= gen_reg_rtx (mode
);
10998 /* Nor can the insn generator. */
10999 rtx_insn
*insn
= GEN_FCN (icode
) (reg
, op0
);
11003 else if (STRICT_ALIGNMENT
)
11005 tree inner_type
= TREE_TYPE (treeop0
);
11006 HOST_WIDE_INT temp_size
11007 = MAX (int_size_in_bytes (inner_type
),
11008 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
11010 = assign_stack_temp_for_type (mode
, temp_size
, type
);
11011 rtx new_with_op0_mode
11012 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
11014 gcc_assert (!TREE_ADDRESSABLE (exp
));
11016 if (GET_MODE (op0
) == BLKmode
)
11017 emit_block_move (new_with_op0_mode
, op0
,
11018 GEN_INT (GET_MODE_SIZE (mode
)),
11019 (modifier
== EXPAND_STACK_PARM
11020 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
11022 emit_move_insn (new_with_op0_mode
, op0
);
11028 op0
= adjust_address (op0
, mode
, 0);
11035 tree lhs
= treeop0
;
11036 tree rhs
= treeop1
;
11037 gcc_assert (ignore
);
11039 /* Check for |= or &= of a bitfield of size one into another bitfield
11040 of size 1. In this case, (unless we need the result of the
11041 assignment) we can do this more efficiently with a
11042 test followed by an assignment, if necessary.
11044 ??? At this point, we can't get a BIT_FIELD_REF here. But if
11045 things change so we do, this code should be enhanced to
11047 if (TREE_CODE (lhs
) == COMPONENT_REF
11048 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
11049 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
11050 && TREE_OPERAND (rhs
, 0) == lhs
11051 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
11052 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
11053 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
11055 rtx_code_label
*label
= gen_label_rtx ();
11056 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
11057 do_jump (TREE_OPERAND (rhs
, 1),
11060 profile_probability::uninitialized ());
11061 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
11063 do_pending_stack_adjust ();
11064 emit_label (label
);
11068 expand_assignment (lhs
, rhs
, false);
11073 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
11075 case REALPART_EXPR
:
11076 op0
= expand_normal (treeop0
);
11077 return read_complex_part (op0
, false);
11079 case IMAGPART_EXPR
:
11080 op0
= expand_normal (treeop0
);
11081 return read_complex_part (op0
, true);
11088 /* Expanded in cfgexpand.c. */
11089 gcc_unreachable ();
11091 case TRY_CATCH_EXPR
:
11093 case EH_FILTER_EXPR
:
11094 case TRY_FINALLY_EXPR
:
11095 /* Lowered by tree-eh.c. */
11096 gcc_unreachable ();
11098 case WITH_CLEANUP_EXPR
:
11099 case CLEANUP_POINT_EXPR
:
11101 case CASE_LABEL_EXPR
:
11106 case COMPOUND_EXPR
:
11107 case PREINCREMENT_EXPR
:
11108 case PREDECREMENT_EXPR
:
11109 case POSTINCREMENT_EXPR
:
11110 case POSTDECREMENT_EXPR
:
11113 case COMPOUND_LITERAL_EXPR
:
11114 /* Lowered by gimplify.c. */
11115 gcc_unreachable ();
11118 /* Function descriptors are not valid except for as
11119 initialization constants, and should not be expanded. */
11120 gcc_unreachable ();
11122 case WITH_SIZE_EXPR
:
11123 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11124 have pulled out the size to use in whatever context it needed. */
11125 return expand_expr_real (treeop0
, original_target
, tmode
,
11126 modifier
, alt_rtl
, inner_reference_p
);
11129 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
11133 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11134 signedness of TYPE), possibly returning the result in TARGET. */
11136 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
11138 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
11139 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
11141 /* For constant values, reduce using build_int_cst_type. */
11142 if (CONST_INT_P (exp
))
11144 HOST_WIDE_INT value
= INTVAL (exp
);
11145 tree t
= build_int_cst_type (type
, value
);
11146 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
11148 else if (TYPE_UNSIGNED (type
))
11150 machine_mode mode
= GET_MODE (exp
);
11151 rtx mask
= immed_wide_int_const
11152 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
11153 return expand_and (mode
, exp
, mask
, target
);
11157 int count
= GET_MODE_PRECISION (GET_MODE (exp
)) - prec
;
11158 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
),
11159 exp
, count
, target
, 0);
11160 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
),
11161 exp
, count
, target
, 0);
11165 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11166 when applied to the address of EXP produces an address known to be
11167 aligned more than BIGGEST_ALIGNMENT. */
11170 is_aligning_offset (const_tree offset
, const_tree exp
)
11172 /* Strip off any conversions. */
11173 while (CONVERT_EXPR_P (offset
))
11174 offset
= TREE_OPERAND (offset
, 0);
11176 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11177 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11178 if (TREE_CODE (offset
) != BIT_AND_EXPR
11179 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
11180 || compare_tree_int (TREE_OPERAND (offset
, 1),
11181 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
11182 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1))
11185 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11186 It must be NEGATE_EXPR. Then strip any more conversions. */
11187 offset
= TREE_OPERAND (offset
, 0);
11188 while (CONVERT_EXPR_P (offset
))
11189 offset
= TREE_OPERAND (offset
, 0);
11191 if (TREE_CODE (offset
) != NEGATE_EXPR
)
11194 offset
= TREE_OPERAND (offset
, 0);
11195 while (CONVERT_EXPR_P (offset
))
11196 offset
= TREE_OPERAND (offset
, 0);
11198 /* This must now be the address of EXP. */
11199 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
11202 /* Return the tree node if an ARG corresponds to a string constant or zero
11203 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
11204 in bytes within the string that ARG is accessing. The type of the
11205 offset will be `sizetype'. */
11208 string_constant (tree arg
, tree
*ptr_offset
)
11210 tree array
, offset
, lower_bound
;
11213 if (TREE_CODE (arg
) == ADDR_EXPR
)
11215 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
11217 *ptr_offset
= size_zero_node
;
11218 return TREE_OPERAND (arg
, 0);
11220 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
11222 array
= TREE_OPERAND (arg
, 0);
11223 offset
= size_zero_node
;
11225 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
11227 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
11228 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
11229 if (TREE_CODE (array
) != STRING_CST
&& !VAR_P (array
))
11232 /* Check if the array has a nonzero lower bound. */
11233 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
11234 if (!integer_zerop (lower_bound
))
11236 /* If the offset and base aren't both constants, return 0. */
11237 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
11239 if (TREE_CODE (offset
) != INTEGER_CST
)
11241 /* Adjust offset by the lower bound. */
11242 offset
= size_diffop (fold_convert (sizetype
, offset
),
11243 fold_convert (sizetype
, lower_bound
));
11246 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
11248 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
11249 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
11250 if (TREE_CODE (array
) != ADDR_EXPR
)
11252 array
= TREE_OPERAND (array
, 0);
11253 if (TREE_CODE (array
) != STRING_CST
&& !VAR_P (array
))
11259 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
11261 tree arg0
= TREE_OPERAND (arg
, 0);
11262 tree arg1
= TREE_OPERAND (arg
, 1);
11267 if (TREE_CODE (arg0
) == ADDR_EXPR
11268 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
11269 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
11271 array
= TREE_OPERAND (arg0
, 0);
11274 else if (TREE_CODE (arg1
) == ADDR_EXPR
11275 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
11276 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
11278 array
= TREE_OPERAND (arg1
, 0);
11287 if (TREE_CODE (array
) == STRING_CST
)
11289 *ptr_offset
= fold_convert (sizetype
, offset
);
11292 else if (VAR_P (array
) || TREE_CODE (array
) == CONST_DECL
)
11295 tree init
= ctor_for_folding (array
);
11297 /* Variables initialized to string literals can be handled too. */
11298 if (init
== error_mark_node
11300 || TREE_CODE (init
) != STRING_CST
)
11303 /* Avoid const char foo[4] = "abcde"; */
11304 if (DECL_SIZE_UNIT (array
) == NULL_TREE
11305 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
11306 || (length
= TREE_STRING_LENGTH (init
)) <= 0
11307 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
11310 /* If variable is bigger than the string literal, OFFSET must be constant
11311 and inside of the bounds of the string literal. */
11312 offset
= fold_convert (sizetype
, offset
);
11313 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
11314 && (! tree_fits_uhwi_p (offset
)
11315 || compare_tree_int (offset
, length
) >= 0))
11318 *ptr_offset
= offset
;
11325 /* Generate code to calculate OPS, and exploded expression
11326 using a store-flag instruction and return an rtx for the result.
11327 OPS reflects a comparison.
11329 If TARGET is nonzero, store the result there if convenient.
11331 Return zero if there is no suitable set-flag instruction
11332 available on this machine.
11334 Once expand_expr has been called on the arguments of the comparison,
11335 we are committed to doing the store flag, since it is not safe to
11336 re-evaluate the expression. We emit the store-flag insn by calling
11337 emit_store_flag, but only expand the arguments if we have a reason
11338 to believe that emit_store_flag will be successful. If we think that
11339 it will, but it isn't, we have to simulate the store-flag with a
11340 set/jump/set sequence. */
11343 do_store_flag (sepops ops
, rtx target
, machine_mode mode
)
11345 enum rtx_code code
;
11346 tree arg0
, arg1
, type
;
11347 machine_mode operand_mode
;
11350 rtx subtarget
= target
;
11351 location_t loc
= ops
->location
;
11356 /* Don't crash if the comparison was erroneous. */
11357 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
11360 type
= TREE_TYPE (arg0
);
11361 operand_mode
= TYPE_MODE (type
);
11362 unsignedp
= TYPE_UNSIGNED (type
);
11364 /* We won't bother with BLKmode store-flag operations because it would mean
11365 passing a lot of information to emit_store_flag. */
11366 if (operand_mode
== BLKmode
)
11369 /* We won't bother with store-flag operations involving function pointers
11370 when function pointers must be canonicalized before comparisons. */
11371 if (targetm
.have_canonicalize_funcptr_for_compare ()
11372 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
11373 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
11375 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
11376 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
11377 == FUNCTION_TYPE
))))
11383 /* For vector typed comparisons emit code to generate the desired
11384 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11385 expander for this. */
11386 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
11388 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
11389 if (VECTOR_BOOLEAN_TYPE_P (ops
->type
)
11390 && expand_vec_cmp_expr_p (TREE_TYPE (arg0
), ops
->type
, ops
->code
))
11391 return expand_vec_cmp_expr (ops
->type
, ifexp
, target
);
11394 tree if_true
= constant_boolean_node (true, ops
->type
);
11395 tree if_false
= constant_boolean_node (false, ops
->type
);
11396 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
,
11401 /* Get the rtx comparison code to use. We know that EXP is a comparison
11402 operation of some type. Some comparisons against 1 and -1 can be
11403 converted to comparisons with zero. Do so here so that the tests
11404 below will be aware that we have a comparison with zero. These
11405 tests will not catch constants in the first operand, but constants
11406 are rarely passed as the first operand. */
11417 if (integer_onep (arg1
))
11418 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
11420 code
= unsignedp
? LTU
: LT
;
11423 if (! unsignedp
&& integer_all_onesp (arg1
))
11424 arg1
= integer_zero_node
, code
= LT
;
11426 code
= unsignedp
? LEU
: LE
;
11429 if (! unsignedp
&& integer_all_onesp (arg1
))
11430 arg1
= integer_zero_node
, code
= GE
;
11432 code
= unsignedp
? GTU
: GT
;
11435 if (integer_onep (arg1
))
11436 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
11438 code
= unsignedp
? GEU
: GE
;
11441 case UNORDERED_EXPR
:
11467 gcc_unreachable ();
11470 /* Put a constant second. */
11471 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
11472 || TREE_CODE (arg0
) == FIXED_CST
)
11474 std::swap (arg0
, arg1
);
11475 code
= swap_condition (code
);
11478 /* If this is an equality or inequality test of a single bit, we can
11479 do this by shifting the bit being tested to the low-order bit and
11480 masking the result with the constant 1. If the condition was EQ,
11481 we xor it with 1. This does not require an scc insn and is faster
11482 than an scc insn even if we have it.
11484 The code to make this transformation was moved into fold_single_bit_test,
11485 so we just call into the folder and expand its result. */
11487 if ((code
== NE
|| code
== EQ
)
11488 && integer_zerop (arg1
)
11489 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
11491 gimple
*srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
11493 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
11495 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
11496 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
11497 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
11498 gimple_assign_rhs1 (srcstmt
),
11499 gimple_assign_rhs2 (srcstmt
));
11500 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
11502 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
11506 if (! get_subtarget (target
)
11507 || GET_MODE (subtarget
) != operand_mode
)
11510 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
11513 target
= gen_reg_rtx (mode
);
11515 /* Try a cstore if possible. */
11516 return emit_store_flag_force (target
, code
, op0
, op1
,
11517 operand_mode
, unsignedp
,
11518 (TYPE_PRECISION (ops
->type
) == 1
11519 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
11522 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11523 0 otherwise (i.e. if there is no casesi instruction).
11525 DEFAULT_PROBABILITY is the probability of jumping to the default
11528 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
11529 rtx table_label
, rtx default_label
, rtx fallback_label
,
11530 profile_probability default_probability
)
11532 struct expand_operand ops
[5];
11533 machine_mode index_mode
= SImode
;
11534 rtx op1
, op2
, index
;
11536 if (! targetm
.have_casesi ())
11539 /* Convert the index to SImode. */
11540 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
11542 machine_mode omode
= TYPE_MODE (index_type
);
11543 rtx rangertx
= expand_normal (range
);
11545 /* We must handle the endpoints in the original mode. */
11546 index_expr
= build2 (MINUS_EXPR
, index_type
,
11547 index_expr
, minval
);
11548 minval
= integer_zero_node
;
11549 index
= expand_normal (index_expr
);
11551 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
11552 omode
, 1, default_label
,
11553 default_probability
);
11554 /* Now we can safely truncate. */
11555 index
= convert_to_mode (index_mode
, index
, 0);
11559 if (TYPE_MODE (index_type
) != index_mode
)
11561 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
11562 index_expr
= fold_convert (index_type
, index_expr
);
11565 index
= expand_normal (index_expr
);
11568 do_pending_stack_adjust ();
11570 op1
= expand_normal (minval
);
11571 op2
= expand_normal (range
);
11573 create_input_operand (&ops
[0], index
, index_mode
);
11574 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
11575 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
11576 create_fixed_operand (&ops
[3], table_label
);
11577 create_fixed_operand (&ops
[4], (default_label
11579 : fallback_label
));
11580 expand_jump_insn (targetm
.code_for_casesi
, 5, ops
);
11584 /* Attempt to generate a tablejump instruction; same concept. */
11585 /* Subroutine of the next function.
11587 INDEX is the value being switched on, with the lowest value
11588 in the table already subtracted.
11589 MODE is its expected mode (needed if INDEX is constant).
11590 RANGE is the length of the jump table.
11591 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11593 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11594 index value is out of range.
11595 DEFAULT_PROBABILITY is the probability of jumping to
11596 the default label. */
11599 do_tablejump (rtx index
, machine_mode mode
, rtx range
, rtx table_label
,
11600 rtx default_label
, profile_probability default_probability
)
11604 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
11605 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
11607 /* Do an unsigned comparison (in the proper mode) between the index
11608 expression and the value which represents the length of the range.
11609 Since we just finished subtracting the lower bound of the range
11610 from the index expression, this comparison allows us to simultaneously
11611 check that the original index expression value is both greater than
11612 or equal to the minimum value of the range and less than or equal to
11613 the maximum value of the range. */
11616 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11617 default_label
, default_probability
);
11620 /* If index is in range, it must fit in Pmode.
11621 Convert to Pmode so we can index with it. */
11623 index
= convert_to_mode (Pmode
, index
, 1);
11625 /* Don't let a MEM slip through, because then INDEX that comes
11626 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11627 and break_out_memory_refs will go to work on it and mess it up. */
11628 #ifdef PIC_CASE_VECTOR_ADDRESS
11629 if (flag_pic
&& !REG_P (index
))
11630 index
= copy_to_mode_reg (Pmode
, index
);
11633 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11634 GET_MODE_SIZE, because this indicates how large insns are. The other
11635 uses should all be Pmode, because they are addresses. This code
11636 could fail if addresses and insns are not the same size. */
11637 index
= simplify_gen_binary (MULT
, Pmode
, index
,
11638 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
11640 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
11641 gen_rtx_LABEL_REF (Pmode
, table_label
));
11643 #ifdef PIC_CASE_VECTOR_ADDRESS
11645 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11648 index
= memory_address (CASE_VECTOR_MODE
, index
);
11649 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11650 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
11651 convert_move (temp
, vector
, 0);
11653 emit_jump_insn (targetm
.gen_tablejump (temp
, table_label
));
11655 /* If we are generating PIC code or if the table is PC-relative, the
11656 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11657 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11662 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
11663 rtx table_label
, rtx default_label
,
11664 profile_probability default_probability
)
11668 if (! targetm
.have_tablejump ())
11671 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
11672 fold_convert (index_type
, index_expr
),
11673 fold_convert (index_type
, minval
));
11674 index
= expand_normal (index_expr
);
11675 do_pending_stack_adjust ();
11677 do_tablejump (index
, TYPE_MODE (index_type
),
11678 convert_modes (TYPE_MODE (index_type
),
11679 TYPE_MODE (TREE_TYPE (range
)),
11680 expand_normal (range
),
11681 TYPE_UNSIGNED (TREE_TYPE (range
))),
11682 table_label
, default_label
, default_probability
);
11686 /* Return a CONST_VECTOR rtx representing vector mask for
11687 a VECTOR_CST of booleans. */
11689 const_vector_mask_from_tree (tree exp
)
11695 machine_mode inner
, mode
;
11697 mode
= TYPE_MODE (TREE_TYPE (exp
));
11698 units
= GET_MODE_NUNITS (mode
);
11699 inner
= GET_MODE_INNER (mode
);
11701 v
= rtvec_alloc (units
);
11703 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11705 elt
= VECTOR_CST_ELT (exp
, i
);
11707 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
11708 if (integer_zerop (elt
))
11709 RTVEC_ELT (v
, i
) = CONST0_RTX (inner
);
11710 else if (integer_onep (elt
)
11711 || integer_minus_onep (elt
))
11712 RTVEC_ELT (v
, i
) = CONSTM1_RTX (inner
);
11714 gcc_unreachable ();
11717 return gen_rtx_CONST_VECTOR (mode
, v
);
11720 /* Return a CONST_INT rtx representing vector mask for
11721 a VECTOR_CST of booleans. */
11723 const_scalar_mask_from_tree (tree exp
)
11725 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
11726 wide_int res
= wi::zero (GET_MODE_PRECISION (mode
));
11730 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11732 elt
= VECTOR_CST_ELT (exp
, i
);
11733 gcc_assert (TREE_CODE (elt
) == INTEGER_CST
);
11734 if (integer_all_onesp (elt
))
11735 res
= wi::set_bit (res
, i
);
11737 gcc_assert (integer_zerop (elt
));
11740 return immed_wide_int_const (res
, mode
);
11743 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11745 const_vector_from_tree (tree exp
)
11751 machine_mode inner
, mode
;
11753 mode
= TYPE_MODE (TREE_TYPE (exp
));
11755 if (initializer_zerop (exp
))
11756 return CONST0_RTX (mode
);
11758 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp
)))
11759 return const_vector_mask_from_tree (exp
);
11761 units
= GET_MODE_NUNITS (mode
);
11762 inner
= GET_MODE_INNER (mode
);
11764 v
= rtvec_alloc (units
);
11766 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11768 elt
= VECTOR_CST_ELT (exp
, i
);
11770 if (TREE_CODE (elt
) == REAL_CST
)
11771 RTVEC_ELT (v
, i
) = const_double_from_real_value (TREE_REAL_CST (elt
),
11773 else if (TREE_CODE (elt
) == FIXED_CST
)
11774 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11777 RTVEC_ELT (v
, i
) = immed_wide_int_const (elt
, inner
);
11780 return gen_rtx_CONST_VECTOR (mode
, v
);
11783 /* Build a decl for a personality function given a language prefix. */
11786 build_personality_function (const char *lang
)
11788 const char *unwind_and_version
;
11792 switch (targetm_common
.except_unwind_info (&global_options
))
11797 unwind_and_version
= "_sj0";
11801 unwind_and_version
= "_v0";
11804 unwind_and_version
= "_seh0";
11807 gcc_unreachable ();
11810 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11812 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11813 long_long_unsigned_type_node
,
11814 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11815 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11816 get_identifier (name
), type
);
11817 DECL_ARTIFICIAL (decl
) = 1;
11818 DECL_EXTERNAL (decl
) = 1;
11819 TREE_PUBLIC (decl
) = 1;
11821 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11822 are the flags assigned by targetm.encode_section_info. */
11823 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11828 /* Extracts the personality function of DECL and returns the corresponding
11832 get_personality_function (tree decl
)
11834 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11835 enum eh_personality_kind pk
;
11837 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11838 if (pk
== eh_personality_none
)
11842 && pk
== eh_personality_any
)
11843 personality
= lang_hooks
.eh_personality ();
11845 if (pk
== eh_personality_lang
)
11846 gcc_assert (personality
!= NULL_TREE
);
11848 return XEXP (DECL_RTL (personality
), 0);
11851 /* Returns a tree for the size of EXP in bytes. */
11854 tree_expr_size (const_tree exp
)
11857 && DECL_SIZE_UNIT (exp
) != 0)
11858 return DECL_SIZE_UNIT (exp
);
11860 return size_in_bytes (TREE_TYPE (exp
));
11863 /* Return an rtx for the size in bytes of the value of EXP. */
11866 expr_size (tree exp
)
11870 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11871 size
= TREE_OPERAND (exp
, 1);
11874 size
= tree_expr_size (exp
);
11876 gcc_assert (size
== SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, exp
));
11879 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
), EXPAND_NORMAL
);
11882 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11883 if the size can vary or is larger than an integer. */
11885 static HOST_WIDE_INT
11886 int_expr_size (tree exp
)
11890 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11891 size
= TREE_OPERAND (exp
, 1);
11894 size
= tree_expr_size (exp
);
11898 if (size
== 0 || !tree_fits_shwi_p (size
))
11901 return tree_to_shwi (size
);