1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
35 #include "hard-reg-set.h"
38 #include "insn-config.h"
39 #include "insn-attr.h"
46 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
48 #include "insn-codes.h"
53 #include "typeclass.h"
55 #include "langhooks.h"
58 #include "tree-iterator.h"
60 #include "dominance.h"
62 #include "basic-block.h"
63 #include "tree-ssa-alias.h"
64 #include "internal-fn.h"
65 #include "gimple-expr.h"
67 #include "gimple-ssa.h"
69 #include "tree-ssanames.h"
71 #include "common/common-target.h"
74 #include "diagnostic.h"
75 #include "tree-ssa-live.h"
76 #include "tree-outof-ssa.h"
77 #include "target-globals.h"
79 #include "tree-ssa-address.h"
80 #include "cfgexpand.h"
82 #include "tree-chkp.h"
87 /* If this is nonzero, we do not bother generating VOLATILE
88 around volatile memory references, and we are willing to
89 output indirect addresses. If cse is to follow, we reject
90 indirect addresses so a useful potential cse is generated;
91 if it is used only once, instruction combination will produce
92 the same indirect address eventually. */
95 /* This structure is used by move_by_pieces to describe the move to
97 struct move_by_pieces_d
106 int explicit_inc_from
;
107 unsigned HOST_WIDE_INT len
;
108 HOST_WIDE_INT offset
;
112 /* This structure is used by store_by_pieces to describe the clear to
115 struct store_by_pieces_d
121 unsigned HOST_WIDE_INT len
;
122 HOST_WIDE_INT offset
;
123 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
);
128 static void move_by_pieces_1 (insn_gen_fn
, machine_mode
,
129 struct move_by_pieces_d
*);
130 static bool block_move_libcall_safe_for_call_parm (void);
131 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
132 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
133 unsigned HOST_WIDE_INT
);
134 static tree
emit_block_move_libcall_fn (int);
135 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
136 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, machine_mode
);
137 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
138 static void store_by_pieces_1 (struct store_by_pieces_d
*, unsigned int);
139 static void store_by_pieces_2 (insn_gen_fn
, machine_mode
,
140 struct store_by_pieces_d
*);
141 static tree
clear_storage_libcall_fn (int);
142 static rtx_insn
*compress_float_constant (rtx
, rtx
);
143 static rtx
get_subtarget (rtx
);
144 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
145 HOST_WIDE_INT
, machine_mode
,
146 tree
, int, alias_set_type
);
147 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
148 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
,
149 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
150 machine_mode
, tree
, alias_set_type
, bool);
152 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
154 static int is_aligning_offset (const_tree
, const_tree
);
155 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
156 static rtx
do_store_flag (sepops
, rtx
, machine_mode
);
158 static void emit_single_push_insn (machine_mode
, rtx
, tree
);
160 static void do_tablejump (rtx
, machine_mode
, rtx
, rtx
, rtx
, int);
161 static rtx
const_vector_from_tree (tree
);
162 static tree
tree_expr_size (const_tree
);
163 static HOST_WIDE_INT
int_expr_size (tree
);
166 /* This is run to set up which modes can be used
167 directly in memory and to initialize the block move optab. It is run
168 at the beginning of compilation and when the target is reinitialized. */
171 init_expr_target (void)
179 /* Try indexing by frame ptr and try by stack ptr.
180 It is known that on the Convex the stack ptr isn't a valid index.
181 With luck, one or the other is valid on any machine. */
182 mem
= gen_rtx_MEM (word_mode
, stack_pointer_rtx
);
183 mem1
= gen_rtx_MEM (word_mode
, frame_pointer_rtx
);
185 /* A scratch register we can modify in-place below to avoid
186 useless RTL allocations. */
187 reg
= gen_rtx_REG (word_mode
, LAST_VIRTUAL_REGISTER
+ 1);
189 insn
= rtx_alloc (INSN
);
190 pat
= gen_rtx_SET (NULL_RTX
, NULL_RTX
);
191 PATTERN (insn
) = pat
;
193 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
194 mode
= (machine_mode
) ((int) mode
+ 1))
198 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
199 PUT_MODE (mem
, mode
);
200 PUT_MODE (mem1
, mode
);
202 /* See if there is some register that can be used in this mode and
203 directly loaded or stored from memory. */
205 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
206 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
207 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
210 if (! HARD_REGNO_MODE_OK (regno
, mode
))
213 set_mode_and_regno (reg
, mode
, regno
);
216 SET_DEST (pat
) = reg
;
217 if (recog (pat
, insn
, &num_clobbers
) >= 0)
218 direct_load
[(int) mode
] = 1;
220 SET_SRC (pat
) = mem1
;
221 SET_DEST (pat
) = reg
;
222 if (recog (pat
, insn
, &num_clobbers
) >= 0)
223 direct_load
[(int) mode
] = 1;
226 SET_DEST (pat
) = mem
;
227 if (recog (pat
, insn
, &num_clobbers
) >= 0)
228 direct_store
[(int) mode
] = 1;
231 SET_DEST (pat
) = mem1
;
232 if (recog (pat
, insn
, &num_clobbers
) >= 0)
233 direct_store
[(int) mode
] = 1;
237 mem
= gen_rtx_MEM (VOIDmode
, gen_raw_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1));
239 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
240 mode
= GET_MODE_WIDER_MODE (mode
))
242 machine_mode srcmode
;
243 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
244 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
248 ic
= can_extend_p (mode
, srcmode
, 0);
249 if (ic
== CODE_FOR_nothing
)
252 PUT_MODE (mem
, srcmode
);
254 if (insn_operand_matches (ic
, 1, mem
))
255 float_extend_from_mem
[mode
][srcmode
] = true;
260 /* This is run at the start of compiling a function. */
265 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
268 /* Copy data from FROM to TO, where the machine modes are not the same.
269 Both modes may be integer, or both may be floating, or both may be
271 UNSIGNEDP should be nonzero if FROM is an unsigned type.
272 This causes zero-extension instead of sign-extension. */
275 convert_move (rtx to
, rtx from
, int unsignedp
)
277 machine_mode to_mode
= GET_MODE (to
);
278 machine_mode from_mode
= GET_MODE (from
);
279 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
280 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
284 /* rtx code for making an equivalent value. */
285 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
286 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
289 gcc_assert (to_real
== from_real
);
290 gcc_assert (to_mode
!= BLKmode
);
291 gcc_assert (from_mode
!= BLKmode
);
293 /* If the source and destination are already the same, then there's
298 /* If FROM is a SUBREG that indicates that we have already done at least
299 the required extension, strip it. We don't handle such SUBREGs as
302 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
303 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from
)))
304 >= GET_MODE_PRECISION (to_mode
))
305 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
306 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
308 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
310 if (to_mode
== from_mode
311 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
313 emit_move_insn (to
, from
);
317 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
319 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
321 if (VECTOR_MODE_P (to_mode
))
322 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
324 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
326 emit_move_insn (to
, from
);
330 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
332 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
333 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
343 gcc_assert ((GET_MODE_PRECISION (from_mode
)
344 != GET_MODE_PRECISION (to_mode
))
345 || (DECIMAL_FLOAT_MODE_P (from_mode
)
346 != DECIMAL_FLOAT_MODE_P (to_mode
)));
348 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
349 /* Conversion between decimal float and binary float, same size. */
350 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
351 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
356 /* Try converting directly if the insn is supported. */
358 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
359 if (code
!= CODE_FOR_nothing
)
361 emit_unop_insn (code
, to
, from
,
362 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
366 /* Otherwise use a libcall. */
367 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
369 /* Is this conversion implemented yet? */
370 gcc_assert (libcall
);
373 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
375 insns
= get_insns ();
377 emit_libcall_block (insns
, to
, value
,
378 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
380 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
384 /* Handle pointer conversion. */ /* SPEE 900220. */
385 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
389 if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
396 if (convert_optab_handler (ctab
, to_mode
, from_mode
)
399 emit_unop_insn (convert_optab_handler (ctab
, to_mode
, from_mode
),
405 /* Targets are expected to provide conversion insns between PxImode and
406 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
407 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
409 machine_mode full_mode
410 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
412 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
413 != CODE_FOR_nothing
);
415 if (full_mode
!= from_mode
)
416 from
= convert_to_mode (full_mode
, from
, unsignedp
);
417 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
421 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
424 machine_mode full_mode
425 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
426 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
427 enum insn_code icode
;
429 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
430 gcc_assert (icode
!= CODE_FOR_nothing
);
432 if (to_mode
== full_mode
)
434 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
438 new_from
= gen_reg_rtx (full_mode
);
439 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
441 /* else proceed to integer conversions below. */
442 from_mode
= full_mode
;
446 /* Make sure both are fixed-point modes or both are not. */
447 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
448 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
449 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
451 /* If we widen from_mode to to_mode and they are in the same class,
452 we won't saturate the result.
453 Otherwise, always saturate the result to play safe. */
454 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
455 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
456 expand_fixed_convert (to
, from
, 0, 0);
458 expand_fixed_convert (to
, from
, 0, 1);
462 /* Now both modes are integers. */
464 /* Handle expanding beyond a word. */
465 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
466 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
473 machine_mode lowpart_mode
;
474 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
476 /* Try converting directly if the insn is supported. */
477 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
480 /* If FROM is a SUBREG, put it into a register. Do this
481 so that we always generate the same set of insns for
482 better cse'ing; if an intermediate assignment occurred,
483 we won't be doing the operation directly on the SUBREG. */
484 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
485 from
= force_reg (from_mode
, from
);
486 emit_unop_insn (code
, to
, from
, equiv_code
);
489 /* Next, try converting via full word. */
490 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
491 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
492 != CODE_FOR_nothing
))
494 rtx word_to
= gen_reg_rtx (word_mode
);
497 if (reg_overlap_mentioned_p (to
, from
))
498 from
= force_reg (from_mode
, from
);
501 convert_move (word_to
, from
, unsignedp
);
502 emit_unop_insn (code
, to
, word_to
, equiv_code
);
506 /* No special multiword conversion insn; do it by hand. */
509 /* Since we will turn this into a no conflict block, we must ensure the
510 the source does not overlap the target so force it into an isolated
511 register when maybe so. Likewise for any MEM input, since the
512 conversion sequence might require several references to it and we
513 must ensure we're getting the same value every time. */
515 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
516 from
= force_reg (from_mode
, from
);
518 /* Get a copy of FROM widened to a word, if necessary. */
519 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
520 lowpart_mode
= word_mode
;
522 lowpart_mode
= from_mode
;
524 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
526 lowpart
= gen_lowpart (lowpart_mode
, to
);
527 emit_move_insn (lowpart
, lowfrom
);
529 /* Compute the value to put in each remaining word. */
531 fill_value
= const0_rtx
;
533 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
534 LT
, lowfrom
, const0_rtx
,
535 lowpart_mode
, 0, -1);
537 /* Fill the remaining words. */
538 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
540 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
541 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
543 gcc_assert (subword
);
545 if (fill_value
!= subword
)
546 emit_move_insn (subword
, fill_value
);
549 insns
= get_insns ();
556 /* Truncating multi-word to a word or less. */
557 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
558 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
561 && ! MEM_VOLATILE_P (from
)
562 && direct_load
[(int) to_mode
]
563 && ! mode_dependent_address_p (XEXP (from
, 0),
564 MEM_ADDR_SPACE (from
)))
566 || GET_CODE (from
) == SUBREG
))
567 from
= force_reg (from_mode
, from
);
568 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
572 /* Now follow all the conversions between integers
573 no more than a word long. */
575 /* For truncation, usually we can just refer to FROM in a narrower mode. */
576 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
577 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
580 && ! MEM_VOLATILE_P (from
)
581 && direct_load
[(int) to_mode
]
582 && ! mode_dependent_address_p (XEXP (from
, 0),
583 MEM_ADDR_SPACE (from
)))
585 || GET_CODE (from
) == SUBREG
))
586 from
= force_reg (from_mode
, from
);
587 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
588 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
589 from
= copy_to_reg (from
);
590 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
594 /* Handle extension. */
595 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
597 /* Convert directly if that works. */
598 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
601 emit_unop_insn (code
, to
, from
, equiv_code
);
606 machine_mode intermediate
;
610 /* Search for a mode to convert via. */
611 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
612 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
613 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
615 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
616 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, intermediate
)))
617 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
618 != CODE_FOR_nothing
))
620 convert_move (to
, convert_to_mode (intermediate
, from
,
621 unsignedp
), unsignedp
);
625 /* No suitable intermediate mode.
626 Generate what we need with shifts. */
627 shift_amount
= (GET_MODE_PRECISION (to_mode
)
628 - GET_MODE_PRECISION (from_mode
));
629 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
630 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
632 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
635 emit_move_insn (to
, tmp
);
640 /* Support special truncate insns for certain modes. */
641 if (convert_optab_handler (trunc_optab
, to_mode
,
642 from_mode
) != CODE_FOR_nothing
)
644 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
649 /* Handle truncation of volatile memrefs, and so on;
650 the things that couldn't be truncated directly,
651 and for which there was no special instruction.
653 ??? Code above formerly short-circuited this, for most integer
654 mode pairs, with a force_reg in from_mode followed by a recursive
655 call to this routine. Appears always to have been wrong. */
656 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
658 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
659 emit_move_insn (to
, temp
);
663 /* Mode combination is not recognized. */
667 /* Return an rtx for a value that would result
668 from converting X to mode MODE.
669 Both X and MODE may be floating, or both integer.
670 UNSIGNEDP is nonzero if X is an unsigned value.
671 This can be done by referring to a part of X in place
672 or by copying to a new temporary with conversion. */
675 convert_to_mode (machine_mode mode
, rtx x
, int unsignedp
)
677 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
680 /* Return an rtx for a value that would result
681 from converting X from mode OLDMODE to mode MODE.
682 Both modes may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
685 This can be done by referring to a part of X in place
686 or by copying to a new temporary with conversion.
688 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
691 convert_modes (machine_mode mode
, machine_mode oldmode
, rtx x
, int unsignedp
)
695 /* If FROM is a SUBREG that indicates that we have already done at least
696 the required extension, strip it. */
698 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
699 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
700 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
701 x
= gen_lowpart (mode
, SUBREG_REG (x
));
703 if (GET_MODE (x
) != VOIDmode
)
704 oldmode
= GET_MODE (x
);
709 if (CONST_SCALAR_INT_P (x
) && GET_MODE_CLASS (mode
) == MODE_INT
)
711 /* If the caller did not tell us the old mode, then there is not
712 much to do with respect to canonicalization. We have to
713 assume that all the bits are significant. */
714 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
715 oldmode
= MAX_MODE_INT
;
716 wide_int w
= wide_int::from (std::make_pair (x
, oldmode
),
717 GET_MODE_PRECISION (mode
),
718 unsignedp
? UNSIGNED
: SIGNED
);
719 return immed_wide_int_const (w
, mode
);
722 /* We can do this with a gen_lowpart if both desired and current modes
723 are integer, and this is either a constant integer, a register, or a
725 if (GET_MODE_CLASS (mode
) == MODE_INT
726 && GET_MODE_CLASS (oldmode
) == MODE_INT
727 && GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (oldmode
)
728 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) mode
])
730 && (!HARD_REGISTER_P (x
)
731 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
732 && TRULY_NOOP_TRUNCATION_MODES_P (mode
, GET_MODE (x
)))))
734 return gen_lowpart (mode
, x
);
736 /* Converting from integer constant into mode is always equivalent to an
738 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
740 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
741 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
744 temp
= gen_reg_rtx (mode
);
745 convert_move (temp
, x
, unsignedp
);
749 /* Return the largest alignment we can use for doing a move (or store)
750 of MAX_PIECES. ALIGN is the largest alignment we could use. */
753 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
757 tmode
= mode_for_size (max_pieces
* BITS_PER_UNIT
, MODE_INT
, 1);
758 if (align
>= GET_MODE_ALIGNMENT (tmode
))
759 align
= GET_MODE_ALIGNMENT (tmode
);
762 machine_mode tmode
, xmode
;
764 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
766 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
767 if (GET_MODE_SIZE (tmode
) > max_pieces
768 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
771 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
777 /* Return the widest integer mode no wider than SIZE. If no such mode
778 can be found, return VOIDmode. */
781 widest_int_mode_for_size (unsigned int size
)
783 machine_mode tmode
, mode
= VOIDmode
;
785 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
786 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
787 if (GET_MODE_SIZE (tmode
) < size
)
793 /* Determine whether the LEN bytes can be moved by using several move
794 instructions. Return nonzero if a call to move_by_pieces should
798 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
801 return targetm
.use_by_pieces_infrastructure_p (len
, align
, MOVE_BY_PIECES
,
802 optimize_insn_for_speed_p ());
805 /* Generate several move instructions to copy LEN bytes from block FROM to
806 block TO. (These are MEM rtx's with BLKmode).
808 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
809 used to push FROM to the stack.
811 ALIGN is maximum stack alignment we can assume.
813 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
814 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
818 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
819 unsigned int align
, int endp
)
821 struct move_by_pieces_d data
;
822 machine_mode to_addr_mode
;
823 machine_mode from_addr_mode
= get_address_mode (from
);
824 rtx to_addr
, from_addr
= XEXP (from
, 0);
825 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
826 enum insn_code icode
;
828 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
831 data
.from_addr
= from_addr
;
834 to_addr_mode
= get_address_mode (to
);
835 to_addr
= XEXP (to
, 0);
838 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
839 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
841 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
845 to_addr_mode
= VOIDmode
;
849 if (STACK_GROWS_DOWNWARD
)
854 data
.to_addr
= to_addr
;
857 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
858 || GET_CODE (from_addr
) == POST_INC
859 || GET_CODE (from_addr
) == POST_DEC
);
861 data
.explicit_inc_from
= 0;
862 data
.explicit_inc_to
= 0;
863 if (data
.reverse
) data
.offset
= len
;
866 /* If copying requires more than two move insns,
867 copy addresses to registers (to make displacements shorter)
868 and use post-increment if available. */
869 if (!(data
.autinc_from
&& data
.autinc_to
)
870 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
872 /* Find the mode of the largest move...
873 MODE might not be used depending on the definitions of the
874 USE_* macros below. */
875 machine_mode mode ATTRIBUTE_UNUSED
876 = widest_int_mode_for_size (max_size
);
878 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
880 data
.from_addr
= copy_to_mode_reg (from_addr_mode
,
881 plus_constant (from_addr_mode
,
883 data
.autinc_from
= 1;
884 data
.explicit_inc_from
= -1;
886 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
888 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
889 data
.autinc_from
= 1;
890 data
.explicit_inc_from
= 1;
892 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
893 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
894 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
896 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
897 plus_constant (to_addr_mode
,
900 data
.explicit_inc_to
= -1;
902 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
904 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
906 data
.explicit_inc_to
= 1;
908 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
909 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
912 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
914 /* First move what we can in the largest integer mode, then go to
915 successively smaller modes. */
917 while (max_size
> 1 && data
.len
> 0)
919 machine_mode mode
= widest_int_mode_for_size (max_size
);
921 if (mode
== VOIDmode
)
924 icode
= optab_handler (mov_optab
, mode
);
925 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
926 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
928 max_size
= GET_MODE_SIZE (mode
);
931 /* The code above should have handled everything. */
932 gcc_assert (!data
.len
);
938 gcc_assert (!data
.reverse
);
943 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
944 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
946 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
947 plus_constant (to_addr_mode
,
951 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
958 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
966 /* Return number of insns required to move L bytes by pieces.
967 ALIGN (in bits) is maximum alignment we can assume. */
969 unsigned HOST_WIDE_INT
970 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
971 unsigned int max_size
)
973 unsigned HOST_WIDE_INT n_insns
= 0;
975 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
977 while (max_size
> 1 && l
> 0)
980 enum insn_code icode
;
982 mode
= widest_int_mode_for_size (max_size
);
984 if (mode
== VOIDmode
)
987 icode
= optab_handler (mov_optab
, mode
);
988 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
989 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
991 max_size
= GET_MODE_SIZE (mode
);
998 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
999 with move instructions for mode MODE. GENFUN is the gen_... function
1000 to make a move insn for that mode. DATA has all the other info. */
1003 move_by_pieces_1 (insn_gen_fn genfun
, machine_mode mode
,
1004 struct move_by_pieces_d
*data
)
1006 unsigned int size
= GET_MODE_SIZE (mode
);
1007 rtx to1
= NULL_RTX
, from1
;
1009 while (data
->len
>= size
)
1012 data
->offset
-= size
;
1016 if (data
->autinc_to
)
1017 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1020 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1023 if (data
->autinc_from
)
1024 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1027 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1029 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1030 emit_insn (gen_add2_insn (data
->to_addr
,
1031 gen_int_mode (-(HOST_WIDE_INT
) size
,
1032 GET_MODE (data
->to_addr
))));
1033 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1034 emit_insn (gen_add2_insn (data
->from_addr
,
1035 gen_int_mode (-(HOST_WIDE_INT
) size
,
1036 GET_MODE (data
->from_addr
))));
1039 emit_insn ((*genfun
) (to1
, from1
));
1042 #ifdef PUSH_ROUNDING
1043 emit_single_push_insn (mode
, from1
, NULL
);
1049 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1050 emit_insn (gen_add2_insn (data
->to_addr
,
1052 GET_MODE (data
->to_addr
))));
1053 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1054 emit_insn (gen_add2_insn (data
->from_addr
,
1056 GET_MODE (data
->from_addr
))));
1058 if (! data
->reverse
)
1059 data
->offset
+= size
;
1065 /* Emit code to move a block Y to a block X. This may be done with
1066 string-move instructions, with multiple scalar move instructions,
1067 or with a library call.
1069 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1070 SIZE is an rtx that says how long they are.
1071 ALIGN is the maximum alignment we can assume they have.
1072 METHOD describes what kind of copy this is, and what mechanisms may be used.
1073 MIN_SIZE is the minimal size of block to move
1074 MAX_SIZE is the maximal size of block to move, if it can not be represented
1075 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1077 Return the address of the new block, if memcpy is called and returns it,
1081 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1082 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1083 unsigned HOST_WIDE_INT min_size
,
1084 unsigned HOST_WIDE_INT max_size
,
1085 unsigned HOST_WIDE_INT probable_max_size
)
1092 if (CONST_INT_P (size
)
1093 && INTVAL (size
) == 0)
1098 case BLOCK_OP_NORMAL
:
1099 case BLOCK_OP_TAILCALL
:
1100 may_use_call
= true;
1103 case BLOCK_OP_CALL_PARM
:
1104 may_use_call
= block_move_libcall_safe_for_call_parm ();
1106 /* Make inhibit_defer_pop nonzero around the library call
1107 to force it to pop the arguments right away. */
1111 case BLOCK_OP_NO_LIBCALL
:
1112 may_use_call
= false;
1119 gcc_assert (MEM_P (x
) && MEM_P (y
));
1120 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1121 gcc_assert (align
>= BITS_PER_UNIT
);
1123 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1124 block copy is more efficient for other large modes, e.g. DCmode. */
1125 x
= adjust_address (x
, BLKmode
, 0);
1126 y
= adjust_address (y
, BLKmode
, 0);
1128 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1129 can be incorrect is coming from __builtin_memcpy. */
1130 if (CONST_INT_P (size
))
1132 x
= shallow_copy_rtx (x
);
1133 y
= shallow_copy_rtx (y
);
1134 set_mem_size (x
, INTVAL (size
));
1135 set_mem_size (y
, INTVAL (size
));
1138 if (CONST_INT_P (size
) && can_move_by_pieces (INTVAL (size
), align
))
1139 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1140 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1141 expected_align
, expected_size
,
1142 min_size
, max_size
, probable_max_size
))
1144 else if (may_use_call
1145 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1146 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1148 /* Since x and y are passed to a libcall, mark the corresponding
1149 tree EXPR as addressable. */
1150 tree y_expr
= MEM_EXPR (y
);
1151 tree x_expr
= MEM_EXPR (x
);
1153 mark_addressable (y_expr
);
1155 mark_addressable (x_expr
);
1156 retval
= emit_block_move_via_libcall (x
, y
, size
,
1157 method
== BLOCK_OP_TAILCALL
);
1161 emit_block_move_via_loop (x
, y
, size
, align
);
1163 if (method
== BLOCK_OP_CALL_PARM
)
1170 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1172 unsigned HOST_WIDE_INT max
, min
= 0;
1173 if (GET_CODE (size
) == CONST_INT
)
1174 min
= max
= UINTVAL (size
);
1176 max
= GET_MODE_MASK (GET_MODE (size
));
1177 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1181 /* A subroutine of emit_block_move. Returns true if calling the
1182 block move libcall will not clobber any parameters which may have
1183 already been placed on the stack. */
1186 block_move_libcall_safe_for_call_parm (void)
1188 #if defined (REG_PARM_STACK_SPACE)
1192 /* If arguments are pushed on the stack, then they're safe. */
1196 /* If registers go on the stack anyway, any argument is sure to clobber
1197 an outgoing argument. */
1198 #if defined (REG_PARM_STACK_SPACE)
1199 fn
= emit_block_move_libcall_fn (false);
1200 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1201 depend on its argument. */
1203 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1204 && REG_PARM_STACK_SPACE (fn
) != 0)
1208 /* If any argument goes in memory, then it might clobber an outgoing
1211 CUMULATIVE_ARGS args_so_far_v
;
1212 cumulative_args_t args_so_far
;
1215 fn
= emit_block_move_libcall_fn (false);
1216 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1217 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1219 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1220 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1222 machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1223 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1225 if (!tmp
|| !REG_P (tmp
))
1227 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1229 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1236 /* A subroutine of emit_block_move. Expand a movmem pattern;
1237 return true if successful. */
1240 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1241 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1242 unsigned HOST_WIDE_INT min_size
,
1243 unsigned HOST_WIDE_INT max_size
,
1244 unsigned HOST_WIDE_INT probable_max_size
)
1246 int save_volatile_ok
= volatile_ok
;
1249 if (expected_align
< align
)
1250 expected_align
= align
;
1251 if (expected_size
!= -1)
1253 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1254 expected_size
= probable_max_size
;
1255 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1256 expected_size
= min_size
;
1259 /* Since this is a move insn, we don't care about volatility. */
1262 /* Try the most limited insn first, because there's no point
1263 including more than one in the machine description unless
1264 the more limited one has some advantage. */
1266 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1267 mode
= GET_MODE_WIDER_MODE (mode
))
1269 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1271 if (code
!= CODE_FOR_nothing
1272 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1273 here because if SIZE is less than the mode mask, as it is
1274 returned by the macro, it will definitely be less than the
1275 actual mode mask. Since SIZE is within the Pmode address
1276 space, we limit MODE to Pmode. */
1277 && ((CONST_INT_P (size
)
1278 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1279 <= (GET_MODE_MASK (mode
) >> 1)))
1280 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1281 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1283 struct expand_operand ops
[9];
1286 /* ??? When called via emit_block_move_for_call, it'd be
1287 nice if there were some way to inform the backend, so
1288 that it doesn't fail the expansion because it thinks
1289 emitting the libcall would be more efficient. */
1290 nops
= insn_data
[(int) code
].n_generator_args
;
1291 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1293 create_fixed_operand (&ops
[0], x
);
1294 create_fixed_operand (&ops
[1], y
);
1295 /* The check above guarantees that this size conversion is valid. */
1296 create_convert_operand_to (&ops
[2], size
, mode
, true);
1297 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1300 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1301 create_integer_operand (&ops
[5], expected_size
);
1305 create_integer_operand (&ops
[6], min_size
);
1306 /* If we can not represent the maximal size,
1307 make parameter NULL. */
1308 if ((HOST_WIDE_INT
) max_size
!= -1)
1309 create_integer_operand (&ops
[7], max_size
);
1311 create_fixed_operand (&ops
[7], NULL
);
1315 /* If we can not represent the maximal size,
1316 make parameter NULL. */
1317 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1318 create_integer_operand (&ops
[8], probable_max_size
);
1320 create_fixed_operand (&ops
[8], NULL
);
1322 if (maybe_expand_insn (code
, nops
, ops
))
1324 volatile_ok
= save_volatile_ok
;
1330 volatile_ok
= save_volatile_ok
;
1334 /* A subroutine of emit_block_move. Expand a call to memcpy.
1335 Return the return value from memcpy, 0 otherwise. */
1338 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1340 rtx dst_addr
, src_addr
;
1341 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1342 machine_mode size_mode
;
1345 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1346 pseudos. We can then place those new pseudos into a VAR_DECL and
1349 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1350 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1352 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1353 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1355 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1356 src_tree
= make_tree (ptr_type_node
, src_addr
);
1358 size_mode
= TYPE_MODE (sizetype
);
1360 size
= convert_to_mode (size_mode
, size
, 1);
1361 size
= copy_to_mode_reg (size_mode
, size
);
1363 /* It is incorrect to use the libcall calling conventions to call
1364 memcpy in this context. This could be a user call to memcpy and
1365 the user may wish to examine the return value from memcpy. For
1366 targets where libcalls and normal calls have different conventions
1367 for returning pointers, we could end up generating incorrect code. */
1369 size_tree
= make_tree (sizetype
, size
);
1371 fn
= emit_block_move_libcall_fn (true);
1372 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1373 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1375 retval
= expand_normal (call_expr
);
1380 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1381 for the function we use for block copies. */
1383 static GTY(()) tree block_move_fn
;
1386 init_block_move_fn (const char *asmspec
)
1390 tree args
, fn
, attrs
, attr_args
;
1392 fn
= get_identifier ("memcpy");
1393 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1394 const_ptr_type_node
, sizetype
,
1397 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
1398 DECL_EXTERNAL (fn
) = 1;
1399 TREE_PUBLIC (fn
) = 1;
1400 DECL_ARTIFICIAL (fn
) = 1;
1401 TREE_NOTHROW (fn
) = 1;
1402 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1403 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1405 attr_args
= build_tree_list (NULL_TREE
, build_string (1, "1"));
1406 attrs
= tree_cons (get_identifier ("fn spec"), attr_args
, NULL
);
1408 decl_attributes (&fn
, attrs
, ATTR_FLAG_BUILT_IN
);
1414 set_user_assembler_name (block_move_fn
, asmspec
);
1418 emit_block_move_libcall_fn (int for_call
)
1420 static bool emitted_extern
;
1423 init_block_move_fn (NULL
);
1425 if (for_call
&& !emitted_extern
)
1427 emitted_extern
= true;
1428 make_decl_rtl (block_move_fn
);
1431 return block_move_fn
;
1434 /* A subroutine of emit_block_move. Copy the data via an explicit
1435 loop. This is used only when libcalls are forbidden. */
1436 /* ??? It'd be nice to copy in hunks larger than QImode. */
1439 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1440 unsigned int align ATTRIBUTE_UNUSED
)
1442 rtx_code_label
*cmp_label
, *top_label
;
1443 rtx iter
, x_addr
, y_addr
, tmp
;
1444 machine_mode x_addr_mode
= get_address_mode (x
);
1445 machine_mode y_addr_mode
= get_address_mode (y
);
1446 machine_mode iter_mode
;
1448 iter_mode
= GET_MODE (size
);
1449 if (iter_mode
== VOIDmode
)
1450 iter_mode
= word_mode
;
1452 top_label
= gen_label_rtx ();
1453 cmp_label
= gen_label_rtx ();
1454 iter
= gen_reg_rtx (iter_mode
);
1456 emit_move_insn (iter
, const0_rtx
);
1458 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1459 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1460 do_pending_stack_adjust ();
1462 emit_jump (cmp_label
);
1463 emit_label (top_label
);
1465 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1466 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1468 if (x_addr_mode
!= y_addr_mode
)
1469 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1470 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1472 x
= change_address (x
, QImode
, x_addr
);
1473 y
= change_address (y
, QImode
, y_addr
);
1475 emit_move_insn (x
, y
);
1477 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1478 true, OPTAB_LIB_WIDEN
);
1480 emit_move_insn (iter
, tmp
);
1482 emit_label (cmp_label
);
1484 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1485 true, top_label
, REG_BR_PROB_BASE
* 90 / 100);
1488 /* Copy all or part of a value X into registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1492 move_block_to_reg (int regno
, rtx x
, int nregs
, machine_mode mode
)
1501 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
1502 x
= validize_mem (force_const_mem (mode
, x
));
1504 /* See if the machine can do this with a load multiple insn. */
1505 if (HAVE_load_multiple
)
1507 last
= get_last_insn ();
1508 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1516 delete_insns_since (last
);
1519 for (i
= 0; i
< nregs
; i
++)
1520 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1521 operand_subword_force (x
, i
, mode
));
1524 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1525 The number of registers to be filled is NREGS. */
1528 move_block_from_reg (int regno
, rtx x
, int nregs
)
1535 /* See if the machine can do this with a store multiple insn. */
1536 if (HAVE_store_multiple
)
1538 rtx_insn
*last
= get_last_insn ();
1539 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1547 delete_insns_since (last
);
1550 for (i
= 0; i
< nregs
; i
++)
1552 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1556 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1560 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1561 ORIG, where ORIG is a non-consecutive group of registers represented by
1562 a PARALLEL. The clone is identical to the original except in that the
1563 original set of registers is replaced by a new set of pseudo registers.
1564 The new set has the same modes as the original set. */
1567 gen_group_rtx (rtx orig
)
1572 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1574 length
= XVECLEN (orig
, 0);
1575 tmps
= XALLOCAVEC (rtx
, length
);
1577 /* Skip a NULL entry in first slot. */
1578 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1583 for (; i
< length
; i
++)
1585 machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1586 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1588 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1591 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1594 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1595 except that values are placed in TMPS[i], and must later be moved
1596 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1599 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1603 machine_mode m
= GET_MODE (orig_src
);
1605 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1608 && !SCALAR_INT_MODE_P (m
)
1609 && !MEM_P (orig_src
)
1610 && GET_CODE (orig_src
) != CONCAT
)
1612 machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1613 if (imode
== BLKmode
)
1614 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
1616 src
= gen_reg_rtx (imode
);
1617 if (imode
!= BLKmode
)
1618 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1619 emit_move_insn (src
, orig_src
);
1620 /* ...and back again. */
1621 if (imode
!= BLKmode
)
1622 src
= gen_lowpart (imode
, src
);
1623 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1627 /* Check for a NULL entry, used to indicate that the parameter goes
1628 both on the stack and in registers. */
1629 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1634 /* Process the pieces. */
1635 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1637 machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1638 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1639 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1642 /* Handle trailing fragments that run over the size of the struct. */
1643 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1645 /* Arrange to shift the fragment to where it belongs.
1646 extract_bit_field loads to the lsb of the reg. */
1648 #ifdef BLOCK_REG_PADDING
1649 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1650 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1655 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1656 bytelen
= ssize
- bytepos
;
1657 gcc_assert (bytelen
> 0);
1660 /* If we won't be loading directly from memory, protect the real source
1661 from strange tricks we might play; but make sure that the source can
1662 be loaded directly into the destination. */
1664 if (!MEM_P (orig_src
)
1665 && (!CONSTANT_P (orig_src
)
1666 || (GET_MODE (orig_src
) != mode
1667 && GET_MODE (orig_src
) != VOIDmode
)))
1669 if (GET_MODE (orig_src
) == VOIDmode
)
1670 src
= gen_reg_rtx (mode
);
1672 src
= gen_reg_rtx (GET_MODE (orig_src
));
1674 emit_move_insn (src
, orig_src
);
1677 /* Optimize the access just a bit. */
1679 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1680 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1681 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1682 && bytelen
== GET_MODE_SIZE (mode
))
1684 tmps
[i
] = gen_reg_rtx (mode
);
1685 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1687 else if (COMPLEX_MODE_P (mode
)
1688 && GET_MODE (src
) == mode
1689 && bytelen
== GET_MODE_SIZE (mode
))
1690 /* Let emit_move_complex do the bulk of the work. */
1692 else if (GET_CODE (src
) == CONCAT
)
1694 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1695 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1697 if ((bytepos
== 0 && bytelen
== slen0
)
1698 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1700 /* The following assumes that the concatenated objects all
1701 have the same size. In this case, a simple calculation
1702 can be used to determine the object and the bit field
1704 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1705 if (! CONSTANT_P (tmps
[i
])
1706 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1707 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1708 (bytepos
% slen0
) * BITS_PER_UNIT
,
1709 1, NULL_RTX
, mode
, mode
);
1715 gcc_assert (!bytepos
);
1716 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1717 emit_move_insn (mem
, src
);
1718 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1719 0, 1, NULL_RTX
, mode
, mode
);
1722 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1723 SIMD register, which is currently broken. While we get GCC
1724 to emit proper RTL for these cases, let's dump to memory. */
1725 else if (VECTOR_MODE_P (GET_MODE (dst
))
1728 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1731 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1732 emit_move_insn (mem
, src
);
1733 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1735 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1736 && XVECLEN (dst
, 0) > 1)
1737 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
1738 else if (CONSTANT_P (src
))
1740 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1748 /* TODO: const_wide_int can have sizes other than this... */
1749 gcc_assert (2 * len
== ssize
);
1750 split_double (src
, &first
, &second
);
1757 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1760 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1761 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1765 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1770 /* Emit code to move a block SRC of type TYPE to a block DST,
1771 where DST is non-consecutive registers represented by a PARALLEL.
1772 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1776 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1781 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1782 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1784 /* Copy the extracted pieces into the proper (probable) hard regs. */
1785 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1787 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1790 emit_move_insn (d
, tmps
[i
]);
1794 /* Similar, but load SRC into new pseudos in a format that looks like
1795 PARALLEL. This can later be fed to emit_group_move to get things
1796 in the right place. */
1799 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1804 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1805 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1807 /* Convert the vector to look just like the original PARALLEL, except
1808 with the computed values. */
1809 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1811 rtx e
= XVECEXP (parallel
, 0, i
);
1812 rtx d
= XEXP (e
, 0);
1816 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1817 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1819 RTVEC_ELT (vec
, i
) = e
;
1822 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1825 /* Emit code to move a block SRC to block DST, where SRC and DST are
1826 non-consecutive groups of registers, each represented by a PARALLEL. */
1829 emit_group_move (rtx dst
, rtx src
)
1833 gcc_assert (GET_CODE (src
) == PARALLEL
1834 && GET_CODE (dst
) == PARALLEL
1835 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1837 /* Skip first entry if NULL. */
1838 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1839 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1840 XEXP (XVECEXP (src
, 0, i
), 0));
1843 /* Move a group of registers represented by a PARALLEL into pseudos. */
1846 emit_group_move_into_temps (rtx src
)
1848 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1851 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1853 rtx e
= XVECEXP (src
, 0, i
);
1854 rtx d
= XEXP (e
, 0);
1857 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1858 RTVEC_ELT (vec
, i
) = e
;
1861 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1864 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1865 where SRC is non-consecutive registers represented by a PARALLEL.
1866 SSIZE represents the total size of block ORIG_DST, or -1 if not
1870 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1873 int start
, finish
, i
;
1874 machine_mode m
= GET_MODE (orig_dst
);
1876 gcc_assert (GET_CODE (src
) == PARALLEL
);
1878 if (!SCALAR_INT_MODE_P (m
)
1879 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1881 machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1882 if (imode
== BLKmode
)
1883 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
1885 dst
= gen_reg_rtx (imode
);
1886 emit_group_store (dst
, src
, type
, ssize
);
1887 if (imode
!= BLKmode
)
1888 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1889 emit_move_insn (orig_dst
, dst
);
1893 /* Check for a NULL entry, used to indicate that the parameter goes
1894 both on the stack and in registers. */
1895 if (XEXP (XVECEXP (src
, 0, 0), 0))
1899 finish
= XVECLEN (src
, 0);
1901 tmps
= XALLOCAVEC (rtx
, finish
);
1903 /* Copy the (probable) hard regs into pseudos. */
1904 for (i
= start
; i
< finish
; i
++)
1906 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1907 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1909 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1910 emit_move_insn (tmps
[i
], reg
);
1916 /* If we won't be storing directly into memory, protect the real destination
1917 from strange tricks we might play. */
1919 if (GET_CODE (dst
) == PARALLEL
)
1923 /* We can get a PARALLEL dst if there is a conditional expression in
1924 a return statement. In that case, the dst and src are the same,
1925 so no action is necessary. */
1926 if (rtx_equal_p (dst
, src
))
1929 /* It is unclear if we can ever reach here, but we may as well handle
1930 it. Allocate a temporary, and split this into a store/load to/from
1932 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
1933 emit_group_store (temp
, src
, type
, ssize
);
1934 emit_group_load (dst
, temp
, type
, ssize
);
1937 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1939 machine_mode outer
= GET_MODE (dst
);
1941 HOST_WIDE_INT bytepos
;
1945 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1946 dst
= gen_reg_rtx (outer
);
1948 /* Make life a bit easier for combine. */
1949 /* If the first element of the vector is the low part
1950 of the destination mode, use a paradoxical subreg to
1951 initialize the destination. */
1954 inner
= GET_MODE (tmps
[start
]);
1955 bytepos
= subreg_lowpart_offset (inner
, outer
);
1956 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1958 temp
= simplify_gen_subreg (outer
, tmps
[start
],
1962 emit_move_insn (dst
, temp
);
1969 /* If the first element wasn't the low part, try the last. */
1971 && start
< finish
- 1)
1973 inner
= GET_MODE (tmps
[finish
- 1]);
1974 bytepos
= subreg_lowpart_offset (inner
, outer
);
1975 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
1977 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
1981 emit_move_insn (dst
, temp
);
1988 /* Otherwise, simply initialize the result to zero. */
1990 emit_move_insn (dst
, CONST0_RTX (outer
));
1993 /* Process the pieces. */
1994 for (i
= start
; i
< finish
; i
++)
1996 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
1997 machine_mode mode
= GET_MODE (tmps
[i
]);
1998 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1999 unsigned int adj_bytelen
;
2002 /* Handle trailing fragments that run over the size of the struct. */
2003 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2004 adj_bytelen
= ssize
- bytepos
;
2006 adj_bytelen
= bytelen
;
2008 if (GET_CODE (dst
) == CONCAT
)
2010 if (bytepos
+ adj_bytelen
2011 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2012 dest
= XEXP (dst
, 0);
2013 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2015 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2016 dest
= XEXP (dst
, 1);
2020 machine_mode dest_mode
= GET_MODE (dest
);
2021 machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2023 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2025 if (GET_MODE_ALIGNMENT (dest_mode
)
2026 >= GET_MODE_ALIGNMENT (tmp_mode
))
2028 dest
= assign_stack_temp (dest_mode
,
2029 GET_MODE_SIZE (dest_mode
));
2030 emit_move_insn (adjust_address (dest
,
2038 dest
= assign_stack_temp (tmp_mode
,
2039 GET_MODE_SIZE (tmp_mode
));
2040 emit_move_insn (dest
, tmps
[i
]);
2041 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2047 /* Handle trailing fragments that run over the size of the struct. */
2048 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2050 /* store_bit_field always takes its value from the lsb.
2051 Move the fragment to the lsb if it's not already there. */
2053 #ifdef BLOCK_REG_PADDING
2054 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2055 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2061 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2062 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2066 /* Make sure not to write past the end of the struct. */
2067 store_bit_field (dest
,
2068 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2069 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2073 /* Optimize the access just a bit. */
2074 else if (MEM_P (dest
)
2075 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2076 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2077 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2078 && bytelen
== GET_MODE_SIZE (mode
))
2079 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2082 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2083 0, 0, mode
, tmps
[i
]);
2086 /* Copy from the pseudo into the (probable) hard reg. */
2087 if (orig_dst
!= dst
)
2088 emit_move_insn (orig_dst
, dst
);
2091 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2092 of the value stored in X. */
2095 maybe_emit_group_store (rtx x
, tree type
)
2097 machine_mode mode
= TYPE_MODE (type
);
2098 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2099 if (GET_CODE (x
) == PARALLEL
)
2101 rtx result
= gen_reg_rtx (mode
);
2102 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2108 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2110 This is used on targets that return BLKmode values in registers. */
2113 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2115 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2116 rtx src
= NULL
, dst
= NULL
;
2117 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2118 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2119 machine_mode mode
= GET_MODE (srcreg
);
2120 machine_mode tmode
= GET_MODE (target
);
2121 machine_mode copy_mode
;
2123 /* BLKmode registers created in the back-end shouldn't have survived. */
2124 gcc_assert (mode
!= BLKmode
);
2126 /* If the structure doesn't take up a whole number of words, see whether
2127 SRCREG is padded on the left or on the right. If it's on the left,
2128 set PADDING_CORRECTION to the number of bits to skip.
2130 In most ABIs, the structure will be returned at the least end of
2131 the register, which translates to right padding on little-endian
2132 targets and left padding on big-endian targets. The opposite
2133 holds if the structure is returned at the most significant
2134 end of the register. */
2135 if (bytes
% UNITS_PER_WORD
!= 0
2136 && (targetm
.calls
.return_in_msb (type
)
2138 : BYTES_BIG_ENDIAN
))
2140 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2142 /* We can use a single move if we have an exact mode for the size. */
2143 else if (MEM_P (target
)
2144 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
2145 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2146 && bytes
== GET_MODE_SIZE (mode
))
2148 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2152 /* And if we additionally have the same mode for a register. */
2153 else if (REG_P (target
)
2154 && GET_MODE (target
) == mode
2155 && bytes
== GET_MODE_SIZE (mode
))
2157 emit_move_insn (target
, srcreg
);
2161 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2162 into a new pseudo which is a full word. */
2163 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2165 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2169 /* Copy the structure BITSIZE bits at a time. If the target lives in
2170 memory, take care of not reading/writing past its end by selecting
2171 a copy mode suited to BITSIZE. This should always be possible given
2174 If the target lives in register, make sure not to select a copy mode
2175 larger than the mode of the register.
2177 We could probably emit more efficient code for machines which do not use
2178 strict alignment, but it doesn't seem worth the effort at the current
2181 copy_mode
= word_mode
;
2184 machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2185 if (mem_mode
!= BLKmode
)
2186 copy_mode
= mem_mode
;
2188 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2191 for (bitpos
= 0, xbitpos
= padding_correction
;
2192 bitpos
< bytes
* BITS_PER_UNIT
;
2193 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2195 /* We need a new source operand each time xbitpos is on a
2196 word boundary and when xbitpos == padding_correction
2197 (the first time through). */
2198 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2199 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2201 /* We need a new destination operand each time bitpos is on
2203 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2205 else if (bitpos
% BITS_PER_WORD
== 0)
2206 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2208 /* Use xbitpos for the source extraction (right justified) and
2209 bitpos for the destination store (left justified). */
2210 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2211 extract_bit_field (src
, bitsize
,
2212 xbitpos
% BITS_PER_WORD
, 1,
2213 NULL_RTX
, copy_mode
, copy_mode
));
2217 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2218 register if it contains any data, otherwise return null.
2220 This is used on targets that return BLKmode values in registers. */
2223 copy_blkmode_to_reg (machine_mode mode
, tree src
)
2226 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2227 unsigned int bitsize
;
2228 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2229 machine_mode dst_mode
;
2231 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2233 x
= expand_normal (src
);
2235 bytes
= int_size_in_bytes (TREE_TYPE (src
));
2239 /* If the structure doesn't take up a whole number of words, see
2240 whether the register value should be padded on the left or on
2241 the right. Set PADDING_CORRECTION to the number of padding
2242 bits needed on the left side.
2244 In most ABIs, the structure will be returned at the least end of
2245 the register, which translates to right padding on little-endian
2246 targets and left padding on big-endian targets. The opposite
2247 holds if the structure is returned at the most significant
2248 end of the register. */
2249 if (bytes
% UNITS_PER_WORD
!= 0
2250 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2252 : BYTES_BIG_ENDIAN
))
2253 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2256 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2257 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2258 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2260 /* Copy the structure BITSIZE bits at a time. */
2261 for (bitpos
= 0, xbitpos
= padding_correction
;
2262 bitpos
< bytes
* BITS_PER_UNIT
;
2263 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2265 /* We need a new destination pseudo each time xbitpos is
2266 on a word boundary and when xbitpos == padding_correction
2267 (the first time through). */
2268 if (xbitpos
% BITS_PER_WORD
== 0
2269 || xbitpos
== padding_correction
)
2271 /* Generate an appropriate register. */
2272 dst_word
= gen_reg_rtx (word_mode
);
2273 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2275 /* Clear the destination before we move anything into it. */
2276 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2279 /* We need a new source operand each time bitpos is on a word
2281 if (bitpos
% BITS_PER_WORD
== 0)
2282 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2284 /* Use bitpos for the source extraction (left justified) and
2285 xbitpos for the destination store (right justified). */
2286 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2288 extract_bit_field (src_word
, bitsize
,
2289 bitpos
% BITS_PER_WORD
, 1,
2290 NULL_RTX
, word_mode
, word_mode
));
2293 if (mode
== BLKmode
)
2295 /* Find the smallest integer mode large enough to hold the
2296 entire structure. */
2297 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2299 mode
= GET_MODE_WIDER_MODE (mode
))
2300 /* Have we found a large enough mode? */
2301 if (GET_MODE_SIZE (mode
) >= bytes
)
2304 /* A suitable mode should have been found. */
2305 gcc_assert (mode
!= VOIDmode
);
2308 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2309 dst_mode
= word_mode
;
2312 dst
= gen_reg_rtx (dst_mode
);
2314 for (i
= 0; i
< n_regs
; i
++)
2315 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2317 if (mode
!= dst_mode
)
2318 dst
= gen_lowpart (mode
, dst
);
2323 /* Add a USE expression for REG to the (possibly empty) list pointed
2324 to by CALL_FUSAGE. REG must denote a hard register. */
2327 use_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2329 gcc_assert (REG_P (reg
));
2331 if (!HARD_REGISTER_P (reg
))
2335 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2338 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2339 to by CALL_FUSAGE. REG must denote a hard register. */
2342 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2344 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2347 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2350 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2351 starting at REGNO. All of these registers must be hard registers. */
2354 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2358 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2360 for (i
= 0; i
< nregs
; i
++)
2361 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2364 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2365 PARALLEL REGS. This is for calls that pass values in multiple
2366 non-contiguous locations. The Irix 6 ABI has examples of this. */
2369 use_group_regs (rtx
*call_fusage
, rtx regs
)
2373 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2375 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2377 /* A NULL entry means the parameter goes both on the stack and in
2378 registers. This can also be a MEM for targets that pass values
2379 partially on the stack and partially in registers. */
2380 if (reg
!= 0 && REG_P (reg
))
2381 use_reg (call_fusage
, reg
);
2385 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2386 assigment and the code of the expresion on the RHS is CODE. Return
2390 get_def_for_expr (tree name
, enum tree_code code
)
2394 if (TREE_CODE (name
) != SSA_NAME
)
2397 def_stmt
= get_gimple_for_ssa_name (name
);
2399 || gimple_assign_rhs_code (def_stmt
) != code
)
2405 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2406 assigment and the class of the expresion on the RHS is CLASS. Return
2410 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2414 if (TREE_CODE (name
) != SSA_NAME
)
2417 def_stmt
= get_gimple_for_ssa_name (name
);
2419 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2426 /* Determine whether the LEN bytes generated by CONSTFUN can be
2427 stored to memory using several move instructions. CONSTFUNDATA is
2428 a pointer which will be passed as argument in every CONSTFUN call.
2429 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2430 a memset operation and false if it's a copy of a constant string.
2431 Return nonzero if a call to store_by_pieces should succeed. */
2434 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2435 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
),
2436 void *constfundata
, unsigned int align
, bool memsetp
)
2438 unsigned HOST_WIDE_INT l
;
2439 unsigned int max_size
;
2440 HOST_WIDE_INT offset
= 0;
2442 enum insn_code icode
;
2444 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2445 rtx cst ATTRIBUTE_UNUSED
;
2450 if (!targetm
.use_by_pieces_infrastructure_p (len
, align
,
2454 optimize_insn_for_speed_p ()))
2457 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2459 /* We would first store what we can in the largest integer mode, then go to
2460 successively smaller modes. */
2463 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2467 max_size
= STORE_MAX_PIECES
+ 1;
2468 while (max_size
> 1 && l
> 0)
2470 mode
= widest_int_mode_for_size (max_size
);
2472 if (mode
== VOIDmode
)
2475 icode
= optab_handler (mov_optab
, mode
);
2476 if (icode
!= CODE_FOR_nothing
2477 && align
>= GET_MODE_ALIGNMENT (mode
))
2479 unsigned int size
= GET_MODE_SIZE (mode
);
2486 cst
= (*constfun
) (constfundata
, offset
, mode
);
2487 if (!targetm
.legitimate_constant_p (mode
, cst
))
2497 max_size
= GET_MODE_SIZE (mode
);
2500 /* The code above should have handled everything. */
2507 /* Generate several move instructions to store LEN bytes generated by
2508 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2509 pointer which will be passed as argument in every CONSTFUN call.
2510 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2511 a memset operation and false if it's a copy of a constant string.
2512 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2513 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2517 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2518 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
),
2519 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2521 machine_mode to_addr_mode
= get_address_mode (to
);
2522 struct store_by_pieces_d data
;
2526 gcc_assert (endp
!= 2);
2530 gcc_assert (targetm
.use_by_pieces_infrastructure_p
2535 optimize_insn_for_speed_p ()));
2537 data
.constfun
= constfun
;
2538 data
.constfundata
= constfundata
;
2541 store_by_pieces_1 (&data
, align
);
2546 gcc_assert (!data
.reverse
);
2551 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2552 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2554 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
2555 plus_constant (to_addr_mode
,
2559 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2566 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2574 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2575 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2578 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2580 struct store_by_pieces_d data
;
2585 data
.constfun
= clear_by_pieces_1
;
2586 data
.constfundata
= NULL
;
2589 store_by_pieces_1 (&data
, align
);
2592 /* Callback routine for clear_by_pieces.
2593 Return const0_rtx unconditionally. */
2596 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2597 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2598 machine_mode mode ATTRIBUTE_UNUSED
)
2603 /* Subroutine of clear_by_pieces and store_by_pieces.
2604 Generate several move instructions to store LEN bytes of block TO. (A MEM
2605 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2608 store_by_pieces_1 (struct store_by_pieces_d
*data ATTRIBUTE_UNUSED
,
2609 unsigned int align ATTRIBUTE_UNUSED
)
2611 machine_mode to_addr_mode
= get_address_mode (data
->to
);
2612 rtx to_addr
= XEXP (data
->to
, 0);
2613 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2614 enum insn_code icode
;
2617 data
->to_addr
= to_addr
;
2619 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2620 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2622 data
->explicit_inc_to
= 0;
2624 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2626 data
->offset
= data
->len
;
2628 /* If storing requires more than two move insns,
2629 copy addresses to registers (to make displacements shorter)
2630 and use post-increment if available. */
2631 if (!data
->autinc_to
2632 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2634 /* Determine the main mode we'll be using.
2635 MODE might not be used depending on the definitions of the
2636 USE_* macros below. */
2637 machine_mode mode ATTRIBUTE_UNUSED
2638 = widest_int_mode_for_size (max_size
);
2640 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2642 data
->to_addr
= copy_to_mode_reg (to_addr_mode
,
2643 plus_constant (to_addr_mode
,
2646 data
->autinc_to
= 1;
2647 data
->explicit_inc_to
= -1;
2650 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2651 && ! data
->autinc_to
)
2653 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2654 data
->autinc_to
= 1;
2655 data
->explicit_inc_to
= 1;
2658 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2659 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2662 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2664 /* First store what we can in the largest integer mode, then go to
2665 successively smaller modes. */
2667 while (max_size
> 1 && data
->len
> 0)
2669 machine_mode mode
= widest_int_mode_for_size (max_size
);
2671 if (mode
== VOIDmode
)
2674 icode
= optab_handler (mov_optab
, mode
);
2675 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2676 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2678 max_size
= GET_MODE_SIZE (mode
);
2681 /* The code above should have handled everything. */
2682 gcc_assert (!data
->len
);
2685 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2686 with move instructions for mode MODE. GENFUN is the gen_... function
2687 to make a move insn for that mode. DATA has all the other info. */
2690 store_by_pieces_2 (insn_gen_fn genfun
, machine_mode mode
,
2691 struct store_by_pieces_d
*data
)
2693 unsigned int size
= GET_MODE_SIZE (mode
);
2696 while (data
->len
>= size
)
2699 data
->offset
-= size
;
2701 if (data
->autinc_to
)
2702 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2705 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2707 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2708 emit_insn (gen_add2_insn (data
->to_addr
,
2709 gen_int_mode (-(HOST_WIDE_INT
) size
,
2710 GET_MODE (data
->to_addr
))));
2712 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2713 emit_insn ((*genfun
) (to1
, cst
));
2715 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2716 emit_insn (gen_add2_insn (data
->to_addr
,
2718 GET_MODE (data
->to_addr
))));
2720 if (! data
->reverse
)
2721 data
->offset
+= size
;
2727 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2728 its length in bytes. */
2731 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2732 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2733 unsigned HOST_WIDE_INT min_size
,
2734 unsigned HOST_WIDE_INT max_size
,
2735 unsigned HOST_WIDE_INT probable_max_size
)
2737 machine_mode mode
= GET_MODE (object
);
2740 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2742 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2743 just move a zero. Otherwise, do this a piece at a time. */
2745 && CONST_INT_P (size
)
2746 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2748 rtx zero
= CONST0_RTX (mode
);
2751 emit_move_insn (object
, zero
);
2755 if (COMPLEX_MODE_P (mode
))
2757 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2760 write_complex_part (object
, zero
, 0);
2761 write_complex_part (object
, zero
, 1);
2767 if (size
== const0_rtx
)
2770 align
= MEM_ALIGN (object
);
2772 if (CONST_INT_P (size
)
2773 && targetm
.use_by_pieces_infrastructure_p (INTVAL (size
), align
,
2775 optimize_insn_for_speed_p ()))
2776 clear_by_pieces (object
, INTVAL (size
), align
);
2777 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2778 expected_align
, expected_size
,
2779 min_size
, max_size
, probable_max_size
))
2781 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2782 return set_storage_via_libcall (object
, size
, const0_rtx
,
2783 method
== BLOCK_OP_TAILCALL
);
2791 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2793 unsigned HOST_WIDE_INT max
, min
= 0;
2794 if (GET_CODE (size
) == CONST_INT
)
2795 min
= max
= UINTVAL (size
);
2797 max
= GET_MODE_MASK (GET_MODE (size
));
2798 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
2802 /* A subroutine of clear_storage. Expand a call to memset.
2803 Return the return value of memset, 0 otherwise. */
2806 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2808 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2809 machine_mode size_mode
;
2812 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2813 place those into new pseudos into a VAR_DECL and use them later. */
2815 object
= copy_addr_to_reg (XEXP (object
, 0));
2817 size_mode
= TYPE_MODE (sizetype
);
2818 size
= convert_to_mode (size_mode
, size
, 1);
2819 size
= copy_to_mode_reg (size_mode
, size
);
2821 /* It is incorrect to use the libcall calling conventions to call
2822 memset in this context. This could be a user call to memset and
2823 the user may wish to examine the return value from memset. For
2824 targets where libcalls and normal calls have different conventions
2825 for returning pointers, we could end up generating incorrect code. */
2827 object_tree
= make_tree (ptr_type_node
, object
);
2828 if (!CONST_INT_P (val
))
2829 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2830 size_tree
= make_tree (sizetype
, size
);
2831 val_tree
= make_tree (integer_type_node
, val
);
2833 fn
= clear_storage_libcall_fn (true);
2834 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
2835 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2837 retval
= expand_normal (call_expr
);
2842 /* A subroutine of set_storage_via_libcall. Create the tree node
2843 for the function we use for block clears. */
2845 tree block_clear_fn
;
2848 init_block_clear_fn (const char *asmspec
)
2850 if (!block_clear_fn
)
2854 fn
= get_identifier ("memset");
2855 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2856 integer_type_node
, sizetype
,
2859 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
2860 DECL_EXTERNAL (fn
) = 1;
2861 TREE_PUBLIC (fn
) = 1;
2862 DECL_ARTIFICIAL (fn
) = 1;
2863 TREE_NOTHROW (fn
) = 1;
2864 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2865 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2867 block_clear_fn
= fn
;
2871 set_user_assembler_name (block_clear_fn
, asmspec
);
2875 clear_storage_libcall_fn (int for_call
)
2877 static bool emitted_extern
;
2879 if (!block_clear_fn
)
2880 init_block_clear_fn (NULL
);
2882 if (for_call
&& !emitted_extern
)
2884 emitted_extern
= true;
2885 make_decl_rtl (block_clear_fn
);
2888 return block_clear_fn
;
2891 /* Expand a setmem pattern; return true if successful. */
2894 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2895 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2896 unsigned HOST_WIDE_INT min_size
,
2897 unsigned HOST_WIDE_INT max_size
,
2898 unsigned HOST_WIDE_INT probable_max_size
)
2900 /* Try the most limited insn first, because there's no point
2901 including more than one in the machine description unless
2902 the more limited one has some advantage. */
2906 if (expected_align
< align
)
2907 expected_align
= align
;
2908 if (expected_size
!= -1)
2910 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
2911 expected_size
= max_size
;
2912 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
2913 expected_size
= min_size
;
2916 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2917 mode
= GET_MODE_WIDER_MODE (mode
))
2919 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
2921 if (code
!= CODE_FOR_nothing
2922 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2923 here because if SIZE is less than the mode mask, as it is
2924 returned by the macro, it will definitely be less than the
2925 actual mode mask. Since SIZE is within the Pmode address
2926 space, we limit MODE to Pmode. */
2927 && ((CONST_INT_P (size
)
2928 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2929 <= (GET_MODE_MASK (mode
) >> 1)))
2930 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
2931 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
2933 struct expand_operand ops
[9];
2936 nops
= insn_data
[(int) code
].n_generator_args
;
2937 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
2939 create_fixed_operand (&ops
[0], object
);
2940 /* The check above guarantees that this size conversion is valid. */
2941 create_convert_operand_to (&ops
[1], size
, mode
, true);
2942 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
2943 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
2946 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
2947 create_integer_operand (&ops
[5], expected_size
);
2951 create_integer_operand (&ops
[6], min_size
);
2952 /* If we can not represent the maximal size,
2953 make parameter NULL. */
2954 if ((HOST_WIDE_INT
) max_size
!= -1)
2955 create_integer_operand (&ops
[7], max_size
);
2957 create_fixed_operand (&ops
[7], NULL
);
2961 /* If we can not represent the maximal size,
2962 make parameter NULL. */
2963 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
2964 create_integer_operand (&ops
[8], probable_max_size
);
2966 create_fixed_operand (&ops
[8], NULL
);
2968 if (maybe_expand_insn (code
, nops
, ops
))
2977 /* Write to one of the components of the complex value CPLX. Write VAL to
2978 the real part if IMAG_P is false, and the imaginary part if its true. */
2981 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2987 if (GET_CODE (cplx
) == CONCAT
)
2989 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2993 cmode
= GET_MODE (cplx
);
2994 imode
= GET_MODE_INNER (cmode
);
2995 ibitsize
= GET_MODE_BITSIZE (imode
);
2997 /* For MEMs simplify_gen_subreg may generate an invalid new address
2998 because, e.g., the original address is considered mode-dependent
2999 by the target, which restricts simplify_subreg from invoking
3000 adjust_address_nv. Instead of preparing fallback support for an
3001 invalid address, we call adjust_address_nv directly. */
3004 emit_move_insn (adjust_address_nv (cplx
, imode
,
3005 imag_p
? GET_MODE_SIZE (imode
) : 0),
3010 /* If the sub-object is at least word sized, then we know that subregging
3011 will work. This special case is important, since store_bit_field
3012 wants to operate on integer modes, and there's rarely an OImode to
3013 correspond to TCmode. */
3014 if (ibitsize
>= BITS_PER_WORD
3015 /* For hard regs we have exact predicates. Assume we can split
3016 the original object if it spans an even number of hard regs.
3017 This special case is important for SCmode on 64-bit platforms
3018 where the natural size of floating-point regs is 32-bit. */
3020 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3021 && REG_NREGS (cplx
) % 2 == 0))
3023 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3024 imag_p
? GET_MODE_SIZE (imode
) : 0);
3027 emit_move_insn (part
, val
);
3031 /* simplify_gen_subreg may fail for sub-word MEMs. */
3032 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3035 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
);
3038 /* Extract one of the components of the complex value CPLX. Extract the
3039 real part if IMAG_P is false, and the imaginary part if it's true. */
3042 read_complex_part (rtx cplx
, bool imag_p
)
3044 machine_mode cmode
, imode
;
3047 if (GET_CODE (cplx
) == CONCAT
)
3048 return XEXP (cplx
, imag_p
);
3050 cmode
= GET_MODE (cplx
);
3051 imode
= GET_MODE_INNER (cmode
);
3052 ibitsize
= GET_MODE_BITSIZE (imode
);
3054 /* Special case reads from complex constants that got spilled to memory. */
3055 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3057 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3058 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3060 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3061 if (CONSTANT_CLASS_P (part
))
3062 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3066 /* For MEMs simplify_gen_subreg may generate an invalid new address
3067 because, e.g., the original address is considered mode-dependent
3068 by the target, which restricts simplify_subreg from invoking
3069 adjust_address_nv. Instead of preparing fallback support for an
3070 invalid address, we call adjust_address_nv directly. */
3072 return adjust_address_nv (cplx
, imode
,
3073 imag_p
? GET_MODE_SIZE (imode
) : 0);
3075 /* If the sub-object is at least word sized, then we know that subregging
3076 will work. This special case is important, since extract_bit_field
3077 wants to operate on integer modes, and there's rarely an OImode to
3078 correspond to TCmode. */
3079 if (ibitsize
>= BITS_PER_WORD
3080 /* For hard regs we have exact predicates. Assume we can split
3081 the original object if it spans an even number of hard regs.
3082 This special case is important for SCmode on 64-bit platforms
3083 where the natural size of floating-point regs is 32-bit. */
3085 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3086 && REG_NREGS (cplx
) % 2 == 0))
3088 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3089 imag_p
? GET_MODE_SIZE (imode
) : 0);
3093 /* simplify_gen_subreg may fail for sub-word MEMs. */
3094 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3097 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3098 true, NULL_RTX
, imode
, imode
);
3101 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3102 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3103 represented in NEW_MODE. If FORCE is true, this will never happen, as
3104 we'll force-create a SUBREG if needed. */
3107 emit_move_change_mode (machine_mode new_mode
,
3108 machine_mode old_mode
, rtx x
, bool force
)
3112 if (push_operand (x
, GET_MODE (x
)))
3114 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3115 MEM_COPY_ATTRIBUTES (ret
, x
);
3119 /* We don't have to worry about changing the address since the
3120 size in bytes is supposed to be the same. */
3121 if (reload_in_progress
)
3123 /* Copy the MEM to change the mode and move any
3124 substitutions from the old MEM to the new one. */
3125 ret
= adjust_address_nv (x
, new_mode
, 0);
3126 copy_replacements (x
, ret
);
3129 ret
= adjust_address (x
, new_mode
, 0);
3133 /* Note that we do want simplify_subreg's behavior of validating
3134 that the new mode is ok for a hard register. If we were to use
3135 simplify_gen_subreg, we would create the subreg, but would
3136 probably run into the target not being able to implement it. */
3137 /* Except, of course, when FORCE is true, when this is exactly what
3138 we want. Which is needed for CCmodes on some targets. */
3140 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3142 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3148 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3149 an integer mode of the same size as MODE. Returns the instruction
3150 emitted, or NULL if such a move could not be generated. */
3153 emit_move_via_integer (machine_mode mode
, rtx x
, rtx y
, bool force
)
3156 enum insn_code code
;
3158 /* There must exist a mode of the exact size we require. */
3159 imode
= int_mode_for_mode (mode
);
3160 if (imode
== BLKmode
)
3163 /* The target must support moves in this mode. */
3164 code
= optab_handler (mov_optab
, imode
);
3165 if (code
== CODE_FOR_nothing
)
3168 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3171 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3174 return emit_insn (GEN_FCN (code
) (x
, y
));
3177 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3178 Return an equivalent MEM that does not use an auto-increment. */
3181 emit_move_resolve_push (machine_mode mode
, rtx x
)
3183 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3184 HOST_WIDE_INT adjust
;
3187 adjust
= GET_MODE_SIZE (mode
);
3188 #ifdef PUSH_ROUNDING
3189 adjust
= PUSH_ROUNDING (adjust
);
3191 if (code
== PRE_DEC
|| code
== POST_DEC
)
3193 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3195 rtx expr
= XEXP (XEXP (x
, 0), 1);
3198 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3199 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3200 val
= INTVAL (XEXP (expr
, 1));
3201 if (GET_CODE (expr
) == MINUS
)
3203 gcc_assert (adjust
== val
|| adjust
== -val
);
3207 /* Do not use anti_adjust_stack, since we don't want to update
3208 stack_pointer_delta. */
3209 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3210 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3211 0, OPTAB_LIB_WIDEN
);
3212 if (temp
!= stack_pointer_rtx
)
3213 emit_move_insn (stack_pointer_rtx
, temp
);
3220 temp
= stack_pointer_rtx
;
3225 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3231 return replace_equiv_address (x
, temp
);
3234 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3235 X is known to satisfy push_operand, and MODE is known to be complex.
3236 Returns the last instruction emitted. */
3239 emit_move_complex_push (machine_mode mode
, rtx x
, rtx y
)
3241 machine_mode submode
= GET_MODE_INNER (mode
);
3244 #ifdef PUSH_ROUNDING
3245 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3247 /* In case we output to the stack, but the size is smaller than the
3248 machine can push exactly, we need to use move instructions. */
3249 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3251 x
= emit_move_resolve_push (mode
, x
);
3252 return emit_move_insn (x
, y
);
3256 /* Note that the real part always precedes the imag part in memory
3257 regardless of machine's endianness. */
3258 switch (GET_CODE (XEXP (x
, 0)))
3272 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3273 read_complex_part (y
, imag_first
));
3274 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3275 read_complex_part (y
, !imag_first
));
3278 /* A subroutine of emit_move_complex. Perform the move from Y to X
3279 via two moves of the parts. Returns the last instruction emitted. */
3282 emit_move_complex_parts (rtx x
, rtx y
)
3284 /* Show the output dies here. This is necessary for SUBREGs
3285 of pseudos since we cannot track their lifetimes correctly;
3286 hard regs shouldn't appear here except as return values. */
3287 if (!reload_completed
&& !reload_in_progress
3288 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3291 write_complex_part (x
, read_complex_part (y
, false), false);
3292 write_complex_part (x
, read_complex_part (y
, true), true);
3294 return get_last_insn ();
3297 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3298 MODE is known to be complex. Returns the last instruction emitted. */
3301 emit_move_complex (machine_mode mode
, rtx x
, rtx y
)
3305 /* Need to take special care for pushes, to maintain proper ordering
3306 of the data, and possibly extra padding. */
3307 if (push_operand (x
, mode
))
3308 return emit_move_complex_push (mode
, x
, y
);
3310 /* See if we can coerce the target into moving both values at once, except
3311 for floating point where we favor moving as parts if this is easy. */
3312 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3313 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3315 && HARD_REGISTER_P (x
)
3316 && REG_NREGS (x
) == 1)
3318 && HARD_REGISTER_P (y
)
3319 && REG_NREGS (y
) == 1))
3321 /* Not possible if the values are inherently not adjacent. */
3322 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3324 /* Is possible if both are registers (or subregs of registers). */
3325 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3327 /* If one of the operands is a memory, and alignment constraints
3328 are friendly enough, we may be able to do combined memory operations.
3329 We do not attempt this if Y is a constant because that combination is
3330 usually better with the by-parts thing below. */
3331 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3332 && (!STRICT_ALIGNMENT
3333 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3342 /* For memory to memory moves, optimal behavior can be had with the
3343 existing block move logic. */
3344 if (MEM_P (x
) && MEM_P (y
))
3346 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3347 BLOCK_OP_NO_LIBCALL
);
3348 return get_last_insn ();
3351 ret
= emit_move_via_integer (mode
, x
, y
, true);
3356 return emit_move_complex_parts (x
, y
);
3359 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3360 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3363 emit_move_ccmode (machine_mode mode
, rtx x
, rtx y
)
3367 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3370 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3371 if (code
!= CODE_FOR_nothing
)
3373 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3374 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3375 return emit_insn (GEN_FCN (code
) (x
, y
));
3379 /* Otherwise, find the MODE_INT mode of the same width. */
3380 ret
= emit_move_via_integer (mode
, x
, y
, false);
3381 gcc_assert (ret
!= NULL
);
3385 /* Return true if word I of OP lies entirely in the
3386 undefined bits of a paradoxical subreg. */
3389 undefined_operand_subword_p (const_rtx op
, int i
)
3391 machine_mode innermode
, innermostmode
;
3393 if (GET_CODE (op
) != SUBREG
)
3395 innermode
= GET_MODE (op
);
3396 innermostmode
= GET_MODE (SUBREG_REG (op
));
3397 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3398 /* The SUBREG_BYTE represents offset, as if the value were stored in
3399 memory, except for a paradoxical subreg where we define
3400 SUBREG_BYTE to be 0; undo this exception as in
3402 if (SUBREG_BYTE (op
) == 0
3403 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3405 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3406 if (WORDS_BIG_ENDIAN
)
3407 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3408 if (BYTES_BIG_ENDIAN
)
3409 offset
+= difference
% UNITS_PER_WORD
;
3411 if (offset
>= GET_MODE_SIZE (innermostmode
)
3412 || offset
<= -GET_MODE_SIZE (word_mode
))
3417 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3418 MODE is any multi-word or full-word mode that lacks a move_insn
3419 pattern. Note that you will get better code if you define such
3420 patterns, even if they must turn into multiple assembler instructions. */
3423 emit_move_multi_word (machine_mode mode
, rtx x
, rtx y
)
3425 rtx_insn
*last_insn
= 0;
3431 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3433 /* If X is a push on the stack, do the push now and replace
3434 X with a reference to the stack pointer. */
3435 if (push_operand (x
, mode
))
3436 x
= emit_move_resolve_push (mode
, x
);
3438 /* If we are in reload, see if either operand is a MEM whose address
3439 is scheduled for replacement. */
3440 if (reload_in_progress
&& MEM_P (x
)
3441 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3442 x
= replace_equiv_address_nv (x
, inner
);
3443 if (reload_in_progress
&& MEM_P (y
)
3444 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3445 y
= replace_equiv_address_nv (y
, inner
);
3449 need_clobber
= false;
3451 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3454 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3457 /* Do not generate code for a move if it would come entirely
3458 from the undefined bits of a paradoxical subreg. */
3459 if (undefined_operand_subword_p (y
, i
))
3462 ypart
= operand_subword (y
, i
, 1, mode
);
3464 /* If we can't get a part of Y, put Y into memory if it is a
3465 constant. Otherwise, force it into a register. Then we must
3466 be able to get a part of Y. */
3467 if (ypart
== 0 && CONSTANT_P (y
))
3469 y
= use_anchored_address (force_const_mem (mode
, y
));
3470 ypart
= operand_subword (y
, i
, 1, mode
);
3472 else if (ypart
== 0)
3473 ypart
= operand_subword_force (y
, i
, mode
);
3475 gcc_assert (xpart
&& ypart
);
3477 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3479 last_insn
= emit_move_insn (xpart
, ypart
);
3485 /* Show the output dies here. This is necessary for SUBREGs
3486 of pseudos since we cannot track their lifetimes correctly;
3487 hard regs shouldn't appear here except as return values.
3488 We never want to emit such a clobber after reload. */
3490 && ! (reload_in_progress
|| reload_completed
)
3491 && need_clobber
!= 0)
3499 /* Low level part of emit_move_insn.
3500 Called just like emit_move_insn, but assumes X and Y
3501 are basically valid. */
3504 emit_move_insn_1 (rtx x
, rtx y
)
3506 machine_mode mode
= GET_MODE (x
);
3507 enum insn_code code
;
3509 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3511 code
= optab_handler (mov_optab
, mode
);
3512 if (code
!= CODE_FOR_nothing
)
3513 return emit_insn (GEN_FCN (code
) (x
, y
));
3515 /* Expand complex moves by moving real part and imag part. */
3516 if (COMPLEX_MODE_P (mode
))
3517 return emit_move_complex (mode
, x
, y
);
3519 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3520 || ALL_FIXED_POINT_MODE_P (mode
))
3522 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3524 /* If we can't find an integer mode, use multi words. */
3528 return emit_move_multi_word (mode
, x
, y
);
3531 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3532 return emit_move_ccmode (mode
, x
, y
);
3534 /* Try using a move pattern for the corresponding integer mode. This is
3535 only safe when simplify_subreg can convert MODE constants into integer
3536 constants. At present, it can only do this reliably if the value
3537 fits within a HOST_WIDE_INT. */
3538 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3540 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3544 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3549 return emit_move_multi_word (mode
, x
, y
);
3552 /* Generate code to copy Y into X.
3553 Both Y and X must have the same mode, except that
3554 Y can be a constant with VOIDmode.
3555 This mode cannot be BLKmode; use emit_block_move for that.
3557 Return the last instruction emitted. */
3560 emit_move_insn (rtx x
, rtx y
)
3562 machine_mode mode
= GET_MODE (x
);
3563 rtx y_cst
= NULL_RTX
;
3564 rtx_insn
*last_insn
;
3567 gcc_assert (mode
!= BLKmode
3568 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3573 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3574 && (last_insn
= compress_float_constant (x
, y
)))
3579 if (!targetm
.legitimate_constant_p (mode
, y
))
3581 y
= force_const_mem (mode
, y
);
3583 /* If the target's cannot_force_const_mem prevented the spill,
3584 assume that the target's move expanders will also take care
3585 of the non-legitimate constant. */
3589 y
= use_anchored_address (y
);
3593 /* If X or Y are memory references, verify that their addresses are valid
3596 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3598 && ! push_operand (x
, GET_MODE (x
))))
3599 x
= validize_mem (x
);
3602 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3603 MEM_ADDR_SPACE (y
)))
3604 y
= validize_mem (y
);
3606 gcc_assert (mode
!= BLKmode
);
3608 last_insn
= emit_move_insn_1 (x
, y
);
3610 if (y_cst
&& REG_P (x
)
3611 && (set
= single_set (last_insn
)) != NULL_RTX
3612 && SET_DEST (set
) == x
3613 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3614 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3619 /* Generate the body of an instruction to copy Y into X.
3620 It may be a list of insns, if one insn isn't enough. */
3623 gen_move_insn (rtx x
, rtx y
)
3628 emit_move_insn_1 (x
, y
);
3634 /* Same as above, but return rtx (used as a callback, which must have
3635 prototype compatible with other functions returning rtx). */
3638 gen_move_insn_uncast (rtx x
, rtx y
)
3640 return gen_move_insn (x
, y
);
3643 /* If Y is representable exactly in a narrower mode, and the target can
3644 perform the extension directly from constant or memory, then emit the
3645 move as an extension. */
3648 compress_float_constant (rtx x
, rtx y
)
3650 machine_mode dstmode
= GET_MODE (x
);
3651 machine_mode orig_srcmode
= GET_MODE (y
);
3652 machine_mode srcmode
;
3654 int oldcost
, newcost
;
3655 bool speed
= optimize_insn_for_speed_p ();
3657 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3659 if (targetm
.legitimate_constant_p (dstmode
, y
))
3660 oldcost
= set_src_cost (y
, speed
);
3662 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), speed
);
3664 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3665 srcmode
!= orig_srcmode
;
3666 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3670 rtx_insn
*last_insn
;
3672 /* Skip if the target can't extend this way. */
3673 ic
= can_extend_p (dstmode
, srcmode
, 0);
3674 if (ic
== CODE_FOR_nothing
)
3677 /* Skip if the narrowed value isn't exact. */
3678 if (! exact_real_truncate (srcmode
, &r
))
3681 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3683 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3685 /* Skip if the target needs extra instructions to perform
3687 if (!insn_operand_matches (ic
, 1, trunc_y
))
3689 /* This is valid, but may not be cheaper than the original. */
3690 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3692 if (oldcost
< newcost
)
3695 else if (float_extend_from_mem
[dstmode
][srcmode
])
3697 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3698 /* This is valid, but may not be cheaper than the original. */
3699 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3701 if (oldcost
< newcost
)
3703 trunc_y
= validize_mem (trunc_y
);
3708 /* For CSE's benefit, force the compressed constant pool entry
3709 into a new pseudo. This constant may be used in different modes,
3710 and if not, combine will put things back together for us. */
3711 trunc_y
= force_reg (srcmode
, trunc_y
);
3713 /* If x is a hard register, perform the extension into a pseudo,
3714 so that e.g. stack realignment code is aware of it. */
3716 if (REG_P (x
) && HARD_REGISTER_P (x
))
3717 target
= gen_reg_rtx (dstmode
);
3719 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3720 last_insn
= get_last_insn ();
3723 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3726 return emit_move_insn (x
, target
);
3733 /* Pushing data onto the stack. */
3735 /* Push a block of length SIZE (perhaps variable)
3736 and return an rtx to address the beginning of the block.
3737 The value may be virtual_outgoing_args_rtx.
3739 EXTRA is the number of bytes of padding to push in addition to SIZE.
3740 BELOW nonzero means this padding comes at low addresses;
3741 otherwise, the padding comes at high addresses. */
3744 push_block (rtx size
, int extra
, int below
)
3748 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3749 if (CONSTANT_P (size
))
3750 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3751 else if (REG_P (size
) && extra
== 0)
3752 anti_adjust_stack (size
);
3755 temp
= copy_to_mode_reg (Pmode
, size
);
3757 temp
= expand_binop (Pmode
, add_optab
, temp
,
3758 gen_int_mode (extra
, Pmode
),
3759 temp
, 0, OPTAB_LIB_WIDEN
);
3760 anti_adjust_stack (temp
);
3763 if (STACK_GROWS_DOWNWARD
)
3765 temp
= virtual_outgoing_args_rtx
;
3766 if (extra
!= 0 && below
)
3767 temp
= plus_constant (Pmode
, temp
, extra
);
3771 if (CONST_INT_P (size
))
3772 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3773 -INTVAL (size
) - (below
? 0 : extra
));
3774 else if (extra
!= 0 && !below
)
3775 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3776 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3779 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3780 negate_rtx (Pmode
, size
));
3783 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3786 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3789 mem_autoinc_base (rtx mem
)
3793 rtx addr
= XEXP (mem
, 0);
3794 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3795 return XEXP (addr
, 0);
3800 /* A utility routine used here, in reload, and in try_split. The insns
3801 after PREV up to and including LAST are known to adjust the stack,
3802 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3803 placing notes as appropriate. PREV may be NULL, indicating the
3804 entire insn sequence prior to LAST should be scanned.
3806 The set of allowed stack pointer modifications is small:
3807 (1) One or more auto-inc style memory references (aka pushes),
3808 (2) One or more addition/subtraction with the SP as destination,
3809 (3) A single move insn with the SP as destination,
3810 (4) A call_pop insn,
3811 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3813 Insns in the sequence that do not modify the SP are ignored,
3814 except for noreturn calls.
3816 The return value is the amount of adjustment that can be trivially
3817 verified, via immediate operand or auto-inc. If the adjustment
3818 cannot be trivially extracted, the return value is INT_MIN. */
3821 find_args_size_adjust (rtx_insn
*insn
)
3826 pat
= PATTERN (insn
);
3829 /* Look for a call_pop pattern. */
3832 /* We have to allow non-call_pop patterns for the case
3833 of emit_single_push_insn of a TLS address. */
3834 if (GET_CODE (pat
) != PARALLEL
)
3837 /* All call_pop have a stack pointer adjust in the parallel.
3838 The call itself is always first, and the stack adjust is
3839 usually last, so search from the end. */
3840 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3842 set
= XVECEXP (pat
, 0, i
);
3843 if (GET_CODE (set
) != SET
)
3845 dest
= SET_DEST (set
);
3846 if (dest
== stack_pointer_rtx
)
3849 /* We'd better have found the stack pointer adjust. */
3852 /* Fall through to process the extracted SET and DEST
3853 as if it was a standalone insn. */
3855 else if (GET_CODE (pat
) == SET
)
3857 else if ((set
= single_set (insn
)) != NULL
)
3859 else if (GET_CODE (pat
) == PARALLEL
)
3861 /* ??? Some older ports use a parallel with a stack adjust
3862 and a store for a PUSH_ROUNDING pattern, rather than a
3863 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3864 /* ??? See h8300 and m68k, pushqi1. */
3865 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
3867 set
= XVECEXP (pat
, 0, i
);
3868 if (GET_CODE (set
) != SET
)
3870 dest
= SET_DEST (set
);
3871 if (dest
== stack_pointer_rtx
)
3874 /* We do not expect an auto-inc of the sp in the parallel. */
3875 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
3876 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3877 != stack_pointer_rtx
);
3885 dest
= SET_DEST (set
);
3887 /* Look for direct modifications of the stack pointer. */
3888 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
3890 /* Look for a trivial adjustment, otherwise assume nothing. */
3891 /* Note that the SPU restore_stack_block pattern refers to
3892 the stack pointer in V4SImode. Consider that non-trivial. */
3893 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
3894 && GET_CODE (SET_SRC (set
)) == PLUS
3895 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
3896 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
3897 return INTVAL (XEXP (SET_SRC (set
), 1));
3898 /* ??? Reload can generate no-op moves, which will be cleaned
3899 up later. Recognize it and continue searching. */
3900 else if (rtx_equal_p (dest
, SET_SRC (set
)))
3903 return HOST_WIDE_INT_MIN
;
3909 /* Otherwise only think about autoinc patterns. */
3910 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
3913 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3914 != stack_pointer_rtx
);
3916 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
3917 mem
= SET_SRC (set
);
3921 addr
= XEXP (mem
, 0);
3922 switch (GET_CODE (addr
))
3926 return GET_MODE_SIZE (GET_MODE (mem
));
3929 return -GET_MODE_SIZE (GET_MODE (mem
));
3932 addr
= XEXP (addr
, 1);
3933 gcc_assert (GET_CODE (addr
) == PLUS
);
3934 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
3935 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
3936 return INTVAL (XEXP (addr
, 1));
3944 fixup_args_size_notes (rtx_insn
*prev
, rtx_insn
*last
, int end_args_size
)
3946 int args_size
= end_args_size
;
3947 bool saw_unknown
= false;
3950 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
3952 HOST_WIDE_INT this_delta
;
3954 if (!NONDEBUG_INSN_P (insn
))
3957 this_delta
= find_args_size_adjust (insn
);
3958 if (this_delta
== 0)
3961 || ACCUMULATE_OUTGOING_ARGS
3962 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
3966 gcc_assert (!saw_unknown
);
3967 if (this_delta
== HOST_WIDE_INT_MIN
)
3970 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (args_size
));
3971 if (STACK_GROWS_DOWNWARD
)
3972 this_delta
= -(unsigned HOST_WIDE_INT
) this_delta
;
3974 args_size
-= this_delta
;
3977 return saw_unknown
? INT_MIN
: args_size
;
3980 #ifdef PUSH_ROUNDING
3981 /* Emit single push insn. */
3984 emit_single_push_insn_1 (machine_mode mode
, rtx x
, tree type
)
3987 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3989 enum insn_code icode
;
3991 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3992 /* If there is push pattern, use it. Otherwise try old way of throwing
3993 MEM representing push operation to move expander. */
3994 icode
= optab_handler (push_optab
, mode
);
3995 if (icode
!= CODE_FOR_nothing
)
3997 struct expand_operand ops
[1];
3999 create_input_operand (&ops
[0], x
, mode
);
4000 if (maybe_expand_insn (icode
, 1, ops
))
4003 if (GET_MODE_SIZE (mode
) == rounded_size
)
4004 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
4005 /* If we are to pad downward, adjust the stack pointer first and
4006 then store X into the stack location using an offset. This is
4007 because emit_move_insn does not know how to pad; it does not have
4009 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
4011 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
4012 HOST_WIDE_INT offset
;
4014 emit_move_insn (stack_pointer_rtx
,
4015 expand_binop (Pmode
,
4016 STACK_GROWS_DOWNWARD
? sub_optab
4019 gen_int_mode (rounded_size
, Pmode
),
4020 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4022 offset
= (HOST_WIDE_INT
) padding_size
;
4023 if (STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_DEC
)
4024 /* We have already decremented the stack pointer, so get the
4026 offset
+= (HOST_WIDE_INT
) rounded_size
;
4028 if (!STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_INC
)
4029 /* We have already incremented the stack pointer, so get the
4031 offset
-= (HOST_WIDE_INT
) rounded_size
;
4033 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4034 gen_int_mode (offset
, Pmode
));
4038 if (STACK_GROWS_DOWNWARD
)
4039 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4040 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4041 gen_int_mode (-(HOST_WIDE_INT
) rounded_size
,
4044 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4045 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4046 gen_int_mode (rounded_size
, Pmode
));
4048 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4051 dest
= gen_rtx_MEM (mode
, dest_addr
);
4055 set_mem_attributes (dest
, type
, 1);
4057 if (cfun
->tail_call_marked
)
4058 /* Function incoming arguments may overlap with sibling call
4059 outgoing arguments and we cannot allow reordering of reads
4060 from function arguments with stores to outgoing arguments
4061 of sibling calls. */
4062 set_mem_alias_set (dest
, 0);
4064 emit_move_insn (dest
, x
);
4067 /* Emit and annotate a single push insn. */
4070 emit_single_push_insn (machine_mode mode
, rtx x
, tree type
)
4072 int delta
, old_delta
= stack_pointer_delta
;
4073 rtx_insn
*prev
= get_last_insn ();
4076 emit_single_push_insn_1 (mode
, x
, type
);
4078 last
= get_last_insn ();
4080 /* Notice the common case where we emitted exactly one insn. */
4081 if (PREV_INSN (last
) == prev
)
4083 add_reg_note (last
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4087 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4088 gcc_assert (delta
== INT_MIN
|| delta
== old_delta
);
4092 /* If reading SIZE bytes from X will end up reading from
4093 Y return the number of bytes that overlap. Return -1
4094 if there is no overlap or -2 if we can't determine
4095 (for example when X and Y have different base registers). */
4098 memory_load_overlap (rtx x
, rtx y
, HOST_WIDE_INT size
)
4100 rtx tmp
= plus_constant (Pmode
, x
, size
);
4101 rtx sub
= simplify_gen_binary (MINUS
, Pmode
, tmp
, y
);
4103 if (!CONST_INT_P (sub
))
4106 HOST_WIDE_INT val
= INTVAL (sub
);
4108 return IN_RANGE (val
, 1, size
) ? val
: -1;
4111 /* Generate code to push X onto the stack, assuming it has mode MODE and
4113 MODE is redundant except when X is a CONST_INT (since they don't
4115 SIZE is an rtx for the size of data to be copied (in bytes),
4116 needed only if X is BLKmode.
4117 Return true if successful. May return false if asked to push a
4118 partial argument during a sibcall optimization (as specified by
4119 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4122 ALIGN (in bits) is maximum alignment we can assume.
4124 If PARTIAL and REG are both nonzero, then copy that many of the first
4125 bytes of X into registers starting with REG, and push the rest of X.
4126 The amount of space pushed is decreased by PARTIAL bytes.
4127 REG must be a hard register in this case.
4128 If REG is zero but PARTIAL is not, take any all others actions for an
4129 argument partially in registers, but do not actually load any
4132 EXTRA is the amount in bytes of extra space to leave next to this arg.
4133 This is ignored if an argument block has already been allocated.
4135 On a machine that lacks real push insns, ARGS_ADDR is the address of
4136 the bottom of the argument block for this call. We use indexing off there
4137 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4138 argument block has not been preallocated.
4140 ARGS_SO_FAR is the size of args previously pushed for this call.
4142 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4143 for arguments passed in registers. If nonzero, it will be the number
4144 of bytes required. */
4147 emit_push_insn (rtx x
, machine_mode mode
, tree type
, rtx size
,
4148 unsigned int align
, int partial
, rtx reg
, int extra
,
4149 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4150 rtx alignment_pad
, bool sibcall_p
)
4153 enum direction stack_direction
= STACK_GROWS_DOWNWARD
? downward
: upward
;
4155 /* Decide where to pad the argument: `downward' for below,
4156 `upward' for above, or `none' for don't pad it.
4157 Default is below for small data on big-endian machines; else above. */
4158 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
4160 /* Invert direction if stack is post-decrement.
4162 if (STACK_PUSH_CODE
== POST_DEC
)
4163 if (where_pad
!= none
)
4164 where_pad
= (where_pad
== downward
? upward
: downward
);
4168 int nregs
= partial
/ UNITS_PER_WORD
;
4169 rtx
*tmp_regs
= NULL
;
4170 int overlapping
= 0;
4173 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4175 /* Copy a block into the stack, entirely or partially. */
4182 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4183 used
= partial
- offset
;
4185 if (mode
!= BLKmode
)
4187 /* A value is to be stored in an insufficiently aligned
4188 stack slot; copy via a suitably aligned slot if
4190 size
= GEN_INT (GET_MODE_SIZE (mode
));
4191 if (!MEM_P (xinner
))
4193 temp
= assign_temp (type
, 1, 1);
4194 emit_move_insn (temp
, xinner
);
4201 /* USED is now the # of bytes we need not copy to the stack
4202 because registers will take care of them. */
4205 xinner
= adjust_address (xinner
, BLKmode
, used
);
4207 /* If the partial register-part of the arg counts in its stack size,
4208 skip the part of stack space corresponding to the registers.
4209 Otherwise, start copying to the beginning of the stack space,
4210 by setting SKIP to 0. */
4211 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4213 #ifdef PUSH_ROUNDING
4214 /* Do it with several push insns if that doesn't take lots of insns
4215 and if there is no difficulty with push insns that skip bytes
4216 on the stack for alignment purposes. */
4219 && CONST_INT_P (size
)
4221 && MEM_ALIGN (xinner
) >= align
4222 && can_move_by_pieces ((unsigned) INTVAL (size
) - used
, align
)
4223 /* Here we avoid the case of a structure whose weak alignment
4224 forces many pushes of a small amount of data,
4225 and such small pushes do rounding that causes trouble. */
4226 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
4227 || align
>= BIGGEST_ALIGNMENT
4228 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4229 == (align
/ BITS_PER_UNIT
)))
4230 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4232 /* Push padding now if padding above and stack grows down,
4233 or if padding below and stack grows up.
4234 But if space already allocated, this has already been done. */
4235 if (extra
&& args_addr
== 0
4236 && where_pad
!= none
&& where_pad
!= stack_direction
)
4237 anti_adjust_stack (GEN_INT (extra
));
4239 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4242 #endif /* PUSH_ROUNDING */
4246 /* Otherwise make space on the stack and copy the data
4247 to the address of that space. */
4249 /* Deduct words put into registers from the size we must copy. */
4252 if (CONST_INT_P (size
))
4253 size
= GEN_INT (INTVAL (size
) - used
);
4255 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4256 gen_int_mode (used
, GET_MODE (size
)),
4257 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4260 /* Get the address of the stack space.
4261 In this case, we do not deal with EXTRA separately.
4262 A single stack adjust will do. */
4265 temp
= push_block (size
, extra
, where_pad
== downward
);
4268 else if (CONST_INT_P (args_so_far
))
4269 temp
= memory_address (BLKmode
,
4270 plus_constant (Pmode
, args_addr
,
4271 skip
+ INTVAL (args_so_far
)));
4273 temp
= memory_address (BLKmode
,
4274 plus_constant (Pmode
,
4275 gen_rtx_PLUS (Pmode
,
4280 if (!ACCUMULATE_OUTGOING_ARGS
)
4282 /* If the source is referenced relative to the stack pointer,
4283 copy it to another register to stabilize it. We do not need
4284 to do this if we know that we won't be changing sp. */
4286 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4287 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4288 temp
= copy_to_reg (temp
);
4291 target
= gen_rtx_MEM (BLKmode
, temp
);
4293 /* We do *not* set_mem_attributes here, because incoming arguments
4294 may overlap with sibling call outgoing arguments and we cannot
4295 allow reordering of reads from function arguments with stores
4296 to outgoing arguments of sibling calls. We do, however, want
4297 to record the alignment of the stack slot. */
4298 /* ALIGN may well be better aligned than TYPE, e.g. due to
4299 PARM_BOUNDARY. Assume the caller isn't lying. */
4300 set_mem_align (target
, align
);
4302 /* If part should go in registers and pushing to that part would
4303 overwrite some of the values that need to go into regs, load the
4304 overlapping values into temporary pseudos to be moved into the hard
4305 regs at the end after the stack pushing has completed.
4306 We cannot load them directly into the hard regs here because
4307 they can be clobbered by the block move expansions.
4310 if (partial
> 0 && reg
!= 0 && mode
== BLKmode
4311 && GET_CODE (reg
) != PARALLEL
)
4313 overlapping
= memory_load_overlap (XEXP (x
, 0), temp
, partial
);
4314 if (overlapping
> 0)
4316 gcc_assert (overlapping
% UNITS_PER_WORD
== 0);
4317 overlapping
/= UNITS_PER_WORD
;
4319 tmp_regs
= XALLOCAVEC (rtx
, overlapping
);
4321 for (int i
= 0; i
< overlapping
; i
++)
4322 tmp_regs
[i
] = gen_reg_rtx (word_mode
);
4324 for (int i
= 0; i
< overlapping
; i
++)
4325 emit_move_insn (tmp_regs
[i
],
4326 operand_subword_force (target
, i
, mode
));
4328 else if (overlapping
== -1)
4330 /* Could not determine whether there is overlap.
4331 Fail the sibcall. */
4339 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4342 else if (partial
> 0)
4344 /* Scalar partly in registers. */
4346 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4349 /* # bytes of start of argument
4350 that we must make space for but need not store. */
4351 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4352 int args_offset
= INTVAL (args_so_far
);
4355 /* Push padding now if padding above and stack grows down,
4356 or if padding below and stack grows up.
4357 But if space already allocated, this has already been done. */
4358 if (extra
&& args_addr
== 0
4359 && where_pad
!= none
&& where_pad
!= stack_direction
)
4360 anti_adjust_stack (GEN_INT (extra
));
4362 /* If we make space by pushing it, we might as well push
4363 the real data. Otherwise, we can leave OFFSET nonzero
4364 and leave the space uninitialized. */
4368 /* Now NOT_STACK gets the number of words that we don't need to
4369 allocate on the stack. Convert OFFSET to words too. */
4370 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4371 offset
/= UNITS_PER_WORD
;
4373 /* If the partial register-part of the arg counts in its stack size,
4374 skip the part of stack space corresponding to the registers.
4375 Otherwise, start copying to the beginning of the stack space,
4376 by setting SKIP to 0. */
4377 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4379 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4380 x
= validize_mem (force_const_mem (mode
, x
));
4382 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4383 SUBREGs of such registers are not allowed. */
4384 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4385 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4386 x
= copy_to_reg (x
);
4388 /* Loop over all the words allocated on the stack for this arg. */
4389 /* We can do it by words, because any scalar bigger than a word
4390 has a size a multiple of a word. */
4391 for (i
= size
- 1; i
>= not_stack
; i
--)
4392 if (i
>= not_stack
+ offset
)
4393 if (!emit_push_insn (operand_subword_force (x
, i
, mode
),
4394 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4396 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4398 reg_parm_stack_space
, alignment_pad
, sibcall_p
))
4406 /* Push padding now if padding above and stack grows down,
4407 or if padding below and stack grows up.
4408 But if space already allocated, this has already been done. */
4409 if (extra
&& args_addr
== 0
4410 && where_pad
!= none
&& where_pad
!= stack_direction
)
4411 anti_adjust_stack (GEN_INT (extra
));
4413 #ifdef PUSH_ROUNDING
4414 if (args_addr
== 0 && PUSH_ARGS
)
4415 emit_single_push_insn (mode
, x
, type
);
4419 if (CONST_INT_P (args_so_far
))
4421 = memory_address (mode
,
4422 plus_constant (Pmode
, args_addr
,
4423 INTVAL (args_so_far
)));
4425 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4427 dest
= gen_rtx_MEM (mode
, addr
);
4429 /* We do *not* set_mem_attributes here, because incoming arguments
4430 may overlap with sibling call outgoing arguments and we cannot
4431 allow reordering of reads from function arguments with stores
4432 to outgoing arguments of sibling calls. We do, however, want
4433 to record the alignment of the stack slot. */
4434 /* ALIGN may well be better aligned than TYPE, e.g. due to
4435 PARM_BOUNDARY. Assume the caller isn't lying. */
4436 set_mem_align (dest
, align
);
4438 emit_move_insn (dest
, x
);
4442 /* Move the partial arguments into the registers and any overlapping
4443 values that we moved into the pseudos in tmp_regs. */
4444 if (partial
> 0 && reg
!= 0)
4446 /* Handle calls that pass values in multiple non-contiguous locations.
4447 The Irix 6 ABI has examples of this. */
4448 if (GET_CODE (reg
) == PARALLEL
)
4449 emit_group_load (reg
, x
, type
, -1);
4452 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4453 move_block_to_reg (REGNO (reg
), x
, nregs
- overlapping
, mode
);
4455 for (int i
= 0; i
< overlapping
; i
++)
4456 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
)
4457 + nregs
- overlapping
+ i
),
4463 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4464 anti_adjust_stack (GEN_INT (extra
));
4466 if (alignment_pad
&& args_addr
== 0)
4467 anti_adjust_stack (alignment_pad
);
4472 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4476 get_subtarget (rtx x
)
4480 /* Only registers can be subtargets. */
4482 /* Don't use hard regs to avoid extending their life. */
4483 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4487 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4488 FIELD is a bitfield. Returns true if the optimization was successful,
4489 and there's nothing else to do. */
4492 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4493 unsigned HOST_WIDE_INT bitpos
,
4494 unsigned HOST_WIDE_INT bitregion_start
,
4495 unsigned HOST_WIDE_INT bitregion_end
,
4496 machine_mode mode1
, rtx str_rtx
,
4499 machine_mode str_mode
= GET_MODE (str_rtx
);
4500 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4505 enum tree_code code
;
4507 if (mode1
!= VOIDmode
4508 || bitsize
>= BITS_PER_WORD
4509 || str_bitsize
> BITS_PER_WORD
4510 || TREE_SIDE_EFFECTS (to
)
4511 || TREE_THIS_VOLATILE (to
))
4515 if (TREE_CODE (src
) != SSA_NAME
)
4517 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4520 srcstmt
= get_gimple_for_ssa_name (src
);
4522 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4525 code
= gimple_assign_rhs_code (srcstmt
);
4527 op0
= gimple_assign_rhs1 (srcstmt
);
4529 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4530 to find its initialization. Hopefully the initialization will
4531 be from a bitfield load. */
4532 if (TREE_CODE (op0
) == SSA_NAME
)
4534 gimple op0stmt
= get_gimple_for_ssa_name (op0
);
4536 /* We want to eventually have OP0 be the same as TO, which
4537 should be a bitfield. */
4539 || !is_gimple_assign (op0stmt
)
4540 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4542 op0
= gimple_assign_rhs1 (op0stmt
);
4545 op1
= gimple_assign_rhs2 (srcstmt
);
4547 if (!operand_equal_p (to
, op0
, 0))
4550 if (MEM_P (str_rtx
))
4552 unsigned HOST_WIDE_INT offset1
;
4554 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4555 str_mode
= word_mode
;
4556 str_mode
= get_best_mode (bitsize
, bitpos
,
4557 bitregion_start
, bitregion_end
,
4558 MEM_ALIGN (str_rtx
), str_mode
, 0);
4559 if (str_mode
== VOIDmode
)
4561 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4564 bitpos
%= str_bitsize
;
4565 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4566 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4568 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4571 /* If the bit field covers the whole REG/MEM, store_field
4572 will likely generate better code. */
4573 if (bitsize
>= str_bitsize
)
4576 /* We can't handle fields split across multiple entities. */
4577 if (bitpos
+ bitsize
> str_bitsize
)
4580 if (BYTES_BIG_ENDIAN
)
4581 bitpos
= str_bitsize
- bitpos
- bitsize
;
4587 /* For now, just optimize the case of the topmost bitfield
4588 where we don't need to do any masking and also
4589 1 bit bitfields where xor can be used.
4590 We might win by one instruction for the other bitfields
4591 too if insv/extv instructions aren't used, so that
4592 can be added later. */
4593 if (bitpos
+ bitsize
!= str_bitsize
4594 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4597 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4598 value
= convert_modes (str_mode
,
4599 TYPE_MODE (TREE_TYPE (op1
)), value
,
4600 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4602 /* We may be accessing data outside the field, which means
4603 we can alias adjacent data. */
4604 if (MEM_P (str_rtx
))
4606 str_rtx
= shallow_copy_rtx (str_rtx
);
4607 set_mem_alias_set (str_rtx
, 0);
4608 set_mem_expr (str_rtx
, 0);
4611 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4612 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4614 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4617 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4618 result
= expand_binop (str_mode
, binop
, str_rtx
,
4619 value
, str_rtx
, 1, OPTAB_WIDEN
);
4620 if (result
!= str_rtx
)
4621 emit_move_insn (str_rtx
, result
);
4626 if (TREE_CODE (op1
) != INTEGER_CST
)
4628 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4629 value
= convert_modes (str_mode
,
4630 TYPE_MODE (TREE_TYPE (op1
)), value
,
4631 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4633 /* We may be accessing data outside the field, which means
4634 we can alias adjacent data. */
4635 if (MEM_P (str_rtx
))
4637 str_rtx
= shallow_copy_rtx (str_rtx
);
4638 set_mem_alias_set (str_rtx
, 0);
4639 set_mem_expr (str_rtx
, 0);
4642 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4643 if (bitpos
+ bitsize
!= str_bitsize
)
4645 rtx mask
= gen_int_mode (((unsigned HOST_WIDE_INT
) 1 << bitsize
) - 1,
4647 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4649 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4650 result
= expand_binop (str_mode
, binop
, str_rtx
,
4651 value
, str_rtx
, 1, OPTAB_WIDEN
);
4652 if (result
!= str_rtx
)
4653 emit_move_insn (str_rtx
, result
);
4663 /* In the C++ memory model, consecutive bit fields in a structure are
4664 considered one memory location.
4666 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4667 returns the bit range of consecutive bits in which this COMPONENT_REF
4668 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4669 and *OFFSET may be adjusted in the process.
4671 If the access does not need to be restricted, 0 is returned in both
4672 *BITSTART and *BITEND. */
4675 get_bit_range (unsigned HOST_WIDE_INT
*bitstart
,
4676 unsigned HOST_WIDE_INT
*bitend
,
4678 HOST_WIDE_INT
*bitpos
,
4681 HOST_WIDE_INT bitoffset
;
4684 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4686 field
= TREE_OPERAND (exp
, 1);
4687 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4688 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4689 need to limit the range we can access. */
4692 *bitstart
= *bitend
= 0;
4696 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4697 part of a larger bit field, then the representative does not serve any
4698 useful purpose. This can occur in Ada. */
4699 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4702 HOST_WIDE_INT rbitsize
, rbitpos
;
4706 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4707 &roffset
, &rmode
, &unsignedp
, &volatilep
, false);
4708 if ((rbitpos
% BITS_PER_UNIT
) != 0)
4710 *bitstart
= *bitend
= 0;
4715 /* Compute the adjustment to bitpos from the offset of the field
4716 relative to the representative. DECL_FIELD_OFFSET of field and
4717 repr are the same by construction if they are not constants,
4718 see finish_bitfield_layout. */
4719 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
))
4720 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr
)))
4721 bitoffset
= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
4722 - tree_to_uhwi (DECL_FIELD_OFFSET (repr
))) * BITS_PER_UNIT
;
4725 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4726 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4728 /* If the adjustment is larger than bitpos, we would have a negative bit
4729 position for the lower bound and this may wreak havoc later. Adjust
4730 offset and bitpos to make the lower bound non-negative in that case. */
4731 if (bitoffset
> *bitpos
)
4733 HOST_WIDE_INT adjust
= bitoffset
- *bitpos
;
4734 gcc_assert ((adjust
% BITS_PER_UNIT
) == 0);
4737 if (*offset
== NULL_TREE
)
4738 *offset
= size_int (-adjust
/ BITS_PER_UNIT
);
4741 = size_binop (MINUS_EXPR
, *offset
, size_int (adjust
/ BITS_PER_UNIT
));
4745 *bitstart
= *bitpos
- bitoffset
;
4747 *bitend
= *bitstart
+ tree_to_uhwi (DECL_SIZE (repr
)) - 1;
4750 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4751 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4752 DECL_RTL was not set yet, return NORTL. */
4755 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4757 if (TREE_CODE (addr
) != ADDR_EXPR
)
4760 tree base
= TREE_OPERAND (addr
, 0);
4763 || TREE_ADDRESSABLE (base
)
4764 || DECL_MODE (base
) == BLKmode
)
4767 if (!DECL_RTL_SET_P (base
))
4770 return (!MEM_P (DECL_RTL (base
)));
4773 /* Returns true if the MEM_REF REF refers to an object that does not
4774 reside in memory and has non-BLKmode. */
4777 mem_ref_refers_to_non_mem_p (tree ref
)
4779 tree base
= TREE_OPERAND (ref
, 0);
4780 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4783 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4784 is true, try generating a nontemporal store. */
4787 expand_assignment (tree to
, tree from
, bool nontemporal
)
4793 enum insn_code icode
;
4795 /* Don't crash if the lhs of the assignment was erroneous. */
4796 if (TREE_CODE (to
) == ERROR_MARK
)
4798 expand_normal (from
);
4802 /* Optimize away no-op moves without side-effects. */
4803 if (operand_equal_p (to
, from
, 0))
4806 /* Handle misaligned stores. */
4807 mode
= TYPE_MODE (TREE_TYPE (to
));
4808 if ((TREE_CODE (to
) == MEM_REF
4809 || TREE_CODE (to
) == TARGET_MEM_REF
)
4811 && !mem_ref_refers_to_non_mem_p (to
)
4812 && ((align
= get_object_alignment (to
))
4813 < GET_MODE_ALIGNMENT (mode
))
4814 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4815 != CODE_FOR_nothing
)
4816 || SLOW_UNALIGNED_ACCESS (mode
, align
)))
4820 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4821 reg
= force_not_mem (reg
);
4822 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4824 if (icode
!= CODE_FOR_nothing
)
4826 struct expand_operand ops
[2];
4828 create_fixed_operand (&ops
[0], mem
);
4829 create_input_operand (&ops
[1], reg
, mode
);
4830 /* The movmisalign<mode> pattern cannot fail, else the assignment
4831 would silently be omitted. */
4832 expand_insn (icode
, 2, ops
);
4835 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
);
4839 /* Assignment of a structure component needs special treatment
4840 if the structure component's rtx is not simply a MEM.
4841 Assignment of an array element at a constant index, and assignment of
4842 an array element in an unaligned packed structure field, has the same
4843 problem. Same for (partially) storing into a non-memory object. */
4844 if (handled_component_p (to
)
4845 || (TREE_CODE (to
) == MEM_REF
4846 && mem_ref_refers_to_non_mem_p (to
))
4847 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4850 HOST_WIDE_INT bitsize
, bitpos
;
4851 unsigned HOST_WIDE_INT bitregion_start
= 0;
4852 unsigned HOST_WIDE_INT bitregion_end
= 0;
4859 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4860 &unsignedp
, &volatilep
, true);
4862 /* Make sure bitpos is not negative, it can wreak havoc later. */
4865 gcc_assert (offset
== NULL_TREE
);
4866 offset
= size_int (bitpos
>> (BITS_PER_UNIT
== 8
4867 ? 3 : exact_log2 (BITS_PER_UNIT
)));
4868 bitpos
&= BITS_PER_UNIT
- 1;
4871 if (TREE_CODE (to
) == COMPONENT_REF
4872 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
4873 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
4874 /* The C++ memory model naturally applies to byte-aligned fields.
4875 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4876 BITSIZE are not byte-aligned, there is no need to limit the range
4877 we can access. This can occur with packed structures in Ada. */
4878 else if (bitsize
> 0
4879 && bitsize
% BITS_PER_UNIT
== 0
4880 && bitpos
% BITS_PER_UNIT
== 0)
4882 bitregion_start
= bitpos
;
4883 bitregion_end
= bitpos
+ bitsize
- 1;
4886 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4888 /* If the field has a mode, we want to access it in the
4889 field's mode, not the computed mode.
4890 If a MEM has VOIDmode (external with incomplete type),
4891 use BLKmode for it instead. */
4894 if (mode1
!= VOIDmode
)
4895 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
4896 else if (GET_MODE (to_rtx
) == VOIDmode
)
4897 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
4902 machine_mode address_mode
;
4905 if (!MEM_P (to_rtx
))
4907 /* We can get constant negative offsets into arrays with broken
4908 user code. Translate this to a trap instead of ICEing. */
4909 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4910 expand_builtin_trap ();
4911 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4914 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4915 address_mode
= get_address_mode (to_rtx
);
4916 if (GET_MODE (offset_rtx
) != address_mode
)
4918 /* We cannot be sure that the RTL in offset_rtx is valid outside
4919 of a memory address context, so force it into a register
4920 before attempting to convert it to the desired mode. */
4921 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
4922 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
4925 /* If we have an expression in OFFSET_RTX and a non-zero
4926 byte offset in BITPOS, adding the byte offset before the
4927 OFFSET_RTX results in better intermediate code, which makes
4928 later rtl optimization passes perform better.
4930 We prefer intermediate code like this:
4932 r124:DI=r123:DI+0x18
4937 r124:DI=r123:DI+0x10
4938 [r124:DI+0x8]=r121:DI
4940 This is only done for aligned data values, as these can
4941 be expected to result in single move instructions. */
4942 if (mode1
!= VOIDmode
4945 && (bitpos
% bitsize
) == 0
4946 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4947 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
4949 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4950 bitregion_start
= 0;
4951 if (bitregion_end
>= (unsigned HOST_WIDE_INT
) bitpos
)
4952 bitregion_end
-= bitpos
;
4956 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4957 highest_pow2_factor_for_target (to
,
4961 /* No action is needed if the target is not a memory and the field
4962 lies completely outside that target. This can occur if the source
4963 code contains an out-of-bounds access to a small array. */
4965 && GET_MODE (to_rtx
) != BLKmode
4966 && (unsigned HOST_WIDE_INT
) bitpos
4967 >= GET_MODE_PRECISION (GET_MODE (to_rtx
)))
4969 expand_normal (from
);
4972 /* Handle expand_expr of a complex value returning a CONCAT. */
4973 else if (GET_CODE (to_rtx
) == CONCAT
)
4975 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
4976 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
)))
4978 && bitsize
== mode_bitsize
)
4979 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4980 else if (bitsize
== mode_bitsize
/ 2
4981 && (bitpos
== 0 || bitpos
== mode_bitsize
/ 2))
4982 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4984 else if (bitpos
+ bitsize
<= mode_bitsize
/ 2)
4985 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
4986 bitregion_start
, bitregion_end
,
4988 get_alias_set (to
), nontemporal
);
4989 else if (bitpos
>= mode_bitsize
/ 2)
4990 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
4991 bitpos
- mode_bitsize
/ 2,
4992 bitregion_start
, bitregion_end
,
4994 get_alias_set (to
), nontemporal
);
4995 else if (bitpos
== 0 && bitsize
== mode_bitsize
)
4998 result
= expand_normal (from
);
4999 from_rtx
= simplify_gen_subreg (GET_MODE (to_rtx
), result
,
5000 TYPE_MODE (TREE_TYPE (from
)), 0);
5001 emit_move_insn (XEXP (to_rtx
, 0),
5002 read_complex_part (from_rtx
, false));
5003 emit_move_insn (XEXP (to_rtx
, 1),
5004 read_complex_part (from_rtx
, true));
5008 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
5009 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5010 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
5011 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
5012 result
= store_field (temp
, bitsize
, bitpos
,
5013 bitregion_start
, bitregion_end
,
5015 get_alias_set (to
), nontemporal
);
5016 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
5017 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
5024 /* If the field is at offset zero, we could have been given the
5025 DECL_RTX of the parent struct. Don't munge it. */
5026 to_rtx
= shallow_copy_rtx (to_rtx
);
5027 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
5029 MEM_VOLATILE_P (to_rtx
) = 1;
5032 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
5033 bitregion_start
, bitregion_end
,
5038 result
= store_field (to_rtx
, bitsize
, bitpos
,
5039 bitregion_start
, bitregion_end
,
5041 get_alias_set (to
), nontemporal
);
5045 preserve_temp_slots (result
);
5050 /* If the rhs is a function call and its value is not an aggregate,
5051 call the function before we start to compute the lhs.
5052 This is needed for correct code for cases such as
5053 val = setjmp (buf) on machines where reference to val
5054 requires loading up part of an address in a separate insn.
5056 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5057 since it might be a promoted variable where the zero- or sign- extension
5058 needs to be done. Handling this in the normal way is safe because no
5059 computation is done before the call. The same is true for SSA names. */
5060 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
5061 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5062 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
5063 && ! (((TREE_CODE (to
) == VAR_DECL
5064 || TREE_CODE (to
) == PARM_DECL
5065 || TREE_CODE (to
) == RESULT_DECL
)
5066 && REG_P (DECL_RTL (to
)))
5067 || TREE_CODE (to
) == SSA_NAME
))
5073 value
= expand_normal (from
);
5075 /* Split value and bounds to store them separately. */
5076 chkp_split_slot (value
, &value
, &bounds
);
5079 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5081 /* Handle calls that return values in multiple non-contiguous locations.
5082 The Irix 6 ABI has examples of this. */
5083 if (GET_CODE (to_rtx
) == PARALLEL
)
5085 if (GET_CODE (value
) == PARALLEL
)
5086 emit_group_move (to_rtx
, value
);
5088 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5089 int_size_in_bytes (TREE_TYPE (from
)));
5091 else if (GET_CODE (value
) == PARALLEL
)
5092 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5093 int_size_in_bytes (TREE_TYPE (from
)));
5094 else if (GET_MODE (to_rtx
) == BLKmode
)
5096 /* Handle calls that return BLKmode values in registers. */
5098 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5100 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5104 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5105 value
= convert_memory_address_addr_space
5106 (GET_MODE (to_rtx
), value
,
5107 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5109 emit_move_insn (to_rtx
, value
);
5112 /* Store bounds if required. */
5114 && (BOUNDED_P (to
) || chkp_type_has_pointer (TREE_TYPE (to
))))
5116 gcc_assert (MEM_P (to_rtx
));
5117 chkp_emit_bounds_store (bounds
, value
, to_rtx
);
5120 preserve_temp_slots (to_rtx
);
5125 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5126 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5128 /* Don't move directly into a return register. */
5129 if (TREE_CODE (to
) == RESULT_DECL
5130 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5136 /* If the source is itself a return value, it still is in a pseudo at
5137 this point so we can move it back to the return register directly. */
5139 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5140 && TREE_CODE (from
) != CALL_EXPR
)
5141 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5143 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5145 /* Handle calls that return values in multiple non-contiguous locations.
5146 The Irix 6 ABI has examples of this. */
5147 if (GET_CODE (to_rtx
) == PARALLEL
)
5149 if (GET_CODE (temp
) == PARALLEL
)
5150 emit_group_move (to_rtx
, temp
);
5152 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5153 int_size_in_bytes (TREE_TYPE (from
)));
5156 emit_move_insn (to_rtx
, temp
);
5158 preserve_temp_slots (to_rtx
);
5163 /* In case we are returning the contents of an object which overlaps
5164 the place the value is being stored, use a safe function when copying
5165 a value through a pointer into a structure value return block. */
5166 if (TREE_CODE (to
) == RESULT_DECL
5167 && TREE_CODE (from
) == INDIRECT_REF
5168 && ADDR_SPACE_GENERIC_P
5169 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5170 && refs_may_alias_p (to
, from
)
5171 && cfun
->returns_struct
5172 && !cfun
->returns_pcc_struct
)
5177 size
= expr_size (from
);
5178 from_rtx
= expand_normal (from
);
5180 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
5181 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
5182 XEXP (from_rtx
, 0), Pmode
,
5183 convert_to_mode (TYPE_MODE (sizetype
),
5184 size
, TYPE_UNSIGNED (sizetype
)),
5185 TYPE_MODE (sizetype
));
5187 preserve_temp_slots (to_rtx
);
5192 /* Compute FROM and store the value in the rtx we got. */
5195 result
= store_expr_with_bounds (from
, to_rtx
, 0, nontemporal
, to
);
5196 preserve_temp_slots (result
);
5201 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5202 succeeded, false otherwise. */
5205 emit_storent_insn (rtx to
, rtx from
)
5207 struct expand_operand ops
[2];
5208 machine_mode mode
= GET_MODE (to
);
5209 enum insn_code code
= optab_handler (storent_optab
, mode
);
5211 if (code
== CODE_FOR_nothing
)
5214 create_fixed_operand (&ops
[0], to
);
5215 create_input_operand (&ops
[1], from
, mode
);
5216 return maybe_expand_insn (code
, 2, ops
);
5219 /* Generate code for computing expression EXP,
5220 and storing the value into TARGET.
5222 If the mode is BLKmode then we may return TARGET itself.
5223 It turns out that in BLKmode it doesn't cause a problem.
5224 because C has no operators that could combine two different
5225 assignments into the same BLKmode object with different values
5226 with no sequence point. Will other languages need this to
5229 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5230 stack, and block moves may need to be treated specially.
5232 If NONTEMPORAL is true, try using a nontemporal store instruction.
5234 If BTARGET is not NULL then computed bounds of EXP are
5235 associated with BTARGET. */
5238 store_expr_with_bounds (tree exp
, rtx target
, int call_param_p
,
5239 bool nontemporal
, tree btarget
)
5242 rtx alt_rtl
= NULL_RTX
;
5243 location_t loc
= curr_insn_location ();
5245 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5247 /* C++ can generate ?: expressions with a throw expression in one
5248 branch and an rvalue in the other. Here, we resolve attempts to
5249 store the throw expression's nonexistent result. */
5250 gcc_assert (!call_param_p
);
5251 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5254 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5256 /* Perform first part of compound expression, then assign from second
5258 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5259 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5260 return store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
,
5261 call_param_p
, nontemporal
, btarget
);
5263 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5265 /* For conditional expression, get safe form of the target. Then
5266 test the condition, doing the appropriate assignment on either
5267 side. This avoids the creation of unnecessary temporaries.
5268 For non-BLKmode, it is more efficient not to do this. */
5270 rtx_code_label
*lab1
= gen_label_rtx (), *lab2
= gen_label_rtx ();
5272 do_pending_stack_adjust ();
5274 jumpifnot (TREE_OPERAND (exp
, 0), lab1
, -1);
5275 store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5276 nontemporal
, btarget
);
5277 emit_jump_insn (gen_jump (lab2
));
5280 store_expr_with_bounds (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5281 nontemporal
, btarget
);
5287 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5288 /* If this is a scalar in a register that is stored in a wider mode
5289 than the declared mode, compute the result into its declared mode
5290 and then convert to the wider mode. Our value is the computed
5293 rtx inner_target
= 0;
5295 /* We can do the conversion inside EXP, which will often result
5296 in some optimizations. Do the conversion in two steps: first
5297 change the signedness, if needed, then the extend. But don't
5298 do this if the type of EXP is a subtype of something else
5299 since then the conversion might involve more than just
5300 converting modes. */
5301 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5302 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5303 && GET_MODE_PRECISION (GET_MODE (target
))
5304 == TYPE_PRECISION (TREE_TYPE (exp
)))
5306 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5307 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5309 /* Some types, e.g. Fortran's logical*4, won't have a signed
5310 version, so use the mode instead. */
5312 = (signed_or_unsigned_type_for
5313 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5315 ntype
= lang_hooks
.types
.type_for_mode
5316 (TYPE_MODE (TREE_TYPE (exp
)),
5317 SUBREG_PROMOTED_SIGN (target
));
5319 exp
= fold_convert_loc (loc
, ntype
, exp
);
5322 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5323 (GET_MODE (SUBREG_REG (target
)),
5324 SUBREG_PROMOTED_SIGN (target
)),
5327 inner_target
= SUBREG_REG (target
);
5330 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5331 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5333 /* Handle bounds returned by call. */
5334 if (TREE_CODE (exp
) == CALL_EXPR
)
5337 chkp_split_slot (temp
, &temp
, &bounds
);
5338 if (bounds
&& btarget
)
5340 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5341 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5342 chkp_set_rtl_bounds (btarget
, tmp
);
5346 /* If TEMP is a VOIDmode constant, use convert_modes to make
5347 sure that we properly convert it. */
5348 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5350 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5351 temp
, SUBREG_PROMOTED_SIGN (target
));
5352 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
5353 GET_MODE (target
), temp
,
5354 SUBREG_PROMOTED_SIGN (target
));
5357 convert_move (SUBREG_REG (target
), temp
,
5358 SUBREG_PROMOTED_SIGN (target
));
5362 else if ((TREE_CODE (exp
) == STRING_CST
5363 || (TREE_CODE (exp
) == MEM_REF
5364 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5365 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5367 && integer_zerop (TREE_OPERAND (exp
, 1))))
5368 && !nontemporal
&& !call_param_p
5371 /* Optimize initialization of an array with a STRING_CST. */
5372 HOST_WIDE_INT exp_len
, str_copy_len
;
5374 tree str
= TREE_CODE (exp
) == STRING_CST
5375 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5377 exp_len
= int_expr_size (exp
);
5381 if (TREE_STRING_LENGTH (str
) <= 0)
5384 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5385 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5388 str_copy_len
= TREE_STRING_LENGTH (str
);
5389 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5390 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5392 str_copy_len
+= STORE_MAX_PIECES
- 1;
5393 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5395 str_copy_len
= MIN (str_copy_len
, exp_len
);
5396 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5397 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5398 MEM_ALIGN (target
), false))
5403 dest_mem
= store_by_pieces (dest_mem
,
5404 str_copy_len
, builtin_strncpy_read_str
,
5406 TREE_STRING_POINTER (str
)),
5407 MEM_ALIGN (target
), false,
5408 exp_len
> str_copy_len
? 1 : 0);
5409 if (exp_len
> str_copy_len
)
5410 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5411 GEN_INT (exp_len
- str_copy_len
),
5420 /* If we want to use a nontemporal store, force the value to
5422 tmp_target
= nontemporal
? NULL_RTX
: target
;
5423 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5425 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5428 /* Handle bounds returned by call. */
5429 if (TREE_CODE (exp
) == CALL_EXPR
)
5432 chkp_split_slot (temp
, &temp
, &bounds
);
5433 if (bounds
&& btarget
)
5435 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5436 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5437 chkp_set_rtl_bounds (btarget
, tmp
);
5442 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5443 the same as that of TARGET, adjust the constant. This is needed, for
5444 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5445 only a word-sized value. */
5446 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5447 && TREE_CODE (exp
) != ERROR_MARK
5448 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5449 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5450 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5452 /* If value was not generated in the target, store it there.
5453 Convert the value to TARGET's type first if necessary and emit the
5454 pending incrementations that have been queued when expanding EXP.
5455 Note that we cannot emit the whole queue blindly because this will
5456 effectively disable the POST_INC optimization later.
5458 If TEMP and TARGET compare equal according to rtx_equal_p, but
5459 one or both of them are volatile memory refs, we have to distinguish
5461 - expand_expr has used TARGET. In this case, we must not generate
5462 another copy. This can be detected by TARGET being equal according
5464 - expand_expr has not used TARGET - that means that the source just
5465 happens to have the same RTX form. Since temp will have been created
5466 by expand_expr, it will compare unequal according to == .
5467 We must generate a copy in this case, to reach the correct number
5468 of volatile memory references. */
5470 if ((! rtx_equal_p (temp
, target
)
5471 || (temp
!= target
&& (side_effects_p (temp
)
5472 || side_effects_p (target
))))
5473 && TREE_CODE (exp
) != ERROR_MARK
5474 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5475 but TARGET is not valid memory reference, TEMP will differ
5476 from TARGET although it is really the same location. */
5478 && rtx_equal_p (alt_rtl
, target
)
5479 && !side_effects_p (alt_rtl
)
5480 && !side_effects_p (target
))
5481 /* If there's nothing to copy, don't bother. Don't call
5482 expr_size unless necessary, because some front-ends (C++)
5483 expr_size-hook must not be given objects that are not
5484 supposed to be bit-copied or bit-initialized. */
5485 && expr_size (exp
) != const0_rtx
)
5487 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5489 if (GET_MODE (target
) == BLKmode
)
5491 /* Handle calls that return BLKmode values in registers. */
5492 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5493 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5495 store_bit_field (target
,
5496 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5497 0, 0, 0, GET_MODE (temp
), temp
);
5500 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5503 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5505 /* Handle copying a string constant into an array. The string
5506 constant may be shorter than the array. So copy just the string's
5507 actual length, and clear the rest. First get the size of the data
5508 type of the string, which is actually the size of the target. */
5509 rtx size
= expr_size (exp
);
5511 if (CONST_INT_P (size
)
5512 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5513 emit_block_move (target
, temp
, size
,
5515 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5518 machine_mode pointer_mode
5519 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5520 machine_mode address_mode
= get_address_mode (target
);
5522 /* Compute the size of the data to copy from the string. */
5524 = size_binop_loc (loc
, MIN_EXPR
,
5525 make_tree (sizetype
, size
),
5526 size_int (TREE_STRING_LENGTH (exp
)));
5528 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5530 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5531 rtx_code_label
*label
= 0;
5533 /* Copy that much. */
5534 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5535 TYPE_UNSIGNED (sizetype
));
5536 emit_block_move (target
, temp
, copy_size_rtx
,
5538 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5540 /* Figure out how much is left in TARGET that we have to clear.
5541 Do all calculations in pointer_mode. */
5542 if (CONST_INT_P (copy_size_rtx
))
5544 size
= plus_constant (address_mode
, size
,
5545 -INTVAL (copy_size_rtx
));
5546 target
= adjust_address (target
, BLKmode
,
5547 INTVAL (copy_size_rtx
));
5551 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5552 copy_size_rtx
, NULL_RTX
, 0,
5555 if (GET_MODE (copy_size_rtx
) != address_mode
)
5556 copy_size_rtx
= convert_to_mode (address_mode
,
5558 TYPE_UNSIGNED (sizetype
));
5560 target
= offset_address (target
, copy_size_rtx
,
5561 highest_pow2_factor (copy_size
));
5562 label
= gen_label_rtx ();
5563 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5564 GET_MODE (size
), 0, label
);
5567 if (size
!= const0_rtx
)
5568 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5574 /* Handle calls that return values in multiple non-contiguous locations.
5575 The Irix 6 ABI has examples of this. */
5576 else if (GET_CODE (target
) == PARALLEL
)
5578 if (GET_CODE (temp
) == PARALLEL
)
5579 emit_group_move (target
, temp
);
5581 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5582 int_size_in_bytes (TREE_TYPE (exp
)));
5584 else if (GET_CODE (temp
) == PARALLEL
)
5585 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5586 int_size_in_bytes (TREE_TYPE (exp
)));
5587 else if (GET_MODE (temp
) == BLKmode
)
5588 emit_block_move (target
, temp
, expr_size (exp
),
5590 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5591 /* If we emit a nontemporal store, there is nothing else to do. */
5592 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5596 temp
= force_operand (temp
, target
);
5598 emit_move_insn (target
, temp
);
5605 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5607 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
5609 return store_expr_with_bounds (exp
, target
, call_param_p
, nontemporal
, NULL
);
5612 /* Return true if field F of structure TYPE is a flexible array. */
5615 flexible_array_member_p (const_tree f
, const_tree type
)
5620 return (DECL_CHAIN (f
) == NULL
5621 && TREE_CODE (tf
) == ARRAY_TYPE
5623 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5624 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5625 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5626 && int_size_in_bytes (type
) >= 0);
5629 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5630 must have in order for it to completely initialize a value of type TYPE.
5631 Return -1 if the number isn't known.
5633 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5635 static HOST_WIDE_INT
5636 count_type_elements (const_tree type
, bool for_ctor_p
)
5638 switch (TREE_CODE (type
))
5644 nelts
= array_type_nelts (type
);
5645 if (nelts
&& tree_fits_uhwi_p (nelts
))
5647 unsigned HOST_WIDE_INT n
;
5649 n
= tree_to_uhwi (nelts
) + 1;
5650 if (n
== 0 || for_ctor_p
)
5653 return n
* count_type_elements (TREE_TYPE (type
), false);
5655 return for_ctor_p
? -1 : 1;
5660 unsigned HOST_WIDE_INT n
;
5664 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5665 if (TREE_CODE (f
) == FIELD_DECL
)
5668 n
+= count_type_elements (TREE_TYPE (f
), false);
5669 else if (!flexible_array_member_p (f
, type
))
5670 /* Don't count flexible arrays, which are not supposed
5671 to be initialized. */
5679 case QUAL_UNION_TYPE
:
5684 gcc_assert (!for_ctor_p
);
5685 /* Estimate the number of scalars in each field and pick the
5686 maximum. Other estimates would do instead; the idea is simply
5687 to make sure that the estimate is not sensitive to the ordering
5690 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5691 if (TREE_CODE (f
) == FIELD_DECL
)
5693 m
= count_type_elements (TREE_TYPE (f
), false);
5694 /* If the field doesn't span the whole union, add an extra
5695 scalar for the rest. */
5696 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5697 TYPE_SIZE (type
)) != 1)
5709 return TYPE_VECTOR_SUBPARTS (type
);
5713 case FIXED_POINT_TYPE
:
5718 case REFERENCE_TYPE
:
5734 /* Helper for categorize_ctor_elements. Identical interface. */
5737 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5738 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5740 unsigned HOST_WIDE_INT idx
;
5741 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5742 tree value
, purpose
, elt_type
;
5744 /* Whether CTOR is a valid constant initializer, in accordance with what
5745 initializer_constant_valid_p does. If inferred from the constructor
5746 elements, true until proven otherwise. */
5747 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5748 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5753 elt_type
= NULL_TREE
;
5755 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5757 HOST_WIDE_INT mult
= 1;
5759 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5761 tree lo_index
= TREE_OPERAND (purpose
, 0);
5762 tree hi_index
= TREE_OPERAND (purpose
, 1);
5764 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5765 mult
= (tree_to_uhwi (hi_index
)
5766 - tree_to_uhwi (lo_index
) + 1);
5769 elt_type
= TREE_TYPE (value
);
5771 switch (TREE_CODE (value
))
5775 HOST_WIDE_INT nz
= 0, ic
= 0;
5777 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5780 nz_elts
+= mult
* nz
;
5781 init_elts
+= mult
* ic
;
5783 if (const_from_elts_p
&& const_p
)
5784 const_p
= const_elt_p
;
5791 if (!initializer_zerop (value
))
5797 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
5798 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
5802 if (!initializer_zerop (TREE_REALPART (value
)))
5804 if (!initializer_zerop (TREE_IMAGPART (value
)))
5812 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
5814 tree v
= VECTOR_CST_ELT (value
, i
);
5815 if (!initializer_zerop (v
))
5824 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
5825 nz_elts
+= mult
* tc
;
5826 init_elts
+= mult
* tc
;
5828 if (const_from_elts_p
&& const_p
)
5829 const_p
= initializer_constant_valid_p (value
, elt_type
)
5836 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
5837 num_fields
, elt_type
))
5838 *p_complete
= false;
5840 *p_nz_elts
+= nz_elts
;
5841 *p_init_elts
+= init_elts
;
5846 /* Examine CTOR to discover:
5847 * how many scalar fields are set to nonzero values,
5848 and place it in *P_NZ_ELTS;
5849 * how many scalar fields in total are in CTOR,
5850 and place it in *P_ELT_COUNT.
5851 * whether the constructor is complete -- in the sense that every
5852 meaningful byte is explicitly given a value --
5853 and place it in *P_COMPLETE.
5855 Return whether or not CTOR is a valid static constant initializer, the same
5856 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5859 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5860 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5866 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
5869 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5870 of which had type LAST_TYPE. Each element was itself a complete
5871 initializer, in the sense that every meaningful byte was explicitly
5872 given a value. Return true if the same is true for the constructor
5876 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
5877 const_tree last_type
)
5879 if (TREE_CODE (type
) == UNION_TYPE
5880 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5885 gcc_assert (num_elts
== 1 && last_type
);
5887 /* ??? We could look at each element of the union, and find the
5888 largest element. Which would avoid comparing the size of the
5889 initialized element against any tail padding in the union.
5890 Doesn't seem worth the effort... */
5891 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
5894 return count_type_elements (type
, true) == num_elts
;
5897 /* Return 1 if EXP contains mostly (3/4) zeros. */
5900 mostly_zeros_p (const_tree exp
)
5902 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5904 HOST_WIDE_INT nz_elts
, init_elts
;
5907 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5908 return !complete_p
|| nz_elts
< init_elts
/ 4;
5911 return initializer_zerop (exp
);
5914 /* Return 1 if EXP contains all zeros. */
5917 all_zeros_p (const_tree exp
)
5919 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5921 HOST_WIDE_INT nz_elts
, init_elts
;
5924 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5925 return nz_elts
== 0;
5928 return initializer_zerop (exp
);
5931 /* Helper function for store_constructor.
5932 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5933 CLEARED is as for store_constructor.
5934 ALIAS_SET is the alias set to use for any stores.
5936 This provides a recursive shortcut back to store_constructor when it isn't
5937 necessary to go through store_field. This is so that we can pass through
5938 the cleared field to let store_constructor know that we may not have to
5939 clear a substructure if the outer structure has already been cleared. */
5942 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5943 HOST_WIDE_INT bitpos
, machine_mode mode
,
5944 tree exp
, int cleared
, alias_set_type alias_set
)
5946 if (TREE_CODE (exp
) == CONSTRUCTOR
5947 /* We can only call store_constructor recursively if the size and
5948 bit position are on a byte boundary. */
5949 && bitpos
% BITS_PER_UNIT
== 0
5950 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5951 /* If we have a nonzero bitpos for a register target, then we just
5952 let store_field do the bitfield handling. This is unlikely to
5953 generate unnecessary clear instructions anyways. */
5954 && (bitpos
== 0 || MEM_P (target
)))
5958 = adjust_address (target
,
5959 GET_MODE (target
) == BLKmode
5961 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5962 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5965 /* Update the alias set, if required. */
5966 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5967 && MEM_ALIAS_SET (target
) != 0)
5969 target
= copy_rtx (target
);
5970 set_mem_alias_set (target
, alias_set
);
5973 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5976 store_field (target
, bitsize
, bitpos
, 0, 0, mode
, exp
, alias_set
, false);
5980 /* Returns the number of FIELD_DECLs in TYPE. */
5983 fields_length (const_tree type
)
5985 tree t
= TYPE_FIELDS (type
);
5988 for (; t
; t
= DECL_CHAIN (t
))
5989 if (TREE_CODE (t
) == FIELD_DECL
)
5996 /* Store the value of constructor EXP into the rtx TARGET.
5997 TARGET is either a REG or a MEM; we know it cannot conflict, since
5998 safe_from_p has been called.
5999 CLEARED is true if TARGET is known to have been zero'd.
6000 SIZE is the number of bytes of TARGET we are allowed to modify: this
6001 may not be the same as the size of EXP if we are assigning to a field
6002 which has been packed to exclude padding bits. */
6005 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
6007 tree type
= TREE_TYPE (exp
);
6008 #ifdef WORD_REGISTER_OPERATIONS
6009 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
6012 switch (TREE_CODE (type
))
6016 case QUAL_UNION_TYPE
:
6018 unsigned HOST_WIDE_INT idx
;
6021 /* If size is zero or the target is already cleared, do nothing. */
6022 if (size
== 0 || cleared
)
6024 /* We either clear the aggregate or indicate the value is dead. */
6025 else if ((TREE_CODE (type
) == UNION_TYPE
6026 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6027 && ! CONSTRUCTOR_ELTS (exp
))
6028 /* If the constructor is empty, clear the union. */
6030 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6034 /* If we are building a static constructor into a register,
6035 set the initial value as zero so we can fold the value into
6036 a constant. But if more than one register is involved,
6037 this probably loses. */
6038 else if (REG_P (target
) && TREE_STATIC (exp
)
6039 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
6041 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6045 /* If the constructor has fewer fields than the structure or
6046 if we are initializing the structure to mostly zeros, clear
6047 the whole structure first. Don't do this if TARGET is a
6048 register whose mode size isn't equal to SIZE since
6049 clear_storage can't handle this case. */
6051 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp
))
6052 != fields_length (type
))
6053 || mostly_zeros_p (exp
))
6055 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
6058 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6062 if (REG_P (target
) && !cleared
)
6063 emit_clobber (target
);
6065 /* Store each element of the constructor into the
6066 corresponding field of TARGET. */
6067 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
6070 HOST_WIDE_INT bitsize
;
6071 HOST_WIDE_INT bitpos
= 0;
6073 rtx to_rtx
= target
;
6075 /* Just ignore missing fields. We cleared the whole
6076 structure, above, if any fields are missing. */
6080 if (cleared
&& initializer_zerop (value
))
6083 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
6084 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
6088 mode
= DECL_MODE (field
);
6089 if (DECL_BIT_FIELD (field
))
6092 offset
= DECL_FIELD_OFFSET (field
);
6093 if (tree_fits_shwi_p (offset
)
6094 && tree_fits_shwi_p (bit_position (field
)))
6096 bitpos
= int_bit_position (field
);
6100 bitpos
= tree_to_shwi (DECL_FIELD_BIT_OFFSET (field
));
6104 machine_mode address_mode
;
6108 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
6109 make_tree (TREE_TYPE (exp
),
6112 offset_rtx
= expand_normal (offset
);
6113 gcc_assert (MEM_P (to_rtx
));
6115 address_mode
= get_address_mode (to_rtx
);
6116 if (GET_MODE (offset_rtx
) != address_mode
)
6117 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
6119 to_rtx
= offset_address (to_rtx
, offset_rtx
,
6120 highest_pow2_factor (offset
));
6123 #ifdef WORD_REGISTER_OPERATIONS
6124 /* If this initializes a field that is smaller than a
6125 word, at the start of a word, try to widen it to a full
6126 word. This special case allows us to output C++ member
6127 function initializations in a form that the optimizers
6130 && bitsize
< BITS_PER_WORD
6131 && bitpos
% BITS_PER_WORD
== 0
6132 && GET_MODE_CLASS (mode
) == MODE_INT
6133 && TREE_CODE (value
) == INTEGER_CST
6135 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6137 tree type
= TREE_TYPE (value
);
6139 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6141 type
= lang_hooks
.types
.type_for_mode
6142 (word_mode
, TYPE_UNSIGNED (type
));
6143 value
= fold_convert (type
, value
);
6146 if (BYTES_BIG_ENDIAN
)
6148 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6149 build_int_cst (type
,
6150 BITS_PER_WORD
- bitsize
));
6151 bitsize
= BITS_PER_WORD
;
6156 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6157 && DECL_NONADDRESSABLE_P (field
))
6159 to_rtx
= copy_rtx (to_rtx
);
6160 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6163 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
6165 get_alias_set (TREE_TYPE (field
)));
6172 unsigned HOST_WIDE_INT i
;
6175 tree elttype
= TREE_TYPE (type
);
6177 HOST_WIDE_INT minelt
= 0;
6178 HOST_WIDE_INT maxelt
= 0;
6180 domain
= TYPE_DOMAIN (type
);
6181 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6182 && TYPE_MAX_VALUE (domain
)
6183 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6184 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6186 /* If we have constant bounds for the range of the type, get them. */
6189 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6190 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6193 /* If the constructor has fewer elements than the array, clear
6194 the whole array first. Similarly if this is static
6195 constructor of a non-BLKmode object. */
6198 else if (REG_P (target
) && TREE_STATIC (exp
))
6202 unsigned HOST_WIDE_INT idx
;
6204 HOST_WIDE_INT count
= 0, zero_count
= 0;
6205 need_to_clear
= ! const_bounds_p
;
6207 /* This loop is a more accurate version of the loop in
6208 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6209 is also needed to check for missing elements. */
6210 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6212 HOST_WIDE_INT this_node_count
;
6217 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6219 tree lo_index
= TREE_OPERAND (index
, 0);
6220 tree hi_index
= TREE_OPERAND (index
, 1);
6222 if (! tree_fits_uhwi_p (lo_index
)
6223 || ! tree_fits_uhwi_p (hi_index
))
6229 this_node_count
= (tree_to_uhwi (hi_index
)
6230 - tree_to_uhwi (lo_index
) + 1);
6233 this_node_count
= 1;
6235 count
+= this_node_count
;
6236 if (mostly_zeros_p (value
))
6237 zero_count
+= this_node_count
;
6240 /* Clear the entire array first if there are any missing
6241 elements, or if the incidence of zero elements is >=
6244 && (count
< maxelt
- minelt
+ 1
6245 || 4 * zero_count
>= 3 * count
))
6249 if (need_to_clear
&& size
> 0)
6252 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6254 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6258 if (!cleared
&& REG_P (target
))
6259 /* Inform later passes that the old value is dead. */
6260 emit_clobber (target
);
6262 /* Store each element of the constructor into the
6263 corresponding element of TARGET, determined by counting the
6265 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6268 HOST_WIDE_INT bitsize
;
6269 HOST_WIDE_INT bitpos
;
6270 rtx xtarget
= target
;
6272 if (cleared
&& initializer_zerop (value
))
6275 mode
= TYPE_MODE (elttype
);
6276 if (mode
== BLKmode
)
6277 bitsize
= (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6278 ? tree_to_uhwi (TYPE_SIZE (elttype
))
6281 bitsize
= GET_MODE_BITSIZE (mode
);
6283 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6285 tree lo_index
= TREE_OPERAND (index
, 0);
6286 tree hi_index
= TREE_OPERAND (index
, 1);
6287 rtx index_r
, pos_rtx
;
6288 HOST_WIDE_INT lo
, hi
, count
;
6291 /* If the range is constant and "small", unroll the loop. */
6293 && tree_fits_shwi_p (lo_index
)
6294 && tree_fits_shwi_p (hi_index
)
6295 && (lo
= tree_to_shwi (lo_index
),
6296 hi
= tree_to_shwi (hi_index
),
6297 count
= hi
- lo
+ 1,
6300 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6301 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6304 lo
-= minelt
; hi
-= minelt
;
6305 for (; lo
<= hi
; lo
++)
6307 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6310 && !MEM_KEEP_ALIAS_SET_P (target
)
6311 && TREE_CODE (type
) == ARRAY_TYPE
6312 && TYPE_NONALIASED_COMPONENT (type
))
6314 target
= copy_rtx (target
);
6315 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6318 store_constructor_field
6319 (target
, bitsize
, bitpos
, mode
, value
, cleared
,
6320 get_alias_set (elttype
));
6325 rtx_code_label
*loop_start
= gen_label_rtx ();
6326 rtx_code_label
*loop_end
= gen_label_rtx ();
6329 expand_normal (hi_index
);
6331 index
= build_decl (EXPR_LOCATION (exp
),
6332 VAR_DECL
, NULL_TREE
, domain
);
6333 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6334 SET_DECL_RTL (index
, index_r
);
6335 store_expr (lo_index
, index_r
, 0, false);
6337 /* Build the head of the loop. */
6338 do_pending_stack_adjust ();
6339 emit_label (loop_start
);
6341 /* Assign value to element index. */
6343 fold_convert (ssizetype
,
6344 fold_build2 (MINUS_EXPR
,
6347 TYPE_MIN_VALUE (domain
)));
6350 size_binop (MULT_EXPR
, position
,
6351 fold_convert (ssizetype
,
6352 TYPE_SIZE_UNIT (elttype
)));
6354 pos_rtx
= expand_normal (position
);
6355 xtarget
= offset_address (target
, pos_rtx
,
6356 highest_pow2_factor (position
));
6357 xtarget
= adjust_address (xtarget
, mode
, 0);
6358 if (TREE_CODE (value
) == CONSTRUCTOR
)
6359 store_constructor (value
, xtarget
, cleared
,
6360 bitsize
/ BITS_PER_UNIT
);
6362 store_expr (value
, xtarget
, 0, false);
6364 /* Generate a conditional jump to exit the loop. */
6365 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6367 jumpif (exit_cond
, loop_end
, -1);
6369 /* Update the loop counter, and jump to the head of
6371 expand_assignment (index
,
6372 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6373 index
, integer_one_node
),
6376 emit_jump (loop_start
);
6378 /* Build the end of the loop. */
6379 emit_label (loop_end
);
6382 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6383 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6388 index
= ssize_int (1);
6391 index
= fold_convert (ssizetype
,
6392 fold_build2 (MINUS_EXPR
,
6395 TYPE_MIN_VALUE (domain
)));
6398 size_binop (MULT_EXPR
, index
,
6399 fold_convert (ssizetype
,
6400 TYPE_SIZE_UNIT (elttype
)));
6401 xtarget
= offset_address (target
,
6402 expand_normal (position
),
6403 highest_pow2_factor (position
));
6404 xtarget
= adjust_address (xtarget
, mode
, 0);
6405 store_expr (value
, xtarget
, 0, false);
6410 bitpos
= ((tree_to_shwi (index
) - minelt
)
6411 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6413 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6415 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6416 && TREE_CODE (type
) == ARRAY_TYPE
6417 && TYPE_NONALIASED_COMPONENT (type
))
6419 target
= copy_rtx (target
);
6420 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6422 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
6423 cleared
, get_alias_set (elttype
));
6431 unsigned HOST_WIDE_INT idx
;
6432 constructor_elt
*ce
;
6435 int icode
= CODE_FOR_nothing
;
6436 tree elttype
= TREE_TYPE (type
);
6437 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6438 machine_mode eltmode
= TYPE_MODE (elttype
);
6439 HOST_WIDE_INT bitsize
;
6440 HOST_WIDE_INT bitpos
;
6441 rtvec vector
= NULL
;
6443 alias_set_type alias
;
6445 gcc_assert (eltmode
!= BLKmode
);
6447 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6448 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
6450 machine_mode mode
= GET_MODE (target
);
6452 icode
= (int) optab_handler (vec_init_optab
, mode
);
6453 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6454 if (icode
!= CODE_FOR_nothing
)
6458 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6459 if (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
)
6461 icode
= CODE_FOR_nothing
;
6465 if (icode
!= CODE_FOR_nothing
)
6469 vector
= rtvec_alloc (n_elts
);
6470 for (i
= 0; i
< n_elts
; i
++)
6471 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
6475 /* If the constructor has fewer elements than the vector,
6476 clear the whole array first. Similarly if this is static
6477 constructor of a non-BLKmode object. */
6480 else if (REG_P (target
) && TREE_STATIC (exp
))
6484 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6487 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6489 int n_elts_here
= tree_to_uhwi
6490 (int_const_binop (TRUNC_DIV_EXPR
,
6491 TYPE_SIZE (TREE_TYPE (value
)),
6492 TYPE_SIZE (elttype
)));
6494 count
+= n_elts_here
;
6495 if (mostly_zeros_p (value
))
6496 zero_count
+= n_elts_here
;
6499 /* Clear the entire vector first if there are any missing elements,
6500 or if the incidence of zero elements is >= 75%. */
6501 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6504 if (need_to_clear
&& size
> 0 && !vector
)
6507 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6509 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6513 /* Inform later passes that the old value is dead. */
6514 if (!cleared
&& !vector
&& REG_P (target
))
6515 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6518 alias
= MEM_ALIAS_SET (target
);
6520 alias
= get_alias_set (elttype
);
6522 /* Store each element of the constructor into the corresponding
6523 element of TARGET, determined by counting the elements. */
6524 for (idx
= 0, i
= 0;
6525 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6526 idx
++, i
+= bitsize
/ elt_size
)
6528 HOST_WIDE_INT eltpos
;
6529 tree value
= ce
->value
;
6531 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6532 if (cleared
&& initializer_zerop (value
))
6536 eltpos
= tree_to_uhwi (ce
->index
);
6542 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6544 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6545 RTVEC_ELT (vector
, eltpos
)
6546 = expand_normal (value
);
6550 machine_mode value_mode
=
6551 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6552 ? TYPE_MODE (TREE_TYPE (value
))
6554 bitpos
= eltpos
* elt_size
;
6555 store_constructor_field (target
, bitsize
, bitpos
, value_mode
,
6556 value
, cleared
, alias
);
6561 emit_insn (GEN_FCN (icode
)
6563 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
6572 /* Store the value of EXP (an expression tree)
6573 into a subfield of TARGET which has mode MODE and occupies
6574 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6575 If MODE is VOIDmode, it means that we are storing into a bit-field.
6577 BITREGION_START is bitpos of the first bitfield in this region.
6578 BITREGION_END is the bitpos of the ending bitfield in this region.
6579 These two fields are 0, if the C++ memory model does not apply,
6580 or we are not interested in keeping track of bitfield regions.
6582 Always return const0_rtx unless we have something particular to
6585 ALIAS_SET is the alias set for the destination. This value will
6586 (in general) be different from that for TARGET, since TARGET is a
6587 reference to the containing structure.
6589 If NONTEMPORAL is true, try generating a nontemporal store. */
6592 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
6593 unsigned HOST_WIDE_INT bitregion_start
,
6594 unsigned HOST_WIDE_INT bitregion_end
,
6595 machine_mode mode
, tree exp
,
6596 alias_set_type alias_set
, bool nontemporal
)
6598 if (TREE_CODE (exp
) == ERROR_MARK
)
6601 /* If we have nothing to store, do nothing unless the expression has
6604 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6606 if (GET_CODE (target
) == CONCAT
)
6608 /* We're storing into a struct containing a single __complex. */
6610 gcc_assert (!bitpos
);
6611 return store_expr (exp
, target
, 0, nontemporal
);
6614 /* If the structure is in a register or if the component
6615 is a bit field, we cannot use addressing to access it.
6616 Use bit-field techniques or SUBREG to store in it. */
6618 if (mode
== VOIDmode
6619 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6620 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6621 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6623 || GET_CODE (target
) == SUBREG
6624 /* If the field isn't aligned enough to store as an ordinary memref,
6625 store it as a bit field. */
6627 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6628 || bitpos
% GET_MODE_ALIGNMENT (mode
))
6629 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
6630 || (bitpos
% BITS_PER_UNIT
!= 0)))
6631 || (bitsize
>= 0 && mode
!= BLKmode
6632 && GET_MODE_BITSIZE (mode
) > bitsize
)
6633 /* If the RHS and field are a constant size and the size of the
6634 RHS isn't the same size as the bitfield, we must use bitfield
6637 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6638 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0)
6639 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6640 decl we must use bitfield operations. */
6642 && TREE_CODE (exp
) == MEM_REF
6643 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6644 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6645 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0),0 ))
6646 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6651 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6652 implies a mask operation. If the precision is the same size as
6653 the field we're storing into, that mask is redundant. This is
6654 particularly common with bit field assignments generated by the
6656 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6659 tree type
= TREE_TYPE (exp
);
6660 if (INTEGRAL_TYPE_P (type
)
6661 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6662 && bitsize
== TYPE_PRECISION (type
))
6664 tree op
= gimple_assign_rhs1 (nop_def
);
6665 type
= TREE_TYPE (op
);
6666 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
6671 temp
= expand_normal (exp
);
6673 /* If BITSIZE is narrower than the size of the type of EXP
6674 we will be narrowing TEMP. Normally, what's wanted are the
6675 low-order bits. However, if EXP's type is a record and this is
6676 big-endian machine, we want the upper BITSIZE bits. */
6677 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
6678 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
6679 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
6680 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
6681 GET_MODE_BITSIZE (GET_MODE (temp
)) - bitsize
,
6684 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6685 if (mode
!= VOIDmode
&& mode
!= BLKmode
6686 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
6687 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
6689 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6690 are both BLKmode, both must be in memory and BITPOS must be aligned
6691 on a byte boundary. If so, we simply do a block copy. Likewise for
6692 a BLKmode-like TARGET. */
6693 if (GET_CODE (temp
) != PARALLEL
6694 && GET_MODE (temp
) == BLKmode
6695 && (GET_MODE (target
) == BLKmode
6697 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
6698 && (bitpos
% BITS_PER_UNIT
) == 0
6699 && (bitsize
% BITS_PER_UNIT
) == 0)))
6701 gcc_assert (MEM_P (target
) && MEM_P (temp
)
6702 && (bitpos
% BITS_PER_UNIT
) == 0);
6704 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6705 emit_block_move (target
, temp
,
6706 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6713 /* Handle calls that return values in multiple non-contiguous locations.
6714 The Irix 6 ABI has examples of this. */
6715 if (GET_CODE (temp
) == PARALLEL
)
6717 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6719 if (mode
== BLKmode
|| mode
== VOIDmode
)
6720 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6721 temp_target
= gen_reg_rtx (mode
);
6722 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
6725 else if (mode
== BLKmode
)
6727 /* Handle calls that return BLKmode values in registers. */
6728 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
6730 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
6731 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
6736 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6738 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6739 temp_target
= gen_reg_rtx (mode
);
6741 = extract_bit_field (temp
, size
* BITS_PER_UNIT
, 0, 1,
6742 temp_target
, mode
, mode
);
6747 /* Store the value in the bitfield. */
6748 store_bit_field (target
, bitsize
, bitpos
,
6749 bitregion_start
, bitregion_end
,
6756 /* Now build a reference to just the desired component. */
6757 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
6759 if (to_rtx
== target
)
6760 to_rtx
= copy_rtx (to_rtx
);
6762 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
6763 set_mem_alias_set (to_rtx
, alias_set
);
6765 return store_expr (exp
, to_rtx
, 0, nontemporal
);
6769 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6770 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6771 codes and find the ultimate containing object, which we return.
6773 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6774 bit position, and *PUNSIGNEDP to the signedness of the field.
6775 If the position of the field is variable, we store a tree
6776 giving the variable offset (in units) in *POFFSET.
6777 This offset is in addition to the bit position.
6778 If the position is not variable, we store 0 in *POFFSET.
6780 If any of the extraction expressions is volatile,
6781 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6783 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6784 Otherwise, it is a mode that can be used to access the field.
6786 If the field describes a variable-sized object, *PMODE is set to
6787 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6788 this case, but the address of the object can be found.
6790 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6791 look through nodes that serve as markers of a greater alignment than
6792 the one that can be deduced from the expression. These nodes make it
6793 possible for front-ends to prevent temporaries from being created by
6794 the middle-end on alignment considerations. For that purpose, the
6795 normal operating mode at high-level is to always pass FALSE so that
6796 the ultimate containing object is really returned; moreover, the
6797 associated predicate handled_component_p will always return TRUE
6798 on these nodes, thus indicating that they are essentially handled
6799 by get_inner_reference. TRUE should only be passed when the caller
6800 is scanning the expression in order to build another representation
6801 and specifically knows how to handle these nodes; as such, this is
6802 the normal operating mode in the RTL expanders. */
6805 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
6806 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
6807 machine_mode
*pmode
, int *punsignedp
,
6808 int *pvolatilep
, bool keep_aligning
)
6811 machine_mode mode
= VOIDmode
;
6812 bool blkmode_bitfield
= false;
6813 tree offset
= size_zero_node
;
6814 offset_int bit_offset
= 0;
6816 /* First get the mode, signedness, and size. We do this from just the
6817 outermost expression. */
6819 if (TREE_CODE (exp
) == COMPONENT_REF
)
6821 tree field
= TREE_OPERAND (exp
, 1);
6822 size_tree
= DECL_SIZE (field
);
6823 if (flag_strict_volatile_bitfields
> 0
6824 && TREE_THIS_VOLATILE (exp
)
6825 && DECL_BIT_FIELD_TYPE (field
)
6826 && DECL_MODE (field
) != BLKmode
)
6827 /* Volatile bitfields should be accessed in the mode of the
6828 field's type, not the mode computed based on the bit
6830 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
6831 else if (!DECL_BIT_FIELD (field
))
6832 mode
= DECL_MODE (field
);
6833 else if (DECL_MODE (field
) == BLKmode
)
6834 blkmode_bitfield
= true;
6836 *punsignedp
= DECL_UNSIGNED (field
);
6838 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
6840 size_tree
= TREE_OPERAND (exp
, 1);
6841 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
6842 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
6844 /* For vector types, with the correct size of access, use the mode of
6846 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
6847 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6848 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
6849 mode
= TYPE_MODE (TREE_TYPE (exp
));
6853 mode
= TYPE_MODE (TREE_TYPE (exp
));
6854 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
6856 if (mode
== BLKmode
)
6857 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
6859 *pbitsize
= GET_MODE_BITSIZE (mode
);
6864 if (! tree_fits_uhwi_p (size_tree
))
6865 mode
= BLKmode
, *pbitsize
= -1;
6867 *pbitsize
= tree_to_uhwi (size_tree
);
6870 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6871 and find the ultimate containing object. */
6874 switch (TREE_CODE (exp
))
6877 bit_offset
+= wi::to_offset (TREE_OPERAND (exp
, 2));
6882 tree field
= TREE_OPERAND (exp
, 1);
6883 tree this_offset
= component_ref_field_offset (exp
);
6885 /* If this field hasn't been filled in yet, don't go past it.
6886 This should only happen when folding expressions made during
6887 type construction. */
6888 if (this_offset
== 0)
6891 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
6892 bit_offset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
6894 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6899 case ARRAY_RANGE_REF
:
6901 tree index
= TREE_OPERAND (exp
, 1);
6902 tree low_bound
= array_ref_low_bound (exp
);
6903 tree unit_size
= array_ref_element_size (exp
);
6905 /* We assume all arrays have sizes that are a multiple of a byte.
6906 First subtract the lower bound, if any, in the type of the
6907 index, then convert to sizetype and multiply by the size of
6908 the array element. */
6909 if (! integer_zerop (low_bound
))
6910 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
6913 offset
= size_binop (PLUS_EXPR
, offset
,
6914 size_binop (MULT_EXPR
,
6915 fold_convert (sizetype
, index
),
6924 bit_offset
+= *pbitsize
;
6927 case VIEW_CONVERT_EXPR
:
6928 if (keep_aligning
&& STRICT_ALIGNMENT
6929 && (TYPE_ALIGN (TREE_TYPE (exp
))
6930 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6931 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6932 < BIGGEST_ALIGNMENT
)
6933 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6934 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6939 /* Hand back the decl for MEM[&decl, off]. */
6940 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
6942 tree off
= TREE_OPERAND (exp
, 1);
6943 if (!integer_zerop (off
))
6945 offset_int boff
, coff
= mem_ref_offset (exp
);
6946 boff
= wi::lshift (coff
, LOG2_BITS_PER_UNIT
);
6949 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6957 /* If any reference in the chain is volatile, the effect is volatile. */
6958 if (TREE_THIS_VOLATILE (exp
))
6961 exp
= TREE_OPERAND (exp
, 0);
6965 /* If OFFSET is constant, see if we can return the whole thing as a
6966 constant bit position. Make sure to handle overflow during
6968 if (TREE_CODE (offset
) == INTEGER_CST
)
6970 offset_int tem
= wi::sext (wi::to_offset (offset
),
6971 TYPE_PRECISION (sizetype
));
6972 tem
= wi::lshift (tem
, LOG2_BITS_PER_UNIT
);
6974 if (wi::fits_shwi_p (tem
))
6976 *pbitpos
= tem
.to_shwi ();
6977 *poffset
= offset
= NULL_TREE
;
6981 /* Otherwise, split it up. */
6984 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6985 if (wi::neg_p (bit_offset
) || !wi::fits_shwi_p (bit_offset
))
6987 offset_int mask
= wi::mask
<offset_int
> (LOG2_BITS_PER_UNIT
, false);
6988 offset_int tem
= bit_offset
.and_not (mask
);
6989 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6990 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6992 tem
= wi::arshift (tem
, LOG2_BITS_PER_UNIT
);
6993 offset
= size_binop (PLUS_EXPR
, offset
,
6994 wide_int_to_tree (sizetype
, tem
));
6997 *pbitpos
= bit_offset
.to_shwi ();
7001 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7002 if (mode
== VOIDmode
7004 && (*pbitpos
% BITS_PER_UNIT
) == 0
7005 && (*pbitsize
% BITS_PER_UNIT
) == 0)
7013 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7015 static unsigned HOST_WIDE_INT
7016 target_align (const_tree target
)
7018 /* We might have a chain of nested references with intermediate misaligning
7019 bitfields components, so need to recurse to find out. */
7021 unsigned HOST_WIDE_INT this_align
, outer_align
;
7023 switch (TREE_CODE (target
))
7029 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7030 outer_align
= target_align (TREE_OPERAND (target
, 0));
7031 return MIN (this_align
, outer_align
);
7034 case ARRAY_RANGE_REF
:
7035 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7036 outer_align
= target_align (TREE_OPERAND (target
, 0));
7037 return MIN (this_align
, outer_align
);
7040 case NON_LVALUE_EXPR
:
7041 case VIEW_CONVERT_EXPR
:
7042 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7043 outer_align
= target_align (TREE_OPERAND (target
, 0));
7044 return MAX (this_align
, outer_align
);
7047 return TYPE_ALIGN (TREE_TYPE (target
));
7052 /* Given an rtx VALUE that may contain additions and multiplications, return
7053 an equivalent value that just refers to a register, memory, or constant.
7054 This is done by generating instructions to perform the arithmetic and
7055 returning a pseudo-register containing the value.
7057 The returned value may be a REG, SUBREG, MEM or constant. */
7060 force_operand (rtx value
, rtx target
)
7063 /* Use subtarget as the target for operand 0 of a binary operation. */
7064 rtx subtarget
= get_subtarget (target
);
7065 enum rtx_code code
= GET_CODE (value
);
7067 /* Check for subreg applied to an expression produced by loop optimizer. */
7069 && !REG_P (SUBREG_REG (value
))
7070 && !MEM_P (SUBREG_REG (value
)))
7073 = simplify_gen_subreg (GET_MODE (value
),
7074 force_reg (GET_MODE (SUBREG_REG (value
)),
7075 force_operand (SUBREG_REG (value
),
7077 GET_MODE (SUBREG_REG (value
)),
7078 SUBREG_BYTE (value
));
7079 code
= GET_CODE (value
);
7082 /* Check for a PIC address load. */
7083 if ((code
== PLUS
|| code
== MINUS
)
7084 && XEXP (value
, 0) == pic_offset_table_rtx
7085 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7086 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7087 || GET_CODE (XEXP (value
, 1)) == CONST
))
7090 subtarget
= gen_reg_rtx (GET_MODE (value
));
7091 emit_move_insn (subtarget
, value
);
7095 if (ARITHMETIC_P (value
))
7097 op2
= XEXP (value
, 1);
7098 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7100 if (code
== MINUS
&& CONST_INT_P (op2
))
7103 op2
= negate_rtx (GET_MODE (value
), op2
);
7106 /* Check for an addition with OP2 a constant integer and our first
7107 operand a PLUS of a virtual register and something else. In that
7108 case, we want to emit the sum of the virtual register and the
7109 constant first and then add the other value. This allows virtual
7110 register instantiation to simply modify the constant rather than
7111 creating another one around this addition. */
7112 if (code
== PLUS
&& CONST_INT_P (op2
)
7113 && GET_CODE (XEXP (value
, 0)) == PLUS
7114 && REG_P (XEXP (XEXP (value
, 0), 0))
7115 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7116 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7118 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7119 XEXP (XEXP (value
, 0), 0), op2
,
7120 subtarget
, 0, OPTAB_LIB_WIDEN
);
7121 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7122 force_operand (XEXP (XEXP (value
,
7124 target
, 0, OPTAB_LIB_WIDEN
);
7127 op1
= force_operand (XEXP (value
, 0), subtarget
);
7128 op2
= force_operand (op2
, NULL_RTX
);
7132 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7134 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7135 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7136 target
, 1, OPTAB_LIB_WIDEN
);
7138 return expand_divmod (0,
7139 FLOAT_MODE_P (GET_MODE (value
))
7140 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7141 GET_MODE (value
), op1
, op2
, target
, 0);
7143 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7146 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7149 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7152 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7153 target
, 0, OPTAB_LIB_WIDEN
);
7155 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7156 target
, 1, OPTAB_LIB_WIDEN
);
7159 if (UNARY_P (value
))
7162 target
= gen_reg_rtx (GET_MODE (value
));
7163 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7170 case FLOAT_TRUNCATE
:
7171 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7176 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7180 case UNSIGNED_FLOAT
:
7181 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7185 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7189 #ifdef INSN_SCHEDULING
7190 /* On machines that have insn scheduling, we want all memory reference to be
7191 explicit, so we need to deal with such paradoxical SUBREGs. */
7192 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7194 = simplify_gen_subreg (GET_MODE (value
),
7195 force_reg (GET_MODE (SUBREG_REG (value
)),
7196 force_operand (SUBREG_REG (value
),
7198 GET_MODE (SUBREG_REG (value
)),
7199 SUBREG_BYTE (value
));
7205 /* Subroutine of expand_expr: return nonzero iff there is no way that
7206 EXP can reference X, which is being modified. TOP_P is nonzero if this
7207 call is going to be used to determine whether we need a temporary
7208 for EXP, as opposed to a recursive call to this function.
7210 It is always safe for this routine to return zero since it merely
7211 searches for optimization opportunities. */
7214 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7220 /* If EXP has varying size, we MUST use a target since we currently
7221 have no way of allocating temporaries of variable size
7222 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7223 So we assume here that something at a higher level has prevented a
7224 clash. This is somewhat bogus, but the best we can do. Only
7225 do this when X is BLKmode and when we are at the top level. */
7226 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7227 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7228 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7229 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7230 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7232 && GET_MODE (x
) == BLKmode
)
7233 /* If X is in the outgoing argument area, it is always safe. */
7235 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7236 || (GET_CODE (XEXP (x
, 0)) == PLUS
7237 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7240 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7241 find the underlying pseudo. */
7242 if (GET_CODE (x
) == SUBREG
)
7245 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7249 /* Now look at our tree code and possibly recurse. */
7250 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7252 case tcc_declaration
:
7253 exp_rtl
= DECL_RTL_IF_SET (exp
);
7259 case tcc_exceptional
:
7260 if (TREE_CODE (exp
) == TREE_LIST
)
7264 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7266 exp
= TREE_CHAIN (exp
);
7269 if (TREE_CODE (exp
) != TREE_LIST
)
7270 return safe_from_p (x
, exp
, 0);
7273 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7275 constructor_elt
*ce
;
7276 unsigned HOST_WIDE_INT idx
;
7278 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7279 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7280 || !safe_from_p (x
, ce
->value
, 0))
7284 else if (TREE_CODE (exp
) == ERROR_MARK
)
7285 return 1; /* An already-visited SAVE_EXPR? */
7290 /* The only case we look at here is the DECL_INITIAL inside a
7292 return (TREE_CODE (exp
) != DECL_EXPR
7293 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7294 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7295 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7298 case tcc_comparison
:
7299 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7304 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7306 case tcc_expression
:
7309 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7310 the expression. If it is set, we conflict iff we are that rtx or
7311 both are in memory. Otherwise, we check all operands of the
7312 expression recursively. */
7314 switch (TREE_CODE (exp
))
7317 /* If the operand is static or we are static, we can't conflict.
7318 Likewise if we don't conflict with the operand at all. */
7319 if (staticp (TREE_OPERAND (exp
, 0))
7320 || TREE_STATIC (exp
)
7321 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7324 /* Otherwise, the only way this can conflict is if we are taking
7325 the address of a DECL a that address if part of X, which is
7327 exp
= TREE_OPERAND (exp
, 0);
7330 if (!DECL_RTL_SET_P (exp
)
7331 || !MEM_P (DECL_RTL (exp
)))
7334 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7340 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7341 get_alias_set (exp
)))
7346 /* Assume that the call will clobber all hard registers and
7348 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7353 case WITH_CLEANUP_EXPR
:
7354 case CLEANUP_POINT_EXPR
:
7355 /* Lowered by gimplify.c. */
7359 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7365 /* If we have an rtx, we do not need to scan our operands. */
7369 nops
= TREE_OPERAND_LENGTH (exp
);
7370 for (i
= 0; i
< nops
; i
++)
7371 if (TREE_OPERAND (exp
, i
) != 0
7372 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7378 /* Should never get a type here. */
7382 /* If we have an rtl, find any enclosed object. Then see if we conflict
7386 if (GET_CODE (exp_rtl
) == SUBREG
)
7388 exp_rtl
= SUBREG_REG (exp_rtl
);
7390 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7394 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7395 are memory and they conflict. */
7396 return ! (rtx_equal_p (x
, exp_rtl
)
7397 || (MEM_P (x
) && MEM_P (exp_rtl
)
7398 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7401 /* If we reach here, it is safe. */
7406 /* Return the highest power of two that EXP is known to be a multiple of.
7407 This is used in updating alignment of MEMs in array references. */
7409 unsigned HOST_WIDE_INT
7410 highest_pow2_factor (const_tree exp
)
7412 unsigned HOST_WIDE_INT ret
;
7413 int trailing_zeros
= tree_ctz (exp
);
7414 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7415 return BIGGEST_ALIGNMENT
;
7416 ret
= (unsigned HOST_WIDE_INT
) 1 << trailing_zeros
;
7417 if (ret
> BIGGEST_ALIGNMENT
)
7418 return BIGGEST_ALIGNMENT
;
7422 /* Similar, except that the alignment requirements of TARGET are
7423 taken into account. Assume it is at least as aligned as its
7424 type, unless it is a COMPONENT_REF in which case the layout of
7425 the structure gives the alignment. */
7427 static unsigned HOST_WIDE_INT
7428 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7430 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7431 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7433 return MAX (factor
, talign
);
7436 /* Convert the tree comparison code TCODE to the rtl one where the
7437 signedness is UNSIGNEDP. */
7439 static enum rtx_code
7440 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7452 code
= unsignedp
? LTU
: LT
;
7455 code
= unsignedp
? LEU
: LE
;
7458 code
= unsignedp
? GTU
: GT
;
7461 code
= unsignedp
? GEU
: GE
;
7463 case UNORDERED_EXPR
:
7494 /* Subroutine of expand_expr. Expand the two operands of a binary
7495 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7496 The value may be stored in TARGET if TARGET is nonzero. The
7497 MODIFIER argument is as documented by expand_expr. */
7500 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7501 enum expand_modifier modifier
)
7503 if (! safe_from_p (target
, exp1
, 1))
7505 if (operand_equal_p (exp0
, exp1
, 0))
7507 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7508 *op1
= copy_rtx (*op0
);
7512 /* If we need to preserve evaluation order, copy exp0 into its own
7513 temporary variable so that it can't be clobbered by exp1. */
7514 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
7515 exp0
= save_expr (exp0
);
7516 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7517 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7522 /* Return a MEM that contains constant EXP. DEFER is as for
7523 output_constant_def and MODIFIER is as for expand_expr. */
7526 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7530 mem
= output_constant_def (exp
, defer
);
7531 if (modifier
!= EXPAND_INITIALIZER
)
7532 mem
= use_anchored_address (mem
);
7536 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7537 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7540 expand_expr_addr_expr_1 (tree exp
, rtx target
, machine_mode tmode
,
7541 enum expand_modifier modifier
, addr_space_t as
)
7543 rtx result
, subtarget
;
7545 HOST_WIDE_INT bitsize
, bitpos
;
7546 int volatilep
, unsignedp
;
7549 /* If we are taking the address of a constant and are at the top level,
7550 we have to use output_constant_def since we can't call force_const_mem
7552 /* ??? This should be considered a front-end bug. We should not be
7553 generating ADDR_EXPR of something that isn't an LVALUE. The only
7554 exception here is STRING_CST. */
7555 if (CONSTANT_CLASS_P (exp
))
7557 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7558 if (modifier
< EXPAND_SUM
)
7559 result
= force_operand (result
, target
);
7563 /* Everything must be something allowed by is_gimple_addressable. */
7564 switch (TREE_CODE (exp
))
7567 /* This case will happen via recursion for &a->b. */
7568 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7572 tree tem
= TREE_OPERAND (exp
, 0);
7573 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7574 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7575 return expand_expr (tem
, target
, tmode
, modifier
);
7579 /* Expand the initializer like constants above. */
7580 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7582 if (modifier
< EXPAND_SUM
)
7583 result
= force_operand (result
, target
);
7587 /* The real part of the complex number is always first, therefore
7588 the address is the same as the address of the parent object. */
7591 inner
= TREE_OPERAND (exp
, 0);
7595 /* The imaginary part of the complex number is always second.
7596 The expression is therefore always offset by the size of the
7599 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
7600 inner
= TREE_OPERAND (exp
, 0);
7603 case COMPOUND_LITERAL_EXPR
:
7604 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7605 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7606 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7607 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7608 the initializers aren't gimplified. */
7609 if (COMPOUND_LITERAL_EXPR_DECL (exp
)
7610 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp
)))
7611 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7612 target
, tmode
, modifier
, as
);
7615 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7616 expand_expr, as that can have various side effects; LABEL_DECLs for
7617 example, may not have their DECL_RTL set yet. Expand the rtl of
7618 CONSTRUCTORs too, which should yield a memory reference for the
7619 constructor's contents. Assume language specific tree nodes can
7620 be expanded in some interesting way. */
7621 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7623 || TREE_CODE (exp
) == CONSTRUCTOR
7624 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7626 result
= expand_expr (exp
, target
, tmode
,
7627 modifier
== EXPAND_INITIALIZER
7628 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7630 /* If the DECL isn't in memory, then the DECL wasn't properly
7631 marked TREE_ADDRESSABLE, which will be either a front-end
7632 or a tree optimizer bug. */
7634 if (TREE_ADDRESSABLE (exp
)
7636 && ! targetm
.calls
.allocate_stack_slots_for_args ())
7638 error ("local frame unavailable (naked function?)");
7642 gcc_assert (MEM_P (result
));
7643 result
= XEXP (result
, 0);
7645 /* ??? Is this needed anymore? */
7647 TREE_USED (exp
) = 1;
7649 if (modifier
!= EXPAND_INITIALIZER
7650 && modifier
!= EXPAND_CONST_ADDRESS
7651 && modifier
!= EXPAND_SUM
)
7652 result
= force_operand (result
, target
);
7656 /* Pass FALSE as the last argument to get_inner_reference although
7657 we are expanding to RTL. The rationale is that we know how to
7658 handle "aligning nodes" here: we can just bypass them because
7659 they won't change the final object whose address will be returned
7660 (they actually exist only for that purpose). */
7661 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7662 &mode1
, &unsignedp
, &volatilep
, false);
7666 /* We must have made progress. */
7667 gcc_assert (inner
!= exp
);
7669 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
7670 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7671 inner alignment, force the inner to be sufficiently aligned. */
7672 if (CONSTANT_CLASS_P (inner
)
7673 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7675 inner
= copy_node (inner
);
7676 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7677 TYPE_ALIGN (TREE_TYPE (inner
)) = TYPE_ALIGN (TREE_TYPE (exp
));
7678 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7680 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7686 if (modifier
!= EXPAND_NORMAL
)
7687 result
= force_operand (result
, NULL
);
7688 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7689 modifier
== EXPAND_INITIALIZER
7690 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7692 /* expand_expr is allowed to return an object in a mode other
7693 than TMODE. If it did, we need to convert. */
7694 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
7695 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
7696 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
7697 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7698 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7700 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7701 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7704 subtarget
= bitpos
? NULL_RTX
: target
;
7705 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7706 1, OPTAB_LIB_WIDEN
);
7712 /* Someone beforehand should have rejected taking the address
7713 of such an object. */
7714 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7716 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7717 result
= plus_constant (tmode
, result
, bitpos
/ BITS_PER_UNIT
);
7718 if (modifier
< EXPAND_SUM
)
7719 result
= force_operand (result
, target
);
7725 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7726 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7729 expand_expr_addr_expr (tree exp
, rtx target
, machine_mode tmode
,
7730 enum expand_modifier modifier
)
7732 addr_space_t as
= ADDR_SPACE_GENERIC
;
7733 machine_mode address_mode
= Pmode
;
7734 machine_mode pointer_mode
= ptr_mode
;
7738 /* Target mode of VOIDmode says "whatever's natural". */
7739 if (tmode
== VOIDmode
)
7740 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7742 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7744 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7745 address_mode
= targetm
.addr_space
.address_mode (as
);
7746 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7749 /* We can get called with some Weird Things if the user does silliness
7750 like "(short) &a". In that case, convert_memory_address won't do
7751 the right thing, so ignore the given target mode. */
7752 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7753 tmode
= address_mode
;
7755 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7756 tmode
, modifier
, as
);
7758 /* Despite expand_expr claims concerning ignoring TMODE when not
7759 strictly convenient, stuff breaks if we don't honor it. Note
7760 that combined with the above, we only do this for pointer modes. */
7761 rmode
= GET_MODE (result
);
7762 if (rmode
== VOIDmode
)
7765 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7770 /* Generate code for computing CONSTRUCTOR EXP.
7771 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7772 is TRUE, instead of creating a temporary variable in memory
7773 NULL is returned and the caller needs to handle it differently. */
7776 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7777 bool avoid_temp_mem
)
7779 tree type
= TREE_TYPE (exp
);
7780 machine_mode mode
= TYPE_MODE (type
);
7782 /* Try to avoid creating a temporary at all. This is possible
7783 if all of the initializer is zero.
7784 FIXME: try to handle all [0..255] initializers we can handle
7786 if (TREE_STATIC (exp
)
7787 && !TREE_ADDRESSABLE (exp
)
7788 && target
!= 0 && mode
== BLKmode
7789 && all_zeros_p (exp
))
7791 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7795 /* All elts simple constants => refer to a constant in memory. But
7796 if this is a non-BLKmode mode, let it store a field at a time
7797 since that should make a CONST_INT, CONST_WIDE_INT or
7798 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7799 use, it is best to store directly into the target unless the type
7800 is large enough that memcpy will be used. If we are making an
7801 initializer and all operands are constant, put it in memory as
7804 FIXME: Avoid trying to fill vector constructors piece-meal.
7805 Output them with output_constant_def below unless we're sure
7806 they're zeros. This should go away when vector initializers
7807 are treated like VECTOR_CST instead of arrays. */
7808 if ((TREE_STATIC (exp
)
7809 && ((mode
== BLKmode
7810 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7811 || TREE_ADDRESSABLE (exp
)
7812 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
7813 && (! can_move_by_pieces
7814 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
7816 && ! mostly_zeros_p (exp
))))
7817 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7818 && TREE_CONSTANT (exp
)))
7825 constructor
= expand_expr_constant (exp
, 1, modifier
);
7827 if (modifier
!= EXPAND_CONST_ADDRESS
7828 && modifier
!= EXPAND_INITIALIZER
7829 && modifier
!= EXPAND_SUM
)
7830 constructor
= validize_mem (constructor
);
7835 /* Handle calls that pass values in multiple non-contiguous
7836 locations. The Irix 6 ABI has examples of this. */
7837 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7838 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
7843 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
7846 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7851 /* expand_expr: generate code for computing expression EXP.
7852 An rtx for the computed value is returned. The value is never null.
7853 In the case of a void EXP, const0_rtx is returned.
7855 The value may be stored in TARGET if TARGET is nonzero.
7856 TARGET is just a suggestion; callers must assume that
7857 the rtx returned may not be the same as TARGET.
7859 If TARGET is CONST0_RTX, it means that the value will be ignored.
7861 If TMODE is not VOIDmode, it suggests generating the
7862 result in mode TMODE. But this is done only when convenient.
7863 Otherwise, TMODE is ignored and the value generated in its natural mode.
7864 TMODE is just a suggestion; callers must assume that
7865 the rtx returned may not have mode TMODE.
7867 Note that TARGET may have neither TMODE nor MODE. In that case, it
7868 probably will not be used.
7870 If MODIFIER is EXPAND_SUM then when EXP is an addition
7871 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7872 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7873 products as above, or REG or MEM, or constant.
7874 Ordinarily in such cases we would output mul or add instructions
7875 and then return a pseudo reg containing the sum.
7877 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7878 it also marks a label as absolutely required (it can't be dead).
7879 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7880 This is used for outputting expressions used in initializers.
7882 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7883 with a constant address even if that address is not normally legitimate.
7884 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7886 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7887 a call parameter. Such targets require special care as we haven't yet
7888 marked TARGET so that it's safe from being trashed by libcalls. We
7889 don't want to use TARGET for anything but the final result;
7890 Intermediate values must go elsewhere. Additionally, calls to
7891 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7893 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7894 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7895 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7896 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7899 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7900 In this case, we don't adjust a returned MEM rtx that wouldn't be
7901 sufficiently aligned for its mode; instead, it's up to the caller
7902 to deal with it afterwards. This is used to make sure that unaligned
7903 base objects for which out-of-bounds accesses are supported, for
7904 example record types with trailing arrays, aren't realigned behind
7905 the back of the caller.
7906 The normal operating mode is to pass FALSE for this parameter. */
7909 expand_expr_real (tree exp
, rtx target
, machine_mode tmode
,
7910 enum expand_modifier modifier
, rtx
*alt_rtl
,
7911 bool inner_reference_p
)
7915 /* Handle ERROR_MARK before anybody tries to access its type. */
7916 if (TREE_CODE (exp
) == ERROR_MARK
7917 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7919 ret
= CONST0_RTX (tmode
);
7920 return ret
? ret
: const0_rtx
;
7923 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
7928 /* Try to expand the conditional expression which is represented by
7929 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7930 return the rtl reg which repsents the result. Otherwise return
7934 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
7935 tree treeop1 ATTRIBUTE_UNUSED
,
7936 tree treeop2 ATTRIBUTE_UNUSED
)
7939 rtx op00
, op01
, op1
, op2
;
7940 enum rtx_code comparison_code
;
7941 machine_mode comparison_mode
;
7944 tree type
= TREE_TYPE (treeop1
);
7945 int unsignedp
= TYPE_UNSIGNED (type
);
7946 machine_mode mode
= TYPE_MODE (type
);
7947 machine_mode orig_mode
= mode
;
7949 /* If we cannot do a conditional move on the mode, try doing it
7950 with the promoted mode. */
7951 if (!can_conditionally_move_p (mode
))
7953 mode
= promote_mode (type
, mode
, &unsignedp
);
7954 if (!can_conditionally_move_p (mode
))
7956 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
7959 temp
= assign_temp (type
, 0, 1);
7962 expand_operands (treeop1
, treeop2
,
7963 temp
, &op1
, &op2
, EXPAND_NORMAL
);
7965 if (TREE_CODE (treeop0
) == SSA_NAME
7966 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
7968 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
7969 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
7970 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
7971 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
7972 comparison_mode
= TYPE_MODE (type
);
7973 unsignedp
= TYPE_UNSIGNED (type
);
7974 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7976 else if (COMPARISON_CLASS_P (treeop0
))
7978 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
7979 enum tree_code cmpcode
= TREE_CODE (treeop0
);
7980 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
7981 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
7982 unsignedp
= TYPE_UNSIGNED (type
);
7983 comparison_mode
= TYPE_MODE (type
);
7984 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7988 op00
= expand_normal (treeop0
);
7990 comparison_code
= NE
;
7991 comparison_mode
= GET_MODE (op00
);
7992 if (comparison_mode
== VOIDmode
)
7993 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
7996 if (GET_MODE (op1
) != mode
)
7997 op1
= gen_lowpart (mode
, op1
);
7999 if (GET_MODE (op2
) != mode
)
8000 op2
= gen_lowpart (mode
, op2
);
8002 /* Try to emit the conditional move. */
8003 insn
= emit_conditional_move (temp
, comparison_code
,
8004 op00
, op01
, comparison_mode
,
8008 /* If we could do the conditional move, emit the sequence,
8012 rtx_insn
*seq
= get_insns ();
8015 return convert_modes (orig_mode
, mode
, temp
, 0);
8018 /* Otherwise discard the sequence and fall back to code with
8025 expand_expr_real_2 (sepops ops
, rtx target
, machine_mode tmode
,
8026 enum expand_modifier modifier
)
8028 rtx op0
, op1
, op2
, temp
;
8029 rtx_code_label
*lab
;
8033 enum tree_code code
= ops
->code
;
8035 rtx subtarget
, original_target
;
8037 bool reduce_bit_field
;
8038 location_t loc
= ops
->location
;
8039 tree treeop0
, treeop1
, treeop2
;
8040 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8041 ? reduce_to_bit_field_precision ((expr), \
8047 mode
= TYPE_MODE (type
);
8048 unsignedp
= TYPE_UNSIGNED (type
);
8054 /* We should be called only on simple (binary or unary) expressions,
8055 exactly those that are valid in gimple expressions that aren't
8056 GIMPLE_SINGLE_RHS (or invalid). */
8057 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8058 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8059 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8061 ignore
= (target
== const0_rtx
8062 || ((CONVERT_EXPR_CODE_P (code
)
8063 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8064 && TREE_CODE (type
) == VOID_TYPE
));
8066 /* We should be called only if we need the result. */
8067 gcc_assert (!ignore
);
8069 /* An operation in what may be a bit-field type needs the
8070 result to be reduced to the precision of the bit-field type,
8071 which is narrower than that of the type's mode. */
8072 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8073 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
8075 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8078 /* Use subtarget as the target for operand 0 of a binary operation. */
8079 subtarget
= get_subtarget (target
);
8080 original_target
= target
;
8084 case NON_LVALUE_EXPR
:
8087 if (treeop0
== error_mark_node
)
8090 if (TREE_CODE (type
) == UNION_TYPE
)
8092 tree valtype
= TREE_TYPE (treeop0
);
8094 /* If both input and output are BLKmode, this conversion isn't doing
8095 anything except possibly changing memory attribute. */
8096 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8098 rtx result
= expand_expr (treeop0
, target
, tmode
,
8101 result
= copy_rtx (result
);
8102 set_mem_attributes (result
, type
, 0);
8108 if (TYPE_MODE (type
) != BLKmode
)
8109 target
= gen_reg_rtx (TYPE_MODE (type
));
8111 target
= assign_temp (type
, 1, 1);
8115 /* Store data into beginning of memory target. */
8116 store_expr (treeop0
,
8117 adjust_address (target
, TYPE_MODE (valtype
), 0),
8118 modifier
== EXPAND_STACK_PARM
,
8123 gcc_assert (REG_P (target
));
8125 /* Store this field into a union of the proper type. */
8126 store_field (target
,
8127 MIN ((int_size_in_bytes (TREE_TYPE
8130 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8131 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0, false);
8134 /* Return the entire union. */
8138 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8140 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8143 /* If the signedness of the conversion differs and OP0 is
8144 a promoted SUBREG, clear that indication since we now
8145 have to do the proper extension. */
8146 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8147 && GET_CODE (op0
) == SUBREG
)
8148 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8150 return REDUCE_BIT_FIELD (op0
);
8153 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8154 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8155 if (GET_MODE (op0
) == mode
)
8158 /* If OP0 is a constant, just convert it into the proper mode. */
8159 else if (CONSTANT_P (op0
))
8161 tree inner_type
= TREE_TYPE (treeop0
);
8162 machine_mode inner_mode
= GET_MODE (op0
);
8164 if (inner_mode
== VOIDmode
)
8165 inner_mode
= TYPE_MODE (inner_type
);
8167 if (modifier
== EXPAND_INITIALIZER
)
8168 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
8169 subreg_lowpart_offset (mode
,
8172 op0
= convert_modes (mode
, inner_mode
, op0
,
8173 TYPE_UNSIGNED (inner_type
));
8176 else if (modifier
== EXPAND_INITIALIZER
)
8177 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8179 else if (target
== 0)
8180 op0
= convert_to_mode (mode
, op0
,
8181 TYPE_UNSIGNED (TREE_TYPE
8185 convert_move (target
, op0
,
8186 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8190 return REDUCE_BIT_FIELD (op0
);
8192 case ADDR_SPACE_CONVERT_EXPR
:
8194 tree treeop0_type
= TREE_TYPE (treeop0
);
8196 addr_space_t as_from
;
8198 gcc_assert (POINTER_TYPE_P (type
));
8199 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8201 as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8202 as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8204 /* Conversions between pointers to the same address space should
8205 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8206 gcc_assert (as_to
!= as_from
);
8208 /* Ask target code to handle conversion between pointers
8209 to overlapping address spaces. */
8210 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8211 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8213 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8214 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8219 /* For disjoint address spaces, converting anything but
8220 a null pointer invokes undefined behaviour. We simply
8221 always return a null pointer here. */
8222 return CONST0_RTX (mode
);
8225 case POINTER_PLUS_EXPR
:
8226 /* Even though the sizetype mode and the pointer's mode can be different
8227 expand is able to handle this correctly and get the correct result out
8228 of the PLUS_EXPR code. */
8229 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8230 if sizetype precision is smaller than pointer precision. */
8231 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8232 treeop1
= fold_convert_loc (loc
, type
,
8233 fold_convert_loc (loc
, ssizetype
,
8235 /* If sizetype precision is larger than pointer precision, truncate the
8236 offset to have matching modes. */
8237 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8238 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8241 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8242 something else, make sure we add the register to the constant and
8243 then to the other thing. This case can occur during strength
8244 reduction and doing it this way will produce better code if the
8245 frame pointer or argument pointer is eliminated.
8247 fold-const.c will ensure that the constant is always in the inner
8248 PLUS_EXPR, so the only case we need to do anything about is if
8249 sp, ap, or fp is our second argument, in which case we must swap
8250 the innermost first argument and our second argument. */
8252 if (TREE_CODE (treeop0
) == PLUS_EXPR
8253 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8254 && TREE_CODE (treeop1
) == VAR_DECL
8255 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8256 || DECL_RTL (treeop1
) == stack_pointer_rtx
8257 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8262 /* If the result is to be ptr_mode and we are adding an integer to
8263 something, we might be forming a constant. So try to use
8264 plus_constant. If it produces a sum and we can't accept it,
8265 use force_operand. This allows P = &ARR[const] to generate
8266 efficient code on machines where a SYMBOL_REF is not a valid
8269 If this is an EXPAND_SUM call, always return the sum. */
8270 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8271 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8273 if (modifier
== EXPAND_STACK_PARM
)
8275 if (TREE_CODE (treeop0
) == INTEGER_CST
8276 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8277 && TREE_CONSTANT (treeop1
))
8281 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8283 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8285 /* Use wi::shwi to ensure that the constant is
8286 truncated according to the mode of OP1, then sign extended
8287 to a HOST_WIDE_INT. Using the constant directly can result
8288 in non-canonical RTL in a 64x32 cross compile. */
8289 wc
= TREE_INT_CST_LOW (treeop0
);
8291 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8292 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8293 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8294 op1
= force_operand (op1
, target
);
8295 return REDUCE_BIT_FIELD (op1
);
8298 else if (TREE_CODE (treeop1
) == INTEGER_CST
8299 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8300 && TREE_CONSTANT (treeop0
))
8304 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8306 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8307 (modifier
== EXPAND_INITIALIZER
8308 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8309 if (! CONSTANT_P (op0
))
8311 op1
= expand_expr (treeop1
, NULL_RTX
,
8312 VOIDmode
, modifier
);
8313 /* Return a PLUS if modifier says it's OK. */
8314 if (modifier
== EXPAND_SUM
8315 || modifier
== EXPAND_INITIALIZER
)
8316 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8319 /* Use wi::shwi to ensure that the constant is
8320 truncated according to the mode of OP1, then sign extended
8321 to a HOST_WIDE_INT. Using the constant directly can result
8322 in non-canonical RTL in a 64x32 cross compile. */
8323 wc
= TREE_INT_CST_LOW (treeop1
);
8325 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8326 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8327 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8328 op0
= force_operand (op0
, target
);
8329 return REDUCE_BIT_FIELD (op0
);
8333 /* Use TER to expand pointer addition of a negated value
8334 as pointer subtraction. */
8335 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8336 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8337 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8338 && TREE_CODE (treeop1
) == SSA_NAME
8339 && TYPE_MODE (TREE_TYPE (treeop0
))
8340 == TYPE_MODE (TREE_TYPE (treeop1
)))
8342 gimple def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8345 treeop1
= gimple_assign_rhs1 (def
);
8351 /* No sense saving up arithmetic to be done
8352 if it's all in the wrong mode to form part of an address.
8353 And force_operand won't know whether to sign-extend or
8355 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8356 || mode
!= ptr_mode
)
8358 expand_operands (treeop0
, treeop1
,
8359 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8360 if (op0
== const0_rtx
)
8362 if (op1
== const0_rtx
)
8367 expand_operands (treeop0
, treeop1
,
8368 subtarget
, &op0
, &op1
, modifier
);
8369 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8373 /* For initializers, we are allowed to return a MINUS of two
8374 symbolic constants. Here we handle all cases when both operands
8376 /* Handle difference of two symbolic constants,
8377 for the sake of an initializer. */
8378 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8379 && really_constant_p (treeop0
)
8380 && really_constant_p (treeop1
))
8382 expand_operands (treeop0
, treeop1
,
8383 NULL_RTX
, &op0
, &op1
, modifier
);
8385 /* If the last operand is a CONST_INT, use plus_constant of
8386 the negated constant. Else make the MINUS. */
8387 if (CONST_INT_P (op1
))
8388 return REDUCE_BIT_FIELD (plus_constant (mode
, op0
,
8391 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8394 /* No sense saving up arithmetic to be done
8395 if it's all in the wrong mode to form part of an address.
8396 And force_operand won't know whether to sign-extend or
8398 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8399 || mode
!= ptr_mode
)
8402 expand_operands (treeop0
, treeop1
,
8403 subtarget
, &op0
, &op1
, modifier
);
8405 /* Convert A - const to A + (-const). */
8406 if (CONST_INT_P (op1
))
8408 op1
= negate_rtx (mode
, op1
);
8409 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8414 case WIDEN_MULT_PLUS_EXPR
:
8415 case WIDEN_MULT_MINUS_EXPR
:
8416 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8417 op2
= expand_normal (treeop2
);
8418 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8422 case WIDEN_MULT_EXPR
:
8423 /* If first operand is constant, swap them.
8424 Thus the following special case checks need only
8425 check the second operand. */
8426 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8427 std::swap (treeop0
, treeop1
);
8429 /* First, check if we have a multiplication of one signed and one
8430 unsigned operand. */
8431 if (TREE_CODE (treeop1
) != INTEGER_CST
8432 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8433 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8435 machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8436 this_optab
= usmul_widen_optab
;
8437 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8438 != CODE_FOR_nothing
)
8440 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8441 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8444 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8446 /* op0 and op1 might still be constant, despite the above
8447 != INTEGER_CST check. Handle it. */
8448 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8450 op0
= convert_modes (innermode
, mode
, op0
, true);
8451 op1
= convert_modes (innermode
, mode
, op1
, false);
8452 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8453 target
, unsignedp
));
8458 /* Check for a multiplication with matching signedness. */
8459 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8460 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8461 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8462 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8464 tree op0type
= TREE_TYPE (treeop0
);
8465 machine_mode innermode
= TYPE_MODE (op0type
);
8466 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8467 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8468 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8470 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8472 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8473 != CODE_FOR_nothing
)
8475 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8477 /* op0 and op1 might still be constant, despite the above
8478 != INTEGER_CST check. Handle it. */
8479 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8482 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8484 = convert_modes (innermode
, mode
, op1
,
8485 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8486 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8490 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8491 unsignedp
, this_optab
);
8492 return REDUCE_BIT_FIELD (temp
);
8494 if (find_widening_optab_handler (other_optab
, mode
, innermode
, 0)
8496 && innermode
== word_mode
)
8499 op0
= expand_normal (treeop0
);
8500 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8501 op1
= convert_modes (innermode
, mode
,
8502 expand_normal (treeop1
),
8503 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8505 op1
= expand_normal (treeop1
);
8506 /* op0 and op1 might still be constant, despite the above
8507 != INTEGER_CST check. Handle it. */
8508 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8509 goto widen_mult_const
;
8510 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8511 unsignedp
, OPTAB_LIB_WIDEN
);
8512 hipart
= gen_highpart (innermode
, temp
);
8513 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8517 emit_move_insn (hipart
, htem
);
8518 return REDUCE_BIT_FIELD (temp
);
8522 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8523 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8524 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8525 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8529 optab opt
= fma_optab
;
8532 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8534 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8536 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8539 gcc_assert (fn
!= NULL_TREE
);
8540 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8541 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8544 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8545 /* The multiplication is commutative - look at its 2nd operand
8546 if the first isn't fed by a negate. */
8549 def0
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8550 /* Swap operands if the 2nd operand is fed by a negate. */
8552 std::swap (treeop0
, treeop1
);
8554 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8559 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8562 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8563 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8566 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8569 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8572 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8575 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8579 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8581 op2
= expand_normal (treeop2
);
8582 op1
= expand_normal (treeop1
);
8584 return expand_ternary_op (TYPE_MODE (type
), opt
,
8585 op0
, op1
, op2
, target
, 0);
8589 /* If this is a fixed-point operation, then we cannot use the code
8590 below because "expand_mult" doesn't support sat/no-sat fixed-point
8592 if (ALL_FIXED_POINT_MODE_P (mode
))
8595 /* If first operand is constant, swap them.
8596 Thus the following special case checks need only
8597 check the second operand. */
8598 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8599 std::swap (treeop0
, treeop1
);
8601 /* Attempt to return something suitable for generating an
8602 indexed address, for machines that support that. */
8604 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8605 && tree_fits_shwi_p (treeop1
))
8607 tree exp1
= treeop1
;
8609 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8613 op0
= force_operand (op0
, NULL_RTX
);
8615 op0
= copy_to_mode_reg (mode
, op0
);
8617 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8618 gen_int_mode (tree_to_shwi (exp1
),
8619 TYPE_MODE (TREE_TYPE (exp1
)))));
8622 if (modifier
== EXPAND_STACK_PARM
)
8625 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8626 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8628 case TRUNC_DIV_EXPR
:
8629 case FLOOR_DIV_EXPR
:
8631 case ROUND_DIV_EXPR
:
8632 case EXACT_DIV_EXPR
:
8633 /* If this is a fixed-point operation, then we cannot use the code
8634 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8636 if (ALL_FIXED_POINT_MODE_P (mode
))
8639 if (modifier
== EXPAND_STACK_PARM
)
8641 /* Possible optimization: compute the dividend with EXPAND_SUM
8642 then if the divisor is constant can optimize the case
8643 where some terms of the dividend have coeffs divisible by it. */
8644 expand_operands (treeop0
, treeop1
,
8645 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8646 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8651 case MULT_HIGHPART_EXPR
:
8652 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8653 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8657 case TRUNC_MOD_EXPR
:
8658 case FLOOR_MOD_EXPR
:
8660 case ROUND_MOD_EXPR
:
8661 if (modifier
== EXPAND_STACK_PARM
)
8663 expand_operands (treeop0
, treeop1
,
8664 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8665 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8667 case FIXED_CONVERT_EXPR
:
8668 op0
= expand_normal (treeop0
);
8669 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8670 target
= gen_reg_rtx (mode
);
8672 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8673 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8674 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8675 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8677 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8680 case FIX_TRUNC_EXPR
:
8681 op0
= expand_normal (treeop0
);
8682 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8683 target
= gen_reg_rtx (mode
);
8684 expand_fix (target
, op0
, unsignedp
);
8688 op0
= expand_normal (treeop0
);
8689 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8690 target
= gen_reg_rtx (mode
);
8691 /* expand_float can't figure out what to do if FROM has VOIDmode.
8692 So give it the correct mode. With -O, cse will optimize this. */
8693 if (GET_MODE (op0
) == VOIDmode
)
8694 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8696 expand_float (target
, op0
,
8697 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8701 op0
= expand_expr (treeop0
, subtarget
,
8702 VOIDmode
, EXPAND_NORMAL
);
8703 if (modifier
== EXPAND_STACK_PARM
)
8705 temp
= expand_unop (mode
,
8706 optab_for_tree_code (NEGATE_EXPR
, type
,
8710 return REDUCE_BIT_FIELD (temp
);
8713 op0
= expand_expr (treeop0
, subtarget
,
8714 VOIDmode
, EXPAND_NORMAL
);
8715 if (modifier
== EXPAND_STACK_PARM
)
8718 /* ABS_EXPR is not valid for complex arguments. */
8719 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8720 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8722 /* Unsigned abs is simply the operand. Testing here means we don't
8723 risk generating incorrect code below. */
8724 if (TYPE_UNSIGNED (type
))
8727 return expand_abs (mode
, op0
, target
, unsignedp
,
8728 safe_from_p (target
, treeop0
, 1));
8732 target
= original_target
;
8734 || modifier
== EXPAND_STACK_PARM
8735 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8736 || GET_MODE (target
) != mode
8738 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8739 target
= gen_reg_rtx (mode
);
8740 expand_operands (treeop0
, treeop1
,
8741 target
, &op0
, &op1
, EXPAND_NORMAL
);
8743 /* First try to do it with a special MIN or MAX instruction.
8744 If that does not win, use a conditional jump to select the proper
8746 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8747 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8752 /* At this point, a MEM target is no longer useful; we will get better
8755 if (! REG_P (target
))
8756 target
= gen_reg_rtx (mode
);
8758 /* If op1 was placed in target, swap op0 and op1. */
8759 if (target
!= op0
&& target
== op1
)
8760 std::swap (op0
, op1
);
8762 /* We generate better code and avoid problems with op1 mentioning
8763 target by forcing op1 into a pseudo if it isn't a constant. */
8764 if (! CONSTANT_P (op1
))
8765 op1
= force_reg (mode
, op1
);
8768 enum rtx_code comparison_code
;
8771 if (code
== MAX_EXPR
)
8772 comparison_code
= unsignedp
? GEU
: GE
;
8774 comparison_code
= unsignedp
? LEU
: LE
;
8776 /* Canonicalize to comparisons against 0. */
8777 if (op1
== const1_rtx
)
8779 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8780 or (a != 0 ? a : 1) for unsigned.
8781 For MIN we are safe converting (a <= 1 ? a : 1)
8782 into (a <= 0 ? a : 1) */
8783 cmpop1
= const0_rtx
;
8784 if (code
== MAX_EXPR
)
8785 comparison_code
= unsignedp
? NE
: GT
;
8787 if (op1
== constm1_rtx
&& !unsignedp
)
8789 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8790 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8791 cmpop1
= const0_rtx
;
8792 if (code
== MIN_EXPR
)
8793 comparison_code
= LT
;
8796 /* Use a conditional move if possible. */
8797 if (can_conditionally_move_p (mode
))
8803 /* Try to emit the conditional move. */
8804 insn
= emit_conditional_move (target
, comparison_code
,
8809 /* If we could do the conditional move, emit the sequence,
8813 rtx_insn
*seq
= get_insns ();
8819 /* Otherwise discard the sequence and fall back to code with
8825 emit_move_insn (target
, op0
);
8827 lab
= gen_label_rtx ();
8828 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8829 unsignedp
, mode
, NULL_RTX
, NULL
, lab
,
8832 emit_move_insn (target
, op1
);
8837 op0
= expand_expr (treeop0
, subtarget
,
8838 VOIDmode
, EXPAND_NORMAL
);
8839 if (modifier
== EXPAND_STACK_PARM
)
8841 /* In case we have to reduce the result to bitfield precision
8842 for unsigned bitfield expand this as XOR with a proper constant
8844 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
8846 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
8847 false, GET_MODE_PRECISION (mode
));
8849 temp
= expand_binop (mode
, xor_optab
, op0
,
8850 immed_wide_int_const (mask
, mode
),
8851 target
, 1, OPTAB_LIB_WIDEN
);
8854 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8858 /* ??? Can optimize bitwise operations with one arg constant.
8859 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8860 and (a bitwise1 b) bitwise2 b (etc)
8861 but that is probably not worth while. */
8870 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
8871 || (GET_MODE_PRECISION (TYPE_MODE (type
))
8872 == TYPE_PRECISION (type
)));
8877 /* If this is a fixed-point operation, then we cannot use the code
8878 below because "expand_shift" doesn't support sat/no-sat fixed-point
8880 if (ALL_FIXED_POINT_MODE_P (mode
))
8883 if (! safe_from_p (subtarget
, treeop1
, 1))
8885 if (modifier
== EXPAND_STACK_PARM
)
8887 op0
= expand_expr (treeop0
, subtarget
,
8888 VOIDmode
, EXPAND_NORMAL
);
8889 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
8891 if (code
== LSHIFT_EXPR
)
8892 temp
= REDUCE_BIT_FIELD (temp
);
8895 /* Could determine the answer when only additive constants differ. Also,
8896 the addition of one can be handled by changing the condition. */
8903 case UNORDERED_EXPR
:
8912 temp
= do_store_flag (ops
,
8913 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8914 tmode
!= VOIDmode
? tmode
: mode
);
8918 /* Use a compare and a jump for BLKmode comparisons, or for function
8919 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8922 || modifier
== EXPAND_STACK_PARM
8923 || ! safe_from_p (target
, treeop0
, 1)
8924 || ! safe_from_p (target
, treeop1
, 1)
8925 /* Make sure we don't have a hard reg (such as function's return
8926 value) live across basic blocks, if not optimizing. */
8927 || (!optimize
&& REG_P (target
)
8928 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8929 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8931 emit_move_insn (target
, const0_rtx
);
8933 rtx_code_label
*lab1
= gen_label_rtx ();
8934 jumpifnot_1 (code
, treeop0
, treeop1
, lab1
, -1);
8936 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
8937 emit_move_insn (target
, constm1_rtx
);
8939 emit_move_insn (target
, const1_rtx
);
8945 /* Get the rtx code of the operands. */
8946 op0
= expand_normal (treeop0
);
8947 op1
= expand_normal (treeop1
);
8950 target
= gen_reg_rtx (TYPE_MODE (type
));
8952 /* If target overlaps with op1, then either we need to force
8953 op1 into a pseudo (if target also overlaps with op0),
8954 or write the complex parts in reverse order. */
8955 switch (GET_CODE (target
))
8958 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
8960 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
8962 complex_expr_force_op1
:
8963 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
8964 emit_move_insn (temp
, op1
);
8968 complex_expr_swap_order
:
8969 /* Move the imaginary (op1) and real (op0) parts to their
8971 write_complex_part (target
, op1
, true);
8972 write_complex_part (target
, op0
, false);
8978 temp
= adjust_address_nv (target
,
8979 GET_MODE_INNER (GET_MODE (target
)), 0);
8980 if (reg_overlap_mentioned_p (temp
, op1
))
8982 machine_mode imode
= GET_MODE_INNER (GET_MODE (target
));
8983 temp
= adjust_address_nv (target
, imode
,
8984 GET_MODE_SIZE (imode
));
8985 if (reg_overlap_mentioned_p (temp
, op0
))
8986 goto complex_expr_force_op1
;
8987 goto complex_expr_swap_order
;
8991 if (reg_overlap_mentioned_p (target
, op1
))
8993 if (reg_overlap_mentioned_p (target
, op0
))
8994 goto complex_expr_force_op1
;
8995 goto complex_expr_swap_order
;
9000 /* Move the real (op0) and imaginary (op1) parts to their location. */
9001 write_complex_part (target
, op0
, false);
9002 write_complex_part (target
, op1
, true);
9006 case WIDEN_SUM_EXPR
:
9008 tree oprnd0
= treeop0
;
9009 tree oprnd1
= treeop1
;
9011 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9012 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9017 case REDUC_MAX_EXPR
:
9018 case REDUC_MIN_EXPR
:
9019 case REDUC_PLUS_EXPR
:
9021 op0
= expand_normal (treeop0
);
9022 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9023 machine_mode vec_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9025 if (optab_handler (this_optab
, vec_mode
) != CODE_FOR_nothing
)
9027 struct expand_operand ops
[2];
9028 enum insn_code icode
= optab_handler (this_optab
, vec_mode
);
9030 create_output_operand (&ops
[0], target
, mode
);
9031 create_input_operand (&ops
[1], op0
, vec_mode
);
9032 if (maybe_expand_insn (icode
, 2, ops
))
9034 target
= ops
[0].value
;
9035 if (GET_MODE (target
) != mode
)
9036 return gen_lowpart (tmode
, target
);
9040 /* Fall back to optab with vector result, and then extract scalar. */
9041 this_optab
= scalar_reduc_to_vector (this_optab
, type
);
9042 temp
= expand_unop (vec_mode
, this_optab
, op0
, NULL_RTX
, unsignedp
);
9044 /* The tree code produces a scalar result, but (somewhat by convention)
9045 the optab produces a vector with the result in element 0 if
9046 little-endian, or element N-1 if big-endian. So pull the scalar
9047 result out of that element. */
9048 int index
= BYTES_BIG_ENDIAN
? GET_MODE_NUNITS (vec_mode
) - 1 : 0;
9049 int bitsize
= GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode
));
9050 temp
= extract_bit_field (temp
, bitsize
, bitsize
* index
, unsignedp
,
9051 target
, mode
, mode
);
9056 case VEC_UNPACK_HI_EXPR
:
9057 case VEC_UNPACK_LO_EXPR
:
9059 op0
= expand_normal (treeop0
);
9060 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9066 case VEC_UNPACK_FLOAT_HI_EXPR
:
9067 case VEC_UNPACK_FLOAT_LO_EXPR
:
9069 op0
= expand_normal (treeop0
);
9070 /* The signedness is determined from input operand. */
9071 temp
= expand_widen_pattern_expr
9072 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9073 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9079 case VEC_WIDEN_MULT_HI_EXPR
:
9080 case VEC_WIDEN_MULT_LO_EXPR
:
9081 case VEC_WIDEN_MULT_EVEN_EXPR
:
9082 case VEC_WIDEN_MULT_ODD_EXPR
:
9083 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9084 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9085 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9086 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9088 gcc_assert (target
);
9091 case VEC_PACK_TRUNC_EXPR
:
9092 case VEC_PACK_SAT_EXPR
:
9093 case VEC_PACK_FIX_TRUNC_EXPR
:
9094 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9098 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9099 op2
= expand_normal (treeop2
);
9101 /* Careful here: if the target doesn't support integral vector modes,
9102 a constant selection vector could wind up smooshed into a normal
9103 integral constant. */
9104 if (CONSTANT_P (op2
) && GET_CODE (op2
) != CONST_VECTOR
)
9106 tree sel_type
= TREE_TYPE (treeop2
);
9108 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type
)),
9109 TYPE_VECTOR_SUBPARTS (sel_type
));
9110 gcc_assert (GET_MODE_CLASS (vmode
) == MODE_VECTOR_INT
);
9111 op2
= simplify_subreg (vmode
, op2
, TYPE_MODE (sel_type
), 0);
9112 gcc_assert (op2
&& GET_CODE (op2
) == CONST_VECTOR
);
9115 gcc_assert (GET_MODE_CLASS (GET_MODE (op2
)) == MODE_VECTOR_INT
);
9117 temp
= expand_vec_perm (mode
, op0
, op1
, op2
, target
);
9123 tree oprnd0
= treeop0
;
9124 tree oprnd1
= treeop1
;
9125 tree oprnd2
= treeop2
;
9128 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9129 op2
= expand_normal (oprnd2
);
9130 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9137 tree oprnd0
= treeop0
;
9138 tree oprnd1
= treeop1
;
9139 tree oprnd2
= treeop2
;
9142 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9143 op2
= expand_normal (oprnd2
);
9144 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9149 case REALIGN_LOAD_EXPR
:
9151 tree oprnd0
= treeop0
;
9152 tree oprnd1
= treeop1
;
9153 tree oprnd2
= treeop2
;
9156 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9157 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9158 op2
= expand_normal (oprnd2
);
9159 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9167 /* A COND_EXPR with its type being VOID_TYPE represents a
9168 conditional jump and is handled in
9169 expand_gimple_cond_expr. */
9170 gcc_assert (!VOID_TYPE_P (type
));
9172 /* Note that COND_EXPRs whose type is a structure or union
9173 are required to be constructed to contain assignments of
9174 a temporary variable, so that we can evaluate them here
9175 for side effect only. If type is void, we must do likewise. */
9177 gcc_assert (!TREE_ADDRESSABLE (type
)
9179 && TREE_TYPE (treeop1
) != void_type_node
9180 && TREE_TYPE (treeop2
) != void_type_node
);
9182 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9186 /* If we are not to produce a result, we have no target. Otherwise,
9187 if a target was specified use it; it will not be used as an
9188 intermediate target unless it is safe. If no target, use a
9191 if (modifier
!= EXPAND_STACK_PARM
9193 && safe_from_p (original_target
, treeop0
, 1)
9194 && GET_MODE (original_target
) == mode
9195 && !MEM_P (original_target
))
9196 temp
= original_target
;
9198 temp
= assign_temp (type
, 0, 1);
9200 do_pending_stack_adjust ();
9202 rtx_code_label
*lab0
= gen_label_rtx ();
9203 rtx_code_label
*lab1
= gen_label_rtx ();
9204 jumpifnot (treeop0
, lab0
, -1);
9205 store_expr (treeop1
, temp
,
9206 modifier
== EXPAND_STACK_PARM
,
9209 emit_jump_insn (gen_jump (lab1
));
9212 store_expr (treeop2
, temp
,
9213 modifier
== EXPAND_STACK_PARM
,
9222 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9229 /* Here to do an ordinary binary operator. */
9231 expand_operands (treeop0
, treeop1
,
9232 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9234 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9236 if (modifier
== EXPAND_STACK_PARM
)
9238 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9239 unsignedp
, OPTAB_LIB_WIDEN
);
9241 /* Bitwise operations do not need bitfield reduction as we expect their
9242 operands being properly truncated. */
9243 if (code
== BIT_XOR_EXPR
9244 || code
== BIT_AND_EXPR
9245 || code
== BIT_IOR_EXPR
)
9247 return REDUCE_BIT_FIELD (temp
);
9249 #undef REDUCE_BIT_FIELD
9252 /* Return TRUE if expression STMT is suitable for replacement.
9253 Never consider memory loads as replaceable, because those don't ever lead
9254 into constant expressions. */
9257 stmt_is_replaceable_p (gimple stmt
)
9259 if (ssa_is_replaceable_p (stmt
))
9261 /* Don't move around loads. */
9262 if (!gimple_assign_single_p (stmt
)
9263 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9270 expand_expr_real_1 (tree exp
, rtx target
, machine_mode tmode
,
9271 enum expand_modifier modifier
, rtx
*alt_rtl
,
9272 bool inner_reference_p
)
9274 rtx op0
, op1
, temp
, decl_rtl
;
9278 enum tree_code code
= TREE_CODE (exp
);
9279 rtx subtarget
, original_target
;
9282 bool reduce_bit_field
;
9283 location_t loc
= EXPR_LOCATION (exp
);
9284 struct separate_ops ops
;
9285 tree treeop0
, treeop1
, treeop2
;
9286 tree ssa_name
= NULL_TREE
;
9289 type
= TREE_TYPE (exp
);
9290 mode
= TYPE_MODE (type
);
9291 unsignedp
= TYPE_UNSIGNED (type
);
9293 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9294 if (!VL_EXP_CLASS_P (exp
))
9295 switch (TREE_CODE_LENGTH (code
))
9298 case 3: treeop2
= TREE_OPERAND (exp
, 2);
9299 case 2: treeop1
= TREE_OPERAND (exp
, 1);
9300 case 1: treeop0
= TREE_OPERAND (exp
, 0);
9310 ignore
= (target
== const0_rtx
9311 || ((CONVERT_EXPR_CODE_P (code
)
9312 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9313 && TREE_CODE (type
) == VOID_TYPE
));
9315 /* An operation in what may be a bit-field type needs the
9316 result to be reduced to the precision of the bit-field type,
9317 which is narrower than that of the type's mode. */
9318 reduce_bit_field
= (!ignore
9319 && INTEGRAL_TYPE_P (type
)
9320 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
9322 /* If we are going to ignore this result, we need only do something
9323 if there is a side-effect somewhere in the expression. If there
9324 is, short-circuit the most common cases here. Note that we must
9325 not call expand_expr with anything but const0_rtx in case this
9326 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9330 if (! TREE_SIDE_EFFECTS (exp
))
9333 /* Ensure we reference a volatile object even if value is ignored, but
9334 don't do this if all we are doing is taking its address. */
9335 if (TREE_THIS_VOLATILE (exp
)
9336 && TREE_CODE (exp
) != FUNCTION_DECL
9337 && mode
!= VOIDmode
&& mode
!= BLKmode
9338 && modifier
!= EXPAND_CONST_ADDRESS
)
9340 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9346 if (TREE_CODE_CLASS (code
) == tcc_unary
9347 || code
== BIT_FIELD_REF
9348 || code
== COMPONENT_REF
9349 || code
== INDIRECT_REF
)
9350 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9353 else if (TREE_CODE_CLASS (code
) == tcc_binary
9354 || TREE_CODE_CLASS (code
) == tcc_comparison
9355 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9357 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9358 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9365 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9368 /* Use subtarget as the target for operand 0 of a binary operation. */
9369 subtarget
= get_subtarget (target
);
9370 original_target
= target
;
9376 tree function
= decl_function_context (exp
);
9378 temp
= label_rtx (exp
);
9379 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9381 if (function
!= current_function_decl
9383 LABEL_REF_NONLOCAL_P (temp
) = 1;
9385 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9390 /* ??? ivopts calls expander, without any preparation from
9391 out-of-ssa. So fake instructions as if this was an access to the
9392 base variable. This unnecessarily allocates a pseudo, see how we can
9393 reuse it, if partition base vars have it set already. */
9394 if (!currently_expanding_to_rtl
)
9396 tree var
= SSA_NAME_VAR (exp
);
9397 if (var
&& DECL_RTL_SET_P (var
))
9398 return DECL_RTL (var
);
9399 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9400 LAST_VIRTUAL_REGISTER
+ 1);
9403 g
= get_gimple_for_ssa_name (exp
);
9404 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9406 && modifier
== EXPAND_INITIALIZER
9407 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9408 && (optimize
|| DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9409 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9410 g
= SSA_NAME_DEF_STMT (exp
);
9414 ops
.code
= gimple_assign_rhs_code (g
);
9415 switch (get_gimple_rhs_class (ops
.code
))
9417 case GIMPLE_TERNARY_RHS
:
9418 ops
.op2
= gimple_assign_rhs3 (g
);
9420 case GIMPLE_BINARY_RHS
:
9421 ops
.op1
= gimple_assign_rhs2 (g
);
9423 /* Try to expand conditonal compare. */
9424 if (targetm
.gen_ccmp_first
)
9426 gcc_checking_assert (targetm
.gen_ccmp_next
!= NULL
);
9427 r
= expand_ccmp_expr (g
);
9432 case GIMPLE_UNARY_RHS
:
9433 ops
.op0
= gimple_assign_rhs1 (g
);
9434 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9435 ops
.location
= gimple_location (g
);
9436 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9438 case GIMPLE_SINGLE_RHS
:
9440 location_t saved_loc
= curr_insn_location ();
9441 set_curr_insn_location (gimple_location (g
));
9442 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9443 tmode
, modifier
, NULL
, inner_reference_p
);
9444 set_curr_insn_location (saved_loc
);
9450 if (REG_P (r
) && !REG_EXPR (r
))
9451 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9456 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9457 exp
= SSA_NAME_VAR (ssa_name
);
9458 goto expand_decl_rtl
;
9462 /* If a static var's type was incomplete when the decl was written,
9463 but the type is complete now, lay out the decl now. */
9464 if (DECL_SIZE (exp
) == 0
9465 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9466 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9467 layout_decl (exp
, 0);
9469 /* ... fall through ... */
9473 decl_rtl
= DECL_RTL (exp
);
9475 gcc_assert (decl_rtl
);
9476 decl_rtl
= copy_rtx (decl_rtl
);
9477 /* Record writes to register variables. */
9478 if (modifier
== EXPAND_WRITE
9480 && HARD_REGISTER_P (decl_rtl
))
9481 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9482 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9484 /* Ensure variable marked as used even if it doesn't go through
9485 a parser. If it hasn't be used yet, write out an external
9487 TREE_USED (exp
) = 1;
9489 /* Show we haven't gotten RTL for this yet. */
9492 /* Variables inherited from containing functions should have
9493 been lowered by this point. */
9494 context
= decl_function_context (exp
);
9495 gcc_assert (SCOPE_FILE_SCOPE_P (context
)
9496 || context
== current_function_decl
9497 || TREE_STATIC (exp
)
9498 || DECL_EXTERNAL (exp
)
9499 /* ??? C++ creates functions that are not TREE_STATIC. */
9500 || TREE_CODE (exp
) == FUNCTION_DECL
);
9502 /* This is the case of an array whose size is to be determined
9503 from its initializer, while the initializer is still being parsed.
9504 ??? We aren't parsing while expanding anymore. */
9506 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9507 temp
= validize_mem (decl_rtl
);
9509 /* If DECL_RTL is memory, we are in the normal case and the
9510 address is not valid, get the address into a register. */
9512 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9515 *alt_rtl
= decl_rtl
;
9516 decl_rtl
= use_anchored_address (decl_rtl
);
9517 if (modifier
!= EXPAND_CONST_ADDRESS
9518 && modifier
!= EXPAND_SUM
9519 && !memory_address_addr_space_p (DECL_MODE (exp
),
9521 MEM_ADDR_SPACE (decl_rtl
)))
9522 temp
= replace_equiv_address (decl_rtl
,
9523 copy_rtx (XEXP (decl_rtl
, 0)));
9526 /* If we got something, return it. But first, set the alignment
9527 if the address is a register. */
9530 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
9531 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
9536 /* If the mode of DECL_RTL does not match that of the decl,
9537 there are two cases: we are dealing with a BLKmode value
9538 that is returned in a register, or we are dealing with
9539 a promoted value. In the latter case, return a SUBREG
9540 of the wanted mode, but mark it so that we know that it
9541 was already extended. */
9542 if (REG_P (decl_rtl
)
9543 && DECL_MODE (exp
) != BLKmode
9544 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
9548 /* Get the signedness to be used for this variable. Ensure we get
9549 the same mode we got when the variable was declared. */
9550 if (code
== SSA_NAME
9551 && (g
= SSA_NAME_DEF_STMT (ssa_name
))
9552 && gimple_code (g
) == GIMPLE_CALL
9553 && !gimple_call_internal_p (g
))
9554 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
9555 gimple_call_fntype (g
),
9558 pmode
= promote_decl_mode (exp
, &unsignedp
);
9559 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
9561 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
9562 SUBREG_PROMOTED_VAR_P (temp
) = 1;
9563 SUBREG_PROMOTED_SET (temp
, unsignedp
);
9570 /* Given that TYPE_PRECISION (type) is not always equal to
9571 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9572 the former to the latter according to the signedness of the
9574 temp
= immed_wide_int_const (wide_int::from
9576 GET_MODE_PRECISION (TYPE_MODE (type
)),
9583 tree tmp
= NULL_TREE
;
9584 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
9585 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
9586 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
9587 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
9588 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
9589 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
9590 return const_vector_from_tree (exp
);
9591 if (GET_MODE_CLASS (mode
) == MODE_INT
)
9593 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
9595 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
9599 vec
<constructor_elt
, va_gc
> *v
;
9601 vec_alloc (v
, VECTOR_CST_NELTS (exp
));
9602 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
9603 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
9604 tmp
= build_constructor (type
, v
);
9606 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
9611 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
9614 /* If optimized, generate immediate CONST_DOUBLE
9615 which will be turned into memory by reload if necessary.
9617 We used to force a register so that loop.c could see it. But
9618 this does not allow gen_* patterns to perform optimizations with
9619 the constants. It also produces two insns in cases like "x = 1.0;".
9620 On most machines, floating-point constants are not permitted in
9621 many insns, so we'd end up copying it to a register in any case.
9623 Now, we do the copying in expand_binop, if appropriate. */
9624 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
9625 TYPE_MODE (TREE_TYPE (exp
)));
9628 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
9629 TYPE_MODE (TREE_TYPE (exp
)));
9632 /* Handle evaluating a complex constant in a CONCAT target. */
9633 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
9635 machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9638 rtarg
= XEXP (original_target
, 0);
9639 itarg
= XEXP (original_target
, 1);
9641 /* Move the real and imaginary parts separately. */
9642 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
9643 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
9646 emit_move_insn (rtarg
, op0
);
9648 emit_move_insn (itarg
, op1
);
9650 return original_target
;
9653 /* ... fall through ... */
9656 temp
= expand_expr_constant (exp
, 1, modifier
);
9658 /* temp contains a constant address.
9659 On RISC machines where a constant address isn't valid,
9660 make some insns to get that address into a register. */
9661 if (modifier
!= EXPAND_CONST_ADDRESS
9662 && modifier
!= EXPAND_INITIALIZER
9663 && modifier
!= EXPAND_SUM
9664 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
9665 MEM_ADDR_SPACE (temp
)))
9666 return replace_equiv_address (temp
,
9667 copy_rtx (XEXP (temp
, 0)));
9673 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
9676 if (!SAVE_EXPR_RESOLVED_P (exp
))
9678 /* We can indeed still hit this case, typically via builtin
9679 expanders calling save_expr immediately before expanding
9680 something. Assume this means that we only have to deal
9681 with non-BLKmode values. */
9682 gcc_assert (GET_MODE (ret
) != BLKmode
);
9684 val
= build_decl (curr_insn_location (),
9685 VAR_DECL
, NULL
, TREE_TYPE (exp
));
9686 DECL_ARTIFICIAL (val
) = 1;
9687 DECL_IGNORED_P (val
) = 1;
9689 TREE_OPERAND (exp
, 0) = treeop0
;
9690 SAVE_EXPR_RESOLVED_P (exp
) = 1;
9692 if (!CONSTANT_P (ret
))
9693 ret
= copy_to_reg (ret
);
9694 SET_DECL_RTL (val
, ret
);
9702 /* If we don't need the result, just ensure we evaluate any
9706 unsigned HOST_WIDE_INT idx
;
9709 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
9710 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9715 return expand_constructor (exp
, target
, modifier
, false);
9717 case TARGET_MEM_REF
:
9720 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9721 enum insn_code icode
;
9724 op0
= addr_for_mem_ref (exp
, as
, true);
9725 op0
= memory_address_addr_space (mode
, op0
, as
);
9726 temp
= gen_rtx_MEM (mode
, op0
);
9727 set_mem_attributes (temp
, exp
, 0);
9728 set_mem_addr_space (temp
, as
);
9729 align
= get_object_alignment (exp
);
9730 if (modifier
!= EXPAND_WRITE
9731 && modifier
!= EXPAND_MEMORY
9733 && align
< GET_MODE_ALIGNMENT (mode
)
9734 /* If the target does not have special handling for unaligned
9735 loads of mode then it can use regular moves for them. */
9736 && ((icode
= optab_handler (movmisalign_optab
, mode
))
9737 != CODE_FOR_nothing
))
9739 struct expand_operand ops
[2];
9741 /* We've already validated the memory, and we're creating a
9742 new pseudo destination. The predicates really can't fail,
9743 nor can the generator. */
9744 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9745 create_fixed_operand (&ops
[1], temp
);
9746 expand_insn (icode
, 2, ops
);
9747 temp
= ops
[0].value
;
9755 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9756 machine_mode address_mode
;
9757 tree base
= TREE_OPERAND (exp
, 0);
9759 enum insn_code icode
;
9761 /* Handle expansion of non-aliased memory with non-BLKmode. That
9762 might end up in a register. */
9763 if (mem_ref_refers_to_non_mem_p (exp
))
9765 HOST_WIDE_INT offset
= mem_ref_offset (exp
).to_short_addr ();
9766 base
= TREE_OPERAND (base
, 0);
9768 && tree_fits_uhwi_p (TYPE_SIZE (type
))
9769 && (GET_MODE_BITSIZE (DECL_MODE (base
))
9770 == tree_to_uhwi (TYPE_SIZE (type
))))
9771 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
9772 target
, tmode
, modifier
);
9773 if (TYPE_MODE (type
) == BLKmode
)
9775 temp
= assign_stack_temp (DECL_MODE (base
),
9776 GET_MODE_SIZE (DECL_MODE (base
)));
9777 store_expr (base
, temp
, 0, false);
9778 temp
= adjust_address (temp
, BLKmode
, offset
);
9779 set_mem_size (temp
, int_size_in_bytes (type
));
9782 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
9783 bitsize_int (offset
* BITS_PER_UNIT
));
9784 return expand_expr (exp
, target
, tmode
, modifier
);
9786 address_mode
= targetm
.addr_space
.address_mode (as
);
9787 base
= TREE_OPERAND (exp
, 0);
9788 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
9790 tree mask
= gimple_assign_rhs2 (def_stmt
);
9791 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
9792 gimple_assign_rhs1 (def_stmt
), mask
);
9793 TREE_OPERAND (exp
, 0) = base
;
9795 align
= get_object_alignment (exp
);
9796 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
9797 op0
= memory_address_addr_space (mode
, op0
, as
);
9798 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
9800 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
9801 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
9802 op0
= memory_address_addr_space (mode
, op0
, as
);
9804 temp
= gen_rtx_MEM (mode
, op0
);
9805 set_mem_attributes (temp
, exp
, 0);
9806 set_mem_addr_space (temp
, as
);
9807 if (TREE_THIS_VOLATILE (exp
))
9808 MEM_VOLATILE_P (temp
) = 1;
9809 if (modifier
!= EXPAND_WRITE
9810 && modifier
!= EXPAND_MEMORY
9811 && !inner_reference_p
9813 && align
< GET_MODE_ALIGNMENT (mode
))
9815 if ((icode
= optab_handler (movmisalign_optab
, mode
))
9816 != CODE_FOR_nothing
)
9818 struct expand_operand ops
[2];
9820 /* We've already validated the memory, and we're creating a
9821 new pseudo destination. The predicates really can't fail,
9822 nor can the generator. */
9823 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9824 create_fixed_operand (&ops
[1], temp
);
9825 expand_insn (icode
, 2, ops
);
9826 temp
= ops
[0].value
;
9828 else if (SLOW_UNALIGNED_ACCESS (mode
, align
))
9829 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
9830 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
9831 (modifier
== EXPAND_STACK_PARM
9832 ? NULL_RTX
: target
),
9841 tree array
= treeop0
;
9842 tree index
= treeop1
;
9845 /* Fold an expression like: "foo"[2].
9846 This is not done in fold so it won't happen inside &.
9847 Don't fold if this is for wide characters since it's too
9848 difficult to do correctly and this is a very rare case. */
9850 if (modifier
!= EXPAND_CONST_ADDRESS
9851 && modifier
!= EXPAND_INITIALIZER
9852 && modifier
!= EXPAND_MEMORY
)
9854 tree t
= fold_read_from_constant_string (exp
);
9857 return expand_expr (t
, target
, tmode
, modifier
);
9860 /* If this is a constant index into a constant array,
9861 just get the value from the array. Handle both the cases when
9862 we have an explicit constructor and when our operand is a variable
9863 that was declared const. */
9865 if (modifier
!= EXPAND_CONST_ADDRESS
9866 && modifier
!= EXPAND_INITIALIZER
9867 && modifier
!= EXPAND_MEMORY
9868 && TREE_CODE (array
) == CONSTRUCTOR
9869 && ! TREE_SIDE_EFFECTS (array
)
9870 && TREE_CODE (index
) == INTEGER_CST
)
9872 unsigned HOST_WIDE_INT ix
;
9875 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
9877 if (tree_int_cst_equal (field
, index
))
9879 if (!TREE_SIDE_EFFECTS (value
))
9880 return expand_expr (fold (value
), target
, tmode
, modifier
);
9885 else if (optimize
>= 1
9886 && modifier
!= EXPAND_CONST_ADDRESS
9887 && modifier
!= EXPAND_INITIALIZER
9888 && modifier
!= EXPAND_MEMORY
9889 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
9890 && TREE_CODE (index
) == INTEGER_CST
9891 && (TREE_CODE (array
) == VAR_DECL
9892 || TREE_CODE (array
) == CONST_DECL
)
9893 && (init
= ctor_for_folding (array
)) != error_mark_node
)
9895 if (init
== NULL_TREE
)
9897 tree value
= build_zero_cst (type
);
9898 if (TREE_CODE (value
) == CONSTRUCTOR
)
9900 /* If VALUE is a CONSTRUCTOR, this optimization is only
9901 useful if this doesn't store the CONSTRUCTOR into
9902 memory. If it does, it is more efficient to just
9903 load the data from the array directly. */
9904 rtx ret
= expand_constructor (value
, target
,
9906 if (ret
== NULL_RTX
)
9911 return expand_expr (value
, target
, tmode
, modifier
);
9913 else if (TREE_CODE (init
) == CONSTRUCTOR
)
9915 unsigned HOST_WIDE_INT ix
;
9918 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
9920 if (tree_int_cst_equal (field
, index
))
9922 if (TREE_SIDE_EFFECTS (value
))
9925 if (TREE_CODE (value
) == CONSTRUCTOR
)
9927 /* If VALUE is a CONSTRUCTOR, this
9928 optimization is only useful if
9929 this doesn't store the CONSTRUCTOR
9930 into memory. If it does, it is more
9931 efficient to just load the data from
9932 the array directly. */
9933 rtx ret
= expand_constructor (value
, target
,
9935 if (ret
== NULL_RTX
)
9940 expand_expr (fold (value
), target
, tmode
, modifier
);
9943 else if (TREE_CODE (init
) == STRING_CST
)
9945 tree low_bound
= array_ref_low_bound (exp
);
9946 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
9948 /* Optimize the special case of a zero lower bound.
9950 We convert the lower bound to sizetype to avoid problems
9951 with constant folding. E.g. suppose the lower bound is
9952 1 and its mode is QI. Without the conversion
9953 (ARRAY + (INDEX - (unsigned char)1))
9955 (ARRAY + (-(unsigned char)1) + INDEX)
9957 (ARRAY + 255 + INDEX). Oops! */
9958 if (!integer_zerop (low_bound
))
9959 index1
= size_diffop_loc (loc
, index1
,
9960 fold_convert_loc (loc
, sizetype
,
9963 if (compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
9965 tree type
= TREE_TYPE (TREE_TYPE (init
));
9966 machine_mode mode
= TYPE_MODE (type
);
9968 if (GET_MODE_CLASS (mode
) == MODE_INT
9969 && GET_MODE_SIZE (mode
) == 1)
9970 return gen_int_mode (TREE_STRING_POINTER (init
)
9971 [TREE_INT_CST_LOW (index1
)],
9977 goto normal_inner_ref
;
9980 /* If the operand is a CONSTRUCTOR, we can just extract the
9981 appropriate field if it is present. */
9982 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
9984 unsigned HOST_WIDE_INT idx
;
9987 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
9989 if (field
== treeop1
9990 /* We can normally use the value of the field in the
9991 CONSTRUCTOR. However, if this is a bitfield in
9992 an integral mode that we can fit in a HOST_WIDE_INT,
9993 we must mask only the number of bits in the bitfield,
9994 since this is done implicitly by the constructor. If
9995 the bitfield does not meet either of those conditions,
9996 we can't do this optimization. */
9997 && (! DECL_BIT_FIELD (field
)
9998 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
9999 && (GET_MODE_PRECISION (DECL_MODE (field
))
10000 <= HOST_BITS_PER_WIDE_INT
))))
10002 if (DECL_BIT_FIELD (field
)
10003 && modifier
== EXPAND_STACK_PARM
)
10005 op0
= expand_expr (value
, target
, tmode
, modifier
);
10006 if (DECL_BIT_FIELD (field
))
10008 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10009 machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
10011 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10013 op1
= gen_int_mode (((HOST_WIDE_INT
) 1 << bitsize
) - 1,
10015 op0
= expand_and (imode
, op0
, op1
, target
);
10019 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10021 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10023 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10031 goto normal_inner_ref
;
10033 case BIT_FIELD_REF
:
10034 case ARRAY_RANGE_REF
:
10037 machine_mode mode1
, mode2
;
10038 HOST_WIDE_INT bitsize
, bitpos
;
10040 int volatilep
= 0, must_force_mem
;
10041 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
10042 &mode1
, &unsignedp
, &volatilep
, true);
10043 rtx orig_op0
, memloc
;
10044 bool clear_mem_expr
= false;
10046 /* If we got back the original object, something is wrong. Perhaps
10047 we are evaluating an expression too early. In any event, don't
10048 infinitely recurse. */
10049 gcc_assert (tem
!= exp
);
10051 /* If TEM's type is a union of variable size, pass TARGET to the inner
10052 computation, since it will need a temporary and TARGET is known
10053 to have to do. This occurs in unchecked conversion in Ada. */
10055 = expand_expr_real (tem
,
10056 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10057 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10058 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10060 && modifier
!= EXPAND_STACK_PARM
10061 ? target
: NULL_RTX
),
10063 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10066 /* If the field has a mode, we want to access it in the
10067 field's mode, not the computed mode.
10068 If a MEM has VOIDmode (external with incomplete type),
10069 use BLKmode for it instead. */
10072 if (mode1
!= VOIDmode
)
10073 op0
= adjust_address (op0
, mode1
, 0);
10074 else if (GET_MODE (op0
) == VOIDmode
)
10075 op0
= adjust_address (op0
, BLKmode
, 0);
10079 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10081 /* If we have either an offset, a BLKmode result, or a reference
10082 outside the underlying object, we must force it to memory.
10083 Such a case can occur in Ada if we have unchecked conversion
10084 of an expression from a scalar type to an aggregate type or
10085 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10086 passed a partially uninitialized object or a view-conversion
10087 to a larger size. */
10088 must_force_mem
= (offset
10089 || mode1
== BLKmode
10090 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
10092 /* Handle CONCAT first. */
10093 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10096 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)))
10099 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10102 op0
= XEXP (op0
, 0);
10103 mode2
= GET_MODE (op0
);
10105 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10106 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
10110 op0
= XEXP (op0
, 1);
10112 mode2
= GET_MODE (op0
);
10115 /* Otherwise force into memory. */
10116 must_force_mem
= 1;
10119 /* If this is a constant, put it in a register if it is a legitimate
10120 constant and we don't need a memory reference. */
10121 if (CONSTANT_P (op0
)
10122 && mode2
!= BLKmode
10123 && targetm
.legitimate_constant_p (mode2
, op0
)
10124 && !must_force_mem
)
10125 op0
= force_reg (mode2
, op0
);
10127 /* Otherwise, if this is a constant, try to force it to the constant
10128 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10129 is a legitimate constant. */
10130 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10131 op0
= validize_mem (memloc
);
10133 /* Otherwise, if this is a constant or the object is not in memory
10134 and need be, put it there. */
10135 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10137 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10138 emit_move_insn (memloc
, op0
);
10140 clear_mem_expr
= true;
10145 machine_mode address_mode
;
10146 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10149 gcc_assert (MEM_P (op0
));
10151 address_mode
= get_address_mode (op0
);
10152 if (GET_MODE (offset_rtx
) != address_mode
)
10154 /* We cannot be sure that the RTL in offset_rtx is valid outside
10155 of a memory address context, so force it into a register
10156 before attempting to convert it to the desired mode. */
10157 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
10158 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10161 /* See the comment in expand_assignment for the rationale. */
10162 if (mode1
!= VOIDmode
10165 && (bitpos
% bitsize
) == 0
10166 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
10167 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10169 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10173 op0
= offset_address (op0
, offset_rtx
,
10174 highest_pow2_factor (offset
));
10177 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10178 record its alignment as BIGGEST_ALIGNMENT. */
10179 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
10180 && is_aligning_offset (offset
, tem
))
10181 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10183 /* Don't forget about volatility even if this is a bitfield. */
10184 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10186 if (op0
== orig_op0
)
10187 op0
= copy_rtx (op0
);
10189 MEM_VOLATILE_P (op0
) = 1;
10192 /* In cases where an aligned union has an unaligned object
10193 as a field, we might be extracting a BLKmode value from
10194 an integer-mode (e.g., SImode) object. Handle this case
10195 by doing the extract into an object as wide as the field
10196 (which we know to be the width of a basic mode), then
10197 storing into memory, and changing the mode to BLKmode. */
10198 if (mode1
== VOIDmode
10199 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10200 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10201 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10202 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10203 && modifier
!= EXPAND_CONST_ADDRESS
10204 && modifier
!= EXPAND_INITIALIZER
10205 && modifier
!= EXPAND_MEMORY
)
10206 /* If the bitfield is volatile and the bitsize
10207 is narrower than the access size of the bitfield,
10208 we need to extract bitfields from the access. */
10209 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10210 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10211 && mode1
!= BLKmode
10212 && bitsize
< GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
)
10213 /* If the field isn't aligned enough to fetch as a memref,
10214 fetch it as a bit field. */
10215 || (mode1
!= BLKmode
10216 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10217 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
10219 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10220 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
10221 && modifier
!= EXPAND_MEMORY
10222 && ((modifier
== EXPAND_CONST_ADDRESS
10223 || modifier
== EXPAND_INITIALIZER
)
10225 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
10226 || (bitpos
% BITS_PER_UNIT
!= 0)))
10227 /* If the type and the field are a constant size and the
10228 size of the type isn't the same size as the bitfield,
10229 we must use bitfield operations. */
10231 && TYPE_SIZE (TREE_TYPE (exp
))
10232 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10233 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
10236 machine_mode ext_mode
= mode
;
10238 if (ext_mode
== BLKmode
10239 && ! (target
!= 0 && MEM_P (op0
)
10241 && bitpos
% BITS_PER_UNIT
== 0))
10242 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
10244 if (ext_mode
== BLKmode
)
10247 target
= assign_temp (type
, 1, 1);
10249 /* ??? Unlike the similar test a few lines below, this one is
10250 very likely obsolete. */
10254 /* In this case, BITPOS must start at a byte boundary and
10255 TARGET, if specified, must be a MEM. */
10256 gcc_assert (MEM_P (op0
)
10257 && (!target
|| MEM_P (target
))
10258 && !(bitpos
% BITS_PER_UNIT
));
10260 emit_block_move (target
,
10261 adjust_address (op0
, VOIDmode
,
10262 bitpos
/ BITS_PER_UNIT
),
10263 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
10265 (modifier
== EXPAND_STACK_PARM
10266 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10271 /* If we have nothing to extract, the result will be 0 for targets
10272 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10273 return 0 for the sake of consistency, as reading a zero-sized
10274 bitfield is valid in Ada and the value is fully specified. */
10278 op0
= validize_mem (op0
);
10280 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10281 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10283 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10284 (modifier
== EXPAND_STACK_PARM
10285 ? NULL_RTX
: target
),
10286 ext_mode
, ext_mode
);
10288 /* If the result is a record type and BITSIZE is narrower than
10289 the mode of OP0, an integral mode, and this is a big endian
10290 machine, we must put the field into the high-order bits. */
10291 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
10292 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
10293 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
10294 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
10295 GET_MODE_BITSIZE (GET_MODE (op0
))
10296 - bitsize
, op0
, 1);
10298 /* If the result type is BLKmode, store the data into a temporary
10299 of the appropriate type, but with the mode corresponding to the
10300 mode for the data we have (op0's mode). */
10301 if (mode
== BLKmode
)
10304 = assign_stack_temp_for_type (ext_mode
,
10305 GET_MODE_BITSIZE (ext_mode
),
10307 emit_move_insn (new_rtx
, op0
);
10308 op0
= copy_rtx (new_rtx
);
10309 PUT_MODE (op0
, BLKmode
);
10315 /* If the result is BLKmode, use that to access the object
10317 if (mode
== BLKmode
)
10320 /* Get a reference to just this component. */
10321 if (modifier
== EXPAND_CONST_ADDRESS
10322 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10323 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10325 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10327 if (op0
== orig_op0
)
10328 op0
= copy_rtx (op0
);
10330 set_mem_attributes (op0
, exp
, 0);
10332 if (REG_P (XEXP (op0
, 0)))
10333 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10335 /* If op0 is a temporary because the original expressions was forced
10336 to memory, clear MEM_EXPR so that the original expression cannot
10337 be marked as addressable through MEM_EXPR of the temporary. */
10338 if (clear_mem_expr
)
10339 set_mem_expr (op0
, NULL_TREE
);
10341 MEM_VOLATILE_P (op0
) |= volatilep
;
10342 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10343 || modifier
== EXPAND_CONST_ADDRESS
10344 || modifier
== EXPAND_INITIALIZER
)
10348 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10350 convert_move (target
, op0
, unsignedp
);
10355 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10358 /* All valid uses of __builtin_va_arg_pack () are removed during
10360 if (CALL_EXPR_VA_ARG_PACK (exp
))
10361 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10363 tree fndecl
= get_callee_fndecl (exp
), attr
;
10366 && (attr
= lookup_attribute ("error",
10367 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10368 error ("%Kcall to %qs declared with attribute error: %s",
10369 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10370 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10372 && (attr
= lookup_attribute ("warning",
10373 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10374 warning_at (tree_nonartificial_location (exp
),
10375 0, "%Kcall to %qs declared with attribute warning: %s",
10376 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10377 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10379 /* Check for a built-in function. */
10380 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10382 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10383 if (CALL_WITH_BOUNDS_P (exp
))
10384 return expand_builtin_with_bounds (exp
, target
, subtarget
,
10387 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10390 return expand_call (exp
, target
, ignore
);
10392 case VIEW_CONVERT_EXPR
:
10395 /* If we are converting to BLKmode, try to avoid an intermediate
10396 temporary by fetching an inner memory reference. */
10397 if (mode
== BLKmode
10398 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
10399 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10400 && handled_component_p (treeop0
))
10402 machine_mode mode1
;
10403 HOST_WIDE_INT bitsize
, bitpos
;
10408 = get_inner_reference (treeop0
, &bitsize
, &bitpos
,
10409 &offset
, &mode1
, &unsignedp
, &volatilep
,
10413 /* ??? We should work harder and deal with non-zero offsets. */
10415 && (bitpos
% BITS_PER_UNIT
) == 0
10417 && compare_tree_int (TYPE_SIZE (type
), bitsize
) == 0)
10419 /* See the normal_inner_ref case for the rationale. */
10421 = expand_expr_real (tem
,
10422 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10423 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10425 && modifier
!= EXPAND_STACK_PARM
10426 ? target
: NULL_RTX
),
10428 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10431 if (MEM_P (orig_op0
))
10435 /* Get a reference to just this component. */
10436 if (modifier
== EXPAND_CONST_ADDRESS
10437 || modifier
== EXPAND_SUM
10438 || modifier
== EXPAND_INITIALIZER
)
10439 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10441 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10443 if (op0
== orig_op0
)
10444 op0
= copy_rtx (op0
);
10446 set_mem_attributes (op0
, treeop0
, 0);
10447 if (REG_P (XEXP (op0
, 0)))
10448 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10450 MEM_VOLATILE_P (op0
) |= volatilep
;
10456 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
10457 NULL
, inner_reference_p
);
10459 /* If the input and output modes are both the same, we are done. */
10460 if (mode
== GET_MODE (op0
))
10462 /* If neither mode is BLKmode, and both modes are the same size
10463 then we can use gen_lowpart. */
10464 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
10465 && (GET_MODE_PRECISION (mode
)
10466 == GET_MODE_PRECISION (GET_MODE (op0
)))
10467 && !COMPLEX_MODE_P (GET_MODE (op0
)))
10469 if (GET_CODE (op0
) == SUBREG
)
10470 op0
= force_reg (GET_MODE (op0
), op0
);
10471 temp
= gen_lowpart_common (mode
, op0
);
10476 if (!REG_P (op0
) && !MEM_P (op0
))
10477 op0
= force_reg (GET_MODE (op0
), op0
);
10478 op0
= gen_lowpart (mode
, op0
);
10481 /* If both types are integral, convert from one mode to the other. */
10482 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
10483 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
10484 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
10485 /* If the output type is a bit-field type, do an extraction. */
10486 else if (reduce_bit_field
)
10487 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
10488 TYPE_UNSIGNED (type
), NULL_RTX
,
10490 /* As a last resort, spill op0 to memory, and reload it in a
10492 else if (!MEM_P (op0
))
10494 /* If the operand is not a MEM, force it into memory. Since we
10495 are going to be changing the mode of the MEM, don't call
10496 force_const_mem for constants because we don't allow pool
10497 constants to change mode. */
10498 tree inner_type
= TREE_TYPE (treeop0
);
10500 gcc_assert (!TREE_ADDRESSABLE (exp
));
10502 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
10504 = assign_stack_temp_for_type
10505 (TYPE_MODE (inner_type
),
10506 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
10508 emit_move_insn (target
, op0
);
10512 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10513 output type is such that the operand is known to be aligned, indicate
10514 that it is. Otherwise, we need only be concerned about alignment for
10515 non-BLKmode results. */
10518 enum insn_code icode
;
10520 if (TYPE_ALIGN_OK (type
))
10522 /* ??? Copying the MEM without substantially changing it might
10523 run afoul of the code handling volatile memory references in
10524 store_expr, which assumes that TARGET is returned unmodified
10525 if it has been used. */
10526 op0
= copy_rtx (op0
);
10527 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
10529 else if (modifier
!= EXPAND_WRITE
10530 && modifier
!= EXPAND_MEMORY
10531 && !inner_reference_p
10533 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
10535 /* If the target does have special handling for unaligned
10536 loads of mode then use them. */
10537 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10538 != CODE_FOR_nothing
)
10542 op0
= adjust_address (op0
, mode
, 0);
10543 /* We've already validated the memory, and we're creating a
10544 new pseudo destination. The predicates really can't
10546 reg
= gen_reg_rtx (mode
);
10548 /* Nor can the insn generator. */
10549 rtx_insn
*insn
= GEN_FCN (icode
) (reg
, op0
);
10553 else if (STRICT_ALIGNMENT
)
10555 tree inner_type
= TREE_TYPE (treeop0
);
10556 HOST_WIDE_INT temp_size
10557 = MAX (int_size_in_bytes (inner_type
),
10558 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
10560 = assign_stack_temp_for_type (mode
, temp_size
, type
);
10561 rtx new_with_op0_mode
10562 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
10564 gcc_assert (!TREE_ADDRESSABLE (exp
));
10566 if (GET_MODE (op0
) == BLKmode
)
10567 emit_block_move (new_with_op0_mode
, op0
,
10568 GEN_INT (GET_MODE_SIZE (mode
)),
10569 (modifier
== EXPAND_STACK_PARM
10570 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10572 emit_move_insn (new_with_op0_mode
, op0
);
10578 op0
= adjust_address (op0
, mode
, 0);
10585 tree lhs
= treeop0
;
10586 tree rhs
= treeop1
;
10587 gcc_assert (ignore
);
10589 /* Check for |= or &= of a bitfield of size one into another bitfield
10590 of size 1. In this case, (unless we need the result of the
10591 assignment) we can do this more efficiently with a
10592 test followed by an assignment, if necessary.
10594 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10595 things change so we do, this code should be enhanced to
10597 if (TREE_CODE (lhs
) == COMPONENT_REF
10598 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
10599 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
10600 && TREE_OPERAND (rhs
, 0) == lhs
10601 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
10602 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
10603 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
10605 rtx_code_label
*label
= gen_label_rtx ();
10606 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
10607 do_jump (TREE_OPERAND (rhs
, 1),
10609 value
? 0 : label
, -1);
10610 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
10612 do_pending_stack_adjust ();
10613 emit_label (label
);
10617 expand_assignment (lhs
, rhs
, false);
10622 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
10624 case REALPART_EXPR
:
10625 op0
= expand_normal (treeop0
);
10626 return read_complex_part (op0
, false);
10628 case IMAGPART_EXPR
:
10629 op0
= expand_normal (treeop0
);
10630 return read_complex_part (op0
, true);
10637 /* Expanded in cfgexpand.c. */
10638 gcc_unreachable ();
10640 case TRY_CATCH_EXPR
:
10642 case EH_FILTER_EXPR
:
10643 case TRY_FINALLY_EXPR
:
10644 /* Lowered by tree-eh.c. */
10645 gcc_unreachable ();
10647 case WITH_CLEANUP_EXPR
:
10648 case CLEANUP_POINT_EXPR
:
10650 case CASE_LABEL_EXPR
:
10655 case COMPOUND_EXPR
:
10656 case PREINCREMENT_EXPR
:
10657 case PREDECREMENT_EXPR
:
10658 case POSTINCREMENT_EXPR
:
10659 case POSTDECREMENT_EXPR
:
10662 case COMPOUND_LITERAL_EXPR
:
10663 /* Lowered by gimplify.c. */
10664 gcc_unreachable ();
10667 /* Function descriptors are not valid except for as
10668 initialization constants, and should not be expanded. */
10669 gcc_unreachable ();
10671 case WITH_SIZE_EXPR
:
10672 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10673 have pulled out the size to use in whatever context it needed. */
10674 return expand_expr_real (treeop0
, original_target
, tmode
,
10675 modifier
, alt_rtl
, inner_reference_p
);
10678 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
10682 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10683 signedness of TYPE), possibly returning the result in TARGET. */
10685 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
10687 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
10688 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
10690 /* For constant values, reduce using build_int_cst_type. */
10691 if (CONST_INT_P (exp
))
10693 HOST_WIDE_INT value
= INTVAL (exp
);
10694 tree t
= build_int_cst_type (type
, value
);
10695 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
10697 else if (TYPE_UNSIGNED (type
))
10699 machine_mode mode
= GET_MODE (exp
);
10700 rtx mask
= immed_wide_int_const
10701 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
10702 return expand_and (mode
, exp
, mask
, target
);
10706 int count
= GET_MODE_PRECISION (GET_MODE (exp
)) - prec
;
10707 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
),
10708 exp
, count
, target
, 0);
10709 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
),
10710 exp
, count
, target
, 0);
10714 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10715 when applied to the address of EXP produces an address known to be
10716 aligned more than BIGGEST_ALIGNMENT. */
10719 is_aligning_offset (const_tree offset
, const_tree exp
)
10721 /* Strip off any conversions. */
10722 while (CONVERT_EXPR_P (offset
))
10723 offset
= TREE_OPERAND (offset
, 0);
10725 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10726 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10727 if (TREE_CODE (offset
) != BIT_AND_EXPR
10728 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
10729 || compare_tree_int (TREE_OPERAND (offset
, 1),
10730 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
10731 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1) < 0)
10734 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10735 It must be NEGATE_EXPR. Then strip any more conversions. */
10736 offset
= TREE_OPERAND (offset
, 0);
10737 while (CONVERT_EXPR_P (offset
))
10738 offset
= TREE_OPERAND (offset
, 0);
10740 if (TREE_CODE (offset
) != NEGATE_EXPR
)
10743 offset
= TREE_OPERAND (offset
, 0);
10744 while (CONVERT_EXPR_P (offset
))
10745 offset
= TREE_OPERAND (offset
, 0);
10747 /* This must now be the address of EXP. */
10748 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
10751 /* Return the tree node if an ARG corresponds to a string constant or zero
10752 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10753 in bytes within the string that ARG is accessing. The type of the
10754 offset will be `sizetype'. */
10757 string_constant (tree arg
, tree
*ptr_offset
)
10759 tree array
, offset
, lower_bound
;
10762 if (TREE_CODE (arg
) == ADDR_EXPR
)
10764 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
10766 *ptr_offset
= size_zero_node
;
10767 return TREE_OPERAND (arg
, 0);
10769 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
10771 array
= TREE_OPERAND (arg
, 0);
10772 offset
= size_zero_node
;
10774 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
10776 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10777 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10778 if (TREE_CODE (array
) != STRING_CST
10779 && TREE_CODE (array
) != VAR_DECL
)
10782 /* Check if the array has a nonzero lower bound. */
10783 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
10784 if (!integer_zerop (lower_bound
))
10786 /* If the offset and base aren't both constants, return 0. */
10787 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
10789 if (TREE_CODE (offset
) != INTEGER_CST
)
10791 /* Adjust offset by the lower bound. */
10792 offset
= size_diffop (fold_convert (sizetype
, offset
),
10793 fold_convert (sizetype
, lower_bound
));
10796 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
10798 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10799 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10800 if (TREE_CODE (array
) != ADDR_EXPR
)
10802 array
= TREE_OPERAND (array
, 0);
10803 if (TREE_CODE (array
) != STRING_CST
10804 && TREE_CODE (array
) != VAR_DECL
)
10810 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
10812 tree arg0
= TREE_OPERAND (arg
, 0);
10813 tree arg1
= TREE_OPERAND (arg
, 1);
10818 if (TREE_CODE (arg0
) == ADDR_EXPR
10819 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
10820 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
10822 array
= TREE_OPERAND (arg0
, 0);
10825 else if (TREE_CODE (arg1
) == ADDR_EXPR
10826 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
10827 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
10829 array
= TREE_OPERAND (arg1
, 0);
10838 if (TREE_CODE (array
) == STRING_CST
)
10840 *ptr_offset
= fold_convert (sizetype
, offset
);
10843 else if (TREE_CODE (array
) == VAR_DECL
10844 || TREE_CODE (array
) == CONST_DECL
)
10847 tree init
= ctor_for_folding (array
);
10849 /* Variables initialized to string literals can be handled too. */
10850 if (init
== error_mark_node
10852 || TREE_CODE (init
) != STRING_CST
)
10855 /* Avoid const char foo[4] = "abcde"; */
10856 if (DECL_SIZE_UNIT (array
) == NULL_TREE
10857 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
10858 || (length
= TREE_STRING_LENGTH (init
)) <= 0
10859 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
10862 /* If variable is bigger than the string literal, OFFSET must be constant
10863 and inside of the bounds of the string literal. */
10864 offset
= fold_convert (sizetype
, offset
);
10865 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
10866 && (! tree_fits_uhwi_p (offset
)
10867 || compare_tree_int (offset
, length
) >= 0))
10870 *ptr_offset
= offset
;
10877 /* Generate code to calculate OPS, and exploded expression
10878 using a store-flag instruction and return an rtx for the result.
10879 OPS reflects a comparison.
10881 If TARGET is nonzero, store the result there if convenient.
10883 Return zero if there is no suitable set-flag instruction
10884 available on this machine.
10886 Once expand_expr has been called on the arguments of the comparison,
10887 we are committed to doing the store flag, since it is not safe to
10888 re-evaluate the expression. We emit the store-flag insn by calling
10889 emit_store_flag, but only expand the arguments if we have a reason
10890 to believe that emit_store_flag will be successful. If we think that
10891 it will, but it isn't, we have to simulate the store-flag with a
10892 set/jump/set sequence. */
10895 do_store_flag (sepops ops
, rtx target
, machine_mode mode
)
10897 enum rtx_code code
;
10898 tree arg0
, arg1
, type
;
10899 machine_mode operand_mode
;
10902 rtx subtarget
= target
;
10903 location_t loc
= ops
->location
;
10908 /* Don't crash if the comparison was erroneous. */
10909 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10912 type
= TREE_TYPE (arg0
);
10913 operand_mode
= TYPE_MODE (type
);
10914 unsignedp
= TYPE_UNSIGNED (type
);
10916 /* We won't bother with BLKmode store-flag operations because it would mean
10917 passing a lot of information to emit_store_flag. */
10918 if (operand_mode
== BLKmode
)
10921 /* We won't bother with store-flag operations involving function pointers
10922 when function pointers must be canonicalized before comparisons. */
10923 #ifdef HAVE_canonicalize_funcptr_for_compare
10924 if (HAVE_canonicalize_funcptr_for_compare
10925 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
10926 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
10928 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
10929 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
10930 == FUNCTION_TYPE
))))
10937 /* For vector typed comparisons emit code to generate the desired
10938 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10939 expander for this. */
10940 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
10942 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
10943 tree if_true
= constant_boolean_node (true, ops
->type
);
10944 tree if_false
= constant_boolean_node (false, ops
->type
);
10945 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
, if_false
, target
);
10948 /* Get the rtx comparison code to use. We know that EXP is a comparison
10949 operation of some type. Some comparisons against 1 and -1 can be
10950 converted to comparisons with zero. Do so here so that the tests
10951 below will be aware that we have a comparison with zero. These
10952 tests will not catch constants in the first operand, but constants
10953 are rarely passed as the first operand. */
10964 if (integer_onep (arg1
))
10965 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10967 code
= unsignedp
? LTU
: LT
;
10970 if (! unsignedp
&& integer_all_onesp (arg1
))
10971 arg1
= integer_zero_node
, code
= LT
;
10973 code
= unsignedp
? LEU
: LE
;
10976 if (! unsignedp
&& integer_all_onesp (arg1
))
10977 arg1
= integer_zero_node
, code
= GE
;
10979 code
= unsignedp
? GTU
: GT
;
10982 if (integer_onep (arg1
))
10983 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10985 code
= unsignedp
? GEU
: GE
;
10988 case UNORDERED_EXPR
:
11014 gcc_unreachable ();
11017 /* Put a constant second. */
11018 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
11019 || TREE_CODE (arg0
) == FIXED_CST
)
11021 std::swap (arg0
, arg1
);
11022 code
= swap_condition (code
);
11025 /* If this is an equality or inequality test of a single bit, we can
11026 do this by shifting the bit being tested to the low-order bit and
11027 masking the result with the constant 1. If the condition was EQ,
11028 we xor it with 1. This does not require an scc insn and is faster
11029 than an scc insn even if we have it.
11031 The code to make this transformation was moved into fold_single_bit_test,
11032 so we just call into the folder and expand its result. */
11034 if ((code
== NE
|| code
== EQ
)
11035 && integer_zerop (arg1
)
11036 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
11038 gimple srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
11040 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
11042 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
11043 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
11044 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
11045 gimple_assign_rhs1 (srcstmt
),
11046 gimple_assign_rhs2 (srcstmt
));
11047 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
11049 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
11053 if (! get_subtarget (target
)
11054 || GET_MODE (subtarget
) != operand_mode
)
11057 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
11060 target
= gen_reg_rtx (mode
);
11062 /* Try a cstore if possible. */
11063 return emit_store_flag_force (target
, code
, op0
, op1
,
11064 operand_mode
, unsignedp
,
11065 (TYPE_PRECISION (ops
->type
) == 1
11066 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
11070 /* Stubs in case we haven't got a casesi insn. */
11071 #ifndef HAVE_casesi
11072 # define HAVE_casesi 0
11073 # define gen_casesi(a, b, c, d, e) (0)
11074 # define CODE_FOR_casesi CODE_FOR_nothing
11077 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11078 0 otherwise (i.e. if there is no casesi instruction).
11080 DEFAULT_PROBABILITY is the probability of jumping to the default
11083 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
11084 rtx table_label
, rtx default_label
, rtx fallback_label
,
11085 int default_probability
)
11087 struct expand_operand ops
[5];
11088 machine_mode index_mode
= SImode
;
11089 rtx op1
, op2
, index
;
11094 /* Convert the index to SImode. */
11095 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
11097 machine_mode omode
= TYPE_MODE (index_type
);
11098 rtx rangertx
= expand_normal (range
);
11100 /* We must handle the endpoints in the original mode. */
11101 index_expr
= build2 (MINUS_EXPR
, index_type
,
11102 index_expr
, minval
);
11103 minval
= integer_zero_node
;
11104 index
= expand_normal (index_expr
);
11106 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
11107 omode
, 1, default_label
,
11108 default_probability
);
11109 /* Now we can safely truncate. */
11110 index
= convert_to_mode (index_mode
, index
, 0);
11114 if (TYPE_MODE (index_type
) != index_mode
)
11116 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
11117 index_expr
= fold_convert (index_type
, index_expr
);
11120 index
= expand_normal (index_expr
);
11123 do_pending_stack_adjust ();
11125 op1
= expand_normal (minval
);
11126 op2
= expand_normal (range
);
11128 create_input_operand (&ops
[0], index
, index_mode
);
11129 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
11130 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
11131 create_fixed_operand (&ops
[3], table_label
);
11132 create_fixed_operand (&ops
[4], (default_label
11134 : fallback_label
));
11135 expand_jump_insn (CODE_FOR_casesi
, 5, ops
);
11139 /* Attempt to generate a tablejump instruction; same concept. */
11140 /* Subroutine of the next function.
11142 INDEX is the value being switched on, with the lowest value
11143 in the table already subtracted.
11144 MODE is its expected mode (needed if INDEX is constant).
11145 RANGE is the length of the jump table.
11146 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11148 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11149 index value is out of range.
11150 DEFAULT_PROBABILITY is the probability of jumping to
11151 the default label. */
11154 do_tablejump (rtx index
, machine_mode mode
, rtx range
, rtx table_label
,
11155 rtx default_label
, int default_probability
)
11159 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
11160 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
11162 /* Do an unsigned comparison (in the proper mode) between the index
11163 expression and the value which represents the length of the range.
11164 Since we just finished subtracting the lower bound of the range
11165 from the index expression, this comparison allows us to simultaneously
11166 check that the original index expression value is both greater than
11167 or equal to the minimum value of the range and less than or equal to
11168 the maximum value of the range. */
11171 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11172 default_label
, default_probability
);
11175 /* If index is in range, it must fit in Pmode.
11176 Convert to Pmode so we can index with it. */
11178 index
= convert_to_mode (Pmode
, index
, 1);
11180 /* Don't let a MEM slip through, because then INDEX that comes
11181 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11182 and break_out_memory_refs will go to work on it and mess it up. */
11183 #ifdef PIC_CASE_VECTOR_ADDRESS
11184 if (flag_pic
&& !REG_P (index
))
11185 index
= copy_to_mode_reg (Pmode
, index
);
11188 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11189 GET_MODE_SIZE, because this indicates how large insns are. The other
11190 uses should all be Pmode, because they are addresses. This code
11191 could fail if addresses and insns are not the same size. */
11192 index
= simplify_gen_binary (MULT
, Pmode
, index
,
11193 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
11195 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
11196 gen_rtx_LABEL_REF (Pmode
, table_label
));
11198 #ifdef PIC_CASE_VECTOR_ADDRESS
11200 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11203 index
= memory_address (CASE_VECTOR_MODE
, index
);
11204 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11205 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
11206 convert_move (temp
, vector
, 0);
11208 emit_jump_insn (gen_tablejump (temp
, table_label
));
11210 /* If we are generating PIC code or if the table is PC-relative, the
11211 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11212 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11217 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
11218 rtx table_label
, rtx default_label
, int default_probability
)
11222 if (! HAVE_tablejump
)
11225 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
11226 fold_convert (index_type
, index_expr
),
11227 fold_convert (index_type
, minval
));
11228 index
= expand_normal (index_expr
);
11229 do_pending_stack_adjust ();
11231 do_tablejump (index
, TYPE_MODE (index_type
),
11232 convert_modes (TYPE_MODE (index_type
),
11233 TYPE_MODE (TREE_TYPE (range
)),
11234 expand_normal (range
),
11235 TYPE_UNSIGNED (TREE_TYPE (range
))),
11236 table_label
, default_label
, default_probability
);
11240 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11242 const_vector_from_tree (tree exp
)
11248 machine_mode inner
, mode
;
11250 mode
= TYPE_MODE (TREE_TYPE (exp
));
11252 if (initializer_zerop (exp
))
11253 return CONST0_RTX (mode
);
11255 units
= GET_MODE_NUNITS (mode
);
11256 inner
= GET_MODE_INNER (mode
);
11258 v
= rtvec_alloc (units
);
11260 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11262 elt
= VECTOR_CST_ELT (exp
, i
);
11264 if (TREE_CODE (elt
) == REAL_CST
)
11265 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
11267 else if (TREE_CODE (elt
) == FIXED_CST
)
11268 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11271 RTVEC_ELT (v
, i
) = immed_wide_int_const (elt
, inner
);
11274 return gen_rtx_CONST_VECTOR (mode
, v
);
11277 /* Build a decl for a personality function given a language prefix. */
11280 build_personality_function (const char *lang
)
11282 const char *unwind_and_version
;
11286 switch (targetm_common
.except_unwind_info (&global_options
))
11291 unwind_and_version
= "_sj0";
11295 unwind_and_version
= "_v0";
11298 unwind_and_version
= "_seh0";
11301 gcc_unreachable ();
11304 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11306 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11307 long_long_unsigned_type_node
,
11308 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11309 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11310 get_identifier (name
), type
);
11311 DECL_ARTIFICIAL (decl
) = 1;
11312 DECL_EXTERNAL (decl
) = 1;
11313 TREE_PUBLIC (decl
) = 1;
11315 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11316 are the flags assigned by targetm.encode_section_info. */
11317 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11322 /* Extracts the personality function of DECL and returns the corresponding
11326 get_personality_function (tree decl
)
11328 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11329 enum eh_personality_kind pk
;
11331 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11332 if (pk
== eh_personality_none
)
11336 && pk
== eh_personality_any
)
11337 personality
= lang_hooks
.eh_personality ();
11339 if (pk
== eh_personality_lang
)
11340 gcc_assert (personality
!= NULL_TREE
);
11342 return XEXP (DECL_RTL (personality
), 0);
11345 /* Returns a tree for the size of EXP in bytes. */
11348 tree_expr_size (const_tree exp
)
11351 && DECL_SIZE_UNIT (exp
) != 0)
11352 return DECL_SIZE_UNIT (exp
);
11354 return size_in_bytes (TREE_TYPE (exp
));
11357 /* Return an rtx for the size in bytes of the value of EXP. */
11360 expr_size (tree exp
)
11364 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11365 size
= TREE_OPERAND (exp
, 1);
11368 size
= tree_expr_size (exp
);
11370 gcc_assert (size
== SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, exp
));
11373 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
), EXPAND_NORMAL
);
11376 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11377 if the size can vary or is larger than an integer. */
11379 static HOST_WIDE_INT
11380 int_expr_size (tree exp
)
11384 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11385 size
= TREE_OPERAND (exp
, 1);
11388 size
= tree_expr_size (exp
);
11392 if (size
== 0 || !tree_fits_shwi_p (size
))
11395 return tree_to_shwi (size
);
11398 #include "gt-expr.h"