1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "stor-layout.h"
35 #include "hard-reg-set.h"
38 #include "insn-config.h"
39 #include "insn-attr.h"
46 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
48 #include "insn-codes.h"
53 #include "typeclass.h"
55 #include "langhooks.h"
58 #include "tree-iterator.h"
60 #include "dominance.h"
62 #include "basic-block.h"
63 #include "tree-ssa-alias.h"
64 #include "internal-fn.h"
65 #include "gimple-expr.h"
67 #include "gimple-ssa.h"
68 #include "plugin-api.h"
71 #include "tree-ssanames.h"
73 #include "common/common-target.h"
76 #include "diagnostic.h"
77 #include "tree-ssa-live.h"
78 #include "tree-outof-ssa.h"
79 #include "target-globals.h"
81 #include "tree-ssa-address.h"
82 #include "cfgexpand.h"
84 #include "tree-chkp.h"
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* This structure is used by move_by_pieces to describe the move to
99 struct move_by_pieces_d
108 int explicit_inc_from
;
109 unsigned HOST_WIDE_INT len
;
110 HOST_WIDE_INT offset
;
114 /* This structure is used by store_by_pieces to describe the clear to
117 struct store_by_pieces_d
123 unsigned HOST_WIDE_INT len
;
124 HOST_WIDE_INT offset
;
125 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
);
130 static void move_by_pieces_1 (insn_gen_fn
, machine_mode
,
131 struct move_by_pieces_d
*);
132 static bool block_move_libcall_safe_for_call_parm (void);
133 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
134 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
135 unsigned HOST_WIDE_INT
);
136 static tree
emit_block_move_libcall_fn (int);
137 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
138 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, machine_mode
);
139 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
140 static void store_by_pieces_1 (struct store_by_pieces_d
*, unsigned int);
141 static void store_by_pieces_2 (insn_gen_fn
, machine_mode
,
142 struct store_by_pieces_d
*);
143 static tree
clear_storage_libcall_fn (int);
144 static rtx_insn
*compress_float_constant (rtx
, rtx
);
145 static rtx
get_subtarget (rtx
);
146 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
147 HOST_WIDE_INT
, machine_mode
,
148 tree
, int, alias_set_type
);
149 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
150 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
,
151 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
152 machine_mode
, tree
, alias_set_type
, bool);
154 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
156 static int is_aligning_offset (const_tree
, const_tree
);
157 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
158 static rtx
do_store_flag (sepops
, rtx
, machine_mode
);
160 static void emit_single_push_insn (machine_mode
, rtx
, tree
);
162 static void do_tablejump (rtx
, machine_mode
, rtx
, rtx
, rtx
, int);
163 static rtx
const_vector_from_tree (tree
);
164 static tree
tree_expr_size (const_tree
);
165 static HOST_WIDE_INT
int_expr_size (tree
);
168 /* This is run to set up which modes can be used
169 directly in memory and to initialize the block move optab. It is run
170 at the beginning of compilation and when the target is reinitialized. */
173 init_expr_target (void)
181 /* Try indexing by frame ptr and try by stack ptr.
182 It is known that on the Convex the stack ptr isn't a valid index.
183 With luck, one or the other is valid on any machine. */
184 mem
= gen_rtx_MEM (word_mode
, stack_pointer_rtx
);
185 mem1
= gen_rtx_MEM (word_mode
, frame_pointer_rtx
);
187 /* A scratch register we can modify in-place below to avoid
188 useless RTL allocations. */
189 reg
= gen_rtx_REG (word_mode
, LAST_VIRTUAL_REGISTER
+ 1);
191 insn
= rtx_alloc (INSN
);
192 pat
= gen_rtx_SET (NULL_RTX
, NULL_RTX
);
193 PATTERN (insn
) = pat
;
195 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
196 mode
= (machine_mode
) ((int) mode
+ 1))
200 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
201 PUT_MODE (mem
, mode
);
202 PUT_MODE (mem1
, mode
);
204 /* See if there is some register that can be used in this mode and
205 directly loaded or stored from memory. */
207 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
208 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
209 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
212 if (! HARD_REGNO_MODE_OK (regno
, mode
))
215 set_mode_and_regno (reg
, mode
, regno
);
218 SET_DEST (pat
) = reg
;
219 if (recog (pat
, insn
, &num_clobbers
) >= 0)
220 direct_load
[(int) mode
] = 1;
222 SET_SRC (pat
) = mem1
;
223 SET_DEST (pat
) = reg
;
224 if (recog (pat
, insn
, &num_clobbers
) >= 0)
225 direct_load
[(int) mode
] = 1;
228 SET_DEST (pat
) = mem
;
229 if (recog (pat
, insn
, &num_clobbers
) >= 0)
230 direct_store
[(int) mode
] = 1;
233 SET_DEST (pat
) = mem1
;
234 if (recog (pat
, insn
, &num_clobbers
) >= 0)
235 direct_store
[(int) mode
] = 1;
239 mem
= gen_rtx_MEM (VOIDmode
, gen_raw_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1));
241 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
242 mode
= GET_MODE_WIDER_MODE (mode
))
244 machine_mode srcmode
;
245 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
246 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
250 ic
= can_extend_p (mode
, srcmode
, 0);
251 if (ic
== CODE_FOR_nothing
)
254 PUT_MODE (mem
, srcmode
);
256 if (insn_operand_matches (ic
, 1, mem
))
257 float_extend_from_mem
[mode
][srcmode
] = true;
262 /* This is run at the start of compiling a function. */
267 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
270 /* Copy data from FROM to TO, where the machine modes are not the same.
271 Both modes may be integer, or both may be floating, or both may be
273 UNSIGNEDP should be nonzero if FROM is an unsigned type.
274 This causes zero-extension instead of sign-extension. */
277 convert_move (rtx to
, rtx from
, int unsignedp
)
279 machine_mode to_mode
= GET_MODE (to
);
280 machine_mode from_mode
= GET_MODE (from
);
281 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
282 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
286 /* rtx code for making an equivalent value. */
287 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
288 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
291 gcc_assert (to_real
== from_real
);
292 gcc_assert (to_mode
!= BLKmode
);
293 gcc_assert (from_mode
!= BLKmode
);
295 /* If the source and destination are already the same, then there's
300 /* If FROM is a SUBREG that indicates that we have already done at least
301 the required extension, strip it. We don't handle such SUBREGs as
304 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
305 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from
)))
306 >= GET_MODE_PRECISION (to_mode
))
307 && SUBREG_CHECK_PROMOTED_SIGN (from
, unsignedp
))
308 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
310 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
312 if (to_mode
== from_mode
313 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
315 emit_move_insn (to
, from
);
319 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
321 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
323 if (VECTOR_MODE_P (to_mode
))
324 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
326 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
328 emit_move_insn (to
, from
);
332 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
334 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
335 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
345 gcc_assert ((GET_MODE_PRECISION (from_mode
)
346 != GET_MODE_PRECISION (to_mode
))
347 || (DECIMAL_FLOAT_MODE_P (from_mode
)
348 != DECIMAL_FLOAT_MODE_P (to_mode
)));
350 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
351 /* Conversion between decimal float and binary float, same size. */
352 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
353 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
358 /* Try converting directly if the insn is supported. */
360 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
361 if (code
!= CODE_FOR_nothing
)
363 emit_unop_insn (code
, to
, from
,
364 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
368 /* Otherwise use a libcall. */
369 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
371 /* Is this conversion implemented yet? */
372 gcc_assert (libcall
);
375 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
377 insns
= get_insns ();
379 emit_libcall_block (insns
, to
, value
,
380 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
382 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
386 /* Handle pointer conversion. */ /* SPEE 900220. */
387 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
391 if (GET_MODE_PRECISION (from_mode
) > GET_MODE_PRECISION (to_mode
))
398 if (convert_optab_handler (ctab
, to_mode
, from_mode
)
401 emit_unop_insn (convert_optab_handler (ctab
, to_mode
, from_mode
),
407 /* Targets are expected to provide conversion insns between PxImode and
408 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
409 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
411 machine_mode full_mode
412 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
414 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
415 != CODE_FOR_nothing
);
417 if (full_mode
!= from_mode
)
418 from
= convert_to_mode (full_mode
, from
, unsignedp
);
419 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
423 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
426 machine_mode full_mode
427 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
428 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
429 enum insn_code icode
;
431 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
432 gcc_assert (icode
!= CODE_FOR_nothing
);
434 if (to_mode
== full_mode
)
436 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
440 new_from
= gen_reg_rtx (full_mode
);
441 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
443 /* else proceed to integer conversions below. */
444 from_mode
= full_mode
;
448 /* Make sure both are fixed-point modes or both are not. */
449 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
450 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
451 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
453 /* If we widen from_mode to to_mode and they are in the same class,
454 we won't saturate the result.
455 Otherwise, always saturate the result to play safe. */
456 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
457 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
458 expand_fixed_convert (to
, from
, 0, 0);
460 expand_fixed_convert (to
, from
, 0, 1);
464 /* Now both modes are integers. */
466 /* Handle expanding beyond a word. */
467 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
468 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
475 machine_mode lowpart_mode
;
476 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
478 /* Try converting directly if the insn is supported. */
479 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
482 /* If FROM is a SUBREG, put it into a register. Do this
483 so that we always generate the same set of insns for
484 better cse'ing; if an intermediate assignment occurred,
485 we won't be doing the operation directly on the SUBREG. */
486 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
487 from
= force_reg (from_mode
, from
);
488 emit_unop_insn (code
, to
, from
, equiv_code
);
491 /* Next, try converting via full word. */
492 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
493 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
494 != CODE_FOR_nothing
))
496 rtx word_to
= gen_reg_rtx (word_mode
);
499 if (reg_overlap_mentioned_p (to
, from
))
500 from
= force_reg (from_mode
, from
);
503 convert_move (word_to
, from
, unsignedp
);
504 emit_unop_insn (code
, to
, word_to
, equiv_code
);
508 /* No special multiword conversion insn; do it by hand. */
511 /* Since we will turn this into a no conflict block, we must ensure the
512 the source does not overlap the target so force it into an isolated
513 register when maybe so. Likewise for any MEM input, since the
514 conversion sequence might require several references to it and we
515 must ensure we're getting the same value every time. */
517 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
518 from
= force_reg (from_mode
, from
);
520 /* Get a copy of FROM widened to a word, if necessary. */
521 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
522 lowpart_mode
= word_mode
;
524 lowpart_mode
= from_mode
;
526 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
528 lowpart
= gen_lowpart (lowpart_mode
, to
);
529 emit_move_insn (lowpart
, lowfrom
);
531 /* Compute the value to put in each remaining word. */
533 fill_value
= const0_rtx
;
535 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
536 LT
, lowfrom
, const0_rtx
,
537 lowpart_mode
, 0, -1);
539 /* Fill the remaining words. */
540 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
542 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
543 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
545 gcc_assert (subword
);
547 if (fill_value
!= subword
)
548 emit_move_insn (subword
, fill_value
);
551 insns
= get_insns ();
558 /* Truncating multi-word to a word or less. */
559 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
560 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
563 && ! MEM_VOLATILE_P (from
)
564 && direct_load
[(int) to_mode
]
565 && ! mode_dependent_address_p (XEXP (from
, 0),
566 MEM_ADDR_SPACE (from
)))
568 || GET_CODE (from
) == SUBREG
))
569 from
= force_reg (from_mode
, from
);
570 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
574 /* Now follow all the conversions between integers
575 no more than a word long. */
577 /* For truncation, usually we can just refer to FROM in a narrower mode. */
578 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
579 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
582 && ! MEM_VOLATILE_P (from
)
583 && direct_load
[(int) to_mode
]
584 && ! mode_dependent_address_p (XEXP (from
, 0),
585 MEM_ADDR_SPACE (from
)))
587 || GET_CODE (from
) == SUBREG
))
588 from
= force_reg (from_mode
, from
);
589 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
590 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
591 from
= copy_to_reg (from
);
592 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
596 /* Handle extension. */
597 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
599 /* Convert directly if that works. */
600 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
603 emit_unop_insn (code
, to
, from
, equiv_code
);
608 machine_mode intermediate
;
612 /* Search for a mode to convert via. */
613 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
614 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
615 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
617 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
618 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, intermediate
)))
619 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
620 != CODE_FOR_nothing
))
622 convert_move (to
, convert_to_mode (intermediate
, from
,
623 unsignedp
), unsignedp
);
627 /* No suitable intermediate mode.
628 Generate what we need with shifts. */
629 shift_amount
= (GET_MODE_PRECISION (to_mode
)
630 - GET_MODE_PRECISION (from_mode
));
631 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
632 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
634 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
637 emit_move_insn (to
, tmp
);
642 /* Support special truncate insns for certain modes. */
643 if (convert_optab_handler (trunc_optab
, to_mode
,
644 from_mode
) != CODE_FOR_nothing
)
646 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
651 /* Handle truncation of volatile memrefs, and so on;
652 the things that couldn't be truncated directly,
653 and for which there was no special instruction.
655 ??? Code above formerly short-circuited this, for most integer
656 mode pairs, with a force_reg in from_mode followed by a recursive
657 call to this routine. Appears always to have been wrong. */
658 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
660 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
661 emit_move_insn (to
, temp
);
665 /* Mode combination is not recognized. */
669 /* Return an rtx for a value that would result
670 from converting X to mode MODE.
671 Both X and MODE may be floating, or both integer.
672 UNSIGNEDP is nonzero if X is an unsigned value.
673 This can be done by referring to a part of X in place
674 or by copying to a new temporary with conversion. */
677 convert_to_mode (machine_mode mode
, rtx x
, int unsignedp
)
679 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
682 /* Return an rtx for a value that would result
683 from converting X from mode OLDMODE to mode MODE.
684 Both modes may be floating, or both integer.
685 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion.
690 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
693 convert_modes (machine_mode mode
, machine_mode oldmode
, rtx x
, int unsignedp
)
697 /* If FROM is a SUBREG that indicates that we have already done at least
698 the required extension, strip it. */
700 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
701 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
702 && SUBREG_CHECK_PROMOTED_SIGN (x
, unsignedp
))
703 x
= gen_lowpart (mode
, SUBREG_REG (x
));
705 if (GET_MODE (x
) != VOIDmode
)
706 oldmode
= GET_MODE (x
);
711 if (CONST_SCALAR_INT_P (x
) && GET_MODE_CLASS (mode
) == MODE_INT
)
713 /* If the caller did not tell us the old mode, then there is not
714 much to do with respect to canonicalization. We have to
715 assume that all the bits are significant. */
716 if (GET_MODE_CLASS (oldmode
) != MODE_INT
)
717 oldmode
= MAX_MODE_INT
;
718 wide_int w
= wide_int::from (std::make_pair (x
, oldmode
),
719 GET_MODE_PRECISION (mode
),
720 unsignedp
? UNSIGNED
: SIGNED
);
721 return immed_wide_int_const (w
, mode
);
724 /* We can do this with a gen_lowpart if both desired and current modes
725 are integer, and this is either a constant integer, a register, or a
727 if (GET_MODE_CLASS (mode
) == MODE_INT
728 && GET_MODE_CLASS (oldmode
) == MODE_INT
729 && GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (oldmode
)
730 && ((MEM_P (x
) && !MEM_VOLATILE_P (x
) && direct_load
[(int) mode
])
732 && (!HARD_REGISTER_P (x
)
733 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
734 && TRULY_NOOP_TRUNCATION_MODES_P (mode
, GET_MODE (x
)))))
736 return gen_lowpart (mode
, x
);
738 /* Converting from integer constant into mode is always equivalent to an
740 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
742 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
743 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
746 temp
= gen_reg_rtx (mode
);
747 convert_move (temp
, x
, unsignedp
);
751 /* Return the largest alignment we can use for doing a move (or store)
752 of MAX_PIECES. ALIGN is the largest alignment we could use. */
755 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
759 tmode
= mode_for_size (max_pieces
* BITS_PER_UNIT
, MODE_INT
, 1);
760 if (align
>= GET_MODE_ALIGNMENT (tmode
))
761 align
= GET_MODE_ALIGNMENT (tmode
);
764 machine_mode tmode
, xmode
;
766 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
768 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
769 if (GET_MODE_SIZE (tmode
) > max_pieces
770 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
773 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
779 /* Return the widest integer mode no wider than SIZE. If no such mode
780 can be found, return VOIDmode. */
783 widest_int_mode_for_size (unsigned int size
)
785 machine_mode tmode
, mode
= VOIDmode
;
787 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
788 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
789 if (GET_MODE_SIZE (tmode
) < size
)
795 /* Determine whether the LEN bytes can be moved by using several move
796 instructions. Return nonzero if a call to move_by_pieces should
800 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
803 return targetm
.use_by_pieces_infrastructure_p (len
, align
, MOVE_BY_PIECES
,
804 optimize_insn_for_speed_p ());
807 /* Generate several move instructions to copy LEN bytes from block FROM to
808 block TO. (These are MEM rtx's with BLKmode).
810 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
811 used to push FROM to the stack.
813 ALIGN is maximum stack alignment we can assume.
815 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
816 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
820 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
821 unsigned int align
, int endp
)
823 struct move_by_pieces_d data
;
824 machine_mode to_addr_mode
;
825 machine_mode from_addr_mode
= get_address_mode (from
);
826 rtx to_addr
, from_addr
= XEXP (from
, 0);
827 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
828 enum insn_code icode
;
830 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
833 data
.from_addr
= from_addr
;
836 to_addr_mode
= get_address_mode (to
);
837 to_addr
= XEXP (to
, 0);
840 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
841 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
843 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
847 to_addr_mode
= VOIDmode
;
851 if (STACK_GROWS_DOWNWARD
)
856 data
.to_addr
= to_addr
;
859 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
860 || GET_CODE (from_addr
) == POST_INC
861 || GET_CODE (from_addr
) == POST_DEC
);
863 data
.explicit_inc_from
= 0;
864 data
.explicit_inc_to
= 0;
865 if (data
.reverse
) data
.offset
= len
;
868 /* If copying requires more than two move insns,
869 copy addresses to registers (to make displacements shorter)
870 and use post-increment if available. */
871 if (!(data
.autinc_from
&& data
.autinc_to
)
872 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
874 /* Find the mode of the largest move...
875 MODE might not be used depending on the definitions of the
876 USE_* macros below. */
877 machine_mode mode ATTRIBUTE_UNUSED
878 = widest_int_mode_for_size (max_size
);
880 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
882 data
.from_addr
= copy_to_mode_reg (from_addr_mode
,
883 plus_constant (from_addr_mode
,
885 data
.autinc_from
= 1;
886 data
.explicit_inc_from
= -1;
888 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
890 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
891 data
.autinc_from
= 1;
892 data
.explicit_inc_from
= 1;
894 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
895 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
896 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
898 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
899 plus_constant (to_addr_mode
,
902 data
.explicit_inc_to
= -1;
904 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
906 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
908 data
.explicit_inc_to
= 1;
910 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
911 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
914 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
916 /* First move what we can in the largest integer mode, then go to
917 successively smaller modes. */
919 while (max_size
> 1 && data
.len
> 0)
921 machine_mode mode
= widest_int_mode_for_size (max_size
);
923 if (mode
== VOIDmode
)
926 icode
= optab_handler (mov_optab
, mode
);
927 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
928 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
930 max_size
= GET_MODE_SIZE (mode
);
933 /* The code above should have handled everything. */
934 gcc_assert (!data
.len
);
940 gcc_assert (!data
.reverse
);
945 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
946 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
948 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
949 plus_constant (to_addr_mode
,
953 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
960 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
968 /* Return number of insns required to move L bytes by pieces.
969 ALIGN (in bits) is maximum alignment we can assume. */
971 unsigned HOST_WIDE_INT
972 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
973 unsigned int max_size
)
975 unsigned HOST_WIDE_INT n_insns
= 0;
977 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
979 while (max_size
> 1 && l
> 0)
982 enum insn_code icode
;
984 mode
= widest_int_mode_for_size (max_size
);
986 if (mode
== VOIDmode
)
989 icode
= optab_handler (mov_optab
, mode
);
990 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
991 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
993 max_size
= GET_MODE_SIZE (mode
);
1000 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1001 with move instructions for mode MODE. GENFUN is the gen_... function
1002 to make a move insn for that mode. DATA has all the other info. */
1005 move_by_pieces_1 (insn_gen_fn genfun
, machine_mode mode
,
1006 struct move_by_pieces_d
*data
)
1008 unsigned int size
= GET_MODE_SIZE (mode
);
1009 rtx to1
= NULL_RTX
, from1
;
1011 while (data
->len
>= size
)
1014 data
->offset
-= size
;
1018 if (data
->autinc_to
)
1019 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1022 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1025 if (data
->autinc_from
)
1026 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1029 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1031 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1032 emit_insn (gen_add2_insn (data
->to_addr
,
1033 gen_int_mode (-(HOST_WIDE_INT
) size
,
1034 GET_MODE (data
->to_addr
))));
1035 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1036 emit_insn (gen_add2_insn (data
->from_addr
,
1037 gen_int_mode (-(HOST_WIDE_INT
) size
,
1038 GET_MODE (data
->from_addr
))));
1041 emit_insn ((*genfun
) (to1
, from1
));
1044 #ifdef PUSH_ROUNDING
1045 emit_single_push_insn (mode
, from1
, NULL
);
1051 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1052 emit_insn (gen_add2_insn (data
->to_addr
,
1054 GET_MODE (data
->to_addr
))));
1055 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1056 emit_insn (gen_add2_insn (data
->from_addr
,
1058 GET_MODE (data
->from_addr
))));
1060 if (! data
->reverse
)
1061 data
->offset
+= size
;
1067 /* Emit code to move a block Y to a block X. This may be done with
1068 string-move instructions, with multiple scalar move instructions,
1069 or with a library call.
1071 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1072 SIZE is an rtx that says how long they are.
1073 ALIGN is the maximum alignment we can assume they have.
1074 METHOD describes what kind of copy this is, and what mechanisms may be used.
1075 MIN_SIZE is the minimal size of block to move
1076 MAX_SIZE is the maximal size of block to move, if it can not be represented
1077 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1079 Return the address of the new block, if memcpy is called and returns it,
1083 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1084 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1085 unsigned HOST_WIDE_INT min_size
,
1086 unsigned HOST_WIDE_INT max_size
,
1087 unsigned HOST_WIDE_INT probable_max_size
)
1094 if (CONST_INT_P (size
)
1095 && INTVAL (size
) == 0)
1100 case BLOCK_OP_NORMAL
:
1101 case BLOCK_OP_TAILCALL
:
1102 may_use_call
= true;
1105 case BLOCK_OP_CALL_PARM
:
1106 may_use_call
= block_move_libcall_safe_for_call_parm ();
1108 /* Make inhibit_defer_pop nonzero around the library call
1109 to force it to pop the arguments right away. */
1113 case BLOCK_OP_NO_LIBCALL
:
1114 may_use_call
= false;
1121 gcc_assert (MEM_P (x
) && MEM_P (y
));
1122 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1123 gcc_assert (align
>= BITS_PER_UNIT
);
1125 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1126 block copy is more efficient for other large modes, e.g. DCmode. */
1127 x
= adjust_address (x
, BLKmode
, 0);
1128 y
= adjust_address (y
, BLKmode
, 0);
1130 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1131 can be incorrect is coming from __builtin_memcpy. */
1132 if (CONST_INT_P (size
))
1134 x
= shallow_copy_rtx (x
);
1135 y
= shallow_copy_rtx (y
);
1136 set_mem_size (x
, INTVAL (size
));
1137 set_mem_size (y
, INTVAL (size
));
1140 if (CONST_INT_P (size
) && can_move_by_pieces (INTVAL (size
), align
))
1141 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1142 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1143 expected_align
, expected_size
,
1144 min_size
, max_size
, probable_max_size
))
1146 else if (may_use_call
1147 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1148 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1150 /* Since x and y are passed to a libcall, mark the corresponding
1151 tree EXPR as addressable. */
1152 tree y_expr
= MEM_EXPR (y
);
1153 tree x_expr
= MEM_EXPR (x
);
1155 mark_addressable (y_expr
);
1157 mark_addressable (x_expr
);
1158 retval
= emit_block_move_via_libcall (x
, y
, size
,
1159 method
== BLOCK_OP_TAILCALL
);
1163 emit_block_move_via_loop (x
, y
, size
, align
);
1165 if (method
== BLOCK_OP_CALL_PARM
)
1172 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1174 unsigned HOST_WIDE_INT max
, min
= 0;
1175 if (GET_CODE (size
) == CONST_INT
)
1176 min
= max
= UINTVAL (size
);
1178 max
= GET_MODE_MASK (GET_MODE (size
));
1179 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1183 /* A subroutine of emit_block_move. Returns true if calling the
1184 block move libcall will not clobber any parameters which may have
1185 already been placed on the stack. */
1188 block_move_libcall_safe_for_call_parm (void)
1190 #if defined (REG_PARM_STACK_SPACE)
1194 /* If arguments are pushed on the stack, then they're safe. */
1198 /* If registers go on the stack anyway, any argument is sure to clobber
1199 an outgoing argument. */
1200 #if defined (REG_PARM_STACK_SPACE)
1201 fn
= emit_block_move_libcall_fn (false);
1202 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1203 depend on its argument. */
1205 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1206 && REG_PARM_STACK_SPACE (fn
) != 0)
1210 /* If any argument goes in memory, then it might clobber an outgoing
1213 CUMULATIVE_ARGS args_so_far_v
;
1214 cumulative_args_t args_so_far
;
1217 fn
= emit_block_move_libcall_fn (false);
1218 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1219 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1221 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1222 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1224 machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1225 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1227 if (!tmp
|| !REG_P (tmp
))
1229 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1231 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1238 /* A subroutine of emit_block_move. Expand a movmem pattern;
1239 return true if successful. */
1242 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1243 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1244 unsigned HOST_WIDE_INT min_size
,
1245 unsigned HOST_WIDE_INT max_size
,
1246 unsigned HOST_WIDE_INT probable_max_size
)
1248 int save_volatile_ok
= volatile_ok
;
1251 if (expected_align
< align
)
1252 expected_align
= align
;
1253 if (expected_size
!= -1)
1255 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1256 expected_size
= probable_max_size
;
1257 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1258 expected_size
= min_size
;
1261 /* Since this is a move insn, we don't care about volatility. */
1264 /* Try the most limited insn first, because there's no point
1265 including more than one in the machine description unless
1266 the more limited one has some advantage. */
1268 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1269 mode
= GET_MODE_WIDER_MODE (mode
))
1271 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1273 if (code
!= CODE_FOR_nothing
1274 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1275 here because if SIZE is less than the mode mask, as it is
1276 returned by the macro, it will definitely be less than the
1277 actual mode mask. Since SIZE is within the Pmode address
1278 space, we limit MODE to Pmode. */
1279 && ((CONST_INT_P (size
)
1280 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1281 <= (GET_MODE_MASK (mode
) >> 1)))
1282 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1283 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1285 struct expand_operand ops
[9];
1288 /* ??? When called via emit_block_move_for_call, it'd be
1289 nice if there were some way to inform the backend, so
1290 that it doesn't fail the expansion because it thinks
1291 emitting the libcall would be more efficient. */
1292 nops
= insn_data
[(int) code
].n_generator_args
;
1293 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1295 create_fixed_operand (&ops
[0], x
);
1296 create_fixed_operand (&ops
[1], y
);
1297 /* The check above guarantees that this size conversion is valid. */
1298 create_convert_operand_to (&ops
[2], size
, mode
, true);
1299 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1302 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1303 create_integer_operand (&ops
[5], expected_size
);
1307 create_integer_operand (&ops
[6], min_size
);
1308 /* If we can not represent the maximal size,
1309 make parameter NULL. */
1310 if ((HOST_WIDE_INT
) max_size
!= -1)
1311 create_integer_operand (&ops
[7], max_size
);
1313 create_fixed_operand (&ops
[7], NULL
);
1317 /* If we can not represent the maximal size,
1318 make parameter NULL. */
1319 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1320 create_integer_operand (&ops
[8], probable_max_size
);
1322 create_fixed_operand (&ops
[8], NULL
);
1324 if (maybe_expand_insn (code
, nops
, ops
))
1326 volatile_ok
= save_volatile_ok
;
1332 volatile_ok
= save_volatile_ok
;
1336 /* A subroutine of emit_block_move. Expand a call to memcpy.
1337 Return the return value from memcpy, 0 otherwise. */
1340 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1342 rtx dst_addr
, src_addr
;
1343 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1344 machine_mode size_mode
;
1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348 pseudos. We can then place those new pseudos into a VAR_DECL and
1351 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1352 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1354 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1355 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1357 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1358 src_tree
= make_tree (ptr_type_node
, src_addr
);
1360 size_mode
= TYPE_MODE (sizetype
);
1362 size
= convert_to_mode (size_mode
, size
, 1);
1363 size
= copy_to_mode_reg (size_mode
, size
);
1365 /* It is incorrect to use the libcall calling conventions to call
1366 memcpy in this context. This could be a user call to memcpy and
1367 the user may wish to examine the return value from memcpy. For
1368 targets where libcalls and normal calls have different conventions
1369 for returning pointers, we could end up generating incorrect code. */
1371 size_tree
= make_tree (sizetype
, size
);
1373 fn
= emit_block_move_libcall_fn (true);
1374 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1375 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1377 retval
= expand_normal (call_expr
);
1382 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1383 for the function we use for block copies. */
1385 static GTY(()) tree block_move_fn
;
1388 init_block_move_fn (const char *asmspec
)
1392 tree args
, fn
, attrs
, attr_args
;
1394 fn
= get_identifier ("memcpy");
1395 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1396 const_ptr_type_node
, sizetype
,
1399 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
1400 DECL_EXTERNAL (fn
) = 1;
1401 TREE_PUBLIC (fn
) = 1;
1402 DECL_ARTIFICIAL (fn
) = 1;
1403 TREE_NOTHROW (fn
) = 1;
1404 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1405 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1407 attr_args
= build_tree_list (NULL_TREE
, build_string (1, "1"));
1408 attrs
= tree_cons (get_identifier ("fn spec"), attr_args
, NULL
);
1410 decl_attributes (&fn
, attrs
, ATTR_FLAG_BUILT_IN
);
1416 set_user_assembler_name (block_move_fn
, asmspec
);
1420 emit_block_move_libcall_fn (int for_call
)
1422 static bool emitted_extern
;
1425 init_block_move_fn (NULL
);
1427 if (for_call
&& !emitted_extern
)
1429 emitted_extern
= true;
1430 make_decl_rtl (block_move_fn
);
1433 return block_move_fn
;
1436 /* A subroutine of emit_block_move. Copy the data via an explicit
1437 loop. This is used only when libcalls are forbidden. */
1438 /* ??? It'd be nice to copy in hunks larger than QImode. */
1441 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1442 unsigned int align ATTRIBUTE_UNUSED
)
1444 rtx_code_label
*cmp_label
, *top_label
;
1445 rtx iter
, x_addr
, y_addr
, tmp
;
1446 machine_mode x_addr_mode
= get_address_mode (x
);
1447 machine_mode y_addr_mode
= get_address_mode (y
);
1448 machine_mode iter_mode
;
1450 iter_mode
= GET_MODE (size
);
1451 if (iter_mode
== VOIDmode
)
1452 iter_mode
= word_mode
;
1454 top_label
= gen_label_rtx ();
1455 cmp_label
= gen_label_rtx ();
1456 iter
= gen_reg_rtx (iter_mode
);
1458 emit_move_insn (iter
, const0_rtx
);
1460 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1461 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1462 do_pending_stack_adjust ();
1464 emit_jump (cmp_label
);
1465 emit_label (top_label
);
1467 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1468 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1470 if (x_addr_mode
!= y_addr_mode
)
1471 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1472 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1474 x
= change_address (x
, QImode
, x_addr
);
1475 y
= change_address (y
, QImode
, y_addr
);
1477 emit_move_insn (x
, y
);
1479 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1480 true, OPTAB_LIB_WIDEN
);
1482 emit_move_insn (iter
, tmp
);
1484 emit_label (cmp_label
);
1486 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1487 true, top_label
, REG_BR_PROB_BASE
* 90 / 100);
1490 /* Copy all or part of a value X into registers starting at REGNO.
1491 The number of registers to be filled is NREGS. */
1494 move_block_to_reg (int regno
, rtx x
, int nregs
, machine_mode mode
)
1503 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
1504 x
= validize_mem (force_const_mem (mode
, x
));
1506 /* See if the machine can do this with a load multiple insn. */
1507 if (HAVE_load_multiple
)
1509 last
= get_last_insn ();
1510 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1518 delete_insns_since (last
);
1521 for (i
= 0; i
< nregs
; i
++)
1522 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1523 operand_subword_force (x
, i
, mode
));
1526 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1527 The number of registers to be filled is NREGS. */
1530 move_block_from_reg (int regno
, rtx x
, int nregs
)
1537 /* See if the machine can do this with a store multiple insn. */
1538 if (HAVE_store_multiple
)
1540 rtx_insn
*last
= get_last_insn ();
1541 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1549 delete_insns_since (last
);
1552 for (i
= 0; i
< nregs
; i
++)
1554 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1558 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1562 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1563 ORIG, where ORIG is a non-consecutive group of registers represented by
1564 a PARALLEL. The clone is identical to the original except in that the
1565 original set of registers is replaced by a new set of pseudo registers.
1566 The new set has the same modes as the original set. */
1569 gen_group_rtx (rtx orig
)
1574 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1576 length
= XVECLEN (orig
, 0);
1577 tmps
= XALLOCAVEC (rtx
, length
);
1579 /* Skip a NULL entry in first slot. */
1580 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1585 for (; i
< length
; i
++)
1587 machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1588 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1590 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1593 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1596 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1597 except that values are placed in TMPS[i], and must later be moved
1598 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1601 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1605 machine_mode m
= GET_MODE (orig_src
);
1607 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1610 && !SCALAR_INT_MODE_P (m
)
1611 && !MEM_P (orig_src
)
1612 && GET_CODE (orig_src
) != CONCAT
)
1614 machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1615 if (imode
== BLKmode
)
1616 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
1618 src
= gen_reg_rtx (imode
);
1619 if (imode
!= BLKmode
)
1620 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1621 emit_move_insn (src
, orig_src
);
1622 /* ...and back again. */
1623 if (imode
!= BLKmode
)
1624 src
= gen_lowpart (imode
, src
);
1625 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1629 /* Check for a NULL entry, used to indicate that the parameter goes
1630 both on the stack and in registers. */
1631 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1636 /* Process the pieces. */
1637 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1639 machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1640 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1641 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1644 /* Handle trailing fragments that run over the size of the struct. */
1645 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1647 /* Arrange to shift the fragment to where it belongs.
1648 extract_bit_field loads to the lsb of the reg. */
1650 #ifdef BLOCK_REG_PADDING
1651 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1652 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1657 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1658 bytelen
= ssize
- bytepos
;
1659 gcc_assert (bytelen
> 0);
1662 /* If we won't be loading directly from memory, protect the real source
1663 from strange tricks we might play; but make sure that the source can
1664 be loaded directly into the destination. */
1666 if (!MEM_P (orig_src
)
1667 && (!CONSTANT_P (orig_src
)
1668 || (GET_MODE (orig_src
) != mode
1669 && GET_MODE (orig_src
) != VOIDmode
)))
1671 if (GET_MODE (orig_src
) == VOIDmode
)
1672 src
= gen_reg_rtx (mode
);
1674 src
= gen_reg_rtx (GET_MODE (orig_src
));
1676 emit_move_insn (src
, orig_src
);
1679 /* Optimize the access just a bit. */
1681 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1682 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1683 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1684 && bytelen
== GET_MODE_SIZE (mode
))
1686 tmps
[i
] = gen_reg_rtx (mode
);
1687 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1689 else if (COMPLEX_MODE_P (mode
)
1690 && GET_MODE (src
) == mode
1691 && bytelen
== GET_MODE_SIZE (mode
))
1692 /* Let emit_move_complex do the bulk of the work. */
1694 else if (GET_CODE (src
) == CONCAT
)
1696 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1697 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1699 if ((bytepos
== 0 && bytelen
== slen0
)
1700 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1702 /* The following assumes that the concatenated objects all
1703 have the same size. In this case, a simple calculation
1704 can be used to determine the object and the bit field
1706 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1707 if (! CONSTANT_P (tmps
[i
])
1708 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1709 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1710 (bytepos
% slen0
) * BITS_PER_UNIT
,
1711 1, NULL_RTX
, mode
, mode
);
1717 gcc_assert (!bytepos
);
1718 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1719 emit_move_insn (mem
, src
);
1720 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1721 0, 1, NULL_RTX
, mode
, mode
);
1724 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1725 SIMD register, which is currently broken. While we get GCC
1726 to emit proper RTL for these cases, let's dump to memory. */
1727 else if (VECTOR_MODE_P (GET_MODE (dst
))
1730 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1733 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1734 emit_move_insn (mem
, src
);
1735 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1737 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1738 && XVECLEN (dst
, 0) > 1)
1739 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
1740 else if (CONSTANT_P (src
))
1742 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1750 /* TODO: const_wide_int can have sizes other than this... */
1751 gcc_assert (2 * len
== ssize
);
1752 split_double (src
, &first
, &second
);
1759 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1762 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1763 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1767 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1772 /* Emit code to move a block SRC of type TYPE to a block DST,
1773 where DST is non-consecutive registers represented by a PARALLEL.
1774 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1778 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1783 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1784 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1786 /* Copy the extracted pieces into the proper (probable) hard regs. */
1787 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1789 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1792 emit_move_insn (d
, tmps
[i
]);
1796 /* Similar, but load SRC into new pseudos in a format that looks like
1797 PARALLEL. This can later be fed to emit_group_move to get things
1798 in the right place. */
1801 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1806 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1807 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1809 /* Convert the vector to look just like the original PARALLEL, except
1810 with the computed values. */
1811 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1813 rtx e
= XVECEXP (parallel
, 0, i
);
1814 rtx d
= XEXP (e
, 0);
1818 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1819 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1821 RTVEC_ELT (vec
, i
) = e
;
1824 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1827 /* Emit code to move a block SRC to block DST, where SRC and DST are
1828 non-consecutive groups of registers, each represented by a PARALLEL. */
1831 emit_group_move (rtx dst
, rtx src
)
1835 gcc_assert (GET_CODE (src
) == PARALLEL
1836 && GET_CODE (dst
) == PARALLEL
1837 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1839 /* Skip first entry if NULL. */
1840 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1841 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1842 XEXP (XVECEXP (src
, 0, i
), 0));
1845 /* Move a group of registers represented by a PARALLEL into pseudos. */
1848 emit_group_move_into_temps (rtx src
)
1850 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1853 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1855 rtx e
= XVECEXP (src
, 0, i
);
1856 rtx d
= XEXP (e
, 0);
1859 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1860 RTVEC_ELT (vec
, i
) = e
;
1863 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1866 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1867 where SRC is non-consecutive registers represented by a PARALLEL.
1868 SSIZE represents the total size of block ORIG_DST, or -1 if not
1872 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1875 int start
, finish
, i
;
1876 machine_mode m
= GET_MODE (orig_dst
);
1878 gcc_assert (GET_CODE (src
) == PARALLEL
);
1880 if (!SCALAR_INT_MODE_P (m
)
1881 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1883 machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1884 if (imode
== BLKmode
)
1885 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
1887 dst
= gen_reg_rtx (imode
);
1888 emit_group_store (dst
, src
, type
, ssize
);
1889 if (imode
!= BLKmode
)
1890 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1891 emit_move_insn (orig_dst
, dst
);
1895 /* Check for a NULL entry, used to indicate that the parameter goes
1896 both on the stack and in registers. */
1897 if (XEXP (XVECEXP (src
, 0, 0), 0))
1901 finish
= XVECLEN (src
, 0);
1903 tmps
= XALLOCAVEC (rtx
, finish
);
1905 /* Copy the (probable) hard regs into pseudos. */
1906 for (i
= start
; i
< finish
; i
++)
1908 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1909 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1911 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1912 emit_move_insn (tmps
[i
], reg
);
1918 /* If we won't be storing directly into memory, protect the real destination
1919 from strange tricks we might play. */
1921 if (GET_CODE (dst
) == PARALLEL
)
1925 /* We can get a PARALLEL dst if there is a conditional expression in
1926 a return statement. In that case, the dst and src are the same,
1927 so no action is necessary. */
1928 if (rtx_equal_p (dst
, src
))
1931 /* It is unclear if we can ever reach here, but we may as well handle
1932 it. Allocate a temporary, and split this into a store/load to/from
1934 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
1935 emit_group_store (temp
, src
, type
, ssize
);
1936 emit_group_load (dst
, temp
, type
, ssize
);
1939 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1941 machine_mode outer
= GET_MODE (dst
);
1943 HOST_WIDE_INT bytepos
;
1947 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1948 dst
= gen_reg_rtx (outer
);
1950 /* Make life a bit easier for combine. */
1951 /* If the first element of the vector is the low part
1952 of the destination mode, use a paradoxical subreg to
1953 initialize the destination. */
1956 inner
= GET_MODE (tmps
[start
]);
1957 bytepos
= subreg_lowpart_offset (inner
, outer
);
1958 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1960 temp
= simplify_gen_subreg (outer
, tmps
[start
],
1964 emit_move_insn (dst
, temp
);
1971 /* If the first element wasn't the low part, try the last. */
1973 && start
< finish
- 1)
1975 inner
= GET_MODE (tmps
[finish
- 1]);
1976 bytepos
= subreg_lowpart_offset (inner
, outer
);
1977 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
1979 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
1983 emit_move_insn (dst
, temp
);
1990 /* Otherwise, simply initialize the result to zero. */
1992 emit_move_insn (dst
, CONST0_RTX (outer
));
1995 /* Process the pieces. */
1996 for (i
= start
; i
< finish
; i
++)
1998 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
1999 machine_mode mode
= GET_MODE (tmps
[i
]);
2000 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2001 unsigned int adj_bytelen
;
2004 /* Handle trailing fragments that run over the size of the struct. */
2005 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2006 adj_bytelen
= ssize
- bytepos
;
2008 adj_bytelen
= bytelen
;
2010 if (GET_CODE (dst
) == CONCAT
)
2012 if (bytepos
+ adj_bytelen
2013 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2014 dest
= XEXP (dst
, 0);
2015 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2017 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2018 dest
= XEXP (dst
, 1);
2022 machine_mode dest_mode
= GET_MODE (dest
);
2023 machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2025 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2027 if (GET_MODE_ALIGNMENT (dest_mode
)
2028 >= GET_MODE_ALIGNMENT (tmp_mode
))
2030 dest
= assign_stack_temp (dest_mode
,
2031 GET_MODE_SIZE (dest_mode
));
2032 emit_move_insn (adjust_address (dest
,
2040 dest
= assign_stack_temp (tmp_mode
,
2041 GET_MODE_SIZE (tmp_mode
));
2042 emit_move_insn (dest
, tmps
[i
]);
2043 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2049 /* Handle trailing fragments that run over the size of the struct. */
2050 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2052 /* store_bit_field always takes its value from the lsb.
2053 Move the fragment to the lsb if it's not already there. */
2055 #ifdef BLOCK_REG_PADDING
2056 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2057 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2063 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2064 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2068 /* Make sure not to write past the end of the struct. */
2069 store_bit_field (dest
,
2070 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2071 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2075 /* Optimize the access just a bit. */
2076 else if (MEM_P (dest
)
2077 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2078 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2079 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2080 && bytelen
== GET_MODE_SIZE (mode
))
2081 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2084 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2085 0, 0, mode
, tmps
[i
]);
2088 /* Copy from the pseudo into the (probable) hard reg. */
2089 if (orig_dst
!= dst
)
2090 emit_move_insn (orig_dst
, dst
);
2093 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2094 of the value stored in X. */
2097 maybe_emit_group_store (rtx x
, tree type
)
2099 machine_mode mode
= TYPE_MODE (type
);
2100 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2101 if (GET_CODE (x
) == PARALLEL
)
2103 rtx result
= gen_reg_rtx (mode
);
2104 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2110 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2112 This is used on targets that return BLKmode values in registers. */
2115 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2117 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2118 rtx src
= NULL
, dst
= NULL
;
2119 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2120 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2121 machine_mode mode
= GET_MODE (srcreg
);
2122 machine_mode tmode
= GET_MODE (target
);
2123 machine_mode copy_mode
;
2125 /* BLKmode registers created in the back-end shouldn't have survived. */
2126 gcc_assert (mode
!= BLKmode
);
2128 /* If the structure doesn't take up a whole number of words, see whether
2129 SRCREG is padded on the left or on the right. If it's on the left,
2130 set PADDING_CORRECTION to the number of bits to skip.
2132 In most ABIs, the structure will be returned at the least end of
2133 the register, which translates to right padding on little-endian
2134 targets and left padding on big-endian targets. The opposite
2135 holds if the structure is returned at the most significant
2136 end of the register. */
2137 if (bytes
% UNITS_PER_WORD
!= 0
2138 && (targetm
.calls
.return_in_msb (type
)
2140 : BYTES_BIG_ENDIAN
))
2142 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2144 /* We can use a single move if we have an exact mode for the size. */
2145 else if (MEM_P (target
)
2146 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
2147 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2148 && bytes
== GET_MODE_SIZE (mode
))
2150 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2154 /* And if we additionally have the same mode for a register. */
2155 else if (REG_P (target
)
2156 && GET_MODE (target
) == mode
2157 && bytes
== GET_MODE_SIZE (mode
))
2159 emit_move_insn (target
, srcreg
);
2163 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2164 into a new pseudo which is a full word. */
2165 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2167 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2171 /* Copy the structure BITSIZE bits at a time. If the target lives in
2172 memory, take care of not reading/writing past its end by selecting
2173 a copy mode suited to BITSIZE. This should always be possible given
2176 If the target lives in register, make sure not to select a copy mode
2177 larger than the mode of the register.
2179 We could probably emit more efficient code for machines which do not use
2180 strict alignment, but it doesn't seem worth the effort at the current
2183 copy_mode
= word_mode
;
2186 machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2187 if (mem_mode
!= BLKmode
)
2188 copy_mode
= mem_mode
;
2190 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2193 for (bitpos
= 0, xbitpos
= padding_correction
;
2194 bitpos
< bytes
* BITS_PER_UNIT
;
2195 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2197 /* We need a new source operand each time xbitpos is on a
2198 word boundary and when xbitpos == padding_correction
2199 (the first time through). */
2200 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2201 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2203 /* We need a new destination operand each time bitpos is on
2205 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2207 else if (bitpos
% BITS_PER_WORD
== 0)
2208 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2210 /* Use xbitpos for the source extraction (right justified) and
2211 bitpos for the destination store (left justified). */
2212 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2213 extract_bit_field (src
, bitsize
,
2214 xbitpos
% BITS_PER_WORD
, 1,
2215 NULL_RTX
, copy_mode
, copy_mode
));
2219 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2220 register if it contains any data, otherwise return null.
2222 This is used on targets that return BLKmode values in registers. */
2225 copy_blkmode_to_reg (machine_mode mode
, tree src
)
2228 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2229 unsigned int bitsize
;
2230 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2231 machine_mode dst_mode
;
2233 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2235 x
= expand_normal (src
);
2237 bytes
= int_size_in_bytes (TREE_TYPE (src
));
2241 /* If the structure doesn't take up a whole number of words, see
2242 whether the register value should be padded on the left or on
2243 the right. Set PADDING_CORRECTION to the number of padding
2244 bits needed on the left side.
2246 In most ABIs, the structure will be returned at the least end of
2247 the register, which translates to right padding on little-endian
2248 targets and left padding on big-endian targets. The opposite
2249 holds if the structure is returned at the most significant
2250 end of the register. */
2251 if (bytes
% UNITS_PER_WORD
!= 0
2252 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2254 : BYTES_BIG_ENDIAN
))
2255 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2258 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2259 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2260 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2262 /* Copy the structure BITSIZE bits at a time. */
2263 for (bitpos
= 0, xbitpos
= padding_correction
;
2264 bitpos
< bytes
* BITS_PER_UNIT
;
2265 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2267 /* We need a new destination pseudo each time xbitpos is
2268 on a word boundary and when xbitpos == padding_correction
2269 (the first time through). */
2270 if (xbitpos
% BITS_PER_WORD
== 0
2271 || xbitpos
== padding_correction
)
2273 /* Generate an appropriate register. */
2274 dst_word
= gen_reg_rtx (word_mode
);
2275 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2277 /* Clear the destination before we move anything into it. */
2278 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2281 /* We need a new source operand each time bitpos is on a word
2283 if (bitpos
% BITS_PER_WORD
== 0)
2284 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2286 /* Use bitpos for the source extraction (left justified) and
2287 xbitpos for the destination store (right justified). */
2288 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2290 extract_bit_field (src_word
, bitsize
,
2291 bitpos
% BITS_PER_WORD
, 1,
2292 NULL_RTX
, word_mode
, word_mode
));
2295 if (mode
== BLKmode
)
2297 /* Find the smallest integer mode large enough to hold the
2298 entire structure. */
2299 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2301 mode
= GET_MODE_WIDER_MODE (mode
))
2302 /* Have we found a large enough mode? */
2303 if (GET_MODE_SIZE (mode
) >= bytes
)
2306 /* A suitable mode should have been found. */
2307 gcc_assert (mode
!= VOIDmode
);
2310 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2311 dst_mode
= word_mode
;
2314 dst
= gen_reg_rtx (dst_mode
);
2316 for (i
= 0; i
< n_regs
; i
++)
2317 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2319 if (mode
!= dst_mode
)
2320 dst
= gen_lowpart (mode
, dst
);
2325 /* Add a USE expression for REG to the (possibly empty) list pointed
2326 to by CALL_FUSAGE. REG must denote a hard register. */
2329 use_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2331 gcc_assert (REG_P (reg
));
2333 if (!HARD_REGISTER_P (reg
))
2337 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2340 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2341 to by CALL_FUSAGE. REG must denote a hard register. */
2344 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, machine_mode mode
)
2346 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2349 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2352 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2353 starting at REGNO. All of these registers must be hard registers. */
2356 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2360 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2362 for (i
= 0; i
< nregs
; i
++)
2363 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2366 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2367 PARALLEL REGS. This is for calls that pass values in multiple
2368 non-contiguous locations. The Irix 6 ABI has examples of this. */
2371 use_group_regs (rtx
*call_fusage
, rtx regs
)
2375 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2377 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2379 /* A NULL entry means the parameter goes both on the stack and in
2380 registers. This can also be a MEM for targets that pass values
2381 partially on the stack and partially in registers. */
2382 if (reg
!= 0 && REG_P (reg
))
2383 use_reg (call_fusage
, reg
);
2387 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2388 assigment and the code of the expresion on the RHS is CODE. Return
2392 get_def_for_expr (tree name
, enum tree_code code
)
2396 if (TREE_CODE (name
) != SSA_NAME
)
2399 def_stmt
= get_gimple_for_ssa_name (name
);
2401 || gimple_assign_rhs_code (def_stmt
) != code
)
2407 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2408 assigment and the class of the expresion on the RHS is CLASS. Return
2412 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2416 if (TREE_CODE (name
) != SSA_NAME
)
2419 def_stmt
= get_gimple_for_ssa_name (name
);
2421 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2428 /* Determine whether the LEN bytes generated by CONSTFUN can be
2429 stored to memory using several move instructions. CONSTFUNDATA is
2430 a pointer which will be passed as argument in every CONSTFUN call.
2431 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2432 a memset operation and false if it's a copy of a constant string.
2433 Return nonzero if a call to store_by_pieces should succeed. */
2436 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2437 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
),
2438 void *constfundata
, unsigned int align
, bool memsetp
)
2440 unsigned HOST_WIDE_INT l
;
2441 unsigned int max_size
;
2442 HOST_WIDE_INT offset
= 0;
2444 enum insn_code icode
;
2446 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2447 rtx cst ATTRIBUTE_UNUSED
;
2452 if (!targetm
.use_by_pieces_infrastructure_p (len
, align
,
2456 optimize_insn_for_speed_p ()))
2459 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2461 /* We would first store what we can in the largest integer mode, then go to
2462 successively smaller modes. */
2465 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2469 max_size
= STORE_MAX_PIECES
+ 1;
2470 while (max_size
> 1 && l
> 0)
2472 mode
= widest_int_mode_for_size (max_size
);
2474 if (mode
== VOIDmode
)
2477 icode
= optab_handler (mov_optab
, mode
);
2478 if (icode
!= CODE_FOR_nothing
2479 && align
>= GET_MODE_ALIGNMENT (mode
))
2481 unsigned int size
= GET_MODE_SIZE (mode
);
2488 cst
= (*constfun
) (constfundata
, offset
, mode
);
2489 if (!targetm
.legitimate_constant_p (mode
, cst
))
2499 max_size
= GET_MODE_SIZE (mode
);
2502 /* The code above should have handled everything. */
2509 /* Generate several move instructions to store LEN bytes generated by
2510 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2511 pointer which will be passed as argument in every CONSTFUN call.
2512 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2513 a memset operation and false if it's a copy of a constant string.
2514 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2515 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2519 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2520 rtx (*constfun
) (void *, HOST_WIDE_INT
, machine_mode
),
2521 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2523 machine_mode to_addr_mode
= get_address_mode (to
);
2524 struct store_by_pieces_d data
;
2528 gcc_assert (endp
!= 2);
2532 gcc_assert (targetm
.use_by_pieces_infrastructure_p
2537 optimize_insn_for_speed_p ()));
2539 data
.constfun
= constfun
;
2540 data
.constfundata
= constfundata
;
2543 store_by_pieces_1 (&data
, align
);
2548 gcc_assert (!data
.reverse
);
2553 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2554 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2556 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
2557 plus_constant (to_addr_mode
,
2561 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2568 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2576 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2577 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2580 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2582 struct store_by_pieces_d data
;
2587 data
.constfun
= clear_by_pieces_1
;
2588 data
.constfundata
= NULL
;
2591 store_by_pieces_1 (&data
, align
);
2594 /* Callback routine for clear_by_pieces.
2595 Return const0_rtx unconditionally. */
2598 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2599 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2600 machine_mode mode ATTRIBUTE_UNUSED
)
2605 /* Subroutine of clear_by_pieces and store_by_pieces.
2606 Generate several move instructions to store LEN bytes of block TO. (A MEM
2607 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2610 store_by_pieces_1 (struct store_by_pieces_d
*data ATTRIBUTE_UNUSED
,
2611 unsigned int align ATTRIBUTE_UNUSED
)
2613 machine_mode to_addr_mode
= get_address_mode (data
->to
);
2614 rtx to_addr
= XEXP (data
->to
, 0);
2615 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2616 enum insn_code icode
;
2619 data
->to_addr
= to_addr
;
2621 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2622 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2624 data
->explicit_inc_to
= 0;
2626 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2628 data
->offset
= data
->len
;
2630 /* If storing requires more than two move insns,
2631 copy addresses to registers (to make displacements shorter)
2632 and use post-increment if available. */
2633 if (!data
->autinc_to
2634 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2636 /* Determine the main mode we'll be using.
2637 MODE might not be used depending on the definitions of the
2638 USE_* macros below. */
2639 machine_mode mode ATTRIBUTE_UNUSED
2640 = widest_int_mode_for_size (max_size
);
2642 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2644 data
->to_addr
= copy_to_mode_reg (to_addr_mode
,
2645 plus_constant (to_addr_mode
,
2648 data
->autinc_to
= 1;
2649 data
->explicit_inc_to
= -1;
2652 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2653 && ! data
->autinc_to
)
2655 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2656 data
->autinc_to
= 1;
2657 data
->explicit_inc_to
= 1;
2660 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2661 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2664 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2666 /* First store what we can in the largest integer mode, then go to
2667 successively smaller modes. */
2669 while (max_size
> 1 && data
->len
> 0)
2671 machine_mode mode
= widest_int_mode_for_size (max_size
);
2673 if (mode
== VOIDmode
)
2676 icode
= optab_handler (mov_optab
, mode
);
2677 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2678 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2680 max_size
= GET_MODE_SIZE (mode
);
2683 /* The code above should have handled everything. */
2684 gcc_assert (!data
->len
);
2687 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2688 with move instructions for mode MODE. GENFUN is the gen_... function
2689 to make a move insn for that mode. DATA has all the other info. */
2692 store_by_pieces_2 (insn_gen_fn genfun
, machine_mode mode
,
2693 struct store_by_pieces_d
*data
)
2695 unsigned int size
= GET_MODE_SIZE (mode
);
2698 while (data
->len
>= size
)
2701 data
->offset
-= size
;
2703 if (data
->autinc_to
)
2704 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2707 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2709 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2710 emit_insn (gen_add2_insn (data
->to_addr
,
2711 gen_int_mode (-(HOST_WIDE_INT
) size
,
2712 GET_MODE (data
->to_addr
))));
2714 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2715 emit_insn ((*genfun
) (to1
, cst
));
2717 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2718 emit_insn (gen_add2_insn (data
->to_addr
,
2720 GET_MODE (data
->to_addr
))));
2722 if (! data
->reverse
)
2723 data
->offset
+= size
;
2729 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2730 its length in bytes. */
2733 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2734 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2735 unsigned HOST_WIDE_INT min_size
,
2736 unsigned HOST_WIDE_INT max_size
,
2737 unsigned HOST_WIDE_INT probable_max_size
)
2739 machine_mode mode
= GET_MODE (object
);
2742 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2744 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2745 just move a zero. Otherwise, do this a piece at a time. */
2747 && CONST_INT_P (size
)
2748 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2750 rtx zero
= CONST0_RTX (mode
);
2753 emit_move_insn (object
, zero
);
2757 if (COMPLEX_MODE_P (mode
))
2759 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2762 write_complex_part (object
, zero
, 0);
2763 write_complex_part (object
, zero
, 1);
2769 if (size
== const0_rtx
)
2772 align
= MEM_ALIGN (object
);
2774 if (CONST_INT_P (size
)
2775 && targetm
.use_by_pieces_infrastructure_p (INTVAL (size
), align
,
2777 optimize_insn_for_speed_p ()))
2778 clear_by_pieces (object
, INTVAL (size
), align
);
2779 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2780 expected_align
, expected_size
,
2781 min_size
, max_size
, probable_max_size
))
2783 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2784 return set_storage_via_libcall (object
, size
, const0_rtx
,
2785 method
== BLOCK_OP_TAILCALL
);
2793 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2795 unsigned HOST_WIDE_INT max
, min
= 0;
2796 if (GET_CODE (size
) == CONST_INT
)
2797 min
= max
= UINTVAL (size
);
2799 max
= GET_MODE_MASK (GET_MODE (size
));
2800 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
2804 /* A subroutine of clear_storage. Expand a call to memset.
2805 Return the return value of memset, 0 otherwise. */
2808 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2810 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2811 machine_mode size_mode
;
2814 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2815 place those into new pseudos into a VAR_DECL and use them later. */
2817 object
= copy_addr_to_reg (XEXP (object
, 0));
2819 size_mode
= TYPE_MODE (sizetype
);
2820 size
= convert_to_mode (size_mode
, size
, 1);
2821 size
= copy_to_mode_reg (size_mode
, size
);
2823 /* It is incorrect to use the libcall calling conventions to call
2824 memset in this context. This could be a user call to memset and
2825 the user may wish to examine the return value from memset. For
2826 targets where libcalls and normal calls have different conventions
2827 for returning pointers, we could end up generating incorrect code. */
2829 object_tree
= make_tree (ptr_type_node
, object
);
2830 if (!CONST_INT_P (val
))
2831 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2832 size_tree
= make_tree (sizetype
, size
);
2833 val_tree
= make_tree (integer_type_node
, val
);
2835 fn
= clear_storage_libcall_fn (true);
2836 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
2837 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2839 retval
= expand_normal (call_expr
);
2844 /* A subroutine of set_storage_via_libcall. Create the tree node
2845 for the function we use for block clears. */
2847 tree block_clear_fn
;
2850 init_block_clear_fn (const char *asmspec
)
2852 if (!block_clear_fn
)
2856 fn
= get_identifier ("memset");
2857 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2858 integer_type_node
, sizetype
,
2861 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
2862 DECL_EXTERNAL (fn
) = 1;
2863 TREE_PUBLIC (fn
) = 1;
2864 DECL_ARTIFICIAL (fn
) = 1;
2865 TREE_NOTHROW (fn
) = 1;
2866 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2867 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2869 block_clear_fn
= fn
;
2873 set_user_assembler_name (block_clear_fn
, asmspec
);
2877 clear_storage_libcall_fn (int for_call
)
2879 static bool emitted_extern
;
2881 if (!block_clear_fn
)
2882 init_block_clear_fn (NULL
);
2884 if (for_call
&& !emitted_extern
)
2886 emitted_extern
= true;
2887 make_decl_rtl (block_clear_fn
);
2890 return block_clear_fn
;
2893 /* Expand a setmem pattern; return true if successful. */
2896 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2897 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2898 unsigned HOST_WIDE_INT min_size
,
2899 unsigned HOST_WIDE_INT max_size
,
2900 unsigned HOST_WIDE_INT probable_max_size
)
2902 /* Try the most limited insn first, because there's no point
2903 including more than one in the machine description unless
2904 the more limited one has some advantage. */
2908 if (expected_align
< align
)
2909 expected_align
= align
;
2910 if (expected_size
!= -1)
2912 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
2913 expected_size
= max_size
;
2914 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
2915 expected_size
= min_size
;
2918 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2919 mode
= GET_MODE_WIDER_MODE (mode
))
2921 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
2923 if (code
!= CODE_FOR_nothing
2924 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2925 here because if SIZE is less than the mode mask, as it is
2926 returned by the macro, it will definitely be less than the
2927 actual mode mask. Since SIZE is within the Pmode address
2928 space, we limit MODE to Pmode. */
2929 && ((CONST_INT_P (size
)
2930 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2931 <= (GET_MODE_MASK (mode
) >> 1)))
2932 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
2933 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
2935 struct expand_operand ops
[9];
2938 nops
= insn_data
[(int) code
].n_generator_args
;
2939 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
2941 create_fixed_operand (&ops
[0], object
);
2942 /* The check above guarantees that this size conversion is valid. */
2943 create_convert_operand_to (&ops
[1], size
, mode
, true);
2944 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
2945 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
2948 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
2949 create_integer_operand (&ops
[5], expected_size
);
2953 create_integer_operand (&ops
[6], min_size
);
2954 /* If we can not represent the maximal size,
2955 make parameter NULL. */
2956 if ((HOST_WIDE_INT
) max_size
!= -1)
2957 create_integer_operand (&ops
[7], max_size
);
2959 create_fixed_operand (&ops
[7], NULL
);
2963 /* If we can not represent the maximal size,
2964 make parameter NULL. */
2965 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
2966 create_integer_operand (&ops
[8], probable_max_size
);
2968 create_fixed_operand (&ops
[8], NULL
);
2970 if (maybe_expand_insn (code
, nops
, ops
))
2979 /* Write to one of the components of the complex value CPLX. Write VAL to
2980 the real part if IMAG_P is false, and the imaginary part if its true. */
2983 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2989 if (GET_CODE (cplx
) == CONCAT
)
2991 emit_move_insn (XEXP (cplx
, imag_p
), val
);
2995 cmode
= GET_MODE (cplx
);
2996 imode
= GET_MODE_INNER (cmode
);
2997 ibitsize
= GET_MODE_BITSIZE (imode
);
2999 /* For MEMs simplify_gen_subreg may generate an invalid new address
3000 because, e.g., the original address is considered mode-dependent
3001 by the target, which restricts simplify_subreg from invoking
3002 adjust_address_nv. Instead of preparing fallback support for an
3003 invalid address, we call adjust_address_nv directly. */
3006 emit_move_insn (adjust_address_nv (cplx
, imode
,
3007 imag_p
? GET_MODE_SIZE (imode
) : 0),
3012 /* If the sub-object is at least word sized, then we know that subregging
3013 will work. This special case is important, since store_bit_field
3014 wants to operate on integer modes, and there's rarely an OImode to
3015 correspond to TCmode. */
3016 if (ibitsize
>= BITS_PER_WORD
3017 /* For hard regs we have exact predicates. Assume we can split
3018 the original object if it spans an even number of hard regs.
3019 This special case is important for SCmode on 64-bit platforms
3020 where the natural size of floating-point regs is 32-bit. */
3022 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3023 && REG_NREGS (cplx
) % 2 == 0))
3025 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3026 imag_p
? GET_MODE_SIZE (imode
) : 0);
3029 emit_move_insn (part
, val
);
3033 /* simplify_gen_subreg may fail for sub-word MEMs. */
3034 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3037 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
);
3040 /* Extract one of the components of the complex value CPLX. Extract the
3041 real part if IMAG_P is false, and the imaginary part if it's true. */
3044 read_complex_part (rtx cplx
, bool imag_p
)
3046 machine_mode cmode
, imode
;
3049 if (GET_CODE (cplx
) == CONCAT
)
3050 return XEXP (cplx
, imag_p
);
3052 cmode
= GET_MODE (cplx
);
3053 imode
= GET_MODE_INNER (cmode
);
3054 ibitsize
= GET_MODE_BITSIZE (imode
);
3056 /* Special case reads from complex constants that got spilled to memory. */
3057 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3059 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3060 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3062 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3063 if (CONSTANT_CLASS_P (part
))
3064 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3068 /* For MEMs simplify_gen_subreg may generate an invalid new address
3069 because, e.g., the original address is considered mode-dependent
3070 by the target, which restricts simplify_subreg from invoking
3071 adjust_address_nv. Instead of preparing fallback support for an
3072 invalid address, we call adjust_address_nv directly. */
3074 return adjust_address_nv (cplx
, imode
,
3075 imag_p
? GET_MODE_SIZE (imode
) : 0);
3077 /* If the sub-object is at least word sized, then we know that subregging
3078 will work. This special case is important, since extract_bit_field
3079 wants to operate on integer modes, and there's rarely an OImode to
3080 correspond to TCmode. */
3081 if (ibitsize
>= BITS_PER_WORD
3082 /* For hard regs we have exact predicates. Assume we can split
3083 the original object if it spans an even number of hard regs.
3084 This special case is important for SCmode on 64-bit platforms
3085 where the natural size of floating-point regs is 32-bit. */
3087 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3088 && REG_NREGS (cplx
) % 2 == 0))
3090 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3091 imag_p
? GET_MODE_SIZE (imode
) : 0);
3095 /* simplify_gen_subreg may fail for sub-word MEMs. */
3096 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3099 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3100 true, NULL_RTX
, imode
, imode
);
3103 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3104 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3105 represented in NEW_MODE. If FORCE is true, this will never happen, as
3106 we'll force-create a SUBREG if needed. */
3109 emit_move_change_mode (machine_mode new_mode
,
3110 machine_mode old_mode
, rtx x
, bool force
)
3114 if (push_operand (x
, GET_MODE (x
)))
3116 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3117 MEM_COPY_ATTRIBUTES (ret
, x
);
3121 /* We don't have to worry about changing the address since the
3122 size in bytes is supposed to be the same. */
3123 if (reload_in_progress
)
3125 /* Copy the MEM to change the mode and move any
3126 substitutions from the old MEM to the new one. */
3127 ret
= adjust_address_nv (x
, new_mode
, 0);
3128 copy_replacements (x
, ret
);
3131 ret
= adjust_address (x
, new_mode
, 0);
3135 /* Note that we do want simplify_subreg's behavior of validating
3136 that the new mode is ok for a hard register. If we were to use
3137 simplify_gen_subreg, we would create the subreg, but would
3138 probably run into the target not being able to implement it. */
3139 /* Except, of course, when FORCE is true, when this is exactly what
3140 we want. Which is needed for CCmodes on some targets. */
3142 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3144 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3150 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3151 an integer mode of the same size as MODE. Returns the instruction
3152 emitted, or NULL if such a move could not be generated. */
3155 emit_move_via_integer (machine_mode mode
, rtx x
, rtx y
, bool force
)
3158 enum insn_code code
;
3160 /* There must exist a mode of the exact size we require. */
3161 imode
= int_mode_for_mode (mode
);
3162 if (imode
== BLKmode
)
3165 /* The target must support moves in this mode. */
3166 code
= optab_handler (mov_optab
, imode
);
3167 if (code
== CODE_FOR_nothing
)
3170 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3173 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3176 return emit_insn (GEN_FCN (code
) (x
, y
));
3179 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3180 Return an equivalent MEM that does not use an auto-increment. */
3183 emit_move_resolve_push (machine_mode mode
, rtx x
)
3185 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3186 HOST_WIDE_INT adjust
;
3189 adjust
= GET_MODE_SIZE (mode
);
3190 #ifdef PUSH_ROUNDING
3191 adjust
= PUSH_ROUNDING (adjust
);
3193 if (code
== PRE_DEC
|| code
== POST_DEC
)
3195 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3197 rtx expr
= XEXP (XEXP (x
, 0), 1);
3200 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3201 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3202 val
= INTVAL (XEXP (expr
, 1));
3203 if (GET_CODE (expr
) == MINUS
)
3205 gcc_assert (adjust
== val
|| adjust
== -val
);
3209 /* Do not use anti_adjust_stack, since we don't want to update
3210 stack_pointer_delta. */
3211 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3212 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3213 0, OPTAB_LIB_WIDEN
);
3214 if (temp
!= stack_pointer_rtx
)
3215 emit_move_insn (stack_pointer_rtx
, temp
);
3222 temp
= stack_pointer_rtx
;
3227 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3233 return replace_equiv_address (x
, temp
);
3236 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3237 X is known to satisfy push_operand, and MODE is known to be complex.
3238 Returns the last instruction emitted. */
3241 emit_move_complex_push (machine_mode mode
, rtx x
, rtx y
)
3243 machine_mode submode
= GET_MODE_INNER (mode
);
3246 #ifdef PUSH_ROUNDING
3247 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3249 /* In case we output to the stack, but the size is smaller than the
3250 machine can push exactly, we need to use move instructions. */
3251 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3253 x
= emit_move_resolve_push (mode
, x
);
3254 return emit_move_insn (x
, y
);
3258 /* Note that the real part always precedes the imag part in memory
3259 regardless of machine's endianness. */
3260 switch (GET_CODE (XEXP (x
, 0)))
3274 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3275 read_complex_part (y
, imag_first
));
3276 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3277 read_complex_part (y
, !imag_first
));
3280 /* A subroutine of emit_move_complex. Perform the move from Y to X
3281 via two moves of the parts. Returns the last instruction emitted. */
3284 emit_move_complex_parts (rtx x
, rtx y
)
3286 /* Show the output dies here. This is necessary for SUBREGs
3287 of pseudos since we cannot track their lifetimes correctly;
3288 hard regs shouldn't appear here except as return values. */
3289 if (!reload_completed
&& !reload_in_progress
3290 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3293 write_complex_part (x
, read_complex_part (y
, false), false);
3294 write_complex_part (x
, read_complex_part (y
, true), true);
3296 return get_last_insn ();
3299 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3300 MODE is known to be complex. Returns the last instruction emitted. */
3303 emit_move_complex (machine_mode mode
, rtx x
, rtx y
)
3307 /* Need to take special care for pushes, to maintain proper ordering
3308 of the data, and possibly extra padding. */
3309 if (push_operand (x
, mode
))
3310 return emit_move_complex_push (mode
, x
, y
);
3312 /* See if we can coerce the target into moving both values at once, except
3313 for floating point where we favor moving as parts if this is easy. */
3314 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3315 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3317 && HARD_REGISTER_P (x
)
3318 && REG_NREGS (x
) == 1)
3320 && HARD_REGISTER_P (y
)
3321 && REG_NREGS (y
) == 1))
3323 /* Not possible if the values are inherently not adjacent. */
3324 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3326 /* Is possible if both are registers (or subregs of registers). */
3327 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3329 /* If one of the operands is a memory, and alignment constraints
3330 are friendly enough, we may be able to do combined memory operations.
3331 We do not attempt this if Y is a constant because that combination is
3332 usually better with the by-parts thing below. */
3333 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3334 && (!STRICT_ALIGNMENT
3335 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3344 /* For memory to memory moves, optimal behavior can be had with the
3345 existing block move logic. */
3346 if (MEM_P (x
) && MEM_P (y
))
3348 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3349 BLOCK_OP_NO_LIBCALL
);
3350 return get_last_insn ();
3353 ret
= emit_move_via_integer (mode
, x
, y
, true);
3358 return emit_move_complex_parts (x
, y
);
3361 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3362 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3365 emit_move_ccmode (machine_mode mode
, rtx x
, rtx y
)
3369 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3372 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3373 if (code
!= CODE_FOR_nothing
)
3375 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3376 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3377 return emit_insn (GEN_FCN (code
) (x
, y
));
3381 /* Otherwise, find the MODE_INT mode of the same width. */
3382 ret
= emit_move_via_integer (mode
, x
, y
, false);
3383 gcc_assert (ret
!= NULL
);
3387 /* Return true if word I of OP lies entirely in the
3388 undefined bits of a paradoxical subreg. */
3391 undefined_operand_subword_p (const_rtx op
, int i
)
3393 machine_mode innermode
, innermostmode
;
3395 if (GET_CODE (op
) != SUBREG
)
3397 innermode
= GET_MODE (op
);
3398 innermostmode
= GET_MODE (SUBREG_REG (op
));
3399 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3400 /* The SUBREG_BYTE represents offset, as if the value were stored in
3401 memory, except for a paradoxical subreg where we define
3402 SUBREG_BYTE to be 0; undo this exception as in
3404 if (SUBREG_BYTE (op
) == 0
3405 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3407 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3408 if (WORDS_BIG_ENDIAN
)
3409 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3410 if (BYTES_BIG_ENDIAN
)
3411 offset
+= difference
% UNITS_PER_WORD
;
3413 if (offset
>= GET_MODE_SIZE (innermostmode
)
3414 || offset
<= -GET_MODE_SIZE (word_mode
))
3419 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3420 MODE is any multi-word or full-word mode that lacks a move_insn
3421 pattern. Note that you will get better code if you define such
3422 patterns, even if they must turn into multiple assembler instructions. */
3425 emit_move_multi_word (machine_mode mode
, rtx x
, rtx y
)
3427 rtx_insn
*last_insn
= 0;
3433 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3435 /* If X is a push on the stack, do the push now and replace
3436 X with a reference to the stack pointer. */
3437 if (push_operand (x
, mode
))
3438 x
= emit_move_resolve_push (mode
, x
);
3440 /* If we are in reload, see if either operand is a MEM whose address
3441 is scheduled for replacement. */
3442 if (reload_in_progress
&& MEM_P (x
)
3443 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3444 x
= replace_equiv_address_nv (x
, inner
);
3445 if (reload_in_progress
&& MEM_P (y
)
3446 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3447 y
= replace_equiv_address_nv (y
, inner
);
3451 need_clobber
= false;
3453 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3456 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3459 /* Do not generate code for a move if it would come entirely
3460 from the undefined bits of a paradoxical subreg. */
3461 if (undefined_operand_subword_p (y
, i
))
3464 ypart
= operand_subword (y
, i
, 1, mode
);
3466 /* If we can't get a part of Y, put Y into memory if it is a
3467 constant. Otherwise, force it into a register. Then we must
3468 be able to get a part of Y. */
3469 if (ypart
== 0 && CONSTANT_P (y
))
3471 y
= use_anchored_address (force_const_mem (mode
, y
));
3472 ypart
= operand_subword (y
, i
, 1, mode
);
3474 else if (ypart
== 0)
3475 ypart
= operand_subword_force (y
, i
, mode
);
3477 gcc_assert (xpart
&& ypart
);
3479 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3481 last_insn
= emit_move_insn (xpart
, ypart
);
3487 /* Show the output dies here. This is necessary for SUBREGs
3488 of pseudos since we cannot track their lifetimes correctly;
3489 hard regs shouldn't appear here except as return values.
3490 We never want to emit such a clobber after reload. */
3492 && ! (reload_in_progress
|| reload_completed
)
3493 && need_clobber
!= 0)
3501 /* Low level part of emit_move_insn.
3502 Called just like emit_move_insn, but assumes X and Y
3503 are basically valid. */
3506 emit_move_insn_1 (rtx x
, rtx y
)
3508 machine_mode mode
= GET_MODE (x
);
3509 enum insn_code code
;
3511 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3513 code
= optab_handler (mov_optab
, mode
);
3514 if (code
!= CODE_FOR_nothing
)
3515 return emit_insn (GEN_FCN (code
) (x
, y
));
3517 /* Expand complex moves by moving real part and imag part. */
3518 if (COMPLEX_MODE_P (mode
))
3519 return emit_move_complex (mode
, x
, y
);
3521 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3522 || ALL_FIXED_POINT_MODE_P (mode
))
3524 rtx_insn
*result
= emit_move_via_integer (mode
, x
, y
, true);
3526 /* If we can't find an integer mode, use multi words. */
3530 return emit_move_multi_word (mode
, x
, y
);
3533 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3534 return emit_move_ccmode (mode
, x
, y
);
3536 /* Try using a move pattern for the corresponding integer mode. This is
3537 only safe when simplify_subreg can convert MODE constants into integer
3538 constants. At present, it can only do this reliably if the value
3539 fits within a HOST_WIDE_INT. */
3540 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3542 rtx_insn
*ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3546 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3551 return emit_move_multi_word (mode
, x
, y
);
3554 /* Generate code to copy Y into X.
3555 Both Y and X must have the same mode, except that
3556 Y can be a constant with VOIDmode.
3557 This mode cannot be BLKmode; use emit_block_move for that.
3559 Return the last instruction emitted. */
3562 emit_move_insn (rtx x
, rtx y
)
3564 machine_mode mode
= GET_MODE (x
);
3565 rtx y_cst
= NULL_RTX
;
3566 rtx_insn
*last_insn
;
3569 gcc_assert (mode
!= BLKmode
3570 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3575 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3576 && (last_insn
= compress_float_constant (x
, y
)))
3581 if (!targetm
.legitimate_constant_p (mode
, y
))
3583 y
= force_const_mem (mode
, y
);
3585 /* If the target's cannot_force_const_mem prevented the spill,
3586 assume that the target's move expanders will also take care
3587 of the non-legitimate constant. */
3591 y
= use_anchored_address (y
);
3595 /* If X or Y are memory references, verify that their addresses are valid
3598 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3600 && ! push_operand (x
, GET_MODE (x
))))
3601 x
= validize_mem (x
);
3604 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3605 MEM_ADDR_SPACE (y
)))
3606 y
= validize_mem (y
);
3608 gcc_assert (mode
!= BLKmode
);
3610 last_insn
= emit_move_insn_1 (x
, y
);
3612 if (y_cst
&& REG_P (x
)
3613 && (set
= single_set (last_insn
)) != NULL_RTX
3614 && SET_DEST (set
) == x
3615 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3616 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3621 /* Generate the body of an instruction to copy Y into X.
3622 It may be a list of insns, if one insn isn't enough. */
3625 gen_move_insn (rtx x
, rtx y
)
3630 emit_move_insn_1 (x
, y
);
3636 /* Same as above, but return rtx (used as a callback, which must have
3637 prototype compatible with other functions returning rtx). */
3640 gen_move_insn_uncast (rtx x
, rtx y
)
3642 return gen_move_insn (x
, y
);
3645 /* If Y is representable exactly in a narrower mode, and the target can
3646 perform the extension directly from constant or memory, then emit the
3647 move as an extension. */
3650 compress_float_constant (rtx x
, rtx y
)
3652 machine_mode dstmode
= GET_MODE (x
);
3653 machine_mode orig_srcmode
= GET_MODE (y
);
3654 machine_mode srcmode
;
3656 int oldcost
, newcost
;
3657 bool speed
= optimize_insn_for_speed_p ();
3659 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3661 if (targetm
.legitimate_constant_p (dstmode
, y
))
3662 oldcost
= set_src_cost (y
, speed
);
3664 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), speed
);
3666 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3667 srcmode
!= orig_srcmode
;
3668 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3672 rtx_insn
*last_insn
;
3674 /* Skip if the target can't extend this way. */
3675 ic
= can_extend_p (dstmode
, srcmode
, 0);
3676 if (ic
== CODE_FOR_nothing
)
3679 /* Skip if the narrowed value isn't exact. */
3680 if (! exact_real_truncate (srcmode
, &r
))
3683 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3685 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3687 /* Skip if the target needs extra instructions to perform
3689 if (!insn_operand_matches (ic
, 1, trunc_y
))
3691 /* This is valid, but may not be cheaper than the original. */
3692 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3694 if (oldcost
< newcost
)
3697 else if (float_extend_from_mem
[dstmode
][srcmode
])
3699 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3700 /* This is valid, but may not be cheaper than the original. */
3701 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3703 if (oldcost
< newcost
)
3705 trunc_y
= validize_mem (trunc_y
);
3710 /* For CSE's benefit, force the compressed constant pool entry
3711 into a new pseudo. This constant may be used in different modes,
3712 and if not, combine will put things back together for us. */
3713 trunc_y
= force_reg (srcmode
, trunc_y
);
3715 /* If x is a hard register, perform the extension into a pseudo,
3716 so that e.g. stack realignment code is aware of it. */
3718 if (REG_P (x
) && HARD_REGISTER_P (x
))
3719 target
= gen_reg_rtx (dstmode
);
3721 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3722 last_insn
= get_last_insn ();
3725 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3728 return emit_move_insn (x
, target
);
3735 /* Pushing data onto the stack. */
3737 /* Push a block of length SIZE (perhaps variable)
3738 and return an rtx to address the beginning of the block.
3739 The value may be virtual_outgoing_args_rtx.
3741 EXTRA is the number of bytes of padding to push in addition to SIZE.
3742 BELOW nonzero means this padding comes at low addresses;
3743 otherwise, the padding comes at high addresses. */
3746 push_block (rtx size
, int extra
, int below
)
3750 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3751 if (CONSTANT_P (size
))
3752 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3753 else if (REG_P (size
) && extra
== 0)
3754 anti_adjust_stack (size
);
3757 temp
= copy_to_mode_reg (Pmode
, size
);
3759 temp
= expand_binop (Pmode
, add_optab
, temp
,
3760 gen_int_mode (extra
, Pmode
),
3761 temp
, 0, OPTAB_LIB_WIDEN
);
3762 anti_adjust_stack (temp
);
3765 if (STACK_GROWS_DOWNWARD
)
3767 temp
= virtual_outgoing_args_rtx
;
3768 if (extra
!= 0 && below
)
3769 temp
= plus_constant (Pmode
, temp
, extra
);
3773 if (CONST_INT_P (size
))
3774 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3775 -INTVAL (size
) - (below
? 0 : extra
));
3776 else if (extra
!= 0 && !below
)
3777 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3778 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3781 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3782 negate_rtx (Pmode
, size
));
3785 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3788 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3791 mem_autoinc_base (rtx mem
)
3795 rtx addr
= XEXP (mem
, 0);
3796 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3797 return XEXP (addr
, 0);
3802 /* A utility routine used here, in reload, and in try_split. The insns
3803 after PREV up to and including LAST are known to adjust the stack,
3804 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3805 placing notes as appropriate. PREV may be NULL, indicating the
3806 entire insn sequence prior to LAST should be scanned.
3808 The set of allowed stack pointer modifications is small:
3809 (1) One or more auto-inc style memory references (aka pushes),
3810 (2) One or more addition/subtraction with the SP as destination,
3811 (3) A single move insn with the SP as destination,
3812 (4) A call_pop insn,
3813 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3815 Insns in the sequence that do not modify the SP are ignored,
3816 except for noreturn calls.
3818 The return value is the amount of adjustment that can be trivially
3819 verified, via immediate operand or auto-inc. If the adjustment
3820 cannot be trivially extracted, the return value is INT_MIN. */
3823 find_args_size_adjust (rtx_insn
*insn
)
3828 pat
= PATTERN (insn
);
3831 /* Look for a call_pop pattern. */
3834 /* We have to allow non-call_pop patterns for the case
3835 of emit_single_push_insn of a TLS address. */
3836 if (GET_CODE (pat
) != PARALLEL
)
3839 /* All call_pop have a stack pointer adjust in the parallel.
3840 The call itself is always first, and the stack adjust is
3841 usually last, so search from the end. */
3842 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3844 set
= XVECEXP (pat
, 0, i
);
3845 if (GET_CODE (set
) != SET
)
3847 dest
= SET_DEST (set
);
3848 if (dest
== stack_pointer_rtx
)
3851 /* We'd better have found the stack pointer adjust. */
3854 /* Fall through to process the extracted SET and DEST
3855 as if it was a standalone insn. */
3857 else if (GET_CODE (pat
) == SET
)
3859 else if ((set
= single_set (insn
)) != NULL
)
3861 else if (GET_CODE (pat
) == PARALLEL
)
3863 /* ??? Some older ports use a parallel with a stack adjust
3864 and a store for a PUSH_ROUNDING pattern, rather than a
3865 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3866 /* ??? See h8300 and m68k, pushqi1. */
3867 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
3869 set
= XVECEXP (pat
, 0, i
);
3870 if (GET_CODE (set
) != SET
)
3872 dest
= SET_DEST (set
);
3873 if (dest
== stack_pointer_rtx
)
3876 /* We do not expect an auto-inc of the sp in the parallel. */
3877 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
3878 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3879 != stack_pointer_rtx
);
3887 dest
= SET_DEST (set
);
3889 /* Look for direct modifications of the stack pointer. */
3890 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
3892 /* Look for a trivial adjustment, otherwise assume nothing. */
3893 /* Note that the SPU restore_stack_block pattern refers to
3894 the stack pointer in V4SImode. Consider that non-trivial. */
3895 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
3896 && GET_CODE (SET_SRC (set
)) == PLUS
3897 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
3898 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
3899 return INTVAL (XEXP (SET_SRC (set
), 1));
3900 /* ??? Reload can generate no-op moves, which will be cleaned
3901 up later. Recognize it and continue searching. */
3902 else if (rtx_equal_p (dest
, SET_SRC (set
)))
3905 return HOST_WIDE_INT_MIN
;
3911 /* Otherwise only think about autoinc patterns. */
3912 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
3915 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3916 != stack_pointer_rtx
);
3918 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
3919 mem
= SET_SRC (set
);
3923 addr
= XEXP (mem
, 0);
3924 switch (GET_CODE (addr
))
3928 return GET_MODE_SIZE (GET_MODE (mem
));
3931 return -GET_MODE_SIZE (GET_MODE (mem
));
3934 addr
= XEXP (addr
, 1);
3935 gcc_assert (GET_CODE (addr
) == PLUS
);
3936 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
3937 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
3938 return INTVAL (XEXP (addr
, 1));
3946 fixup_args_size_notes (rtx_insn
*prev
, rtx_insn
*last
, int end_args_size
)
3948 int args_size
= end_args_size
;
3949 bool saw_unknown
= false;
3952 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
3954 HOST_WIDE_INT this_delta
;
3956 if (!NONDEBUG_INSN_P (insn
))
3959 this_delta
= find_args_size_adjust (insn
);
3960 if (this_delta
== 0)
3963 || ACCUMULATE_OUTGOING_ARGS
3964 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
3968 gcc_assert (!saw_unknown
);
3969 if (this_delta
== HOST_WIDE_INT_MIN
)
3972 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (args_size
));
3973 if (STACK_GROWS_DOWNWARD
)
3974 this_delta
= -(unsigned HOST_WIDE_INT
) this_delta
;
3976 args_size
-= this_delta
;
3979 return saw_unknown
? INT_MIN
: args_size
;
3982 #ifdef PUSH_ROUNDING
3983 /* Emit single push insn. */
3986 emit_single_push_insn_1 (machine_mode mode
, rtx x
, tree type
)
3989 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3991 enum insn_code icode
;
3993 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3994 /* If there is push pattern, use it. Otherwise try old way of throwing
3995 MEM representing push operation to move expander. */
3996 icode
= optab_handler (push_optab
, mode
);
3997 if (icode
!= CODE_FOR_nothing
)
3999 struct expand_operand ops
[1];
4001 create_input_operand (&ops
[0], x
, mode
);
4002 if (maybe_expand_insn (icode
, 1, ops
))
4005 if (GET_MODE_SIZE (mode
) == rounded_size
)
4006 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
4007 /* If we are to pad downward, adjust the stack pointer first and
4008 then store X into the stack location using an offset. This is
4009 because emit_move_insn does not know how to pad; it does not have
4011 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
4013 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
4014 HOST_WIDE_INT offset
;
4016 emit_move_insn (stack_pointer_rtx
,
4017 expand_binop (Pmode
,
4018 STACK_GROWS_DOWNWARD
? sub_optab
4021 gen_int_mode (rounded_size
, Pmode
),
4022 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4024 offset
= (HOST_WIDE_INT
) padding_size
;
4025 if (STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_DEC
)
4026 /* We have already decremented the stack pointer, so get the
4028 offset
+= (HOST_WIDE_INT
) rounded_size
;
4030 if (!STACK_GROWS_DOWNWARD
&& STACK_PUSH_CODE
== POST_INC
)
4031 /* We have already incremented the stack pointer, so get the
4033 offset
-= (HOST_WIDE_INT
) rounded_size
;
4035 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4036 gen_int_mode (offset
, Pmode
));
4040 if (STACK_GROWS_DOWNWARD
)
4041 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4042 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4043 gen_int_mode (-(HOST_WIDE_INT
) rounded_size
,
4046 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4047 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4048 gen_int_mode (rounded_size
, Pmode
));
4050 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4053 dest
= gen_rtx_MEM (mode
, dest_addr
);
4057 set_mem_attributes (dest
, type
, 1);
4059 if (cfun
->tail_call_marked
)
4060 /* Function incoming arguments may overlap with sibling call
4061 outgoing arguments and we cannot allow reordering of reads
4062 from function arguments with stores to outgoing arguments
4063 of sibling calls. */
4064 set_mem_alias_set (dest
, 0);
4066 emit_move_insn (dest
, x
);
4069 /* Emit and annotate a single push insn. */
4072 emit_single_push_insn (machine_mode mode
, rtx x
, tree type
)
4074 int delta
, old_delta
= stack_pointer_delta
;
4075 rtx_insn
*prev
= get_last_insn ();
4078 emit_single_push_insn_1 (mode
, x
, type
);
4080 last
= get_last_insn ();
4082 /* Notice the common case where we emitted exactly one insn. */
4083 if (PREV_INSN (last
) == prev
)
4085 add_reg_note (last
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4089 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4090 gcc_assert (delta
== INT_MIN
|| delta
== old_delta
);
4094 /* If reading SIZE bytes from X will end up reading from
4095 Y return the number of bytes that overlap. Return -1
4096 if there is no overlap or -2 if we can't determine
4097 (for example when X and Y have different base registers). */
4100 memory_load_overlap (rtx x
, rtx y
, HOST_WIDE_INT size
)
4102 rtx tmp
= plus_constant (Pmode
, x
, size
);
4103 rtx sub
= simplify_gen_binary (MINUS
, Pmode
, tmp
, y
);
4105 if (!CONST_INT_P (sub
))
4108 HOST_WIDE_INT val
= INTVAL (sub
);
4110 return IN_RANGE (val
, 1, size
) ? val
: -1;
4113 /* Generate code to push X onto the stack, assuming it has mode MODE and
4115 MODE is redundant except when X is a CONST_INT (since they don't
4117 SIZE is an rtx for the size of data to be copied (in bytes),
4118 needed only if X is BLKmode.
4119 Return true if successful. May return false if asked to push a
4120 partial argument during a sibcall optimization (as specified by
4121 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4124 ALIGN (in bits) is maximum alignment we can assume.
4126 If PARTIAL and REG are both nonzero, then copy that many of the first
4127 bytes of X into registers starting with REG, and push the rest of X.
4128 The amount of space pushed is decreased by PARTIAL bytes.
4129 REG must be a hard register in this case.
4130 If REG is zero but PARTIAL is not, take any all others actions for an
4131 argument partially in registers, but do not actually load any
4134 EXTRA is the amount in bytes of extra space to leave next to this arg.
4135 This is ignored if an argument block has already been allocated.
4137 On a machine that lacks real push insns, ARGS_ADDR is the address of
4138 the bottom of the argument block for this call. We use indexing off there
4139 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4140 argument block has not been preallocated.
4142 ARGS_SO_FAR is the size of args previously pushed for this call.
4144 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4145 for arguments passed in registers. If nonzero, it will be the number
4146 of bytes required. */
4149 emit_push_insn (rtx x
, machine_mode mode
, tree type
, rtx size
,
4150 unsigned int align
, int partial
, rtx reg
, int extra
,
4151 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4152 rtx alignment_pad
, bool sibcall_p
)
4155 enum direction stack_direction
= STACK_GROWS_DOWNWARD
? downward
: upward
;
4157 /* Decide where to pad the argument: `downward' for below,
4158 `upward' for above, or `none' for don't pad it.
4159 Default is below for small data on big-endian machines; else above. */
4160 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
4162 /* Invert direction if stack is post-decrement.
4164 if (STACK_PUSH_CODE
== POST_DEC
)
4165 if (where_pad
!= none
)
4166 where_pad
= (where_pad
== downward
? upward
: downward
);
4170 int nregs
= partial
/ UNITS_PER_WORD
;
4171 rtx
*tmp_regs
= NULL
;
4172 int overlapping
= 0;
4175 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4177 /* Copy a block into the stack, entirely or partially. */
4184 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4185 used
= partial
- offset
;
4187 if (mode
!= BLKmode
)
4189 /* A value is to be stored in an insufficiently aligned
4190 stack slot; copy via a suitably aligned slot if
4192 size
= GEN_INT (GET_MODE_SIZE (mode
));
4193 if (!MEM_P (xinner
))
4195 temp
= assign_temp (type
, 1, 1);
4196 emit_move_insn (temp
, xinner
);
4203 /* USED is now the # of bytes we need not copy to the stack
4204 because registers will take care of them. */
4207 xinner
= adjust_address (xinner
, BLKmode
, used
);
4209 /* If the partial register-part of the arg counts in its stack size,
4210 skip the part of stack space corresponding to the registers.
4211 Otherwise, start copying to the beginning of the stack space,
4212 by setting SKIP to 0. */
4213 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4215 #ifdef PUSH_ROUNDING
4216 /* Do it with several push insns if that doesn't take lots of insns
4217 and if there is no difficulty with push insns that skip bytes
4218 on the stack for alignment purposes. */
4221 && CONST_INT_P (size
)
4223 && MEM_ALIGN (xinner
) >= align
4224 && can_move_by_pieces ((unsigned) INTVAL (size
) - used
, align
)
4225 /* Here we avoid the case of a structure whose weak alignment
4226 forces many pushes of a small amount of data,
4227 and such small pushes do rounding that causes trouble. */
4228 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
4229 || align
>= BIGGEST_ALIGNMENT
4230 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4231 == (align
/ BITS_PER_UNIT
)))
4232 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4234 /* Push padding now if padding above and stack grows down,
4235 or if padding below and stack grows up.
4236 But if space already allocated, this has already been done. */
4237 if (extra
&& args_addr
== 0
4238 && where_pad
!= none
&& where_pad
!= stack_direction
)
4239 anti_adjust_stack (GEN_INT (extra
));
4241 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4244 #endif /* PUSH_ROUNDING */
4248 /* Otherwise make space on the stack and copy the data
4249 to the address of that space. */
4251 /* Deduct words put into registers from the size we must copy. */
4254 if (CONST_INT_P (size
))
4255 size
= GEN_INT (INTVAL (size
) - used
);
4257 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4258 gen_int_mode (used
, GET_MODE (size
)),
4259 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4262 /* Get the address of the stack space.
4263 In this case, we do not deal with EXTRA separately.
4264 A single stack adjust will do. */
4267 temp
= push_block (size
, extra
, where_pad
== downward
);
4270 else if (CONST_INT_P (args_so_far
))
4271 temp
= memory_address (BLKmode
,
4272 plus_constant (Pmode
, args_addr
,
4273 skip
+ INTVAL (args_so_far
)));
4275 temp
= memory_address (BLKmode
,
4276 plus_constant (Pmode
,
4277 gen_rtx_PLUS (Pmode
,
4282 if (!ACCUMULATE_OUTGOING_ARGS
)
4284 /* If the source is referenced relative to the stack pointer,
4285 copy it to another register to stabilize it. We do not need
4286 to do this if we know that we won't be changing sp. */
4288 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4289 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4290 temp
= copy_to_reg (temp
);
4293 target
= gen_rtx_MEM (BLKmode
, temp
);
4295 /* We do *not* set_mem_attributes here, because incoming arguments
4296 may overlap with sibling call outgoing arguments and we cannot
4297 allow reordering of reads from function arguments with stores
4298 to outgoing arguments of sibling calls. We do, however, want
4299 to record the alignment of the stack slot. */
4300 /* ALIGN may well be better aligned than TYPE, e.g. due to
4301 PARM_BOUNDARY. Assume the caller isn't lying. */
4302 set_mem_align (target
, align
);
4304 /* If part should go in registers and pushing to that part would
4305 overwrite some of the values that need to go into regs, load the
4306 overlapping values into temporary pseudos to be moved into the hard
4307 regs at the end after the stack pushing has completed.
4308 We cannot load them directly into the hard regs here because
4309 they can be clobbered by the block move expansions.
4312 if (partial
> 0 && reg
!= 0 && mode
== BLKmode
4313 && GET_CODE (reg
) != PARALLEL
)
4315 overlapping
= memory_load_overlap (XEXP (x
, 0), temp
, partial
);
4316 if (overlapping
> 0)
4318 gcc_assert (overlapping
% UNITS_PER_WORD
== 0);
4319 overlapping
/= UNITS_PER_WORD
;
4321 tmp_regs
= XALLOCAVEC (rtx
, overlapping
);
4323 for (int i
= 0; i
< overlapping
; i
++)
4324 tmp_regs
[i
] = gen_reg_rtx (word_mode
);
4326 for (int i
= 0; i
< overlapping
; i
++)
4327 emit_move_insn (tmp_regs
[i
],
4328 operand_subword_force (target
, i
, mode
));
4330 else if (overlapping
== -1)
4332 /* Could not determine whether there is overlap.
4333 Fail the sibcall. */
4341 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4344 else if (partial
> 0)
4346 /* Scalar partly in registers. */
4348 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4351 /* # bytes of start of argument
4352 that we must make space for but need not store. */
4353 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4354 int args_offset
= INTVAL (args_so_far
);
4357 /* Push padding now if padding above and stack grows down,
4358 or if padding below and stack grows up.
4359 But if space already allocated, this has already been done. */
4360 if (extra
&& args_addr
== 0
4361 && where_pad
!= none
&& where_pad
!= stack_direction
)
4362 anti_adjust_stack (GEN_INT (extra
));
4364 /* If we make space by pushing it, we might as well push
4365 the real data. Otherwise, we can leave OFFSET nonzero
4366 and leave the space uninitialized. */
4370 /* Now NOT_STACK gets the number of words that we don't need to
4371 allocate on the stack. Convert OFFSET to words too. */
4372 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4373 offset
/= UNITS_PER_WORD
;
4375 /* If the partial register-part of the arg counts in its stack size,
4376 skip the part of stack space corresponding to the registers.
4377 Otherwise, start copying to the beginning of the stack space,
4378 by setting SKIP to 0. */
4379 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4381 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4382 x
= validize_mem (force_const_mem (mode
, x
));
4384 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4385 SUBREGs of such registers are not allowed. */
4386 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4387 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4388 x
= copy_to_reg (x
);
4390 /* Loop over all the words allocated on the stack for this arg. */
4391 /* We can do it by words, because any scalar bigger than a word
4392 has a size a multiple of a word. */
4393 for (i
= size
- 1; i
>= not_stack
; i
--)
4394 if (i
>= not_stack
+ offset
)
4395 if (!emit_push_insn (operand_subword_force (x
, i
, mode
),
4396 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4398 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4400 reg_parm_stack_space
, alignment_pad
, sibcall_p
))
4408 /* Push padding now if padding above and stack grows down,
4409 or if padding below and stack grows up.
4410 But if space already allocated, this has already been done. */
4411 if (extra
&& args_addr
== 0
4412 && where_pad
!= none
&& where_pad
!= stack_direction
)
4413 anti_adjust_stack (GEN_INT (extra
));
4415 #ifdef PUSH_ROUNDING
4416 if (args_addr
== 0 && PUSH_ARGS
)
4417 emit_single_push_insn (mode
, x
, type
);
4421 if (CONST_INT_P (args_so_far
))
4423 = memory_address (mode
,
4424 plus_constant (Pmode
, args_addr
,
4425 INTVAL (args_so_far
)));
4427 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4429 dest
= gen_rtx_MEM (mode
, addr
);
4431 /* We do *not* set_mem_attributes here, because incoming arguments
4432 may overlap with sibling call outgoing arguments and we cannot
4433 allow reordering of reads from function arguments with stores
4434 to outgoing arguments of sibling calls. We do, however, want
4435 to record the alignment of the stack slot. */
4436 /* ALIGN may well be better aligned than TYPE, e.g. due to
4437 PARM_BOUNDARY. Assume the caller isn't lying. */
4438 set_mem_align (dest
, align
);
4440 emit_move_insn (dest
, x
);
4444 /* Move the partial arguments into the registers and any overlapping
4445 values that we moved into the pseudos in tmp_regs. */
4446 if (partial
> 0 && reg
!= 0)
4448 /* Handle calls that pass values in multiple non-contiguous locations.
4449 The Irix 6 ABI has examples of this. */
4450 if (GET_CODE (reg
) == PARALLEL
)
4451 emit_group_load (reg
, x
, type
, -1);
4454 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4455 move_block_to_reg (REGNO (reg
), x
, nregs
- overlapping
, mode
);
4457 for (int i
= 0; i
< overlapping
; i
++)
4458 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
)
4459 + nregs
- overlapping
+ i
),
4465 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4466 anti_adjust_stack (GEN_INT (extra
));
4468 if (alignment_pad
&& args_addr
== 0)
4469 anti_adjust_stack (alignment_pad
);
4474 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4478 get_subtarget (rtx x
)
4482 /* Only registers can be subtargets. */
4484 /* Don't use hard regs to avoid extending their life. */
4485 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4489 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4490 FIELD is a bitfield. Returns true if the optimization was successful,
4491 and there's nothing else to do. */
4494 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4495 unsigned HOST_WIDE_INT bitpos
,
4496 unsigned HOST_WIDE_INT bitregion_start
,
4497 unsigned HOST_WIDE_INT bitregion_end
,
4498 machine_mode mode1
, rtx str_rtx
,
4501 machine_mode str_mode
= GET_MODE (str_rtx
);
4502 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4507 enum tree_code code
;
4509 if (mode1
!= VOIDmode
4510 || bitsize
>= BITS_PER_WORD
4511 || str_bitsize
> BITS_PER_WORD
4512 || TREE_SIDE_EFFECTS (to
)
4513 || TREE_THIS_VOLATILE (to
))
4517 if (TREE_CODE (src
) != SSA_NAME
)
4519 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4522 srcstmt
= get_gimple_for_ssa_name (src
);
4524 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4527 code
= gimple_assign_rhs_code (srcstmt
);
4529 op0
= gimple_assign_rhs1 (srcstmt
);
4531 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4532 to find its initialization. Hopefully the initialization will
4533 be from a bitfield load. */
4534 if (TREE_CODE (op0
) == SSA_NAME
)
4536 gimple op0stmt
= get_gimple_for_ssa_name (op0
);
4538 /* We want to eventually have OP0 be the same as TO, which
4539 should be a bitfield. */
4541 || !is_gimple_assign (op0stmt
)
4542 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4544 op0
= gimple_assign_rhs1 (op0stmt
);
4547 op1
= gimple_assign_rhs2 (srcstmt
);
4549 if (!operand_equal_p (to
, op0
, 0))
4552 if (MEM_P (str_rtx
))
4554 unsigned HOST_WIDE_INT offset1
;
4556 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4557 str_mode
= word_mode
;
4558 str_mode
= get_best_mode (bitsize
, bitpos
,
4559 bitregion_start
, bitregion_end
,
4560 MEM_ALIGN (str_rtx
), str_mode
, 0);
4561 if (str_mode
== VOIDmode
)
4563 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4566 bitpos
%= str_bitsize
;
4567 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4568 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4570 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4573 /* If the bit field covers the whole REG/MEM, store_field
4574 will likely generate better code. */
4575 if (bitsize
>= str_bitsize
)
4578 /* We can't handle fields split across multiple entities. */
4579 if (bitpos
+ bitsize
> str_bitsize
)
4582 if (BYTES_BIG_ENDIAN
)
4583 bitpos
= str_bitsize
- bitpos
- bitsize
;
4589 /* For now, just optimize the case of the topmost bitfield
4590 where we don't need to do any masking and also
4591 1 bit bitfields where xor can be used.
4592 We might win by one instruction for the other bitfields
4593 too if insv/extv instructions aren't used, so that
4594 can be added later. */
4595 if (bitpos
+ bitsize
!= str_bitsize
4596 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4599 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4600 value
= convert_modes (str_mode
,
4601 TYPE_MODE (TREE_TYPE (op1
)), value
,
4602 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4604 /* We may be accessing data outside the field, which means
4605 we can alias adjacent data. */
4606 if (MEM_P (str_rtx
))
4608 str_rtx
= shallow_copy_rtx (str_rtx
);
4609 set_mem_alias_set (str_rtx
, 0);
4610 set_mem_expr (str_rtx
, 0);
4613 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4614 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4616 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4619 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4620 result
= expand_binop (str_mode
, binop
, str_rtx
,
4621 value
, str_rtx
, 1, OPTAB_WIDEN
);
4622 if (result
!= str_rtx
)
4623 emit_move_insn (str_rtx
, result
);
4628 if (TREE_CODE (op1
) != INTEGER_CST
)
4630 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4631 value
= convert_modes (str_mode
,
4632 TYPE_MODE (TREE_TYPE (op1
)), value
,
4633 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4635 /* We may be accessing data outside the field, which means
4636 we can alias adjacent data. */
4637 if (MEM_P (str_rtx
))
4639 str_rtx
= shallow_copy_rtx (str_rtx
);
4640 set_mem_alias_set (str_rtx
, 0);
4641 set_mem_expr (str_rtx
, 0);
4644 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4645 if (bitpos
+ bitsize
!= str_bitsize
)
4647 rtx mask
= gen_int_mode (((unsigned HOST_WIDE_INT
) 1 << bitsize
) - 1,
4649 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4651 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4652 result
= expand_binop (str_mode
, binop
, str_rtx
,
4653 value
, str_rtx
, 1, OPTAB_WIDEN
);
4654 if (result
!= str_rtx
)
4655 emit_move_insn (str_rtx
, result
);
4665 /* In the C++ memory model, consecutive bit fields in a structure are
4666 considered one memory location.
4668 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4669 returns the bit range of consecutive bits in which this COMPONENT_REF
4670 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4671 and *OFFSET may be adjusted in the process.
4673 If the access does not need to be restricted, 0 is returned in both
4674 *BITSTART and *BITEND. */
4677 get_bit_range (unsigned HOST_WIDE_INT
*bitstart
,
4678 unsigned HOST_WIDE_INT
*bitend
,
4680 HOST_WIDE_INT
*bitpos
,
4683 HOST_WIDE_INT bitoffset
;
4686 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4688 field
= TREE_OPERAND (exp
, 1);
4689 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4690 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4691 need to limit the range we can access. */
4694 *bitstart
= *bitend
= 0;
4698 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4699 part of a larger bit field, then the representative does not serve any
4700 useful purpose. This can occur in Ada. */
4701 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4704 HOST_WIDE_INT rbitsize
, rbitpos
;
4708 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4709 &roffset
, &rmode
, &unsignedp
, &volatilep
, false);
4710 if ((rbitpos
% BITS_PER_UNIT
) != 0)
4712 *bitstart
= *bitend
= 0;
4717 /* Compute the adjustment to bitpos from the offset of the field
4718 relative to the representative. DECL_FIELD_OFFSET of field and
4719 repr are the same by construction if they are not constants,
4720 see finish_bitfield_layout. */
4721 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
))
4722 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr
)))
4723 bitoffset
= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
4724 - tree_to_uhwi (DECL_FIELD_OFFSET (repr
))) * BITS_PER_UNIT
;
4727 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4728 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4730 /* If the adjustment is larger than bitpos, we would have a negative bit
4731 position for the lower bound and this may wreak havoc later. Adjust
4732 offset and bitpos to make the lower bound non-negative in that case. */
4733 if (bitoffset
> *bitpos
)
4735 HOST_WIDE_INT adjust
= bitoffset
- *bitpos
;
4736 gcc_assert ((adjust
% BITS_PER_UNIT
) == 0);
4739 if (*offset
== NULL_TREE
)
4740 *offset
= size_int (-adjust
/ BITS_PER_UNIT
);
4743 = size_binop (MINUS_EXPR
, *offset
, size_int (adjust
/ BITS_PER_UNIT
));
4747 *bitstart
= *bitpos
- bitoffset
;
4749 *bitend
= *bitstart
+ tree_to_uhwi (DECL_SIZE (repr
)) - 1;
4752 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4753 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4754 DECL_RTL was not set yet, return NORTL. */
4757 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4759 if (TREE_CODE (addr
) != ADDR_EXPR
)
4762 tree base
= TREE_OPERAND (addr
, 0);
4765 || TREE_ADDRESSABLE (base
)
4766 || DECL_MODE (base
) == BLKmode
)
4769 if (!DECL_RTL_SET_P (base
))
4772 return (!MEM_P (DECL_RTL (base
)));
4775 /* Returns true if the MEM_REF REF refers to an object that does not
4776 reside in memory and has non-BLKmode. */
4779 mem_ref_refers_to_non_mem_p (tree ref
)
4781 tree base
= TREE_OPERAND (ref
, 0);
4782 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4785 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4786 is true, try generating a nontemporal store. */
4789 expand_assignment (tree to
, tree from
, bool nontemporal
)
4795 enum insn_code icode
;
4797 /* Don't crash if the lhs of the assignment was erroneous. */
4798 if (TREE_CODE (to
) == ERROR_MARK
)
4800 expand_normal (from
);
4804 /* Optimize away no-op moves without side-effects. */
4805 if (operand_equal_p (to
, from
, 0))
4808 /* Handle misaligned stores. */
4809 mode
= TYPE_MODE (TREE_TYPE (to
));
4810 if ((TREE_CODE (to
) == MEM_REF
4811 || TREE_CODE (to
) == TARGET_MEM_REF
)
4813 && !mem_ref_refers_to_non_mem_p (to
)
4814 && ((align
= get_object_alignment (to
))
4815 < GET_MODE_ALIGNMENT (mode
))
4816 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4817 != CODE_FOR_nothing
)
4818 || SLOW_UNALIGNED_ACCESS (mode
, align
)))
4822 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4823 reg
= force_not_mem (reg
);
4824 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4826 if (icode
!= CODE_FOR_nothing
)
4828 struct expand_operand ops
[2];
4830 create_fixed_operand (&ops
[0], mem
);
4831 create_input_operand (&ops
[1], reg
, mode
);
4832 /* The movmisalign<mode> pattern cannot fail, else the assignment
4833 would silently be omitted. */
4834 expand_insn (icode
, 2, ops
);
4837 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
);
4841 /* Assignment of a structure component needs special treatment
4842 if the structure component's rtx is not simply a MEM.
4843 Assignment of an array element at a constant index, and assignment of
4844 an array element in an unaligned packed structure field, has the same
4845 problem. Same for (partially) storing into a non-memory object. */
4846 if (handled_component_p (to
)
4847 || (TREE_CODE (to
) == MEM_REF
4848 && mem_ref_refers_to_non_mem_p (to
))
4849 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4852 HOST_WIDE_INT bitsize
, bitpos
;
4853 unsigned HOST_WIDE_INT bitregion_start
= 0;
4854 unsigned HOST_WIDE_INT bitregion_end
= 0;
4861 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4862 &unsignedp
, &volatilep
, true);
4864 /* Make sure bitpos is not negative, it can wreak havoc later. */
4867 gcc_assert (offset
== NULL_TREE
);
4868 offset
= size_int (bitpos
>> (BITS_PER_UNIT
== 8
4869 ? 3 : exact_log2 (BITS_PER_UNIT
)));
4870 bitpos
&= BITS_PER_UNIT
- 1;
4873 if (TREE_CODE (to
) == COMPONENT_REF
4874 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
4875 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
4876 /* The C++ memory model naturally applies to byte-aligned fields.
4877 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4878 BITSIZE are not byte-aligned, there is no need to limit the range
4879 we can access. This can occur with packed structures in Ada. */
4880 else if (bitsize
> 0
4881 && bitsize
% BITS_PER_UNIT
== 0
4882 && bitpos
% BITS_PER_UNIT
== 0)
4884 bitregion_start
= bitpos
;
4885 bitregion_end
= bitpos
+ bitsize
- 1;
4888 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4890 /* If the field has a mode, we want to access it in the
4891 field's mode, not the computed mode.
4892 If a MEM has VOIDmode (external with incomplete type),
4893 use BLKmode for it instead. */
4896 if (mode1
!= VOIDmode
)
4897 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
4898 else if (GET_MODE (to_rtx
) == VOIDmode
)
4899 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
4904 machine_mode address_mode
;
4907 if (!MEM_P (to_rtx
))
4909 /* We can get constant negative offsets into arrays with broken
4910 user code. Translate this to a trap instead of ICEing. */
4911 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4912 expand_builtin_trap ();
4913 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4916 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4917 address_mode
= get_address_mode (to_rtx
);
4918 if (GET_MODE (offset_rtx
) != address_mode
)
4920 /* We cannot be sure that the RTL in offset_rtx is valid outside
4921 of a memory address context, so force it into a register
4922 before attempting to convert it to the desired mode. */
4923 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
4924 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
4927 /* If we have an expression in OFFSET_RTX and a non-zero
4928 byte offset in BITPOS, adding the byte offset before the
4929 OFFSET_RTX results in better intermediate code, which makes
4930 later rtl optimization passes perform better.
4932 We prefer intermediate code like this:
4934 r124:DI=r123:DI+0x18
4939 r124:DI=r123:DI+0x10
4940 [r124:DI+0x8]=r121:DI
4942 This is only done for aligned data values, as these can
4943 be expected to result in single move instructions. */
4944 if (mode1
!= VOIDmode
4947 && (bitpos
% bitsize
) == 0
4948 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4949 && MEM_ALIGN (to_rtx
) >= GET_MODE_ALIGNMENT (mode1
))
4951 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4952 bitregion_start
= 0;
4953 if (bitregion_end
>= (unsigned HOST_WIDE_INT
) bitpos
)
4954 bitregion_end
-= bitpos
;
4958 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4959 highest_pow2_factor_for_target (to
,
4963 /* No action is needed if the target is not a memory and the field
4964 lies completely outside that target. This can occur if the source
4965 code contains an out-of-bounds access to a small array. */
4967 && GET_MODE (to_rtx
) != BLKmode
4968 && (unsigned HOST_WIDE_INT
) bitpos
4969 >= GET_MODE_PRECISION (GET_MODE (to_rtx
)))
4971 expand_normal (from
);
4974 /* Handle expand_expr of a complex value returning a CONCAT. */
4975 else if (GET_CODE (to_rtx
) == CONCAT
)
4977 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
4978 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
)))
4980 && bitsize
== mode_bitsize
)
4981 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4982 else if (bitsize
== mode_bitsize
/ 2
4983 && (bitpos
== 0 || bitpos
== mode_bitsize
/ 2))
4984 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4986 else if (bitpos
+ bitsize
<= mode_bitsize
/ 2)
4987 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
4988 bitregion_start
, bitregion_end
,
4990 get_alias_set (to
), nontemporal
);
4991 else if (bitpos
>= mode_bitsize
/ 2)
4992 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
4993 bitpos
- mode_bitsize
/ 2,
4994 bitregion_start
, bitregion_end
,
4996 get_alias_set (to
), nontemporal
);
4997 else if (bitpos
== 0 && bitsize
== mode_bitsize
)
5000 result
= expand_normal (from
);
5001 from_rtx
= simplify_gen_subreg (GET_MODE (to_rtx
), result
,
5002 TYPE_MODE (TREE_TYPE (from
)), 0);
5003 emit_move_insn (XEXP (to_rtx
, 0),
5004 read_complex_part (from_rtx
, false));
5005 emit_move_insn (XEXP (to_rtx
, 1),
5006 read_complex_part (from_rtx
, true));
5010 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
5011 GET_MODE_SIZE (GET_MODE (to_rtx
)));
5012 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
5013 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
5014 result
= store_field (temp
, bitsize
, bitpos
,
5015 bitregion_start
, bitregion_end
,
5017 get_alias_set (to
), nontemporal
);
5018 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
5019 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
5026 /* If the field is at offset zero, we could have been given the
5027 DECL_RTX of the parent struct. Don't munge it. */
5028 to_rtx
= shallow_copy_rtx (to_rtx
);
5029 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
5031 MEM_VOLATILE_P (to_rtx
) = 1;
5034 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
5035 bitregion_start
, bitregion_end
,
5040 result
= store_field (to_rtx
, bitsize
, bitpos
,
5041 bitregion_start
, bitregion_end
,
5043 get_alias_set (to
), nontemporal
);
5047 preserve_temp_slots (result
);
5052 /* If the rhs is a function call and its value is not an aggregate,
5053 call the function before we start to compute the lhs.
5054 This is needed for correct code for cases such as
5055 val = setjmp (buf) on machines where reference to val
5056 requires loading up part of an address in a separate insn.
5058 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5059 since it might be a promoted variable where the zero- or sign- extension
5060 needs to be done. Handling this in the normal way is safe because no
5061 computation is done before the call. The same is true for SSA names. */
5062 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
5063 && COMPLETE_TYPE_P (TREE_TYPE (from
))
5064 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
5065 && ! (((TREE_CODE (to
) == VAR_DECL
5066 || TREE_CODE (to
) == PARM_DECL
5067 || TREE_CODE (to
) == RESULT_DECL
)
5068 && REG_P (DECL_RTL (to
)))
5069 || TREE_CODE (to
) == SSA_NAME
))
5075 value
= expand_normal (from
);
5077 /* Split value and bounds to store them separately. */
5078 chkp_split_slot (value
, &value
, &bounds
);
5081 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5083 /* Handle calls that return values in multiple non-contiguous locations.
5084 The Irix 6 ABI has examples of this. */
5085 if (GET_CODE (to_rtx
) == PARALLEL
)
5087 if (GET_CODE (value
) == PARALLEL
)
5088 emit_group_move (to_rtx
, value
);
5090 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
5091 int_size_in_bytes (TREE_TYPE (from
)));
5093 else if (GET_CODE (value
) == PARALLEL
)
5094 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
5095 int_size_in_bytes (TREE_TYPE (from
)));
5096 else if (GET_MODE (to_rtx
) == BLKmode
)
5098 /* Handle calls that return BLKmode values in registers. */
5100 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
5102 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5106 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5107 value
= convert_memory_address_addr_space
5108 (GET_MODE (to_rtx
), value
,
5109 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5111 emit_move_insn (to_rtx
, value
);
5114 /* Store bounds if required. */
5116 && (BOUNDED_P (to
) || chkp_type_has_pointer (TREE_TYPE (to
))))
5118 gcc_assert (MEM_P (to_rtx
));
5119 chkp_emit_bounds_store (bounds
, value
, to_rtx
);
5122 preserve_temp_slots (to_rtx
);
5127 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5128 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5130 /* Don't move directly into a return register. */
5131 if (TREE_CODE (to
) == RESULT_DECL
5132 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5138 /* If the source is itself a return value, it still is in a pseudo at
5139 this point so we can move it back to the return register directly. */
5141 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5142 && TREE_CODE (from
) != CALL_EXPR
)
5143 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5145 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5147 /* Handle calls that return values in multiple non-contiguous locations.
5148 The Irix 6 ABI has examples of this. */
5149 if (GET_CODE (to_rtx
) == PARALLEL
)
5151 if (GET_CODE (temp
) == PARALLEL
)
5152 emit_group_move (to_rtx
, temp
);
5154 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5155 int_size_in_bytes (TREE_TYPE (from
)));
5158 emit_move_insn (to_rtx
, temp
);
5160 preserve_temp_slots (to_rtx
);
5165 /* In case we are returning the contents of an object which overlaps
5166 the place the value is being stored, use a safe function when copying
5167 a value through a pointer into a structure value return block. */
5168 if (TREE_CODE (to
) == RESULT_DECL
5169 && TREE_CODE (from
) == INDIRECT_REF
5170 && ADDR_SPACE_GENERIC_P
5171 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5172 && refs_may_alias_p (to
, from
)
5173 && cfun
->returns_struct
5174 && !cfun
->returns_pcc_struct
)
5179 size
= expr_size (from
);
5180 from_rtx
= expand_normal (from
);
5182 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
5183 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
5184 XEXP (from_rtx
, 0), Pmode
,
5185 convert_to_mode (TYPE_MODE (sizetype
),
5186 size
, TYPE_UNSIGNED (sizetype
)),
5187 TYPE_MODE (sizetype
));
5189 preserve_temp_slots (to_rtx
);
5194 /* Compute FROM and store the value in the rtx we got. */
5197 result
= store_expr_with_bounds (from
, to_rtx
, 0, nontemporal
, to
);
5198 preserve_temp_slots (result
);
5203 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5204 succeeded, false otherwise. */
5207 emit_storent_insn (rtx to
, rtx from
)
5209 struct expand_operand ops
[2];
5210 machine_mode mode
= GET_MODE (to
);
5211 enum insn_code code
= optab_handler (storent_optab
, mode
);
5213 if (code
== CODE_FOR_nothing
)
5216 create_fixed_operand (&ops
[0], to
);
5217 create_input_operand (&ops
[1], from
, mode
);
5218 return maybe_expand_insn (code
, 2, ops
);
5221 /* Generate code for computing expression EXP,
5222 and storing the value into TARGET.
5224 If the mode is BLKmode then we may return TARGET itself.
5225 It turns out that in BLKmode it doesn't cause a problem.
5226 because C has no operators that could combine two different
5227 assignments into the same BLKmode object with different values
5228 with no sequence point. Will other languages need this to
5231 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5232 stack, and block moves may need to be treated specially.
5234 If NONTEMPORAL is true, try using a nontemporal store instruction.
5236 If BTARGET is not NULL then computed bounds of EXP are
5237 associated with BTARGET. */
5240 store_expr_with_bounds (tree exp
, rtx target
, int call_param_p
,
5241 bool nontemporal
, tree btarget
)
5244 rtx alt_rtl
= NULL_RTX
;
5245 location_t loc
= curr_insn_location ();
5247 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5249 /* C++ can generate ?: expressions with a throw expression in one
5250 branch and an rvalue in the other. Here, we resolve attempts to
5251 store the throw expression's nonexistent result. */
5252 gcc_assert (!call_param_p
);
5253 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5256 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5258 /* Perform first part of compound expression, then assign from second
5260 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5261 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5262 return store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
,
5263 call_param_p
, nontemporal
, btarget
);
5265 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5267 /* For conditional expression, get safe form of the target. Then
5268 test the condition, doing the appropriate assignment on either
5269 side. This avoids the creation of unnecessary temporaries.
5270 For non-BLKmode, it is more efficient not to do this. */
5272 rtx_code_label
*lab1
= gen_label_rtx (), *lab2
= gen_label_rtx ();
5274 do_pending_stack_adjust ();
5276 jumpifnot (TREE_OPERAND (exp
, 0), lab1
, -1);
5277 store_expr_with_bounds (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5278 nontemporal
, btarget
);
5279 emit_jump_insn (gen_jump (lab2
));
5282 store_expr_with_bounds (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5283 nontemporal
, btarget
);
5289 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5290 /* If this is a scalar in a register that is stored in a wider mode
5291 than the declared mode, compute the result into its declared mode
5292 and then convert to the wider mode. Our value is the computed
5295 rtx inner_target
= 0;
5297 /* We can do the conversion inside EXP, which will often result
5298 in some optimizations. Do the conversion in two steps: first
5299 change the signedness, if needed, then the extend. But don't
5300 do this if the type of EXP is a subtype of something else
5301 since then the conversion might involve more than just
5302 converting modes. */
5303 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5304 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5305 && GET_MODE_PRECISION (GET_MODE (target
))
5306 == TYPE_PRECISION (TREE_TYPE (exp
)))
5308 if (!SUBREG_CHECK_PROMOTED_SIGN (target
,
5309 TYPE_UNSIGNED (TREE_TYPE (exp
))))
5311 /* Some types, e.g. Fortran's logical*4, won't have a signed
5312 version, so use the mode instead. */
5314 = (signed_or_unsigned_type_for
5315 (SUBREG_PROMOTED_SIGN (target
), TREE_TYPE (exp
)));
5317 ntype
= lang_hooks
.types
.type_for_mode
5318 (TYPE_MODE (TREE_TYPE (exp
)),
5319 SUBREG_PROMOTED_SIGN (target
));
5321 exp
= fold_convert_loc (loc
, ntype
, exp
);
5324 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5325 (GET_MODE (SUBREG_REG (target
)),
5326 SUBREG_PROMOTED_SIGN (target
)),
5329 inner_target
= SUBREG_REG (target
);
5332 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5333 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5335 /* Handle bounds returned by call. */
5336 if (TREE_CODE (exp
) == CALL_EXPR
)
5339 chkp_split_slot (temp
, &temp
, &bounds
);
5340 if (bounds
&& btarget
)
5342 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5343 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5344 chkp_set_rtl_bounds (btarget
, tmp
);
5348 /* If TEMP is a VOIDmode constant, use convert_modes to make
5349 sure that we properly convert it. */
5350 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5352 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5353 temp
, SUBREG_PROMOTED_SIGN (target
));
5354 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
5355 GET_MODE (target
), temp
,
5356 SUBREG_PROMOTED_SIGN (target
));
5359 convert_move (SUBREG_REG (target
), temp
,
5360 SUBREG_PROMOTED_SIGN (target
));
5364 else if ((TREE_CODE (exp
) == STRING_CST
5365 || (TREE_CODE (exp
) == MEM_REF
5366 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5367 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5369 && integer_zerop (TREE_OPERAND (exp
, 1))))
5370 && !nontemporal
&& !call_param_p
5373 /* Optimize initialization of an array with a STRING_CST. */
5374 HOST_WIDE_INT exp_len
, str_copy_len
;
5376 tree str
= TREE_CODE (exp
) == STRING_CST
5377 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5379 exp_len
= int_expr_size (exp
);
5383 if (TREE_STRING_LENGTH (str
) <= 0)
5386 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5387 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5390 str_copy_len
= TREE_STRING_LENGTH (str
);
5391 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5392 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5394 str_copy_len
+= STORE_MAX_PIECES
- 1;
5395 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5397 str_copy_len
= MIN (str_copy_len
, exp_len
);
5398 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5399 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5400 MEM_ALIGN (target
), false))
5405 dest_mem
= store_by_pieces (dest_mem
,
5406 str_copy_len
, builtin_strncpy_read_str
,
5408 TREE_STRING_POINTER (str
)),
5409 MEM_ALIGN (target
), false,
5410 exp_len
> str_copy_len
? 1 : 0);
5411 if (exp_len
> str_copy_len
)
5412 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5413 GEN_INT (exp_len
- str_copy_len
),
5422 /* If we want to use a nontemporal store, force the value to
5424 tmp_target
= nontemporal
? NULL_RTX
: target
;
5425 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5427 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5430 /* Handle bounds returned by call. */
5431 if (TREE_CODE (exp
) == CALL_EXPR
)
5434 chkp_split_slot (temp
, &temp
, &bounds
);
5435 if (bounds
&& btarget
)
5437 gcc_assert (TREE_CODE (btarget
) == SSA_NAME
);
5438 rtx tmp
= targetm
.calls
.load_returned_bounds (bounds
);
5439 chkp_set_rtl_bounds (btarget
, tmp
);
5444 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5445 the same as that of TARGET, adjust the constant. This is needed, for
5446 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5447 only a word-sized value. */
5448 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5449 && TREE_CODE (exp
) != ERROR_MARK
5450 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5451 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5452 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5454 /* If value was not generated in the target, store it there.
5455 Convert the value to TARGET's type first if necessary and emit the
5456 pending incrementations that have been queued when expanding EXP.
5457 Note that we cannot emit the whole queue blindly because this will
5458 effectively disable the POST_INC optimization later.
5460 If TEMP and TARGET compare equal according to rtx_equal_p, but
5461 one or both of them are volatile memory refs, we have to distinguish
5463 - expand_expr has used TARGET. In this case, we must not generate
5464 another copy. This can be detected by TARGET being equal according
5466 - expand_expr has not used TARGET - that means that the source just
5467 happens to have the same RTX form. Since temp will have been created
5468 by expand_expr, it will compare unequal according to == .
5469 We must generate a copy in this case, to reach the correct number
5470 of volatile memory references. */
5472 if ((! rtx_equal_p (temp
, target
)
5473 || (temp
!= target
&& (side_effects_p (temp
)
5474 || side_effects_p (target
))))
5475 && TREE_CODE (exp
) != ERROR_MARK
5476 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5477 but TARGET is not valid memory reference, TEMP will differ
5478 from TARGET although it is really the same location. */
5480 && rtx_equal_p (alt_rtl
, target
)
5481 && !side_effects_p (alt_rtl
)
5482 && !side_effects_p (target
))
5483 /* If there's nothing to copy, don't bother. Don't call
5484 expr_size unless necessary, because some front-ends (C++)
5485 expr_size-hook must not be given objects that are not
5486 supposed to be bit-copied or bit-initialized. */
5487 && expr_size (exp
) != const0_rtx
)
5489 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5491 if (GET_MODE (target
) == BLKmode
)
5493 /* Handle calls that return BLKmode values in registers. */
5494 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5495 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5497 store_bit_field (target
,
5498 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5499 0, 0, 0, GET_MODE (temp
), temp
);
5502 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5505 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5507 /* Handle copying a string constant into an array. The string
5508 constant may be shorter than the array. So copy just the string's
5509 actual length, and clear the rest. First get the size of the data
5510 type of the string, which is actually the size of the target. */
5511 rtx size
= expr_size (exp
);
5513 if (CONST_INT_P (size
)
5514 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5515 emit_block_move (target
, temp
, size
,
5517 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5520 machine_mode pointer_mode
5521 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5522 machine_mode address_mode
= get_address_mode (target
);
5524 /* Compute the size of the data to copy from the string. */
5526 = size_binop_loc (loc
, MIN_EXPR
,
5527 make_tree (sizetype
, size
),
5528 size_int (TREE_STRING_LENGTH (exp
)));
5530 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5532 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5533 rtx_code_label
*label
= 0;
5535 /* Copy that much. */
5536 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5537 TYPE_UNSIGNED (sizetype
));
5538 emit_block_move (target
, temp
, copy_size_rtx
,
5540 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5542 /* Figure out how much is left in TARGET that we have to clear.
5543 Do all calculations in pointer_mode. */
5544 if (CONST_INT_P (copy_size_rtx
))
5546 size
= plus_constant (address_mode
, size
,
5547 -INTVAL (copy_size_rtx
));
5548 target
= adjust_address (target
, BLKmode
,
5549 INTVAL (copy_size_rtx
));
5553 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5554 copy_size_rtx
, NULL_RTX
, 0,
5557 if (GET_MODE (copy_size_rtx
) != address_mode
)
5558 copy_size_rtx
= convert_to_mode (address_mode
,
5560 TYPE_UNSIGNED (sizetype
));
5562 target
= offset_address (target
, copy_size_rtx
,
5563 highest_pow2_factor (copy_size
));
5564 label
= gen_label_rtx ();
5565 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5566 GET_MODE (size
), 0, label
);
5569 if (size
!= const0_rtx
)
5570 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5576 /* Handle calls that return values in multiple non-contiguous locations.
5577 The Irix 6 ABI has examples of this. */
5578 else if (GET_CODE (target
) == PARALLEL
)
5580 if (GET_CODE (temp
) == PARALLEL
)
5581 emit_group_move (target
, temp
);
5583 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5584 int_size_in_bytes (TREE_TYPE (exp
)));
5586 else if (GET_CODE (temp
) == PARALLEL
)
5587 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5588 int_size_in_bytes (TREE_TYPE (exp
)));
5589 else if (GET_MODE (temp
) == BLKmode
)
5590 emit_block_move (target
, temp
, expr_size (exp
),
5592 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5593 /* If we emit a nontemporal store, there is nothing else to do. */
5594 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5598 temp
= force_operand (temp
, target
);
5600 emit_move_insn (target
, temp
);
5607 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5609 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
5611 return store_expr_with_bounds (exp
, target
, call_param_p
, nontemporal
, NULL
);
5614 /* Return true if field F of structure TYPE is a flexible array. */
5617 flexible_array_member_p (const_tree f
, const_tree type
)
5622 return (DECL_CHAIN (f
) == NULL
5623 && TREE_CODE (tf
) == ARRAY_TYPE
5625 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5626 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5627 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5628 && int_size_in_bytes (type
) >= 0);
5631 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5632 must have in order for it to completely initialize a value of type TYPE.
5633 Return -1 if the number isn't known.
5635 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5637 static HOST_WIDE_INT
5638 count_type_elements (const_tree type
, bool for_ctor_p
)
5640 switch (TREE_CODE (type
))
5646 nelts
= array_type_nelts (type
);
5647 if (nelts
&& tree_fits_uhwi_p (nelts
))
5649 unsigned HOST_WIDE_INT n
;
5651 n
= tree_to_uhwi (nelts
) + 1;
5652 if (n
== 0 || for_ctor_p
)
5655 return n
* count_type_elements (TREE_TYPE (type
), false);
5657 return for_ctor_p
? -1 : 1;
5662 unsigned HOST_WIDE_INT n
;
5666 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5667 if (TREE_CODE (f
) == FIELD_DECL
)
5670 n
+= count_type_elements (TREE_TYPE (f
), false);
5671 else if (!flexible_array_member_p (f
, type
))
5672 /* Don't count flexible arrays, which are not supposed
5673 to be initialized. */
5681 case QUAL_UNION_TYPE
:
5686 gcc_assert (!for_ctor_p
);
5687 /* Estimate the number of scalars in each field and pick the
5688 maximum. Other estimates would do instead; the idea is simply
5689 to make sure that the estimate is not sensitive to the ordering
5692 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5693 if (TREE_CODE (f
) == FIELD_DECL
)
5695 m
= count_type_elements (TREE_TYPE (f
), false);
5696 /* If the field doesn't span the whole union, add an extra
5697 scalar for the rest. */
5698 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5699 TYPE_SIZE (type
)) != 1)
5711 return TYPE_VECTOR_SUBPARTS (type
);
5715 case FIXED_POINT_TYPE
:
5720 case REFERENCE_TYPE
:
5736 /* Helper for categorize_ctor_elements. Identical interface. */
5739 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5740 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5742 unsigned HOST_WIDE_INT idx
;
5743 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5744 tree value
, purpose
, elt_type
;
5746 /* Whether CTOR is a valid constant initializer, in accordance with what
5747 initializer_constant_valid_p does. If inferred from the constructor
5748 elements, true until proven otherwise. */
5749 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5750 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5755 elt_type
= NULL_TREE
;
5757 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5759 HOST_WIDE_INT mult
= 1;
5761 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5763 tree lo_index
= TREE_OPERAND (purpose
, 0);
5764 tree hi_index
= TREE_OPERAND (purpose
, 1);
5766 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5767 mult
= (tree_to_uhwi (hi_index
)
5768 - tree_to_uhwi (lo_index
) + 1);
5771 elt_type
= TREE_TYPE (value
);
5773 switch (TREE_CODE (value
))
5777 HOST_WIDE_INT nz
= 0, ic
= 0;
5779 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5782 nz_elts
+= mult
* nz
;
5783 init_elts
+= mult
* ic
;
5785 if (const_from_elts_p
&& const_p
)
5786 const_p
= const_elt_p
;
5793 if (!initializer_zerop (value
))
5799 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
5800 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
5804 if (!initializer_zerop (TREE_REALPART (value
)))
5806 if (!initializer_zerop (TREE_IMAGPART (value
)))
5814 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
5816 tree v
= VECTOR_CST_ELT (value
, i
);
5817 if (!initializer_zerop (v
))
5826 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
5827 nz_elts
+= mult
* tc
;
5828 init_elts
+= mult
* tc
;
5830 if (const_from_elts_p
&& const_p
)
5831 const_p
= initializer_constant_valid_p (value
, elt_type
)
5838 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
5839 num_fields
, elt_type
))
5840 *p_complete
= false;
5842 *p_nz_elts
+= nz_elts
;
5843 *p_init_elts
+= init_elts
;
5848 /* Examine CTOR to discover:
5849 * how many scalar fields are set to nonzero values,
5850 and place it in *P_NZ_ELTS;
5851 * how many scalar fields in total are in CTOR,
5852 and place it in *P_ELT_COUNT.
5853 * whether the constructor is complete -- in the sense that every
5854 meaningful byte is explicitly given a value --
5855 and place it in *P_COMPLETE.
5857 Return whether or not CTOR is a valid static constant initializer, the same
5858 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5861 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5862 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5868 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
5871 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5872 of which had type LAST_TYPE. Each element was itself a complete
5873 initializer, in the sense that every meaningful byte was explicitly
5874 given a value. Return true if the same is true for the constructor
5878 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
5879 const_tree last_type
)
5881 if (TREE_CODE (type
) == UNION_TYPE
5882 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5887 gcc_assert (num_elts
== 1 && last_type
);
5889 /* ??? We could look at each element of the union, and find the
5890 largest element. Which would avoid comparing the size of the
5891 initialized element against any tail padding in the union.
5892 Doesn't seem worth the effort... */
5893 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
5896 return count_type_elements (type
, true) == num_elts
;
5899 /* Return 1 if EXP contains mostly (3/4) zeros. */
5902 mostly_zeros_p (const_tree exp
)
5904 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5906 HOST_WIDE_INT nz_elts
, init_elts
;
5909 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5910 return !complete_p
|| nz_elts
< init_elts
/ 4;
5913 return initializer_zerop (exp
);
5916 /* Return 1 if EXP contains all zeros. */
5919 all_zeros_p (const_tree exp
)
5921 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5923 HOST_WIDE_INT nz_elts
, init_elts
;
5926 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5927 return nz_elts
== 0;
5930 return initializer_zerop (exp
);
5933 /* Helper function for store_constructor.
5934 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5935 CLEARED is as for store_constructor.
5936 ALIAS_SET is the alias set to use for any stores.
5938 This provides a recursive shortcut back to store_constructor when it isn't
5939 necessary to go through store_field. This is so that we can pass through
5940 the cleared field to let store_constructor know that we may not have to
5941 clear a substructure if the outer structure has already been cleared. */
5944 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5945 HOST_WIDE_INT bitpos
, machine_mode mode
,
5946 tree exp
, int cleared
, alias_set_type alias_set
)
5948 if (TREE_CODE (exp
) == CONSTRUCTOR
5949 /* We can only call store_constructor recursively if the size and
5950 bit position are on a byte boundary. */
5951 && bitpos
% BITS_PER_UNIT
== 0
5952 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5953 /* If we have a nonzero bitpos for a register target, then we just
5954 let store_field do the bitfield handling. This is unlikely to
5955 generate unnecessary clear instructions anyways. */
5956 && (bitpos
== 0 || MEM_P (target
)))
5960 = adjust_address (target
,
5961 GET_MODE (target
) == BLKmode
5963 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5964 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5967 /* Update the alias set, if required. */
5968 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5969 && MEM_ALIAS_SET (target
) != 0)
5971 target
= copy_rtx (target
);
5972 set_mem_alias_set (target
, alias_set
);
5975 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5978 store_field (target
, bitsize
, bitpos
, 0, 0, mode
, exp
, alias_set
, false);
5982 /* Returns the number of FIELD_DECLs in TYPE. */
5985 fields_length (const_tree type
)
5987 tree t
= TYPE_FIELDS (type
);
5990 for (; t
; t
= DECL_CHAIN (t
))
5991 if (TREE_CODE (t
) == FIELD_DECL
)
5998 /* Store the value of constructor EXP into the rtx TARGET.
5999 TARGET is either a REG or a MEM; we know it cannot conflict, since
6000 safe_from_p has been called.
6001 CLEARED is true if TARGET is known to have been zero'd.
6002 SIZE is the number of bytes of TARGET we are allowed to modify: this
6003 may not be the same as the size of EXP if we are assigning to a field
6004 which has been packed to exclude padding bits. */
6007 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
6009 tree type
= TREE_TYPE (exp
);
6010 #ifdef WORD_REGISTER_OPERATIONS
6011 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
6014 switch (TREE_CODE (type
))
6018 case QUAL_UNION_TYPE
:
6020 unsigned HOST_WIDE_INT idx
;
6023 /* If size is zero or the target is already cleared, do nothing. */
6024 if (size
== 0 || cleared
)
6026 /* We either clear the aggregate or indicate the value is dead. */
6027 else if ((TREE_CODE (type
) == UNION_TYPE
6028 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
6029 && ! CONSTRUCTOR_ELTS (exp
))
6030 /* If the constructor is empty, clear the union. */
6032 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
6036 /* If we are building a static constructor into a register,
6037 set the initial value as zero so we can fold the value into
6038 a constant. But if more than one register is involved,
6039 this probably loses. */
6040 else if (REG_P (target
) && TREE_STATIC (exp
)
6041 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
6043 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6047 /* If the constructor has fewer fields than the structure or
6048 if we are initializing the structure to mostly zeros, clear
6049 the whole structure first. Don't do this if TARGET is a
6050 register whose mode size isn't equal to SIZE since
6051 clear_storage can't handle this case. */
6053 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp
))
6054 != fields_length (type
))
6055 || mostly_zeros_p (exp
))
6057 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
6060 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6064 if (REG_P (target
) && !cleared
)
6065 emit_clobber (target
);
6067 /* Store each element of the constructor into the
6068 corresponding field of TARGET. */
6069 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
6072 HOST_WIDE_INT bitsize
;
6073 HOST_WIDE_INT bitpos
= 0;
6075 rtx to_rtx
= target
;
6077 /* Just ignore missing fields. We cleared the whole
6078 structure, above, if any fields are missing. */
6082 if (cleared
&& initializer_zerop (value
))
6085 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
6086 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
6090 mode
= DECL_MODE (field
);
6091 if (DECL_BIT_FIELD (field
))
6094 offset
= DECL_FIELD_OFFSET (field
);
6095 if (tree_fits_shwi_p (offset
)
6096 && tree_fits_shwi_p (bit_position (field
)))
6098 bitpos
= int_bit_position (field
);
6102 bitpos
= tree_to_shwi (DECL_FIELD_BIT_OFFSET (field
));
6106 machine_mode address_mode
;
6110 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
6111 make_tree (TREE_TYPE (exp
),
6114 offset_rtx
= expand_normal (offset
);
6115 gcc_assert (MEM_P (to_rtx
));
6117 address_mode
= get_address_mode (to_rtx
);
6118 if (GET_MODE (offset_rtx
) != address_mode
)
6119 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
6121 to_rtx
= offset_address (to_rtx
, offset_rtx
,
6122 highest_pow2_factor (offset
));
6125 #ifdef WORD_REGISTER_OPERATIONS
6126 /* If this initializes a field that is smaller than a
6127 word, at the start of a word, try to widen it to a full
6128 word. This special case allows us to output C++ member
6129 function initializations in a form that the optimizers
6132 && bitsize
< BITS_PER_WORD
6133 && bitpos
% BITS_PER_WORD
== 0
6134 && GET_MODE_CLASS (mode
) == MODE_INT
6135 && TREE_CODE (value
) == INTEGER_CST
6137 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
6139 tree type
= TREE_TYPE (value
);
6141 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
6143 type
= lang_hooks
.types
.type_for_mode
6144 (word_mode
, TYPE_UNSIGNED (type
));
6145 value
= fold_convert (type
, value
);
6148 if (BYTES_BIG_ENDIAN
)
6150 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6151 build_int_cst (type
,
6152 BITS_PER_WORD
- bitsize
));
6153 bitsize
= BITS_PER_WORD
;
6158 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6159 && DECL_NONADDRESSABLE_P (field
))
6161 to_rtx
= copy_rtx (to_rtx
);
6162 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6165 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
6167 get_alias_set (TREE_TYPE (field
)));
6174 unsigned HOST_WIDE_INT i
;
6177 tree elttype
= TREE_TYPE (type
);
6179 HOST_WIDE_INT minelt
= 0;
6180 HOST_WIDE_INT maxelt
= 0;
6182 domain
= TYPE_DOMAIN (type
);
6183 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6184 && TYPE_MAX_VALUE (domain
)
6185 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6186 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6188 /* If we have constant bounds for the range of the type, get them. */
6191 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6192 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6195 /* If the constructor has fewer elements than the array, clear
6196 the whole array first. Similarly if this is static
6197 constructor of a non-BLKmode object. */
6200 else if (REG_P (target
) && TREE_STATIC (exp
))
6204 unsigned HOST_WIDE_INT idx
;
6206 HOST_WIDE_INT count
= 0, zero_count
= 0;
6207 need_to_clear
= ! const_bounds_p
;
6209 /* This loop is a more accurate version of the loop in
6210 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6211 is also needed to check for missing elements. */
6212 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6214 HOST_WIDE_INT this_node_count
;
6219 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6221 tree lo_index
= TREE_OPERAND (index
, 0);
6222 tree hi_index
= TREE_OPERAND (index
, 1);
6224 if (! tree_fits_uhwi_p (lo_index
)
6225 || ! tree_fits_uhwi_p (hi_index
))
6231 this_node_count
= (tree_to_uhwi (hi_index
)
6232 - tree_to_uhwi (lo_index
) + 1);
6235 this_node_count
= 1;
6237 count
+= this_node_count
;
6238 if (mostly_zeros_p (value
))
6239 zero_count
+= this_node_count
;
6242 /* Clear the entire array first if there are any missing
6243 elements, or if the incidence of zero elements is >=
6246 && (count
< maxelt
- minelt
+ 1
6247 || 4 * zero_count
>= 3 * count
))
6251 if (need_to_clear
&& size
> 0)
6254 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6256 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6260 if (!cleared
&& REG_P (target
))
6261 /* Inform later passes that the old value is dead. */
6262 emit_clobber (target
);
6264 /* Store each element of the constructor into the
6265 corresponding element of TARGET, determined by counting the
6267 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6270 HOST_WIDE_INT bitsize
;
6271 HOST_WIDE_INT bitpos
;
6272 rtx xtarget
= target
;
6274 if (cleared
&& initializer_zerop (value
))
6277 mode
= TYPE_MODE (elttype
);
6278 if (mode
== BLKmode
)
6279 bitsize
= (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6280 ? tree_to_uhwi (TYPE_SIZE (elttype
))
6283 bitsize
= GET_MODE_BITSIZE (mode
);
6285 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6287 tree lo_index
= TREE_OPERAND (index
, 0);
6288 tree hi_index
= TREE_OPERAND (index
, 1);
6289 rtx index_r
, pos_rtx
;
6290 HOST_WIDE_INT lo
, hi
, count
;
6293 /* If the range is constant and "small", unroll the loop. */
6295 && tree_fits_shwi_p (lo_index
)
6296 && tree_fits_shwi_p (hi_index
)
6297 && (lo
= tree_to_shwi (lo_index
),
6298 hi
= tree_to_shwi (hi_index
),
6299 count
= hi
- lo
+ 1,
6302 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6303 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6306 lo
-= minelt
; hi
-= minelt
;
6307 for (; lo
<= hi
; lo
++)
6309 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6312 && !MEM_KEEP_ALIAS_SET_P (target
)
6313 && TREE_CODE (type
) == ARRAY_TYPE
6314 && TYPE_NONALIASED_COMPONENT (type
))
6316 target
= copy_rtx (target
);
6317 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6320 store_constructor_field
6321 (target
, bitsize
, bitpos
, mode
, value
, cleared
,
6322 get_alias_set (elttype
));
6327 rtx_code_label
*loop_start
= gen_label_rtx ();
6328 rtx_code_label
*loop_end
= gen_label_rtx ();
6331 expand_normal (hi_index
);
6333 index
= build_decl (EXPR_LOCATION (exp
),
6334 VAR_DECL
, NULL_TREE
, domain
);
6335 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6336 SET_DECL_RTL (index
, index_r
);
6337 store_expr (lo_index
, index_r
, 0, false);
6339 /* Build the head of the loop. */
6340 do_pending_stack_adjust ();
6341 emit_label (loop_start
);
6343 /* Assign value to element index. */
6345 fold_convert (ssizetype
,
6346 fold_build2 (MINUS_EXPR
,
6349 TYPE_MIN_VALUE (domain
)));
6352 size_binop (MULT_EXPR
, position
,
6353 fold_convert (ssizetype
,
6354 TYPE_SIZE_UNIT (elttype
)));
6356 pos_rtx
= expand_normal (position
);
6357 xtarget
= offset_address (target
, pos_rtx
,
6358 highest_pow2_factor (position
));
6359 xtarget
= adjust_address (xtarget
, mode
, 0);
6360 if (TREE_CODE (value
) == CONSTRUCTOR
)
6361 store_constructor (value
, xtarget
, cleared
,
6362 bitsize
/ BITS_PER_UNIT
);
6364 store_expr (value
, xtarget
, 0, false);
6366 /* Generate a conditional jump to exit the loop. */
6367 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6369 jumpif (exit_cond
, loop_end
, -1);
6371 /* Update the loop counter, and jump to the head of
6373 expand_assignment (index
,
6374 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6375 index
, integer_one_node
),
6378 emit_jump (loop_start
);
6380 /* Build the end of the loop. */
6381 emit_label (loop_end
);
6384 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6385 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6390 index
= ssize_int (1);
6393 index
= fold_convert (ssizetype
,
6394 fold_build2 (MINUS_EXPR
,
6397 TYPE_MIN_VALUE (domain
)));
6400 size_binop (MULT_EXPR
, index
,
6401 fold_convert (ssizetype
,
6402 TYPE_SIZE_UNIT (elttype
)));
6403 xtarget
= offset_address (target
,
6404 expand_normal (position
),
6405 highest_pow2_factor (position
));
6406 xtarget
= adjust_address (xtarget
, mode
, 0);
6407 store_expr (value
, xtarget
, 0, false);
6412 bitpos
= ((tree_to_shwi (index
) - minelt
)
6413 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6415 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6417 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6418 && TREE_CODE (type
) == ARRAY_TYPE
6419 && TYPE_NONALIASED_COMPONENT (type
))
6421 target
= copy_rtx (target
);
6422 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6424 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
6425 cleared
, get_alias_set (elttype
));
6433 unsigned HOST_WIDE_INT idx
;
6434 constructor_elt
*ce
;
6437 int icode
= CODE_FOR_nothing
;
6438 tree elttype
= TREE_TYPE (type
);
6439 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6440 machine_mode eltmode
= TYPE_MODE (elttype
);
6441 HOST_WIDE_INT bitsize
;
6442 HOST_WIDE_INT bitpos
;
6443 rtvec vector
= NULL
;
6445 alias_set_type alias
;
6447 gcc_assert (eltmode
!= BLKmode
);
6449 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6450 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
6452 machine_mode mode
= GET_MODE (target
);
6454 icode
= (int) optab_handler (vec_init_optab
, mode
);
6455 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6456 if (icode
!= CODE_FOR_nothing
)
6460 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6461 if (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
)
6463 icode
= CODE_FOR_nothing
;
6467 if (icode
!= CODE_FOR_nothing
)
6471 vector
= rtvec_alloc (n_elts
);
6472 for (i
= 0; i
< n_elts
; i
++)
6473 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
6477 /* If the constructor has fewer elements than the vector,
6478 clear the whole array first. Similarly if this is static
6479 constructor of a non-BLKmode object. */
6482 else if (REG_P (target
) && TREE_STATIC (exp
))
6486 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6489 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6491 int n_elts_here
= tree_to_uhwi
6492 (int_const_binop (TRUNC_DIV_EXPR
,
6493 TYPE_SIZE (TREE_TYPE (value
)),
6494 TYPE_SIZE (elttype
)));
6496 count
+= n_elts_here
;
6497 if (mostly_zeros_p (value
))
6498 zero_count
+= n_elts_here
;
6501 /* Clear the entire vector first if there are any missing elements,
6502 or if the incidence of zero elements is >= 75%. */
6503 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6506 if (need_to_clear
&& size
> 0 && !vector
)
6509 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6511 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6515 /* Inform later passes that the old value is dead. */
6516 if (!cleared
&& !vector
&& REG_P (target
))
6517 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6520 alias
= MEM_ALIAS_SET (target
);
6522 alias
= get_alias_set (elttype
);
6524 /* Store each element of the constructor into the corresponding
6525 element of TARGET, determined by counting the elements. */
6526 for (idx
= 0, i
= 0;
6527 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6528 idx
++, i
+= bitsize
/ elt_size
)
6530 HOST_WIDE_INT eltpos
;
6531 tree value
= ce
->value
;
6533 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6534 if (cleared
&& initializer_zerop (value
))
6538 eltpos
= tree_to_uhwi (ce
->index
);
6544 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6546 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6547 RTVEC_ELT (vector
, eltpos
)
6548 = expand_normal (value
);
6552 machine_mode value_mode
=
6553 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6554 ? TYPE_MODE (TREE_TYPE (value
))
6556 bitpos
= eltpos
* elt_size
;
6557 store_constructor_field (target
, bitsize
, bitpos
, value_mode
,
6558 value
, cleared
, alias
);
6563 emit_insn (GEN_FCN (icode
)
6565 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
6574 /* Store the value of EXP (an expression tree)
6575 into a subfield of TARGET which has mode MODE and occupies
6576 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6577 If MODE is VOIDmode, it means that we are storing into a bit-field.
6579 BITREGION_START is bitpos of the first bitfield in this region.
6580 BITREGION_END is the bitpos of the ending bitfield in this region.
6581 These two fields are 0, if the C++ memory model does not apply,
6582 or we are not interested in keeping track of bitfield regions.
6584 Always return const0_rtx unless we have something particular to
6587 ALIAS_SET is the alias set for the destination. This value will
6588 (in general) be different from that for TARGET, since TARGET is a
6589 reference to the containing structure.
6591 If NONTEMPORAL is true, try generating a nontemporal store. */
6594 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
6595 unsigned HOST_WIDE_INT bitregion_start
,
6596 unsigned HOST_WIDE_INT bitregion_end
,
6597 machine_mode mode
, tree exp
,
6598 alias_set_type alias_set
, bool nontemporal
)
6600 if (TREE_CODE (exp
) == ERROR_MARK
)
6603 /* If we have nothing to store, do nothing unless the expression has
6606 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6608 if (GET_CODE (target
) == CONCAT
)
6610 /* We're storing into a struct containing a single __complex. */
6612 gcc_assert (!bitpos
);
6613 return store_expr (exp
, target
, 0, nontemporal
);
6616 /* If the structure is in a register or if the component
6617 is a bit field, we cannot use addressing to access it.
6618 Use bit-field techniques or SUBREG to store in it. */
6620 if (mode
== VOIDmode
6621 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6622 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6623 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6625 || GET_CODE (target
) == SUBREG
6626 /* If the field isn't aligned enough to store as an ordinary memref,
6627 store it as a bit field. */
6629 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6630 || bitpos
% GET_MODE_ALIGNMENT (mode
))
6631 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
6632 || (bitpos
% BITS_PER_UNIT
!= 0)))
6633 || (bitsize
>= 0 && mode
!= BLKmode
6634 && GET_MODE_BITSIZE (mode
) > bitsize
)
6635 /* If the RHS and field are a constant size and the size of the
6636 RHS isn't the same size as the bitfield, we must use bitfield
6639 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6640 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0)
6641 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6642 decl we must use bitfield operations. */
6644 && TREE_CODE (exp
) == MEM_REF
6645 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6646 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6647 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0),0 ))
6648 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6653 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6654 implies a mask operation. If the precision is the same size as
6655 the field we're storing into, that mask is redundant. This is
6656 particularly common with bit field assignments generated by the
6658 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6661 tree type
= TREE_TYPE (exp
);
6662 if (INTEGRAL_TYPE_P (type
)
6663 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6664 && bitsize
== TYPE_PRECISION (type
))
6666 tree op
= gimple_assign_rhs1 (nop_def
);
6667 type
= TREE_TYPE (op
);
6668 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
6673 temp
= expand_normal (exp
);
6675 /* If BITSIZE is narrower than the size of the type of EXP
6676 we will be narrowing TEMP. Normally, what's wanted are the
6677 low-order bits. However, if EXP's type is a record and this is
6678 big-endian machine, we want the upper BITSIZE bits. */
6679 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
6680 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
6681 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
6682 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
6683 GET_MODE_BITSIZE (GET_MODE (temp
)) - bitsize
,
6686 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6687 if (mode
!= VOIDmode
&& mode
!= BLKmode
6688 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
6689 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
6691 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6692 are both BLKmode, both must be in memory and BITPOS must be aligned
6693 on a byte boundary. If so, we simply do a block copy. Likewise for
6694 a BLKmode-like TARGET. */
6695 if (GET_CODE (temp
) != PARALLEL
6696 && GET_MODE (temp
) == BLKmode
6697 && (GET_MODE (target
) == BLKmode
6699 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
6700 && (bitpos
% BITS_PER_UNIT
) == 0
6701 && (bitsize
% BITS_PER_UNIT
) == 0)))
6703 gcc_assert (MEM_P (target
) && MEM_P (temp
)
6704 && (bitpos
% BITS_PER_UNIT
) == 0);
6706 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6707 emit_block_move (target
, temp
,
6708 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6715 /* Handle calls that return values in multiple non-contiguous locations.
6716 The Irix 6 ABI has examples of this. */
6717 if (GET_CODE (temp
) == PARALLEL
)
6719 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6721 if (mode
== BLKmode
|| mode
== VOIDmode
)
6722 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6723 temp_target
= gen_reg_rtx (mode
);
6724 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
6727 else if (mode
== BLKmode
)
6729 /* Handle calls that return BLKmode values in registers. */
6730 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
6732 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
6733 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
6738 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6740 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6741 temp_target
= gen_reg_rtx (mode
);
6743 = extract_bit_field (temp
, size
* BITS_PER_UNIT
, 0, 1,
6744 temp_target
, mode
, mode
);
6749 /* Store the value in the bitfield. */
6750 store_bit_field (target
, bitsize
, bitpos
,
6751 bitregion_start
, bitregion_end
,
6758 /* Now build a reference to just the desired component. */
6759 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
6761 if (to_rtx
== target
)
6762 to_rtx
= copy_rtx (to_rtx
);
6764 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
6765 set_mem_alias_set (to_rtx
, alias_set
);
6767 return store_expr (exp
, to_rtx
, 0, nontemporal
);
6771 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6772 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6773 codes and find the ultimate containing object, which we return.
6775 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6776 bit position, and *PUNSIGNEDP to the signedness of the field.
6777 If the position of the field is variable, we store a tree
6778 giving the variable offset (in units) in *POFFSET.
6779 This offset is in addition to the bit position.
6780 If the position is not variable, we store 0 in *POFFSET.
6782 If any of the extraction expressions is volatile,
6783 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6785 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6786 Otherwise, it is a mode that can be used to access the field.
6788 If the field describes a variable-sized object, *PMODE is set to
6789 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6790 this case, but the address of the object can be found.
6792 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6793 look through nodes that serve as markers of a greater alignment than
6794 the one that can be deduced from the expression. These nodes make it
6795 possible for front-ends to prevent temporaries from being created by
6796 the middle-end on alignment considerations. For that purpose, the
6797 normal operating mode at high-level is to always pass FALSE so that
6798 the ultimate containing object is really returned; moreover, the
6799 associated predicate handled_component_p will always return TRUE
6800 on these nodes, thus indicating that they are essentially handled
6801 by get_inner_reference. TRUE should only be passed when the caller
6802 is scanning the expression in order to build another representation
6803 and specifically knows how to handle these nodes; as such, this is
6804 the normal operating mode in the RTL expanders. */
6807 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
6808 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
6809 machine_mode
*pmode
, int *punsignedp
,
6810 int *pvolatilep
, bool keep_aligning
)
6813 machine_mode mode
= VOIDmode
;
6814 bool blkmode_bitfield
= false;
6815 tree offset
= size_zero_node
;
6816 offset_int bit_offset
= 0;
6818 /* First get the mode, signedness, and size. We do this from just the
6819 outermost expression. */
6821 if (TREE_CODE (exp
) == COMPONENT_REF
)
6823 tree field
= TREE_OPERAND (exp
, 1);
6824 size_tree
= DECL_SIZE (field
);
6825 if (flag_strict_volatile_bitfields
> 0
6826 && TREE_THIS_VOLATILE (exp
)
6827 && DECL_BIT_FIELD_TYPE (field
)
6828 && DECL_MODE (field
) != BLKmode
)
6829 /* Volatile bitfields should be accessed in the mode of the
6830 field's type, not the mode computed based on the bit
6832 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
6833 else if (!DECL_BIT_FIELD (field
))
6834 mode
= DECL_MODE (field
);
6835 else if (DECL_MODE (field
) == BLKmode
)
6836 blkmode_bitfield
= true;
6838 *punsignedp
= DECL_UNSIGNED (field
);
6840 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
6842 size_tree
= TREE_OPERAND (exp
, 1);
6843 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
6844 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
6846 /* For vector types, with the correct size of access, use the mode of
6848 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
6849 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6850 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
6851 mode
= TYPE_MODE (TREE_TYPE (exp
));
6855 mode
= TYPE_MODE (TREE_TYPE (exp
));
6856 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
6858 if (mode
== BLKmode
)
6859 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
6861 *pbitsize
= GET_MODE_BITSIZE (mode
);
6866 if (! tree_fits_uhwi_p (size_tree
))
6867 mode
= BLKmode
, *pbitsize
= -1;
6869 *pbitsize
= tree_to_uhwi (size_tree
);
6872 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6873 and find the ultimate containing object. */
6876 switch (TREE_CODE (exp
))
6879 bit_offset
+= wi::to_offset (TREE_OPERAND (exp
, 2));
6884 tree field
= TREE_OPERAND (exp
, 1);
6885 tree this_offset
= component_ref_field_offset (exp
);
6887 /* If this field hasn't been filled in yet, don't go past it.
6888 This should only happen when folding expressions made during
6889 type construction. */
6890 if (this_offset
== 0)
6893 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
6894 bit_offset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
6896 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6901 case ARRAY_RANGE_REF
:
6903 tree index
= TREE_OPERAND (exp
, 1);
6904 tree low_bound
= array_ref_low_bound (exp
);
6905 tree unit_size
= array_ref_element_size (exp
);
6907 /* We assume all arrays have sizes that are a multiple of a byte.
6908 First subtract the lower bound, if any, in the type of the
6909 index, then convert to sizetype and multiply by the size of
6910 the array element. */
6911 if (! integer_zerop (low_bound
))
6912 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
6915 offset
= size_binop (PLUS_EXPR
, offset
,
6916 size_binop (MULT_EXPR
,
6917 fold_convert (sizetype
, index
),
6926 bit_offset
+= *pbitsize
;
6929 case VIEW_CONVERT_EXPR
:
6930 if (keep_aligning
&& STRICT_ALIGNMENT
6931 && (TYPE_ALIGN (TREE_TYPE (exp
))
6932 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6933 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6934 < BIGGEST_ALIGNMENT
)
6935 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6936 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6941 /* Hand back the decl for MEM[&decl, off]. */
6942 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
6944 tree off
= TREE_OPERAND (exp
, 1);
6945 if (!integer_zerop (off
))
6947 offset_int boff
, coff
= mem_ref_offset (exp
);
6948 boff
= wi::lshift (coff
, LOG2_BITS_PER_UNIT
);
6951 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6959 /* If any reference in the chain is volatile, the effect is volatile. */
6960 if (TREE_THIS_VOLATILE (exp
))
6963 exp
= TREE_OPERAND (exp
, 0);
6967 /* If OFFSET is constant, see if we can return the whole thing as a
6968 constant bit position. Make sure to handle overflow during
6970 if (TREE_CODE (offset
) == INTEGER_CST
)
6972 offset_int tem
= wi::sext (wi::to_offset (offset
),
6973 TYPE_PRECISION (sizetype
));
6974 tem
= wi::lshift (tem
, LOG2_BITS_PER_UNIT
);
6976 if (wi::fits_shwi_p (tem
))
6978 *pbitpos
= tem
.to_shwi ();
6979 *poffset
= offset
= NULL_TREE
;
6983 /* Otherwise, split it up. */
6986 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6987 if (wi::neg_p (bit_offset
) || !wi::fits_shwi_p (bit_offset
))
6989 offset_int mask
= wi::mask
<offset_int
> (LOG2_BITS_PER_UNIT
, false);
6990 offset_int tem
= bit_offset
.and_not (mask
);
6991 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6992 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6994 tem
= wi::arshift (tem
, LOG2_BITS_PER_UNIT
);
6995 offset
= size_binop (PLUS_EXPR
, offset
,
6996 wide_int_to_tree (sizetype
, tem
));
6999 *pbitpos
= bit_offset
.to_shwi ();
7003 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7004 if (mode
== VOIDmode
7006 && (*pbitpos
% BITS_PER_UNIT
) == 0
7007 && (*pbitsize
% BITS_PER_UNIT
) == 0)
7015 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7017 static unsigned HOST_WIDE_INT
7018 target_align (const_tree target
)
7020 /* We might have a chain of nested references with intermediate misaligning
7021 bitfields components, so need to recurse to find out. */
7023 unsigned HOST_WIDE_INT this_align
, outer_align
;
7025 switch (TREE_CODE (target
))
7031 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7032 outer_align
= target_align (TREE_OPERAND (target
, 0));
7033 return MIN (this_align
, outer_align
);
7036 case ARRAY_RANGE_REF
:
7037 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7038 outer_align
= target_align (TREE_OPERAND (target
, 0));
7039 return MIN (this_align
, outer_align
);
7042 case NON_LVALUE_EXPR
:
7043 case VIEW_CONVERT_EXPR
:
7044 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7045 outer_align
= target_align (TREE_OPERAND (target
, 0));
7046 return MAX (this_align
, outer_align
);
7049 return TYPE_ALIGN (TREE_TYPE (target
));
7054 /* Given an rtx VALUE that may contain additions and multiplications, return
7055 an equivalent value that just refers to a register, memory, or constant.
7056 This is done by generating instructions to perform the arithmetic and
7057 returning a pseudo-register containing the value.
7059 The returned value may be a REG, SUBREG, MEM or constant. */
7062 force_operand (rtx value
, rtx target
)
7065 /* Use subtarget as the target for operand 0 of a binary operation. */
7066 rtx subtarget
= get_subtarget (target
);
7067 enum rtx_code code
= GET_CODE (value
);
7069 /* Check for subreg applied to an expression produced by loop optimizer. */
7071 && !REG_P (SUBREG_REG (value
))
7072 && !MEM_P (SUBREG_REG (value
)))
7075 = simplify_gen_subreg (GET_MODE (value
),
7076 force_reg (GET_MODE (SUBREG_REG (value
)),
7077 force_operand (SUBREG_REG (value
),
7079 GET_MODE (SUBREG_REG (value
)),
7080 SUBREG_BYTE (value
));
7081 code
= GET_CODE (value
);
7084 /* Check for a PIC address load. */
7085 if ((code
== PLUS
|| code
== MINUS
)
7086 && XEXP (value
, 0) == pic_offset_table_rtx
7087 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7088 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7089 || GET_CODE (XEXP (value
, 1)) == CONST
))
7092 subtarget
= gen_reg_rtx (GET_MODE (value
));
7093 emit_move_insn (subtarget
, value
);
7097 if (ARITHMETIC_P (value
))
7099 op2
= XEXP (value
, 1);
7100 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7102 if (code
== MINUS
&& CONST_INT_P (op2
))
7105 op2
= negate_rtx (GET_MODE (value
), op2
);
7108 /* Check for an addition with OP2 a constant integer and our first
7109 operand a PLUS of a virtual register and something else. In that
7110 case, we want to emit the sum of the virtual register and the
7111 constant first and then add the other value. This allows virtual
7112 register instantiation to simply modify the constant rather than
7113 creating another one around this addition. */
7114 if (code
== PLUS
&& CONST_INT_P (op2
)
7115 && GET_CODE (XEXP (value
, 0)) == PLUS
7116 && REG_P (XEXP (XEXP (value
, 0), 0))
7117 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7118 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7120 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7121 XEXP (XEXP (value
, 0), 0), op2
,
7122 subtarget
, 0, OPTAB_LIB_WIDEN
);
7123 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7124 force_operand (XEXP (XEXP (value
,
7126 target
, 0, OPTAB_LIB_WIDEN
);
7129 op1
= force_operand (XEXP (value
, 0), subtarget
);
7130 op2
= force_operand (op2
, NULL_RTX
);
7134 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7136 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7137 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7138 target
, 1, OPTAB_LIB_WIDEN
);
7140 return expand_divmod (0,
7141 FLOAT_MODE_P (GET_MODE (value
))
7142 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7143 GET_MODE (value
), op1
, op2
, target
, 0);
7145 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7148 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7151 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7154 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7155 target
, 0, OPTAB_LIB_WIDEN
);
7157 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7158 target
, 1, OPTAB_LIB_WIDEN
);
7161 if (UNARY_P (value
))
7164 target
= gen_reg_rtx (GET_MODE (value
));
7165 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7172 case FLOAT_TRUNCATE
:
7173 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7178 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7182 case UNSIGNED_FLOAT
:
7183 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7187 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7191 #ifdef INSN_SCHEDULING
7192 /* On machines that have insn scheduling, we want all memory reference to be
7193 explicit, so we need to deal with such paradoxical SUBREGs. */
7194 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7196 = simplify_gen_subreg (GET_MODE (value
),
7197 force_reg (GET_MODE (SUBREG_REG (value
)),
7198 force_operand (SUBREG_REG (value
),
7200 GET_MODE (SUBREG_REG (value
)),
7201 SUBREG_BYTE (value
));
7207 /* Subroutine of expand_expr: return nonzero iff there is no way that
7208 EXP can reference X, which is being modified. TOP_P is nonzero if this
7209 call is going to be used to determine whether we need a temporary
7210 for EXP, as opposed to a recursive call to this function.
7212 It is always safe for this routine to return zero since it merely
7213 searches for optimization opportunities. */
7216 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7222 /* If EXP has varying size, we MUST use a target since we currently
7223 have no way of allocating temporaries of variable size
7224 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7225 So we assume here that something at a higher level has prevented a
7226 clash. This is somewhat bogus, but the best we can do. Only
7227 do this when X is BLKmode and when we are at the top level. */
7228 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7229 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7230 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7231 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7232 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7234 && GET_MODE (x
) == BLKmode
)
7235 /* If X is in the outgoing argument area, it is always safe. */
7237 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7238 || (GET_CODE (XEXP (x
, 0)) == PLUS
7239 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7242 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7243 find the underlying pseudo. */
7244 if (GET_CODE (x
) == SUBREG
)
7247 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7251 /* Now look at our tree code and possibly recurse. */
7252 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7254 case tcc_declaration
:
7255 exp_rtl
= DECL_RTL_IF_SET (exp
);
7261 case tcc_exceptional
:
7262 if (TREE_CODE (exp
) == TREE_LIST
)
7266 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7268 exp
= TREE_CHAIN (exp
);
7271 if (TREE_CODE (exp
) != TREE_LIST
)
7272 return safe_from_p (x
, exp
, 0);
7275 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7277 constructor_elt
*ce
;
7278 unsigned HOST_WIDE_INT idx
;
7280 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7281 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7282 || !safe_from_p (x
, ce
->value
, 0))
7286 else if (TREE_CODE (exp
) == ERROR_MARK
)
7287 return 1; /* An already-visited SAVE_EXPR? */
7292 /* The only case we look at here is the DECL_INITIAL inside a
7294 return (TREE_CODE (exp
) != DECL_EXPR
7295 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7296 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7297 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7300 case tcc_comparison
:
7301 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7306 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7308 case tcc_expression
:
7311 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7312 the expression. If it is set, we conflict iff we are that rtx or
7313 both are in memory. Otherwise, we check all operands of the
7314 expression recursively. */
7316 switch (TREE_CODE (exp
))
7319 /* If the operand is static or we are static, we can't conflict.
7320 Likewise if we don't conflict with the operand at all. */
7321 if (staticp (TREE_OPERAND (exp
, 0))
7322 || TREE_STATIC (exp
)
7323 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7326 /* Otherwise, the only way this can conflict is if we are taking
7327 the address of a DECL a that address if part of X, which is
7329 exp
= TREE_OPERAND (exp
, 0);
7332 if (!DECL_RTL_SET_P (exp
)
7333 || !MEM_P (DECL_RTL (exp
)))
7336 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7342 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7343 get_alias_set (exp
)))
7348 /* Assume that the call will clobber all hard registers and
7350 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7355 case WITH_CLEANUP_EXPR
:
7356 case CLEANUP_POINT_EXPR
:
7357 /* Lowered by gimplify.c. */
7361 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7367 /* If we have an rtx, we do not need to scan our operands. */
7371 nops
= TREE_OPERAND_LENGTH (exp
);
7372 for (i
= 0; i
< nops
; i
++)
7373 if (TREE_OPERAND (exp
, i
) != 0
7374 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7380 /* Should never get a type here. */
7384 /* If we have an rtl, find any enclosed object. Then see if we conflict
7388 if (GET_CODE (exp_rtl
) == SUBREG
)
7390 exp_rtl
= SUBREG_REG (exp_rtl
);
7392 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7396 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7397 are memory and they conflict. */
7398 return ! (rtx_equal_p (x
, exp_rtl
)
7399 || (MEM_P (x
) && MEM_P (exp_rtl
)
7400 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7403 /* If we reach here, it is safe. */
7408 /* Return the highest power of two that EXP is known to be a multiple of.
7409 This is used in updating alignment of MEMs in array references. */
7411 unsigned HOST_WIDE_INT
7412 highest_pow2_factor (const_tree exp
)
7414 unsigned HOST_WIDE_INT ret
;
7415 int trailing_zeros
= tree_ctz (exp
);
7416 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7417 return BIGGEST_ALIGNMENT
;
7418 ret
= (unsigned HOST_WIDE_INT
) 1 << trailing_zeros
;
7419 if (ret
> BIGGEST_ALIGNMENT
)
7420 return BIGGEST_ALIGNMENT
;
7424 /* Similar, except that the alignment requirements of TARGET are
7425 taken into account. Assume it is at least as aligned as its
7426 type, unless it is a COMPONENT_REF in which case the layout of
7427 the structure gives the alignment. */
7429 static unsigned HOST_WIDE_INT
7430 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7432 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7433 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7435 return MAX (factor
, talign
);
7438 /* Convert the tree comparison code TCODE to the rtl one where the
7439 signedness is UNSIGNEDP. */
7441 static enum rtx_code
7442 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7454 code
= unsignedp
? LTU
: LT
;
7457 code
= unsignedp
? LEU
: LE
;
7460 code
= unsignedp
? GTU
: GT
;
7463 code
= unsignedp
? GEU
: GE
;
7465 case UNORDERED_EXPR
:
7496 /* Subroutine of expand_expr. Expand the two operands of a binary
7497 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7498 The value may be stored in TARGET if TARGET is nonzero. The
7499 MODIFIER argument is as documented by expand_expr. */
7502 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7503 enum expand_modifier modifier
)
7505 if (! safe_from_p (target
, exp1
, 1))
7507 if (operand_equal_p (exp0
, exp1
, 0))
7509 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7510 *op1
= copy_rtx (*op0
);
7514 /* If we need to preserve evaluation order, copy exp0 into its own
7515 temporary variable so that it can't be clobbered by exp1. */
7516 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
7517 exp0
= save_expr (exp0
);
7518 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7519 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7524 /* Return a MEM that contains constant EXP. DEFER is as for
7525 output_constant_def and MODIFIER is as for expand_expr. */
7528 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7532 mem
= output_constant_def (exp
, defer
);
7533 if (modifier
!= EXPAND_INITIALIZER
)
7534 mem
= use_anchored_address (mem
);
7538 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7539 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7542 expand_expr_addr_expr_1 (tree exp
, rtx target
, machine_mode tmode
,
7543 enum expand_modifier modifier
, addr_space_t as
)
7545 rtx result
, subtarget
;
7547 HOST_WIDE_INT bitsize
, bitpos
;
7548 int volatilep
, unsignedp
;
7551 /* If we are taking the address of a constant and are at the top level,
7552 we have to use output_constant_def since we can't call force_const_mem
7554 /* ??? This should be considered a front-end bug. We should not be
7555 generating ADDR_EXPR of something that isn't an LVALUE. The only
7556 exception here is STRING_CST. */
7557 if (CONSTANT_CLASS_P (exp
))
7559 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7560 if (modifier
< EXPAND_SUM
)
7561 result
= force_operand (result
, target
);
7565 /* Everything must be something allowed by is_gimple_addressable. */
7566 switch (TREE_CODE (exp
))
7569 /* This case will happen via recursion for &a->b. */
7570 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7574 tree tem
= TREE_OPERAND (exp
, 0);
7575 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7576 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7577 return expand_expr (tem
, target
, tmode
, modifier
);
7581 /* Expand the initializer like constants above. */
7582 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7584 if (modifier
< EXPAND_SUM
)
7585 result
= force_operand (result
, target
);
7589 /* The real part of the complex number is always first, therefore
7590 the address is the same as the address of the parent object. */
7593 inner
= TREE_OPERAND (exp
, 0);
7597 /* The imaginary part of the complex number is always second.
7598 The expression is therefore always offset by the size of the
7601 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
7602 inner
= TREE_OPERAND (exp
, 0);
7605 case COMPOUND_LITERAL_EXPR
:
7606 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7607 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7608 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7609 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7610 the initializers aren't gimplified. */
7611 if (COMPOUND_LITERAL_EXPR_DECL (exp
)
7612 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp
)))
7613 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7614 target
, tmode
, modifier
, as
);
7617 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7618 expand_expr, as that can have various side effects; LABEL_DECLs for
7619 example, may not have their DECL_RTL set yet. Expand the rtl of
7620 CONSTRUCTORs too, which should yield a memory reference for the
7621 constructor's contents. Assume language specific tree nodes can
7622 be expanded in some interesting way. */
7623 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7625 || TREE_CODE (exp
) == CONSTRUCTOR
7626 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7628 result
= expand_expr (exp
, target
, tmode
,
7629 modifier
== EXPAND_INITIALIZER
7630 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7632 /* If the DECL isn't in memory, then the DECL wasn't properly
7633 marked TREE_ADDRESSABLE, which will be either a front-end
7634 or a tree optimizer bug. */
7636 if (TREE_ADDRESSABLE (exp
)
7638 && ! targetm
.calls
.allocate_stack_slots_for_args ())
7640 error ("local frame unavailable (naked function?)");
7644 gcc_assert (MEM_P (result
));
7645 result
= XEXP (result
, 0);
7647 /* ??? Is this needed anymore? */
7649 TREE_USED (exp
) = 1;
7651 if (modifier
!= EXPAND_INITIALIZER
7652 && modifier
!= EXPAND_CONST_ADDRESS
7653 && modifier
!= EXPAND_SUM
)
7654 result
= force_operand (result
, target
);
7658 /* Pass FALSE as the last argument to get_inner_reference although
7659 we are expanding to RTL. The rationale is that we know how to
7660 handle "aligning nodes" here: we can just bypass them because
7661 they won't change the final object whose address will be returned
7662 (they actually exist only for that purpose). */
7663 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7664 &mode1
, &unsignedp
, &volatilep
, false);
7668 /* We must have made progress. */
7669 gcc_assert (inner
!= exp
);
7671 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
7672 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7673 inner alignment, force the inner to be sufficiently aligned. */
7674 if (CONSTANT_CLASS_P (inner
)
7675 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7677 inner
= copy_node (inner
);
7678 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7679 TYPE_ALIGN (TREE_TYPE (inner
)) = TYPE_ALIGN (TREE_TYPE (exp
));
7680 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7682 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7688 if (modifier
!= EXPAND_NORMAL
)
7689 result
= force_operand (result
, NULL
);
7690 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7691 modifier
== EXPAND_INITIALIZER
7692 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7694 /* expand_expr is allowed to return an object in a mode other
7695 than TMODE. If it did, we need to convert. */
7696 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
7697 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
7698 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
7699 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7700 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7702 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7703 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7706 subtarget
= bitpos
? NULL_RTX
: target
;
7707 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7708 1, OPTAB_LIB_WIDEN
);
7714 /* Someone beforehand should have rejected taking the address
7715 of such an object. */
7716 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7718 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7719 result
= plus_constant (tmode
, result
, bitpos
/ BITS_PER_UNIT
);
7720 if (modifier
< EXPAND_SUM
)
7721 result
= force_operand (result
, target
);
7727 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7728 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7731 expand_expr_addr_expr (tree exp
, rtx target
, machine_mode tmode
,
7732 enum expand_modifier modifier
)
7734 addr_space_t as
= ADDR_SPACE_GENERIC
;
7735 machine_mode address_mode
= Pmode
;
7736 machine_mode pointer_mode
= ptr_mode
;
7740 /* Target mode of VOIDmode says "whatever's natural". */
7741 if (tmode
== VOIDmode
)
7742 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7744 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7746 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7747 address_mode
= targetm
.addr_space
.address_mode (as
);
7748 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7751 /* We can get called with some Weird Things if the user does silliness
7752 like "(short) &a". In that case, convert_memory_address won't do
7753 the right thing, so ignore the given target mode. */
7754 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7755 tmode
= address_mode
;
7757 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7758 tmode
, modifier
, as
);
7760 /* Despite expand_expr claims concerning ignoring TMODE when not
7761 strictly convenient, stuff breaks if we don't honor it. Note
7762 that combined with the above, we only do this for pointer modes. */
7763 rmode
= GET_MODE (result
);
7764 if (rmode
== VOIDmode
)
7767 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7772 /* Generate code for computing CONSTRUCTOR EXP.
7773 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7774 is TRUE, instead of creating a temporary variable in memory
7775 NULL is returned and the caller needs to handle it differently. */
7778 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7779 bool avoid_temp_mem
)
7781 tree type
= TREE_TYPE (exp
);
7782 machine_mode mode
= TYPE_MODE (type
);
7784 /* Try to avoid creating a temporary at all. This is possible
7785 if all of the initializer is zero.
7786 FIXME: try to handle all [0..255] initializers we can handle
7788 if (TREE_STATIC (exp
)
7789 && !TREE_ADDRESSABLE (exp
)
7790 && target
!= 0 && mode
== BLKmode
7791 && all_zeros_p (exp
))
7793 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7797 /* All elts simple constants => refer to a constant in memory. But
7798 if this is a non-BLKmode mode, let it store a field at a time
7799 since that should make a CONST_INT, CONST_WIDE_INT or
7800 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7801 use, it is best to store directly into the target unless the type
7802 is large enough that memcpy will be used. If we are making an
7803 initializer and all operands are constant, put it in memory as
7806 FIXME: Avoid trying to fill vector constructors piece-meal.
7807 Output them with output_constant_def below unless we're sure
7808 they're zeros. This should go away when vector initializers
7809 are treated like VECTOR_CST instead of arrays. */
7810 if ((TREE_STATIC (exp
)
7811 && ((mode
== BLKmode
7812 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7813 || TREE_ADDRESSABLE (exp
)
7814 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
7815 && (! can_move_by_pieces
7816 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
7818 && ! mostly_zeros_p (exp
))))
7819 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7820 && TREE_CONSTANT (exp
)))
7827 constructor
= expand_expr_constant (exp
, 1, modifier
);
7829 if (modifier
!= EXPAND_CONST_ADDRESS
7830 && modifier
!= EXPAND_INITIALIZER
7831 && modifier
!= EXPAND_SUM
)
7832 constructor
= validize_mem (constructor
);
7837 /* Handle calls that pass values in multiple non-contiguous
7838 locations. The Irix 6 ABI has examples of this. */
7839 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7840 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
7845 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
7848 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7853 /* expand_expr: generate code for computing expression EXP.
7854 An rtx for the computed value is returned. The value is never null.
7855 In the case of a void EXP, const0_rtx is returned.
7857 The value may be stored in TARGET if TARGET is nonzero.
7858 TARGET is just a suggestion; callers must assume that
7859 the rtx returned may not be the same as TARGET.
7861 If TARGET is CONST0_RTX, it means that the value will be ignored.
7863 If TMODE is not VOIDmode, it suggests generating the
7864 result in mode TMODE. But this is done only when convenient.
7865 Otherwise, TMODE is ignored and the value generated in its natural mode.
7866 TMODE is just a suggestion; callers must assume that
7867 the rtx returned may not have mode TMODE.
7869 Note that TARGET may have neither TMODE nor MODE. In that case, it
7870 probably will not be used.
7872 If MODIFIER is EXPAND_SUM then when EXP is an addition
7873 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7874 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7875 products as above, or REG or MEM, or constant.
7876 Ordinarily in such cases we would output mul or add instructions
7877 and then return a pseudo reg containing the sum.
7879 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7880 it also marks a label as absolutely required (it can't be dead).
7881 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7882 This is used for outputting expressions used in initializers.
7884 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7885 with a constant address even if that address is not normally legitimate.
7886 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7888 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7889 a call parameter. Such targets require special care as we haven't yet
7890 marked TARGET so that it's safe from being trashed by libcalls. We
7891 don't want to use TARGET for anything but the final result;
7892 Intermediate values must go elsewhere. Additionally, calls to
7893 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7895 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7896 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7897 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7898 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7901 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7902 In this case, we don't adjust a returned MEM rtx that wouldn't be
7903 sufficiently aligned for its mode; instead, it's up to the caller
7904 to deal with it afterwards. This is used to make sure that unaligned
7905 base objects for which out-of-bounds accesses are supported, for
7906 example record types with trailing arrays, aren't realigned behind
7907 the back of the caller.
7908 The normal operating mode is to pass FALSE for this parameter. */
7911 expand_expr_real (tree exp
, rtx target
, machine_mode tmode
,
7912 enum expand_modifier modifier
, rtx
*alt_rtl
,
7913 bool inner_reference_p
)
7917 /* Handle ERROR_MARK before anybody tries to access its type. */
7918 if (TREE_CODE (exp
) == ERROR_MARK
7919 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7921 ret
= CONST0_RTX (tmode
);
7922 return ret
? ret
: const0_rtx
;
7925 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
7930 /* Try to expand the conditional expression which is represented by
7931 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7932 return the rtl reg which repsents the result. Otherwise return
7936 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
7937 tree treeop1 ATTRIBUTE_UNUSED
,
7938 tree treeop2 ATTRIBUTE_UNUSED
)
7941 rtx op00
, op01
, op1
, op2
;
7942 enum rtx_code comparison_code
;
7943 machine_mode comparison_mode
;
7946 tree type
= TREE_TYPE (treeop1
);
7947 int unsignedp
= TYPE_UNSIGNED (type
);
7948 machine_mode mode
= TYPE_MODE (type
);
7949 machine_mode orig_mode
= mode
;
7951 /* If we cannot do a conditional move on the mode, try doing it
7952 with the promoted mode. */
7953 if (!can_conditionally_move_p (mode
))
7955 mode
= promote_mode (type
, mode
, &unsignedp
);
7956 if (!can_conditionally_move_p (mode
))
7958 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
7961 temp
= assign_temp (type
, 0, 1);
7964 expand_operands (treeop1
, treeop2
,
7965 temp
, &op1
, &op2
, EXPAND_NORMAL
);
7967 if (TREE_CODE (treeop0
) == SSA_NAME
7968 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
7970 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
7971 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
7972 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
7973 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
7974 comparison_mode
= TYPE_MODE (type
);
7975 unsignedp
= TYPE_UNSIGNED (type
);
7976 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7978 else if (COMPARISON_CLASS_P (treeop0
))
7980 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
7981 enum tree_code cmpcode
= TREE_CODE (treeop0
);
7982 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
7983 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
7984 unsignedp
= TYPE_UNSIGNED (type
);
7985 comparison_mode
= TYPE_MODE (type
);
7986 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7990 op00
= expand_normal (treeop0
);
7992 comparison_code
= NE
;
7993 comparison_mode
= GET_MODE (op00
);
7994 if (comparison_mode
== VOIDmode
)
7995 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
7998 if (GET_MODE (op1
) != mode
)
7999 op1
= gen_lowpart (mode
, op1
);
8001 if (GET_MODE (op2
) != mode
)
8002 op2
= gen_lowpart (mode
, op2
);
8004 /* Try to emit the conditional move. */
8005 insn
= emit_conditional_move (temp
, comparison_code
,
8006 op00
, op01
, comparison_mode
,
8010 /* If we could do the conditional move, emit the sequence,
8014 rtx_insn
*seq
= get_insns ();
8017 return convert_modes (orig_mode
, mode
, temp
, 0);
8020 /* Otherwise discard the sequence and fall back to code with
8027 expand_expr_real_2 (sepops ops
, rtx target
, machine_mode tmode
,
8028 enum expand_modifier modifier
)
8030 rtx op0
, op1
, op2
, temp
;
8031 rtx_code_label
*lab
;
8035 enum tree_code code
= ops
->code
;
8037 rtx subtarget
, original_target
;
8039 bool reduce_bit_field
;
8040 location_t loc
= ops
->location
;
8041 tree treeop0
, treeop1
, treeop2
;
8042 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8043 ? reduce_to_bit_field_precision ((expr), \
8049 mode
= TYPE_MODE (type
);
8050 unsignedp
= TYPE_UNSIGNED (type
);
8056 /* We should be called only on simple (binary or unary) expressions,
8057 exactly those that are valid in gimple expressions that aren't
8058 GIMPLE_SINGLE_RHS (or invalid). */
8059 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8060 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8061 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8063 ignore
= (target
== const0_rtx
8064 || ((CONVERT_EXPR_CODE_P (code
)
8065 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8066 && TREE_CODE (type
) == VOID_TYPE
));
8068 /* We should be called only if we need the result. */
8069 gcc_assert (!ignore
);
8071 /* An operation in what may be a bit-field type needs the
8072 result to be reduced to the precision of the bit-field type,
8073 which is narrower than that of the type's mode. */
8074 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8075 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
8077 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8080 /* Use subtarget as the target for operand 0 of a binary operation. */
8081 subtarget
= get_subtarget (target
);
8082 original_target
= target
;
8086 case NON_LVALUE_EXPR
:
8089 if (treeop0
== error_mark_node
)
8092 if (TREE_CODE (type
) == UNION_TYPE
)
8094 tree valtype
= TREE_TYPE (treeop0
);
8096 /* If both input and output are BLKmode, this conversion isn't doing
8097 anything except possibly changing memory attribute. */
8098 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8100 rtx result
= expand_expr (treeop0
, target
, tmode
,
8103 result
= copy_rtx (result
);
8104 set_mem_attributes (result
, type
, 0);
8110 if (TYPE_MODE (type
) != BLKmode
)
8111 target
= gen_reg_rtx (TYPE_MODE (type
));
8113 target
= assign_temp (type
, 1, 1);
8117 /* Store data into beginning of memory target. */
8118 store_expr (treeop0
,
8119 adjust_address (target
, TYPE_MODE (valtype
), 0),
8120 modifier
== EXPAND_STACK_PARM
,
8125 gcc_assert (REG_P (target
));
8127 /* Store this field into a union of the proper type. */
8128 store_field (target
,
8129 MIN ((int_size_in_bytes (TREE_TYPE
8132 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8133 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0, false);
8136 /* Return the entire union. */
8140 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8142 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8145 /* If the signedness of the conversion differs and OP0 is
8146 a promoted SUBREG, clear that indication since we now
8147 have to do the proper extension. */
8148 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8149 && GET_CODE (op0
) == SUBREG
)
8150 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8152 return REDUCE_BIT_FIELD (op0
);
8155 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8156 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8157 if (GET_MODE (op0
) == mode
)
8160 /* If OP0 is a constant, just convert it into the proper mode. */
8161 else if (CONSTANT_P (op0
))
8163 tree inner_type
= TREE_TYPE (treeop0
);
8164 machine_mode inner_mode
= GET_MODE (op0
);
8166 if (inner_mode
== VOIDmode
)
8167 inner_mode
= TYPE_MODE (inner_type
);
8169 if (modifier
== EXPAND_INITIALIZER
)
8170 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
8171 subreg_lowpart_offset (mode
,
8174 op0
= convert_modes (mode
, inner_mode
, op0
,
8175 TYPE_UNSIGNED (inner_type
));
8178 else if (modifier
== EXPAND_INITIALIZER
)
8179 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8181 else if (target
== 0)
8182 op0
= convert_to_mode (mode
, op0
,
8183 TYPE_UNSIGNED (TREE_TYPE
8187 convert_move (target
, op0
,
8188 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8192 return REDUCE_BIT_FIELD (op0
);
8194 case ADDR_SPACE_CONVERT_EXPR
:
8196 tree treeop0_type
= TREE_TYPE (treeop0
);
8198 addr_space_t as_from
;
8200 gcc_assert (POINTER_TYPE_P (type
));
8201 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8203 as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8204 as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8206 /* Conversions between pointers to the same address space should
8207 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8208 gcc_assert (as_to
!= as_from
);
8210 /* Ask target code to handle conversion between pointers
8211 to overlapping address spaces. */
8212 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8213 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8215 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8216 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8221 /* For disjoint address spaces, converting anything but
8222 a null pointer invokes undefined behaviour. We simply
8223 always return a null pointer here. */
8224 return CONST0_RTX (mode
);
8227 case POINTER_PLUS_EXPR
:
8228 /* Even though the sizetype mode and the pointer's mode can be different
8229 expand is able to handle this correctly and get the correct result out
8230 of the PLUS_EXPR code. */
8231 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8232 if sizetype precision is smaller than pointer precision. */
8233 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8234 treeop1
= fold_convert_loc (loc
, type
,
8235 fold_convert_loc (loc
, ssizetype
,
8237 /* If sizetype precision is larger than pointer precision, truncate the
8238 offset to have matching modes. */
8239 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8240 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8243 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8244 something else, make sure we add the register to the constant and
8245 then to the other thing. This case can occur during strength
8246 reduction and doing it this way will produce better code if the
8247 frame pointer or argument pointer is eliminated.
8249 fold-const.c will ensure that the constant is always in the inner
8250 PLUS_EXPR, so the only case we need to do anything about is if
8251 sp, ap, or fp is our second argument, in which case we must swap
8252 the innermost first argument and our second argument. */
8254 if (TREE_CODE (treeop0
) == PLUS_EXPR
8255 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8256 && TREE_CODE (treeop1
) == VAR_DECL
8257 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8258 || DECL_RTL (treeop1
) == stack_pointer_rtx
8259 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8264 /* If the result is to be ptr_mode and we are adding an integer to
8265 something, we might be forming a constant. So try to use
8266 plus_constant. If it produces a sum and we can't accept it,
8267 use force_operand. This allows P = &ARR[const] to generate
8268 efficient code on machines where a SYMBOL_REF is not a valid
8271 If this is an EXPAND_SUM call, always return the sum. */
8272 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8273 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8275 if (modifier
== EXPAND_STACK_PARM
)
8277 if (TREE_CODE (treeop0
) == INTEGER_CST
8278 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8279 && TREE_CONSTANT (treeop1
))
8283 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop1
));
8285 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8287 /* Use wi::shwi to ensure that the constant is
8288 truncated according to the mode of OP1, then sign extended
8289 to a HOST_WIDE_INT. Using the constant directly can result
8290 in non-canonical RTL in a 64x32 cross compile. */
8291 wc
= TREE_INT_CST_LOW (treeop0
);
8293 immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8294 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8295 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8296 op1
= force_operand (op1
, target
);
8297 return REDUCE_BIT_FIELD (op1
);
8300 else if (TREE_CODE (treeop1
) == INTEGER_CST
8301 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8302 && TREE_CONSTANT (treeop0
))
8306 machine_mode wmode
= TYPE_MODE (TREE_TYPE (treeop0
));
8308 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8309 (modifier
== EXPAND_INITIALIZER
8310 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8311 if (! CONSTANT_P (op0
))
8313 op1
= expand_expr (treeop1
, NULL_RTX
,
8314 VOIDmode
, modifier
);
8315 /* Return a PLUS if modifier says it's OK. */
8316 if (modifier
== EXPAND_SUM
8317 || modifier
== EXPAND_INITIALIZER
)
8318 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8321 /* Use wi::shwi to ensure that the constant is
8322 truncated according to the mode of OP1, then sign extended
8323 to a HOST_WIDE_INT. Using the constant directly can result
8324 in non-canonical RTL in a 64x32 cross compile. */
8325 wc
= TREE_INT_CST_LOW (treeop1
);
8327 = immed_wide_int_const (wi::shwi (wc
, wmode
), wmode
);
8328 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8329 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8330 op0
= force_operand (op0
, target
);
8331 return REDUCE_BIT_FIELD (op0
);
8335 /* Use TER to expand pointer addition of a negated value
8336 as pointer subtraction. */
8337 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8338 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8339 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8340 && TREE_CODE (treeop1
) == SSA_NAME
8341 && TYPE_MODE (TREE_TYPE (treeop0
))
8342 == TYPE_MODE (TREE_TYPE (treeop1
)))
8344 gimple def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8347 treeop1
= gimple_assign_rhs1 (def
);
8353 /* No sense saving up arithmetic to be done
8354 if it's all in the wrong mode to form part of an address.
8355 And force_operand won't know whether to sign-extend or
8357 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8358 || mode
!= ptr_mode
)
8360 expand_operands (treeop0
, treeop1
,
8361 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8362 if (op0
== const0_rtx
)
8364 if (op1
== const0_rtx
)
8369 expand_operands (treeop0
, treeop1
,
8370 subtarget
, &op0
, &op1
, modifier
);
8371 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8375 /* For initializers, we are allowed to return a MINUS of two
8376 symbolic constants. Here we handle all cases when both operands
8378 /* Handle difference of two symbolic constants,
8379 for the sake of an initializer. */
8380 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8381 && really_constant_p (treeop0
)
8382 && really_constant_p (treeop1
))
8384 expand_operands (treeop0
, treeop1
,
8385 NULL_RTX
, &op0
, &op1
, modifier
);
8387 /* If the last operand is a CONST_INT, use plus_constant of
8388 the negated constant. Else make the MINUS. */
8389 if (CONST_INT_P (op1
))
8390 return REDUCE_BIT_FIELD (plus_constant (mode
, op0
,
8393 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8396 /* No sense saving up arithmetic to be done
8397 if it's all in the wrong mode to form part of an address.
8398 And force_operand won't know whether to sign-extend or
8400 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8401 || mode
!= ptr_mode
)
8404 expand_operands (treeop0
, treeop1
,
8405 subtarget
, &op0
, &op1
, modifier
);
8407 /* Convert A - const to A + (-const). */
8408 if (CONST_INT_P (op1
))
8410 op1
= negate_rtx (mode
, op1
);
8411 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8416 case WIDEN_MULT_PLUS_EXPR
:
8417 case WIDEN_MULT_MINUS_EXPR
:
8418 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8419 op2
= expand_normal (treeop2
);
8420 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8424 case WIDEN_MULT_EXPR
:
8425 /* If first operand is constant, swap them.
8426 Thus the following special case checks need only
8427 check the second operand. */
8428 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8429 std::swap (treeop0
, treeop1
);
8431 /* First, check if we have a multiplication of one signed and one
8432 unsigned operand. */
8433 if (TREE_CODE (treeop1
) != INTEGER_CST
8434 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8435 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8437 machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8438 this_optab
= usmul_widen_optab
;
8439 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8440 != CODE_FOR_nothing
)
8442 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8443 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8446 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8448 /* op0 and op1 might still be constant, despite the above
8449 != INTEGER_CST check. Handle it. */
8450 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8452 op0
= convert_modes (innermode
, mode
, op0
, true);
8453 op1
= convert_modes (innermode
, mode
, op1
, false);
8454 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8455 target
, unsignedp
));
8460 /* Check for a multiplication with matching signedness. */
8461 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8462 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8463 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8464 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8466 tree op0type
= TREE_TYPE (treeop0
);
8467 machine_mode innermode
= TYPE_MODE (op0type
);
8468 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8469 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8470 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8472 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8474 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8475 != CODE_FOR_nothing
)
8477 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8479 /* op0 and op1 might still be constant, despite the above
8480 != INTEGER_CST check. Handle it. */
8481 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8484 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8486 = convert_modes (innermode
, mode
, op1
,
8487 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8488 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8492 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8493 unsignedp
, this_optab
);
8494 return REDUCE_BIT_FIELD (temp
);
8496 if (find_widening_optab_handler (other_optab
, mode
, innermode
, 0)
8498 && innermode
== word_mode
)
8501 op0
= expand_normal (treeop0
);
8502 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8503 op1
= convert_modes (innermode
, mode
,
8504 expand_normal (treeop1
),
8505 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8507 op1
= expand_normal (treeop1
);
8508 /* op0 and op1 might still be constant, despite the above
8509 != INTEGER_CST check. Handle it. */
8510 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8511 goto widen_mult_const
;
8512 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8513 unsignedp
, OPTAB_LIB_WIDEN
);
8514 hipart
= gen_highpart (innermode
, temp
);
8515 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8519 emit_move_insn (hipart
, htem
);
8520 return REDUCE_BIT_FIELD (temp
);
8524 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8525 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8526 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8527 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8531 optab opt
= fma_optab
;
8534 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8536 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8538 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8541 gcc_assert (fn
!= NULL_TREE
);
8542 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8543 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8546 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8547 /* The multiplication is commutative - look at its 2nd operand
8548 if the first isn't fed by a negate. */
8551 def0
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8552 /* Swap operands if the 2nd operand is fed by a negate. */
8554 std::swap (treeop0
, treeop1
);
8556 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8561 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8564 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8565 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8568 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8571 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8574 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8577 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8581 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8583 op2
= expand_normal (treeop2
);
8584 op1
= expand_normal (treeop1
);
8586 return expand_ternary_op (TYPE_MODE (type
), opt
,
8587 op0
, op1
, op2
, target
, 0);
8591 /* If this is a fixed-point operation, then we cannot use the code
8592 below because "expand_mult" doesn't support sat/no-sat fixed-point
8594 if (ALL_FIXED_POINT_MODE_P (mode
))
8597 /* If first operand is constant, swap them.
8598 Thus the following special case checks need only
8599 check the second operand. */
8600 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8601 std::swap (treeop0
, treeop1
);
8603 /* Attempt to return something suitable for generating an
8604 indexed address, for machines that support that. */
8606 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8607 && tree_fits_shwi_p (treeop1
))
8609 tree exp1
= treeop1
;
8611 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8615 op0
= force_operand (op0
, NULL_RTX
);
8617 op0
= copy_to_mode_reg (mode
, op0
);
8619 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8620 gen_int_mode (tree_to_shwi (exp1
),
8621 TYPE_MODE (TREE_TYPE (exp1
)))));
8624 if (modifier
== EXPAND_STACK_PARM
)
8627 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8628 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8630 case TRUNC_DIV_EXPR
:
8631 case FLOOR_DIV_EXPR
:
8633 case ROUND_DIV_EXPR
:
8634 case EXACT_DIV_EXPR
:
8635 /* If this is a fixed-point operation, then we cannot use the code
8636 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8638 if (ALL_FIXED_POINT_MODE_P (mode
))
8641 if (modifier
== EXPAND_STACK_PARM
)
8643 /* Possible optimization: compute the dividend with EXPAND_SUM
8644 then if the divisor is constant can optimize the case
8645 where some terms of the dividend have coeffs divisible by it. */
8646 expand_operands (treeop0
, treeop1
,
8647 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8648 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8653 case MULT_HIGHPART_EXPR
:
8654 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8655 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8659 case TRUNC_MOD_EXPR
:
8660 case FLOOR_MOD_EXPR
:
8662 case ROUND_MOD_EXPR
:
8663 if (modifier
== EXPAND_STACK_PARM
)
8665 expand_operands (treeop0
, treeop1
,
8666 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8667 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8669 case FIXED_CONVERT_EXPR
:
8670 op0
= expand_normal (treeop0
);
8671 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8672 target
= gen_reg_rtx (mode
);
8674 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8675 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8676 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8677 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8679 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8682 case FIX_TRUNC_EXPR
:
8683 op0
= expand_normal (treeop0
);
8684 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8685 target
= gen_reg_rtx (mode
);
8686 expand_fix (target
, op0
, unsignedp
);
8690 op0
= expand_normal (treeop0
);
8691 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8692 target
= gen_reg_rtx (mode
);
8693 /* expand_float can't figure out what to do if FROM has VOIDmode.
8694 So give it the correct mode. With -O, cse will optimize this. */
8695 if (GET_MODE (op0
) == VOIDmode
)
8696 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8698 expand_float (target
, op0
,
8699 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8703 op0
= expand_expr (treeop0
, subtarget
,
8704 VOIDmode
, EXPAND_NORMAL
);
8705 if (modifier
== EXPAND_STACK_PARM
)
8707 temp
= expand_unop (mode
,
8708 optab_for_tree_code (NEGATE_EXPR
, type
,
8712 return REDUCE_BIT_FIELD (temp
);
8715 op0
= expand_expr (treeop0
, subtarget
,
8716 VOIDmode
, EXPAND_NORMAL
);
8717 if (modifier
== EXPAND_STACK_PARM
)
8720 /* ABS_EXPR is not valid for complex arguments. */
8721 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8722 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8724 /* Unsigned abs is simply the operand. Testing here means we don't
8725 risk generating incorrect code below. */
8726 if (TYPE_UNSIGNED (type
))
8729 return expand_abs (mode
, op0
, target
, unsignedp
,
8730 safe_from_p (target
, treeop0
, 1));
8734 target
= original_target
;
8736 || modifier
== EXPAND_STACK_PARM
8737 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8738 || GET_MODE (target
) != mode
8740 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8741 target
= gen_reg_rtx (mode
);
8742 expand_operands (treeop0
, treeop1
,
8743 target
, &op0
, &op1
, EXPAND_NORMAL
);
8745 /* First try to do it with a special MIN or MAX instruction.
8746 If that does not win, use a conditional jump to select the proper
8748 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8749 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8754 /* At this point, a MEM target is no longer useful; we will get better
8757 if (! REG_P (target
))
8758 target
= gen_reg_rtx (mode
);
8760 /* If op1 was placed in target, swap op0 and op1. */
8761 if (target
!= op0
&& target
== op1
)
8762 std::swap (op0
, op1
);
8764 /* We generate better code and avoid problems with op1 mentioning
8765 target by forcing op1 into a pseudo if it isn't a constant. */
8766 if (! CONSTANT_P (op1
))
8767 op1
= force_reg (mode
, op1
);
8770 enum rtx_code comparison_code
;
8773 if (code
== MAX_EXPR
)
8774 comparison_code
= unsignedp
? GEU
: GE
;
8776 comparison_code
= unsignedp
? LEU
: LE
;
8778 /* Canonicalize to comparisons against 0. */
8779 if (op1
== const1_rtx
)
8781 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8782 or (a != 0 ? a : 1) for unsigned.
8783 For MIN we are safe converting (a <= 1 ? a : 1)
8784 into (a <= 0 ? a : 1) */
8785 cmpop1
= const0_rtx
;
8786 if (code
== MAX_EXPR
)
8787 comparison_code
= unsignedp
? NE
: GT
;
8789 if (op1
== constm1_rtx
&& !unsignedp
)
8791 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8792 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8793 cmpop1
= const0_rtx
;
8794 if (code
== MIN_EXPR
)
8795 comparison_code
= LT
;
8798 /* Use a conditional move if possible. */
8799 if (can_conditionally_move_p (mode
))
8805 /* Try to emit the conditional move. */
8806 insn
= emit_conditional_move (target
, comparison_code
,
8811 /* If we could do the conditional move, emit the sequence,
8815 rtx_insn
*seq
= get_insns ();
8821 /* Otherwise discard the sequence and fall back to code with
8827 emit_move_insn (target
, op0
);
8829 lab
= gen_label_rtx ();
8830 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8831 unsignedp
, mode
, NULL_RTX
, NULL
, lab
,
8834 emit_move_insn (target
, op1
);
8839 op0
= expand_expr (treeop0
, subtarget
,
8840 VOIDmode
, EXPAND_NORMAL
);
8841 if (modifier
== EXPAND_STACK_PARM
)
8843 /* In case we have to reduce the result to bitfield precision
8844 for unsigned bitfield expand this as XOR with a proper constant
8846 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
8848 wide_int mask
= wi::mask (TYPE_PRECISION (type
),
8849 false, GET_MODE_PRECISION (mode
));
8851 temp
= expand_binop (mode
, xor_optab
, op0
,
8852 immed_wide_int_const (mask
, mode
),
8853 target
, 1, OPTAB_LIB_WIDEN
);
8856 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8860 /* ??? Can optimize bitwise operations with one arg constant.
8861 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8862 and (a bitwise1 b) bitwise2 b (etc)
8863 but that is probably not worth while. */
8872 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
8873 || (GET_MODE_PRECISION (TYPE_MODE (type
))
8874 == TYPE_PRECISION (type
)));
8879 /* If this is a fixed-point operation, then we cannot use the code
8880 below because "expand_shift" doesn't support sat/no-sat fixed-point
8882 if (ALL_FIXED_POINT_MODE_P (mode
))
8885 if (! safe_from_p (subtarget
, treeop1
, 1))
8887 if (modifier
== EXPAND_STACK_PARM
)
8889 op0
= expand_expr (treeop0
, subtarget
,
8890 VOIDmode
, EXPAND_NORMAL
);
8891 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
8893 if (code
== LSHIFT_EXPR
)
8894 temp
= REDUCE_BIT_FIELD (temp
);
8897 /* Could determine the answer when only additive constants differ. Also,
8898 the addition of one can be handled by changing the condition. */
8905 case UNORDERED_EXPR
:
8914 temp
= do_store_flag (ops
,
8915 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8916 tmode
!= VOIDmode
? tmode
: mode
);
8920 /* Use a compare and a jump for BLKmode comparisons, or for function
8921 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8924 || modifier
== EXPAND_STACK_PARM
8925 || ! safe_from_p (target
, treeop0
, 1)
8926 || ! safe_from_p (target
, treeop1
, 1)
8927 /* Make sure we don't have a hard reg (such as function's return
8928 value) live across basic blocks, if not optimizing. */
8929 || (!optimize
&& REG_P (target
)
8930 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8931 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8933 emit_move_insn (target
, const0_rtx
);
8935 rtx_code_label
*lab1
= gen_label_rtx ();
8936 jumpifnot_1 (code
, treeop0
, treeop1
, lab1
, -1);
8938 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
8939 emit_move_insn (target
, constm1_rtx
);
8941 emit_move_insn (target
, const1_rtx
);
8947 /* Get the rtx code of the operands. */
8948 op0
= expand_normal (treeop0
);
8949 op1
= expand_normal (treeop1
);
8952 target
= gen_reg_rtx (TYPE_MODE (type
));
8954 /* If target overlaps with op1, then either we need to force
8955 op1 into a pseudo (if target also overlaps with op0),
8956 or write the complex parts in reverse order. */
8957 switch (GET_CODE (target
))
8960 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
8962 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
8964 complex_expr_force_op1
:
8965 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
8966 emit_move_insn (temp
, op1
);
8970 complex_expr_swap_order
:
8971 /* Move the imaginary (op1) and real (op0) parts to their
8973 write_complex_part (target
, op1
, true);
8974 write_complex_part (target
, op0
, false);
8980 temp
= adjust_address_nv (target
,
8981 GET_MODE_INNER (GET_MODE (target
)), 0);
8982 if (reg_overlap_mentioned_p (temp
, op1
))
8984 machine_mode imode
= GET_MODE_INNER (GET_MODE (target
));
8985 temp
= adjust_address_nv (target
, imode
,
8986 GET_MODE_SIZE (imode
));
8987 if (reg_overlap_mentioned_p (temp
, op0
))
8988 goto complex_expr_force_op1
;
8989 goto complex_expr_swap_order
;
8993 if (reg_overlap_mentioned_p (target
, op1
))
8995 if (reg_overlap_mentioned_p (target
, op0
))
8996 goto complex_expr_force_op1
;
8997 goto complex_expr_swap_order
;
9002 /* Move the real (op0) and imaginary (op1) parts to their location. */
9003 write_complex_part (target
, op0
, false);
9004 write_complex_part (target
, op1
, true);
9008 case WIDEN_SUM_EXPR
:
9010 tree oprnd0
= treeop0
;
9011 tree oprnd1
= treeop1
;
9013 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9014 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9019 case REDUC_MAX_EXPR
:
9020 case REDUC_MIN_EXPR
:
9021 case REDUC_PLUS_EXPR
:
9023 op0
= expand_normal (treeop0
);
9024 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9025 machine_mode vec_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9027 if (optab_handler (this_optab
, vec_mode
) != CODE_FOR_nothing
)
9029 struct expand_operand ops
[2];
9030 enum insn_code icode
= optab_handler (this_optab
, vec_mode
);
9032 create_output_operand (&ops
[0], target
, mode
);
9033 create_input_operand (&ops
[1], op0
, vec_mode
);
9034 if (maybe_expand_insn (icode
, 2, ops
))
9036 target
= ops
[0].value
;
9037 if (GET_MODE (target
) != mode
)
9038 return gen_lowpart (tmode
, target
);
9042 /* Fall back to optab with vector result, and then extract scalar. */
9043 this_optab
= scalar_reduc_to_vector (this_optab
, type
);
9044 temp
= expand_unop (vec_mode
, this_optab
, op0
, NULL_RTX
, unsignedp
);
9046 /* The tree code produces a scalar result, but (somewhat by convention)
9047 the optab produces a vector with the result in element 0 if
9048 little-endian, or element N-1 if big-endian. So pull the scalar
9049 result out of that element. */
9050 int index
= BYTES_BIG_ENDIAN
? GET_MODE_NUNITS (vec_mode
) - 1 : 0;
9051 int bitsize
= GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode
));
9052 temp
= extract_bit_field (temp
, bitsize
, bitsize
* index
, unsignedp
,
9053 target
, mode
, mode
);
9058 case VEC_UNPACK_HI_EXPR
:
9059 case VEC_UNPACK_LO_EXPR
:
9061 op0
= expand_normal (treeop0
);
9062 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9068 case VEC_UNPACK_FLOAT_HI_EXPR
:
9069 case VEC_UNPACK_FLOAT_LO_EXPR
:
9071 op0
= expand_normal (treeop0
);
9072 /* The signedness is determined from input operand. */
9073 temp
= expand_widen_pattern_expr
9074 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9075 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9081 case VEC_WIDEN_MULT_HI_EXPR
:
9082 case VEC_WIDEN_MULT_LO_EXPR
:
9083 case VEC_WIDEN_MULT_EVEN_EXPR
:
9084 case VEC_WIDEN_MULT_ODD_EXPR
:
9085 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9086 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9087 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9088 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9090 gcc_assert (target
);
9093 case VEC_PACK_TRUNC_EXPR
:
9094 case VEC_PACK_SAT_EXPR
:
9095 case VEC_PACK_FIX_TRUNC_EXPR
:
9096 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9100 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9101 op2
= expand_normal (treeop2
);
9103 /* Careful here: if the target doesn't support integral vector modes,
9104 a constant selection vector could wind up smooshed into a normal
9105 integral constant. */
9106 if (CONSTANT_P (op2
) && GET_CODE (op2
) != CONST_VECTOR
)
9108 tree sel_type
= TREE_TYPE (treeop2
);
9110 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type
)),
9111 TYPE_VECTOR_SUBPARTS (sel_type
));
9112 gcc_assert (GET_MODE_CLASS (vmode
) == MODE_VECTOR_INT
);
9113 op2
= simplify_subreg (vmode
, op2
, TYPE_MODE (sel_type
), 0);
9114 gcc_assert (op2
&& GET_CODE (op2
) == CONST_VECTOR
);
9117 gcc_assert (GET_MODE_CLASS (GET_MODE (op2
)) == MODE_VECTOR_INT
);
9119 temp
= expand_vec_perm (mode
, op0
, op1
, op2
, target
);
9125 tree oprnd0
= treeop0
;
9126 tree oprnd1
= treeop1
;
9127 tree oprnd2
= treeop2
;
9130 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9131 op2
= expand_normal (oprnd2
);
9132 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9139 tree oprnd0
= treeop0
;
9140 tree oprnd1
= treeop1
;
9141 tree oprnd2
= treeop2
;
9144 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9145 op2
= expand_normal (oprnd2
);
9146 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9151 case REALIGN_LOAD_EXPR
:
9153 tree oprnd0
= treeop0
;
9154 tree oprnd1
= treeop1
;
9155 tree oprnd2
= treeop2
;
9158 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9159 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9160 op2
= expand_normal (oprnd2
);
9161 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9169 /* A COND_EXPR with its type being VOID_TYPE represents a
9170 conditional jump and is handled in
9171 expand_gimple_cond_expr. */
9172 gcc_assert (!VOID_TYPE_P (type
));
9174 /* Note that COND_EXPRs whose type is a structure or union
9175 are required to be constructed to contain assignments of
9176 a temporary variable, so that we can evaluate them here
9177 for side effect only. If type is void, we must do likewise. */
9179 gcc_assert (!TREE_ADDRESSABLE (type
)
9181 && TREE_TYPE (treeop1
) != void_type_node
9182 && TREE_TYPE (treeop2
) != void_type_node
);
9184 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9188 /* If we are not to produce a result, we have no target. Otherwise,
9189 if a target was specified use it; it will not be used as an
9190 intermediate target unless it is safe. If no target, use a
9193 if (modifier
!= EXPAND_STACK_PARM
9195 && safe_from_p (original_target
, treeop0
, 1)
9196 && GET_MODE (original_target
) == mode
9197 && !MEM_P (original_target
))
9198 temp
= original_target
;
9200 temp
= assign_temp (type
, 0, 1);
9202 do_pending_stack_adjust ();
9204 rtx_code_label
*lab0
= gen_label_rtx ();
9205 rtx_code_label
*lab1
= gen_label_rtx ();
9206 jumpifnot (treeop0
, lab0
, -1);
9207 store_expr (treeop1
, temp
,
9208 modifier
== EXPAND_STACK_PARM
,
9211 emit_jump_insn (gen_jump (lab1
));
9214 store_expr (treeop2
, temp
,
9215 modifier
== EXPAND_STACK_PARM
,
9224 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9231 /* Here to do an ordinary binary operator. */
9233 expand_operands (treeop0
, treeop1
,
9234 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9236 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9238 if (modifier
== EXPAND_STACK_PARM
)
9240 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9241 unsignedp
, OPTAB_LIB_WIDEN
);
9243 /* Bitwise operations do not need bitfield reduction as we expect their
9244 operands being properly truncated. */
9245 if (code
== BIT_XOR_EXPR
9246 || code
== BIT_AND_EXPR
9247 || code
== BIT_IOR_EXPR
)
9249 return REDUCE_BIT_FIELD (temp
);
9251 #undef REDUCE_BIT_FIELD
9254 /* Return TRUE if expression STMT is suitable for replacement.
9255 Never consider memory loads as replaceable, because those don't ever lead
9256 into constant expressions. */
9259 stmt_is_replaceable_p (gimple stmt
)
9261 if (ssa_is_replaceable_p (stmt
))
9263 /* Don't move around loads. */
9264 if (!gimple_assign_single_p (stmt
)
9265 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9272 expand_expr_real_1 (tree exp
, rtx target
, machine_mode tmode
,
9273 enum expand_modifier modifier
, rtx
*alt_rtl
,
9274 bool inner_reference_p
)
9276 rtx op0
, op1
, temp
, decl_rtl
;
9280 enum tree_code code
= TREE_CODE (exp
);
9281 rtx subtarget
, original_target
;
9284 bool reduce_bit_field
;
9285 location_t loc
= EXPR_LOCATION (exp
);
9286 struct separate_ops ops
;
9287 tree treeop0
, treeop1
, treeop2
;
9288 tree ssa_name
= NULL_TREE
;
9291 type
= TREE_TYPE (exp
);
9292 mode
= TYPE_MODE (type
);
9293 unsignedp
= TYPE_UNSIGNED (type
);
9295 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9296 if (!VL_EXP_CLASS_P (exp
))
9297 switch (TREE_CODE_LENGTH (code
))
9300 case 3: treeop2
= TREE_OPERAND (exp
, 2);
9301 case 2: treeop1
= TREE_OPERAND (exp
, 1);
9302 case 1: treeop0
= TREE_OPERAND (exp
, 0);
9312 ignore
= (target
== const0_rtx
9313 || ((CONVERT_EXPR_CODE_P (code
)
9314 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9315 && TREE_CODE (type
) == VOID_TYPE
));
9317 /* An operation in what may be a bit-field type needs the
9318 result to be reduced to the precision of the bit-field type,
9319 which is narrower than that of the type's mode. */
9320 reduce_bit_field
= (!ignore
9321 && INTEGRAL_TYPE_P (type
)
9322 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
9324 /* If we are going to ignore this result, we need only do something
9325 if there is a side-effect somewhere in the expression. If there
9326 is, short-circuit the most common cases here. Note that we must
9327 not call expand_expr with anything but const0_rtx in case this
9328 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9332 if (! TREE_SIDE_EFFECTS (exp
))
9335 /* Ensure we reference a volatile object even if value is ignored, but
9336 don't do this if all we are doing is taking its address. */
9337 if (TREE_THIS_VOLATILE (exp
)
9338 && TREE_CODE (exp
) != FUNCTION_DECL
9339 && mode
!= VOIDmode
&& mode
!= BLKmode
9340 && modifier
!= EXPAND_CONST_ADDRESS
)
9342 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9348 if (TREE_CODE_CLASS (code
) == tcc_unary
9349 || code
== BIT_FIELD_REF
9350 || code
== COMPONENT_REF
9351 || code
== INDIRECT_REF
)
9352 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9355 else if (TREE_CODE_CLASS (code
) == tcc_binary
9356 || TREE_CODE_CLASS (code
) == tcc_comparison
9357 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9359 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9360 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9367 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9370 /* Use subtarget as the target for operand 0 of a binary operation. */
9371 subtarget
= get_subtarget (target
);
9372 original_target
= target
;
9378 tree function
= decl_function_context (exp
);
9380 temp
= label_rtx (exp
);
9381 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9383 if (function
!= current_function_decl
9385 LABEL_REF_NONLOCAL_P (temp
) = 1;
9387 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9392 /* ??? ivopts calls expander, without any preparation from
9393 out-of-ssa. So fake instructions as if this was an access to the
9394 base variable. This unnecessarily allocates a pseudo, see how we can
9395 reuse it, if partition base vars have it set already. */
9396 if (!currently_expanding_to_rtl
)
9398 tree var
= SSA_NAME_VAR (exp
);
9399 if (var
&& DECL_RTL_SET_P (var
))
9400 return DECL_RTL (var
);
9401 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9402 LAST_VIRTUAL_REGISTER
+ 1);
9405 g
= get_gimple_for_ssa_name (exp
);
9406 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9408 && modifier
== EXPAND_INITIALIZER
9409 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9410 && (optimize
|| DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9411 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9412 g
= SSA_NAME_DEF_STMT (exp
);
9416 ops
.code
= gimple_assign_rhs_code (g
);
9417 switch (get_gimple_rhs_class (ops
.code
))
9419 case GIMPLE_TERNARY_RHS
:
9420 ops
.op2
= gimple_assign_rhs3 (g
);
9422 case GIMPLE_BINARY_RHS
:
9423 ops
.op1
= gimple_assign_rhs2 (g
);
9425 /* Try to expand conditonal compare. */
9426 if (targetm
.gen_ccmp_first
)
9428 gcc_checking_assert (targetm
.gen_ccmp_next
!= NULL
);
9429 r
= expand_ccmp_expr (g
);
9434 case GIMPLE_UNARY_RHS
:
9435 ops
.op0
= gimple_assign_rhs1 (g
);
9436 ops
.type
= TREE_TYPE (gimple_assign_lhs (g
));
9437 ops
.location
= gimple_location (g
);
9438 r
= expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
9440 case GIMPLE_SINGLE_RHS
:
9442 location_t saved_loc
= curr_insn_location ();
9443 set_curr_insn_location (gimple_location (g
));
9444 r
= expand_expr_real (gimple_assign_rhs1 (g
), target
,
9445 tmode
, modifier
, NULL
, inner_reference_p
);
9446 set_curr_insn_location (saved_loc
);
9452 if (REG_P (r
) && !REG_EXPR (r
))
9453 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9458 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9459 exp
= SSA_NAME_VAR (ssa_name
);
9460 goto expand_decl_rtl
;
9464 /* If a static var's type was incomplete when the decl was written,
9465 but the type is complete now, lay out the decl now. */
9466 if (DECL_SIZE (exp
) == 0
9467 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9468 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9469 layout_decl (exp
, 0);
9471 /* ... fall through ... */
9475 decl_rtl
= DECL_RTL (exp
);
9477 gcc_assert (decl_rtl
);
9478 decl_rtl
= copy_rtx (decl_rtl
);
9479 /* Record writes to register variables. */
9480 if (modifier
== EXPAND_WRITE
9482 && HARD_REGISTER_P (decl_rtl
))
9483 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9484 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9486 /* Ensure variable marked as used even if it doesn't go through
9487 a parser. If it hasn't be used yet, write out an external
9489 TREE_USED (exp
) = 1;
9491 /* Show we haven't gotten RTL for this yet. */
9494 /* Variables inherited from containing functions should have
9495 been lowered by this point. */
9496 context
= decl_function_context (exp
);
9497 gcc_assert (SCOPE_FILE_SCOPE_P (context
)
9498 || context
== current_function_decl
9499 || TREE_STATIC (exp
)
9500 || DECL_EXTERNAL (exp
)
9501 /* ??? C++ creates functions that are not TREE_STATIC. */
9502 || TREE_CODE (exp
) == FUNCTION_DECL
);
9504 /* This is the case of an array whose size is to be determined
9505 from its initializer, while the initializer is still being parsed.
9506 ??? We aren't parsing while expanding anymore. */
9508 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9509 temp
= validize_mem (decl_rtl
);
9511 /* If DECL_RTL is memory, we are in the normal case and the
9512 address is not valid, get the address into a register. */
9514 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9517 *alt_rtl
= decl_rtl
;
9518 decl_rtl
= use_anchored_address (decl_rtl
);
9519 if (modifier
!= EXPAND_CONST_ADDRESS
9520 && modifier
!= EXPAND_SUM
9521 && !memory_address_addr_space_p (DECL_MODE (exp
),
9523 MEM_ADDR_SPACE (decl_rtl
)))
9524 temp
= replace_equiv_address (decl_rtl
,
9525 copy_rtx (XEXP (decl_rtl
, 0)));
9528 /* If we got something, return it. But first, set the alignment
9529 if the address is a register. */
9532 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
9533 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
9538 /* If the mode of DECL_RTL does not match that of the decl,
9539 there are two cases: we are dealing with a BLKmode value
9540 that is returned in a register, or we are dealing with
9541 a promoted value. In the latter case, return a SUBREG
9542 of the wanted mode, but mark it so that we know that it
9543 was already extended. */
9544 if (REG_P (decl_rtl
)
9545 && DECL_MODE (exp
) != BLKmode
9546 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
9550 /* Get the signedness to be used for this variable. Ensure we get
9551 the same mode we got when the variable was declared. */
9552 if (code
== SSA_NAME
9553 && (g
= SSA_NAME_DEF_STMT (ssa_name
))
9554 && gimple_code (g
) == GIMPLE_CALL
9555 && !gimple_call_internal_p (g
))
9556 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
9557 gimple_call_fntype (g
),
9560 pmode
= promote_decl_mode (exp
, &unsignedp
);
9561 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
9563 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
9564 SUBREG_PROMOTED_VAR_P (temp
) = 1;
9565 SUBREG_PROMOTED_SET (temp
, unsignedp
);
9572 /* Given that TYPE_PRECISION (type) is not always equal to
9573 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9574 the former to the latter according to the signedness of the
9576 temp
= immed_wide_int_const (wide_int::from
9578 GET_MODE_PRECISION (TYPE_MODE (type
)),
9585 tree tmp
= NULL_TREE
;
9586 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
9587 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
9588 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
9589 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
9590 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
9591 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
9592 return const_vector_from_tree (exp
);
9593 if (GET_MODE_CLASS (mode
) == MODE_INT
)
9595 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
9597 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
9601 vec
<constructor_elt
, va_gc
> *v
;
9603 vec_alloc (v
, VECTOR_CST_NELTS (exp
));
9604 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
9605 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
9606 tmp
= build_constructor (type
, v
);
9608 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
9613 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
9616 /* If optimized, generate immediate CONST_DOUBLE
9617 which will be turned into memory by reload if necessary.
9619 We used to force a register so that loop.c could see it. But
9620 this does not allow gen_* patterns to perform optimizations with
9621 the constants. It also produces two insns in cases like "x = 1.0;".
9622 On most machines, floating-point constants are not permitted in
9623 many insns, so we'd end up copying it to a register in any case.
9625 Now, we do the copying in expand_binop, if appropriate. */
9626 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
9627 TYPE_MODE (TREE_TYPE (exp
)));
9630 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
9631 TYPE_MODE (TREE_TYPE (exp
)));
9634 /* Handle evaluating a complex constant in a CONCAT target. */
9635 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
9637 machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9640 rtarg
= XEXP (original_target
, 0);
9641 itarg
= XEXP (original_target
, 1);
9643 /* Move the real and imaginary parts separately. */
9644 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
9645 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
9648 emit_move_insn (rtarg
, op0
);
9650 emit_move_insn (itarg
, op1
);
9652 return original_target
;
9655 /* ... fall through ... */
9658 temp
= expand_expr_constant (exp
, 1, modifier
);
9660 /* temp contains a constant address.
9661 On RISC machines where a constant address isn't valid,
9662 make some insns to get that address into a register. */
9663 if (modifier
!= EXPAND_CONST_ADDRESS
9664 && modifier
!= EXPAND_INITIALIZER
9665 && modifier
!= EXPAND_SUM
9666 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
9667 MEM_ADDR_SPACE (temp
)))
9668 return replace_equiv_address (temp
,
9669 copy_rtx (XEXP (temp
, 0)));
9675 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
9678 if (!SAVE_EXPR_RESOLVED_P (exp
))
9680 /* We can indeed still hit this case, typically via builtin
9681 expanders calling save_expr immediately before expanding
9682 something. Assume this means that we only have to deal
9683 with non-BLKmode values. */
9684 gcc_assert (GET_MODE (ret
) != BLKmode
);
9686 val
= build_decl (curr_insn_location (),
9687 VAR_DECL
, NULL
, TREE_TYPE (exp
));
9688 DECL_ARTIFICIAL (val
) = 1;
9689 DECL_IGNORED_P (val
) = 1;
9691 TREE_OPERAND (exp
, 0) = treeop0
;
9692 SAVE_EXPR_RESOLVED_P (exp
) = 1;
9694 if (!CONSTANT_P (ret
))
9695 ret
= copy_to_reg (ret
);
9696 SET_DECL_RTL (val
, ret
);
9704 /* If we don't need the result, just ensure we evaluate any
9708 unsigned HOST_WIDE_INT idx
;
9711 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
9712 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9717 return expand_constructor (exp
, target
, modifier
, false);
9719 case TARGET_MEM_REF
:
9722 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9723 enum insn_code icode
;
9726 op0
= addr_for_mem_ref (exp
, as
, true);
9727 op0
= memory_address_addr_space (mode
, op0
, as
);
9728 temp
= gen_rtx_MEM (mode
, op0
);
9729 set_mem_attributes (temp
, exp
, 0);
9730 set_mem_addr_space (temp
, as
);
9731 align
= get_object_alignment (exp
);
9732 if (modifier
!= EXPAND_WRITE
9733 && modifier
!= EXPAND_MEMORY
9735 && align
< GET_MODE_ALIGNMENT (mode
)
9736 /* If the target does not have special handling for unaligned
9737 loads of mode then it can use regular moves for them. */
9738 && ((icode
= optab_handler (movmisalign_optab
, mode
))
9739 != CODE_FOR_nothing
))
9741 struct expand_operand ops
[2];
9743 /* We've already validated the memory, and we're creating a
9744 new pseudo destination. The predicates really can't fail,
9745 nor can the generator. */
9746 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9747 create_fixed_operand (&ops
[1], temp
);
9748 expand_insn (icode
, 2, ops
);
9749 temp
= ops
[0].value
;
9757 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9758 machine_mode address_mode
;
9759 tree base
= TREE_OPERAND (exp
, 0);
9761 enum insn_code icode
;
9763 /* Handle expansion of non-aliased memory with non-BLKmode. That
9764 might end up in a register. */
9765 if (mem_ref_refers_to_non_mem_p (exp
))
9767 HOST_WIDE_INT offset
= mem_ref_offset (exp
).to_short_addr ();
9768 base
= TREE_OPERAND (base
, 0);
9770 && tree_fits_uhwi_p (TYPE_SIZE (type
))
9771 && (GET_MODE_BITSIZE (DECL_MODE (base
))
9772 == tree_to_uhwi (TYPE_SIZE (type
))))
9773 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
9774 target
, tmode
, modifier
);
9775 if (TYPE_MODE (type
) == BLKmode
)
9777 temp
= assign_stack_temp (DECL_MODE (base
),
9778 GET_MODE_SIZE (DECL_MODE (base
)));
9779 store_expr (base
, temp
, 0, false);
9780 temp
= adjust_address (temp
, BLKmode
, offset
);
9781 set_mem_size (temp
, int_size_in_bytes (type
));
9784 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
9785 bitsize_int (offset
* BITS_PER_UNIT
));
9786 return expand_expr (exp
, target
, tmode
, modifier
);
9788 address_mode
= targetm
.addr_space
.address_mode (as
);
9789 base
= TREE_OPERAND (exp
, 0);
9790 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
9792 tree mask
= gimple_assign_rhs2 (def_stmt
);
9793 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
9794 gimple_assign_rhs1 (def_stmt
), mask
);
9795 TREE_OPERAND (exp
, 0) = base
;
9797 align
= get_object_alignment (exp
);
9798 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
9799 op0
= memory_address_addr_space (mode
, op0
, as
);
9800 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
9802 rtx off
= immed_wide_int_const (mem_ref_offset (exp
), address_mode
);
9803 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
9804 op0
= memory_address_addr_space (mode
, op0
, as
);
9806 temp
= gen_rtx_MEM (mode
, op0
);
9807 set_mem_attributes (temp
, exp
, 0);
9808 set_mem_addr_space (temp
, as
);
9809 if (TREE_THIS_VOLATILE (exp
))
9810 MEM_VOLATILE_P (temp
) = 1;
9811 if (modifier
!= EXPAND_WRITE
9812 && modifier
!= EXPAND_MEMORY
9813 && !inner_reference_p
9815 && align
< GET_MODE_ALIGNMENT (mode
))
9817 if ((icode
= optab_handler (movmisalign_optab
, mode
))
9818 != CODE_FOR_nothing
)
9820 struct expand_operand ops
[2];
9822 /* We've already validated the memory, and we're creating a
9823 new pseudo destination. The predicates really can't fail,
9824 nor can the generator. */
9825 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9826 create_fixed_operand (&ops
[1], temp
);
9827 expand_insn (icode
, 2, ops
);
9828 temp
= ops
[0].value
;
9830 else if (SLOW_UNALIGNED_ACCESS (mode
, align
))
9831 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
9832 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
9833 (modifier
== EXPAND_STACK_PARM
9834 ? NULL_RTX
: target
),
9843 tree array
= treeop0
;
9844 tree index
= treeop1
;
9847 /* Fold an expression like: "foo"[2].
9848 This is not done in fold so it won't happen inside &.
9849 Don't fold if this is for wide characters since it's too
9850 difficult to do correctly and this is a very rare case. */
9852 if (modifier
!= EXPAND_CONST_ADDRESS
9853 && modifier
!= EXPAND_INITIALIZER
9854 && modifier
!= EXPAND_MEMORY
)
9856 tree t
= fold_read_from_constant_string (exp
);
9859 return expand_expr (t
, target
, tmode
, modifier
);
9862 /* If this is a constant index into a constant array,
9863 just get the value from the array. Handle both the cases when
9864 we have an explicit constructor and when our operand is a variable
9865 that was declared const. */
9867 if (modifier
!= EXPAND_CONST_ADDRESS
9868 && modifier
!= EXPAND_INITIALIZER
9869 && modifier
!= EXPAND_MEMORY
9870 && TREE_CODE (array
) == CONSTRUCTOR
9871 && ! TREE_SIDE_EFFECTS (array
)
9872 && TREE_CODE (index
) == INTEGER_CST
)
9874 unsigned HOST_WIDE_INT ix
;
9877 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
9879 if (tree_int_cst_equal (field
, index
))
9881 if (!TREE_SIDE_EFFECTS (value
))
9882 return expand_expr (fold (value
), target
, tmode
, modifier
);
9887 else if (optimize
>= 1
9888 && modifier
!= EXPAND_CONST_ADDRESS
9889 && modifier
!= EXPAND_INITIALIZER
9890 && modifier
!= EXPAND_MEMORY
9891 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
9892 && TREE_CODE (index
) == INTEGER_CST
9893 && (TREE_CODE (array
) == VAR_DECL
9894 || TREE_CODE (array
) == CONST_DECL
)
9895 && (init
= ctor_for_folding (array
)) != error_mark_node
)
9897 if (init
== NULL_TREE
)
9899 tree value
= build_zero_cst (type
);
9900 if (TREE_CODE (value
) == CONSTRUCTOR
)
9902 /* If VALUE is a CONSTRUCTOR, this optimization is only
9903 useful if this doesn't store the CONSTRUCTOR into
9904 memory. If it does, it is more efficient to just
9905 load the data from the array directly. */
9906 rtx ret
= expand_constructor (value
, target
,
9908 if (ret
== NULL_RTX
)
9913 return expand_expr (value
, target
, tmode
, modifier
);
9915 else if (TREE_CODE (init
) == CONSTRUCTOR
)
9917 unsigned HOST_WIDE_INT ix
;
9920 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
9922 if (tree_int_cst_equal (field
, index
))
9924 if (TREE_SIDE_EFFECTS (value
))
9927 if (TREE_CODE (value
) == CONSTRUCTOR
)
9929 /* If VALUE is a CONSTRUCTOR, this
9930 optimization is only useful if
9931 this doesn't store the CONSTRUCTOR
9932 into memory. If it does, it is more
9933 efficient to just load the data from
9934 the array directly. */
9935 rtx ret
= expand_constructor (value
, target
,
9937 if (ret
== NULL_RTX
)
9942 expand_expr (fold (value
), target
, tmode
, modifier
);
9945 else if (TREE_CODE (init
) == STRING_CST
)
9947 tree low_bound
= array_ref_low_bound (exp
);
9948 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
9950 /* Optimize the special case of a zero lower bound.
9952 We convert the lower bound to sizetype to avoid problems
9953 with constant folding. E.g. suppose the lower bound is
9954 1 and its mode is QI. Without the conversion
9955 (ARRAY + (INDEX - (unsigned char)1))
9957 (ARRAY + (-(unsigned char)1) + INDEX)
9959 (ARRAY + 255 + INDEX). Oops! */
9960 if (!integer_zerop (low_bound
))
9961 index1
= size_diffop_loc (loc
, index1
,
9962 fold_convert_loc (loc
, sizetype
,
9965 if (compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
9967 tree type
= TREE_TYPE (TREE_TYPE (init
));
9968 machine_mode mode
= TYPE_MODE (type
);
9970 if (GET_MODE_CLASS (mode
) == MODE_INT
9971 && GET_MODE_SIZE (mode
) == 1)
9972 return gen_int_mode (TREE_STRING_POINTER (init
)
9973 [TREE_INT_CST_LOW (index1
)],
9979 goto normal_inner_ref
;
9982 /* If the operand is a CONSTRUCTOR, we can just extract the
9983 appropriate field if it is present. */
9984 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
9986 unsigned HOST_WIDE_INT idx
;
9989 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
9991 if (field
== treeop1
9992 /* We can normally use the value of the field in the
9993 CONSTRUCTOR. However, if this is a bitfield in
9994 an integral mode that we can fit in a HOST_WIDE_INT,
9995 we must mask only the number of bits in the bitfield,
9996 since this is done implicitly by the constructor. If
9997 the bitfield does not meet either of those conditions,
9998 we can't do this optimization. */
9999 && (! DECL_BIT_FIELD (field
)
10000 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
10001 && (GET_MODE_PRECISION (DECL_MODE (field
))
10002 <= HOST_BITS_PER_WIDE_INT
))))
10004 if (DECL_BIT_FIELD (field
)
10005 && modifier
== EXPAND_STACK_PARM
)
10007 op0
= expand_expr (value
, target
, tmode
, modifier
);
10008 if (DECL_BIT_FIELD (field
))
10010 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
10011 machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
10013 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
10015 op1
= gen_int_mode (((HOST_WIDE_INT
) 1 << bitsize
) - 1,
10017 op0
= expand_and (imode
, op0
, op1
, target
);
10021 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
10023 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
10025 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
10033 goto normal_inner_ref
;
10035 case BIT_FIELD_REF
:
10036 case ARRAY_RANGE_REF
:
10039 machine_mode mode1
, mode2
;
10040 HOST_WIDE_INT bitsize
, bitpos
;
10042 int volatilep
= 0, must_force_mem
;
10043 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
10044 &mode1
, &unsignedp
, &volatilep
, true);
10045 rtx orig_op0
, memloc
;
10046 bool clear_mem_expr
= false;
10048 /* If we got back the original object, something is wrong. Perhaps
10049 we are evaluating an expression too early. In any event, don't
10050 infinitely recurse. */
10051 gcc_assert (tem
!= exp
);
10053 /* If TEM's type is a union of variable size, pass TARGET to the inner
10054 computation, since it will need a temporary and TARGET is known
10055 to have to do. This occurs in unchecked conversion in Ada. */
10057 = expand_expr_real (tem
,
10058 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10059 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10060 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10062 && modifier
!= EXPAND_STACK_PARM
10063 ? target
: NULL_RTX
),
10065 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10068 /* If the field has a mode, we want to access it in the
10069 field's mode, not the computed mode.
10070 If a MEM has VOIDmode (external with incomplete type),
10071 use BLKmode for it instead. */
10074 if (mode1
!= VOIDmode
)
10075 op0
= adjust_address (op0
, mode1
, 0);
10076 else if (GET_MODE (op0
) == VOIDmode
)
10077 op0
= adjust_address (op0
, BLKmode
, 0);
10081 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10083 /* If we have either an offset, a BLKmode result, or a reference
10084 outside the underlying object, we must force it to memory.
10085 Such a case can occur in Ada if we have unchecked conversion
10086 of an expression from a scalar type to an aggregate type or
10087 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10088 passed a partially uninitialized object or a view-conversion
10089 to a larger size. */
10090 must_force_mem
= (offset
10091 || mode1
== BLKmode
10092 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
10094 /* Handle CONCAT first. */
10095 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10098 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)))
10101 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10104 op0
= XEXP (op0
, 0);
10105 mode2
= GET_MODE (op0
);
10107 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10108 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
10112 op0
= XEXP (op0
, 1);
10114 mode2
= GET_MODE (op0
);
10117 /* Otherwise force into memory. */
10118 must_force_mem
= 1;
10121 /* If this is a constant, put it in a register if it is a legitimate
10122 constant and we don't need a memory reference. */
10123 if (CONSTANT_P (op0
)
10124 && mode2
!= BLKmode
10125 && targetm
.legitimate_constant_p (mode2
, op0
)
10126 && !must_force_mem
)
10127 op0
= force_reg (mode2
, op0
);
10129 /* Otherwise, if this is a constant, try to force it to the constant
10130 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10131 is a legitimate constant. */
10132 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10133 op0
= validize_mem (memloc
);
10135 /* Otherwise, if this is a constant or the object is not in memory
10136 and need be, put it there. */
10137 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10139 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10140 emit_move_insn (memloc
, op0
);
10142 clear_mem_expr
= true;
10147 machine_mode address_mode
;
10148 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10151 gcc_assert (MEM_P (op0
));
10153 address_mode
= get_address_mode (op0
);
10154 if (GET_MODE (offset_rtx
) != address_mode
)
10156 /* We cannot be sure that the RTL in offset_rtx is valid outside
10157 of a memory address context, so force it into a register
10158 before attempting to convert it to the desired mode. */
10159 offset_rtx
= force_operand (offset_rtx
, NULL_RTX
);
10160 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10163 /* See the comment in expand_assignment for the rationale. */
10164 if (mode1
!= VOIDmode
10167 && (bitpos
% bitsize
) == 0
10168 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
10169 && MEM_ALIGN (op0
) >= GET_MODE_ALIGNMENT (mode1
))
10171 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10175 op0
= offset_address (op0
, offset_rtx
,
10176 highest_pow2_factor (offset
));
10179 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10180 record its alignment as BIGGEST_ALIGNMENT. */
10181 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
10182 && is_aligning_offset (offset
, tem
))
10183 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10185 /* Don't forget about volatility even if this is a bitfield. */
10186 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10188 if (op0
== orig_op0
)
10189 op0
= copy_rtx (op0
);
10191 MEM_VOLATILE_P (op0
) = 1;
10194 /* In cases where an aligned union has an unaligned object
10195 as a field, we might be extracting a BLKmode value from
10196 an integer-mode (e.g., SImode) object. Handle this case
10197 by doing the extract into an object as wide as the field
10198 (which we know to be the width of a basic mode), then
10199 storing into memory, and changing the mode to BLKmode. */
10200 if (mode1
== VOIDmode
10201 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10202 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10203 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10204 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10205 && modifier
!= EXPAND_CONST_ADDRESS
10206 && modifier
!= EXPAND_INITIALIZER
10207 && modifier
!= EXPAND_MEMORY
)
10208 /* If the bitfield is volatile and the bitsize
10209 is narrower than the access size of the bitfield,
10210 we need to extract bitfields from the access. */
10211 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10212 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10213 && mode1
!= BLKmode
10214 && bitsize
< GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
)
10215 /* If the field isn't aligned enough to fetch as a memref,
10216 fetch it as a bit field. */
10217 || (mode1
!= BLKmode
10218 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10219 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
10221 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10222 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
10223 && modifier
!= EXPAND_MEMORY
10224 && ((modifier
== EXPAND_CONST_ADDRESS
10225 || modifier
== EXPAND_INITIALIZER
)
10227 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
10228 || (bitpos
% BITS_PER_UNIT
!= 0)))
10229 /* If the type and the field are a constant size and the
10230 size of the type isn't the same size as the bitfield,
10231 we must use bitfield operations. */
10233 && TYPE_SIZE (TREE_TYPE (exp
))
10234 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10235 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
10238 machine_mode ext_mode
= mode
;
10240 if (ext_mode
== BLKmode
10241 && ! (target
!= 0 && MEM_P (op0
)
10243 && bitpos
% BITS_PER_UNIT
== 0))
10244 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
10246 if (ext_mode
== BLKmode
)
10249 target
= assign_temp (type
, 1, 1);
10251 /* ??? Unlike the similar test a few lines below, this one is
10252 very likely obsolete. */
10256 /* In this case, BITPOS must start at a byte boundary and
10257 TARGET, if specified, must be a MEM. */
10258 gcc_assert (MEM_P (op0
)
10259 && (!target
|| MEM_P (target
))
10260 && !(bitpos
% BITS_PER_UNIT
));
10262 emit_block_move (target
,
10263 adjust_address (op0
, VOIDmode
,
10264 bitpos
/ BITS_PER_UNIT
),
10265 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
10267 (modifier
== EXPAND_STACK_PARM
10268 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10273 /* If we have nothing to extract, the result will be 0 for targets
10274 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10275 return 0 for the sake of consistency, as reading a zero-sized
10276 bitfield is valid in Ada and the value is fully specified. */
10280 op0
= validize_mem (op0
);
10282 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10283 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10285 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10286 (modifier
== EXPAND_STACK_PARM
10287 ? NULL_RTX
: target
),
10288 ext_mode
, ext_mode
);
10290 /* If the result is a record type and BITSIZE is narrower than
10291 the mode of OP0, an integral mode, and this is a big endian
10292 machine, we must put the field into the high-order bits. */
10293 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
10294 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
10295 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
10296 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
10297 GET_MODE_BITSIZE (GET_MODE (op0
))
10298 - bitsize
, op0
, 1);
10300 /* If the result type is BLKmode, store the data into a temporary
10301 of the appropriate type, but with the mode corresponding to the
10302 mode for the data we have (op0's mode). */
10303 if (mode
== BLKmode
)
10306 = assign_stack_temp_for_type (ext_mode
,
10307 GET_MODE_BITSIZE (ext_mode
),
10309 emit_move_insn (new_rtx
, op0
);
10310 op0
= copy_rtx (new_rtx
);
10311 PUT_MODE (op0
, BLKmode
);
10317 /* If the result is BLKmode, use that to access the object
10319 if (mode
== BLKmode
)
10322 /* Get a reference to just this component. */
10323 if (modifier
== EXPAND_CONST_ADDRESS
10324 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10325 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10327 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10329 if (op0
== orig_op0
)
10330 op0
= copy_rtx (op0
);
10332 set_mem_attributes (op0
, exp
, 0);
10334 if (REG_P (XEXP (op0
, 0)))
10335 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10337 /* If op0 is a temporary because the original expressions was forced
10338 to memory, clear MEM_EXPR so that the original expression cannot
10339 be marked as addressable through MEM_EXPR of the temporary. */
10340 if (clear_mem_expr
)
10341 set_mem_expr (op0
, NULL_TREE
);
10343 MEM_VOLATILE_P (op0
) |= volatilep
;
10344 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10345 || modifier
== EXPAND_CONST_ADDRESS
10346 || modifier
== EXPAND_INITIALIZER
)
10350 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10352 convert_move (target
, op0
, unsignedp
);
10357 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10360 /* All valid uses of __builtin_va_arg_pack () are removed during
10362 if (CALL_EXPR_VA_ARG_PACK (exp
))
10363 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10365 tree fndecl
= get_callee_fndecl (exp
), attr
;
10368 && (attr
= lookup_attribute ("error",
10369 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10370 error ("%Kcall to %qs declared with attribute error: %s",
10371 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10372 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10374 && (attr
= lookup_attribute ("warning",
10375 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10376 warning_at (tree_nonartificial_location (exp
),
10377 0, "%Kcall to %qs declared with attribute warning: %s",
10378 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10379 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10381 /* Check for a built-in function. */
10382 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10384 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10385 if (CALL_WITH_BOUNDS_P (exp
))
10386 return expand_builtin_with_bounds (exp
, target
, subtarget
,
10389 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10392 return expand_call (exp
, target
, ignore
);
10394 case VIEW_CONVERT_EXPR
:
10397 /* If we are converting to BLKmode, try to avoid an intermediate
10398 temporary by fetching an inner memory reference. */
10399 if (mode
== BLKmode
10400 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
10401 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10402 && handled_component_p (treeop0
))
10404 machine_mode mode1
;
10405 HOST_WIDE_INT bitsize
, bitpos
;
10410 = get_inner_reference (treeop0
, &bitsize
, &bitpos
,
10411 &offset
, &mode1
, &unsignedp
, &volatilep
,
10415 /* ??? We should work harder and deal with non-zero offsets. */
10417 && (bitpos
% BITS_PER_UNIT
) == 0
10419 && compare_tree_int (TYPE_SIZE (type
), bitsize
) == 0)
10421 /* See the normal_inner_ref case for the rationale. */
10423 = expand_expr_real (tem
,
10424 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10425 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10427 && modifier
!= EXPAND_STACK_PARM
10428 ? target
: NULL_RTX
),
10430 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10433 if (MEM_P (orig_op0
))
10437 /* Get a reference to just this component. */
10438 if (modifier
== EXPAND_CONST_ADDRESS
10439 || modifier
== EXPAND_SUM
10440 || modifier
== EXPAND_INITIALIZER
)
10441 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10443 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10445 if (op0
== orig_op0
)
10446 op0
= copy_rtx (op0
);
10448 set_mem_attributes (op0
, treeop0
, 0);
10449 if (REG_P (XEXP (op0
, 0)))
10450 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10452 MEM_VOLATILE_P (op0
) |= volatilep
;
10458 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
10459 NULL
, inner_reference_p
);
10461 /* If the input and output modes are both the same, we are done. */
10462 if (mode
== GET_MODE (op0
))
10464 /* If neither mode is BLKmode, and both modes are the same size
10465 then we can use gen_lowpart. */
10466 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
10467 && (GET_MODE_PRECISION (mode
)
10468 == GET_MODE_PRECISION (GET_MODE (op0
)))
10469 && !COMPLEX_MODE_P (GET_MODE (op0
)))
10471 if (GET_CODE (op0
) == SUBREG
)
10472 op0
= force_reg (GET_MODE (op0
), op0
);
10473 temp
= gen_lowpart_common (mode
, op0
);
10478 if (!REG_P (op0
) && !MEM_P (op0
))
10479 op0
= force_reg (GET_MODE (op0
), op0
);
10480 op0
= gen_lowpart (mode
, op0
);
10483 /* If both types are integral, convert from one mode to the other. */
10484 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
10485 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
10486 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
10487 /* If the output type is a bit-field type, do an extraction. */
10488 else if (reduce_bit_field
)
10489 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
10490 TYPE_UNSIGNED (type
), NULL_RTX
,
10492 /* As a last resort, spill op0 to memory, and reload it in a
10494 else if (!MEM_P (op0
))
10496 /* If the operand is not a MEM, force it into memory. Since we
10497 are going to be changing the mode of the MEM, don't call
10498 force_const_mem for constants because we don't allow pool
10499 constants to change mode. */
10500 tree inner_type
= TREE_TYPE (treeop0
);
10502 gcc_assert (!TREE_ADDRESSABLE (exp
));
10504 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
10506 = assign_stack_temp_for_type
10507 (TYPE_MODE (inner_type
),
10508 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
10510 emit_move_insn (target
, op0
);
10514 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10515 output type is such that the operand is known to be aligned, indicate
10516 that it is. Otherwise, we need only be concerned about alignment for
10517 non-BLKmode results. */
10520 enum insn_code icode
;
10522 if (TYPE_ALIGN_OK (type
))
10524 /* ??? Copying the MEM without substantially changing it might
10525 run afoul of the code handling volatile memory references in
10526 store_expr, which assumes that TARGET is returned unmodified
10527 if it has been used. */
10528 op0
= copy_rtx (op0
);
10529 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
10531 else if (modifier
!= EXPAND_WRITE
10532 && modifier
!= EXPAND_MEMORY
10533 && !inner_reference_p
10535 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
10537 /* If the target does have special handling for unaligned
10538 loads of mode then use them. */
10539 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10540 != CODE_FOR_nothing
)
10544 op0
= adjust_address (op0
, mode
, 0);
10545 /* We've already validated the memory, and we're creating a
10546 new pseudo destination. The predicates really can't
10548 reg
= gen_reg_rtx (mode
);
10550 /* Nor can the insn generator. */
10551 rtx_insn
*insn
= GEN_FCN (icode
) (reg
, op0
);
10555 else if (STRICT_ALIGNMENT
)
10557 tree inner_type
= TREE_TYPE (treeop0
);
10558 HOST_WIDE_INT temp_size
10559 = MAX (int_size_in_bytes (inner_type
),
10560 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
10562 = assign_stack_temp_for_type (mode
, temp_size
, type
);
10563 rtx new_with_op0_mode
10564 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
10566 gcc_assert (!TREE_ADDRESSABLE (exp
));
10568 if (GET_MODE (op0
) == BLKmode
)
10569 emit_block_move (new_with_op0_mode
, op0
,
10570 GEN_INT (GET_MODE_SIZE (mode
)),
10571 (modifier
== EXPAND_STACK_PARM
10572 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10574 emit_move_insn (new_with_op0_mode
, op0
);
10580 op0
= adjust_address (op0
, mode
, 0);
10587 tree lhs
= treeop0
;
10588 tree rhs
= treeop1
;
10589 gcc_assert (ignore
);
10591 /* Check for |= or &= of a bitfield of size one into another bitfield
10592 of size 1. In this case, (unless we need the result of the
10593 assignment) we can do this more efficiently with a
10594 test followed by an assignment, if necessary.
10596 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10597 things change so we do, this code should be enhanced to
10599 if (TREE_CODE (lhs
) == COMPONENT_REF
10600 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
10601 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
10602 && TREE_OPERAND (rhs
, 0) == lhs
10603 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
10604 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
10605 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
10607 rtx_code_label
*label
= gen_label_rtx ();
10608 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
10609 do_jump (TREE_OPERAND (rhs
, 1),
10611 value
? 0 : label
, -1);
10612 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
10614 do_pending_stack_adjust ();
10615 emit_label (label
);
10619 expand_assignment (lhs
, rhs
, false);
10624 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
10626 case REALPART_EXPR
:
10627 op0
= expand_normal (treeop0
);
10628 return read_complex_part (op0
, false);
10630 case IMAGPART_EXPR
:
10631 op0
= expand_normal (treeop0
);
10632 return read_complex_part (op0
, true);
10639 /* Expanded in cfgexpand.c. */
10640 gcc_unreachable ();
10642 case TRY_CATCH_EXPR
:
10644 case EH_FILTER_EXPR
:
10645 case TRY_FINALLY_EXPR
:
10646 /* Lowered by tree-eh.c. */
10647 gcc_unreachable ();
10649 case WITH_CLEANUP_EXPR
:
10650 case CLEANUP_POINT_EXPR
:
10652 case CASE_LABEL_EXPR
:
10657 case COMPOUND_EXPR
:
10658 case PREINCREMENT_EXPR
:
10659 case PREDECREMENT_EXPR
:
10660 case POSTINCREMENT_EXPR
:
10661 case POSTDECREMENT_EXPR
:
10664 case COMPOUND_LITERAL_EXPR
:
10665 /* Lowered by gimplify.c. */
10666 gcc_unreachable ();
10669 /* Function descriptors are not valid except for as
10670 initialization constants, and should not be expanded. */
10671 gcc_unreachable ();
10673 case WITH_SIZE_EXPR
:
10674 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10675 have pulled out the size to use in whatever context it needed. */
10676 return expand_expr_real (treeop0
, original_target
, tmode
,
10677 modifier
, alt_rtl
, inner_reference_p
);
10680 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
10684 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10685 signedness of TYPE), possibly returning the result in TARGET. */
10687 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
10689 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
10690 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
10692 /* For constant values, reduce using build_int_cst_type. */
10693 if (CONST_INT_P (exp
))
10695 HOST_WIDE_INT value
= INTVAL (exp
);
10696 tree t
= build_int_cst_type (type
, value
);
10697 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
10699 else if (TYPE_UNSIGNED (type
))
10701 machine_mode mode
= GET_MODE (exp
);
10702 rtx mask
= immed_wide_int_const
10703 (wi::mask (prec
, false, GET_MODE_PRECISION (mode
)), mode
);
10704 return expand_and (mode
, exp
, mask
, target
);
10708 int count
= GET_MODE_PRECISION (GET_MODE (exp
)) - prec
;
10709 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
),
10710 exp
, count
, target
, 0);
10711 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
),
10712 exp
, count
, target
, 0);
10716 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10717 when applied to the address of EXP produces an address known to be
10718 aligned more than BIGGEST_ALIGNMENT. */
10721 is_aligning_offset (const_tree offset
, const_tree exp
)
10723 /* Strip off any conversions. */
10724 while (CONVERT_EXPR_P (offset
))
10725 offset
= TREE_OPERAND (offset
, 0);
10727 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10728 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10729 if (TREE_CODE (offset
) != BIT_AND_EXPR
10730 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
10731 || compare_tree_int (TREE_OPERAND (offset
, 1),
10732 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
10733 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1) < 0)
10736 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10737 It must be NEGATE_EXPR. Then strip any more conversions. */
10738 offset
= TREE_OPERAND (offset
, 0);
10739 while (CONVERT_EXPR_P (offset
))
10740 offset
= TREE_OPERAND (offset
, 0);
10742 if (TREE_CODE (offset
) != NEGATE_EXPR
)
10745 offset
= TREE_OPERAND (offset
, 0);
10746 while (CONVERT_EXPR_P (offset
))
10747 offset
= TREE_OPERAND (offset
, 0);
10749 /* This must now be the address of EXP. */
10750 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
10753 /* Return the tree node if an ARG corresponds to a string constant or zero
10754 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10755 in bytes within the string that ARG is accessing. The type of the
10756 offset will be `sizetype'. */
10759 string_constant (tree arg
, tree
*ptr_offset
)
10761 tree array
, offset
, lower_bound
;
10764 if (TREE_CODE (arg
) == ADDR_EXPR
)
10766 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
10768 *ptr_offset
= size_zero_node
;
10769 return TREE_OPERAND (arg
, 0);
10771 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
10773 array
= TREE_OPERAND (arg
, 0);
10774 offset
= size_zero_node
;
10776 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
10778 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10779 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10780 if (TREE_CODE (array
) != STRING_CST
10781 && TREE_CODE (array
) != VAR_DECL
)
10784 /* Check if the array has a nonzero lower bound. */
10785 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
10786 if (!integer_zerop (lower_bound
))
10788 /* If the offset and base aren't both constants, return 0. */
10789 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
10791 if (TREE_CODE (offset
) != INTEGER_CST
)
10793 /* Adjust offset by the lower bound. */
10794 offset
= size_diffop (fold_convert (sizetype
, offset
),
10795 fold_convert (sizetype
, lower_bound
));
10798 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
10800 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10801 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10802 if (TREE_CODE (array
) != ADDR_EXPR
)
10804 array
= TREE_OPERAND (array
, 0);
10805 if (TREE_CODE (array
) != STRING_CST
10806 && TREE_CODE (array
) != VAR_DECL
)
10812 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
10814 tree arg0
= TREE_OPERAND (arg
, 0);
10815 tree arg1
= TREE_OPERAND (arg
, 1);
10820 if (TREE_CODE (arg0
) == ADDR_EXPR
10821 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
10822 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
10824 array
= TREE_OPERAND (arg0
, 0);
10827 else if (TREE_CODE (arg1
) == ADDR_EXPR
10828 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
10829 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
10831 array
= TREE_OPERAND (arg1
, 0);
10840 if (TREE_CODE (array
) == STRING_CST
)
10842 *ptr_offset
= fold_convert (sizetype
, offset
);
10845 else if (TREE_CODE (array
) == VAR_DECL
10846 || TREE_CODE (array
) == CONST_DECL
)
10849 tree init
= ctor_for_folding (array
);
10851 /* Variables initialized to string literals can be handled too. */
10852 if (init
== error_mark_node
10854 || TREE_CODE (init
) != STRING_CST
)
10857 /* Avoid const char foo[4] = "abcde"; */
10858 if (DECL_SIZE_UNIT (array
) == NULL_TREE
10859 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
10860 || (length
= TREE_STRING_LENGTH (init
)) <= 0
10861 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
10864 /* If variable is bigger than the string literal, OFFSET must be constant
10865 and inside of the bounds of the string literal. */
10866 offset
= fold_convert (sizetype
, offset
);
10867 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
10868 && (! tree_fits_uhwi_p (offset
)
10869 || compare_tree_int (offset
, length
) >= 0))
10872 *ptr_offset
= offset
;
10879 /* Generate code to calculate OPS, and exploded expression
10880 using a store-flag instruction and return an rtx for the result.
10881 OPS reflects a comparison.
10883 If TARGET is nonzero, store the result there if convenient.
10885 Return zero if there is no suitable set-flag instruction
10886 available on this machine.
10888 Once expand_expr has been called on the arguments of the comparison,
10889 we are committed to doing the store flag, since it is not safe to
10890 re-evaluate the expression. We emit the store-flag insn by calling
10891 emit_store_flag, but only expand the arguments if we have a reason
10892 to believe that emit_store_flag will be successful. If we think that
10893 it will, but it isn't, we have to simulate the store-flag with a
10894 set/jump/set sequence. */
10897 do_store_flag (sepops ops
, rtx target
, machine_mode mode
)
10899 enum rtx_code code
;
10900 tree arg0
, arg1
, type
;
10901 machine_mode operand_mode
;
10904 rtx subtarget
= target
;
10905 location_t loc
= ops
->location
;
10910 /* Don't crash if the comparison was erroneous. */
10911 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10914 type
= TREE_TYPE (arg0
);
10915 operand_mode
= TYPE_MODE (type
);
10916 unsignedp
= TYPE_UNSIGNED (type
);
10918 /* We won't bother with BLKmode store-flag operations because it would mean
10919 passing a lot of information to emit_store_flag. */
10920 if (operand_mode
== BLKmode
)
10923 /* We won't bother with store-flag operations involving function pointers
10924 when function pointers must be canonicalized before comparisons. */
10925 #ifdef HAVE_canonicalize_funcptr_for_compare
10926 if (HAVE_canonicalize_funcptr_for_compare
10927 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
10928 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
10930 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
10931 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
10932 == FUNCTION_TYPE
))))
10939 /* For vector typed comparisons emit code to generate the desired
10940 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10941 expander for this. */
10942 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
10944 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
10945 tree if_true
= constant_boolean_node (true, ops
->type
);
10946 tree if_false
= constant_boolean_node (false, ops
->type
);
10947 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
, if_false
, target
);
10950 /* Get the rtx comparison code to use. We know that EXP is a comparison
10951 operation of some type. Some comparisons against 1 and -1 can be
10952 converted to comparisons with zero. Do so here so that the tests
10953 below will be aware that we have a comparison with zero. These
10954 tests will not catch constants in the first operand, but constants
10955 are rarely passed as the first operand. */
10966 if (integer_onep (arg1
))
10967 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10969 code
= unsignedp
? LTU
: LT
;
10972 if (! unsignedp
&& integer_all_onesp (arg1
))
10973 arg1
= integer_zero_node
, code
= LT
;
10975 code
= unsignedp
? LEU
: LE
;
10978 if (! unsignedp
&& integer_all_onesp (arg1
))
10979 arg1
= integer_zero_node
, code
= GE
;
10981 code
= unsignedp
? GTU
: GT
;
10984 if (integer_onep (arg1
))
10985 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10987 code
= unsignedp
? GEU
: GE
;
10990 case UNORDERED_EXPR
:
11016 gcc_unreachable ();
11019 /* Put a constant second. */
11020 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
11021 || TREE_CODE (arg0
) == FIXED_CST
)
11023 std::swap (arg0
, arg1
);
11024 code
= swap_condition (code
);
11027 /* If this is an equality or inequality test of a single bit, we can
11028 do this by shifting the bit being tested to the low-order bit and
11029 masking the result with the constant 1. If the condition was EQ,
11030 we xor it with 1. This does not require an scc insn and is faster
11031 than an scc insn even if we have it.
11033 The code to make this transformation was moved into fold_single_bit_test,
11034 so we just call into the folder and expand its result. */
11036 if ((code
== NE
|| code
== EQ
)
11037 && integer_zerop (arg1
)
11038 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
11040 gimple srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
11042 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
11044 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
11045 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
11046 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
11047 gimple_assign_rhs1 (srcstmt
),
11048 gimple_assign_rhs2 (srcstmt
));
11049 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
11051 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
11055 if (! get_subtarget (target
)
11056 || GET_MODE (subtarget
) != operand_mode
)
11059 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
11062 target
= gen_reg_rtx (mode
);
11064 /* Try a cstore if possible. */
11065 return emit_store_flag_force (target
, code
, op0
, op1
,
11066 operand_mode
, unsignedp
,
11067 (TYPE_PRECISION (ops
->type
) == 1
11068 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
11072 /* Stubs in case we haven't got a casesi insn. */
11073 #ifndef HAVE_casesi
11074 # define HAVE_casesi 0
11075 # define gen_casesi(a, b, c, d, e) (0)
11076 # define CODE_FOR_casesi CODE_FOR_nothing
11079 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11080 0 otherwise (i.e. if there is no casesi instruction).
11082 DEFAULT_PROBABILITY is the probability of jumping to the default
11085 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
11086 rtx table_label
, rtx default_label
, rtx fallback_label
,
11087 int default_probability
)
11089 struct expand_operand ops
[5];
11090 machine_mode index_mode
= SImode
;
11091 rtx op1
, op2
, index
;
11096 /* Convert the index to SImode. */
11097 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
11099 machine_mode omode
= TYPE_MODE (index_type
);
11100 rtx rangertx
= expand_normal (range
);
11102 /* We must handle the endpoints in the original mode. */
11103 index_expr
= build2 (MINUS_EXPR
, index_type
,
11104 index_expr
, minval
);
11105 minval
= integer_zero_node
;
11106 index
= expand_normal (index_expr
);
11108 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
11109 omode
, 1, default_label
,
11110 default_probability
);
11111 /* Now we can safely truncate. */
11112 index
= convert_to_mode (index_mode
, index
, 0);
11116 if (TYPE_MODE (index_type
) != index_mode
)
11118 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
11119 index_expr
= fold_convert (index_type
, index_expr
);
11122 index
= expand_normal (index_expr
);
11125 do_pending_stack_adjust ();
11127 op1
= expand_normal (minval
);
11128 op2
= expand_normal (range
);
11130 create_input_operand (&ops
[0], index
, index_mode
);
11131 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
11132 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
11133 create_fixed_operand (&ops
[3], table_label
);
11134 create_fixed_operand (&ops
[4], (default_label
11136 : fallback_label
));
11137 expand_jump_insn (CODE_FOR_casesi
, 5, ops
);
11141 /* Attempt to generate a tablejump instruction; same concept. */
11142 /* Subroutine of the next function.
11144 INDEX is the value being switched on, with the lowest value
11145 in the table already subtracted.
11146 MODE is its expected mode (needed if INDEX is constant).
11147 RANGE is the length of the jump table.
11148 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11150 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11151 index value is out of range.
11152 DEFAULT_PROBABILITY is the probability of jumping to
11153 the default label. */
11156 do_tablejump (rtx index
, machine_mode mode
, rtx range
, rtx table_label
,
11157 rtx default_label
, int default_probability
)
11161 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
11162 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
11164 /* Do an unsigned comparison (in the proper mode) between the index
11165 expression and the value which represents the length of the range.
11166 Since we just finished subtracting the lower bound of the range
11167 from the index expression, this comparison allows us to simultaneously
11168 check that the original index expression value is both greater than
11169 or equal to the minimum value of the range and less than or equal to
11170 the maximum value of the range. */
11173 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11174 default_label
, default_probability
);
11177 /* If index is in range, it must fit in Pmode.
11178 Convert to Pmode so we can index with it. */
11180 index
= convert_to_mode (Pmode
, index
, 1);
11182 /* Don't let a MEM slip through, because then INDEX that comes
11183 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11184 and break_out_memory_refs will go to work on it and mess it up. */
11185 #ifdef PIC_CASE_VECTOR_ADDRESS
11186 if (flag_pic
&& !REG_P (index
))
11187 index
= copy_to_mode_reg (Pmode
, index
);
11190 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11191 GET_MODE_SIZE, because this indicates how large insns are. The other
11192 uses should all be Pmode, because they are addresses. This code
11193 could fail if addresses and insns are not the same size. */
11194 index
= simplify_gen_binary (MULT
, Pmode
, index
,
11195 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
11197 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
11198 gen_rtx_LABEL_REF (Pmode
, table_label
));
11200 #ifdef PIC_CASE_VECTOR_ADDRESS
11202 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11205 index
= memory_address (CASE_VECTOR_MODE
, index
);
11206 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11207 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
11208 convert_move (temp
, vector
, 0);
11210 emit_jump_insn (gen_tablejump (temp
, table_label
));
11212 /* If we are generating PIC code or if the table is PC-relative, the
11213 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11214 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11219 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
11220 rtx table_label
, rtx default_label
, int default_probability
)
11224 if (! HAVE_tablejump
)
11227 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
11228 fold_convert (index_type
, index_expr
),
11229 fold_convert (index_type
, minval
));
11230 index
= expand_normal (index_expr
);
11231 do_pending_stack_adjust ();
11233 do_tablejump (index
, TYPE_MODE (index_type
),
11234 convert_modes (TYPE_MODE (index_type
),
11235 TYPE_MODE (TREE_TYPE (range
)),
11236 expand_normal (range
),
11237 TYPE_UNSIGNED (TREE_TYPE (range
))),
11238 table_label
, default_label
, default_probability
);
11242 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11244 const_vector_from_tree (tree exp
)
11250 machine_mode inner
, mode
;
11252 mode
= TYPE_MODE (TREE_TYPE (exp
));
11254 if (initializer_zerop (exp
))
11255 return CONST0_RTX (mode
);
11257 units
= GET_MODE_NUNITS (mode
);
11258 inner
= GET_MODE_INNER (mode
);
11260 v
= rtvec_alloc (units
);
11262 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11264 elt
= VECTOR_CST_ELT (exp
, i
);
11266 if (TREE_CODE (elt
) == REAL_CST
)
11267 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
11269 else if (TREE_CODE (elt
) == FIXED_CST
)
11270 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11273 RTVEC_ELT (v
, i
) = immed_wide_int_const (elt
, inner
);
11276 return gen_rtx_CONST_VECTOR (mode
, v
);
11279 /* Build a decl for a personality function given a language prefix. */
11282 build_personality_function (const char *lang
)
11284 const char *unwind_and_version
;
11288 switch (targetm_common
.except_unwind_info (&global_options
))
11293 unwind_and_version
= "_sj0";
11297 unwind_and_version
= "_v0";
11300 unwind_and_version
= "_seh0";
11303 gcc_unreachable ();
11306 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11308 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11309 long_long_unsigned_type_node
,
11310 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11311 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11312 get_identifier (name
), type
);
11313 DECL_ARTIFICIAL (decl
) = 1;
11314 DECL_EXTERNAL (decl
) = 1;
11315 TREE_PUBLIC (decl
) = 1;
11317 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11318 are the flags assigned by targetm.encode_section_info. */
11319 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11324 /* Extracts the personality function of DECL and returns the corresponding
11328 get_personality_function (tree decl
)
11330 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11331 enum eh_personality_kind pk
;
11333 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11334 if (pk
== eh_personality_none
)
11338 && pk
== eh_personality_any
)
11339 personality
= lang_hooks
.eh_personality ();
11341 if (pk
== eh_personality_lang
)
11342 gcc_assert (personality
!= NULL_TREE
);
11344 return XEXP (DECL_RTL (personality
), 0);
11347 /* Returns a tree for the size of EXP in bytes. */
11350 tree_expr_size (const_tree exp
)
11353 && DECL_SIZE_UNIT (exp
) != 0)
11354 return DECL_SIZE_UNIT (exp
);
11356 return size_in_bytes (TREE_TYPE (exp
));
11359 /* Return an rtx for the size in bytes of the value of EXP. */
11362 expr_size (tree exp
)
11366 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11367 size
= TREE_OPERAND (exp
, 1);
11370 size
= tree_expr_size (exp
);
11372 gcc_assert (size
== SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, exp
));
11375 return expand_expr (size
, NULL_RTX
, TYPE_MODE (sizetype
), EXPAND_NORMAL
);
11378 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11379 if the size can vary or is larger than an integer. */
11381 static HOST_WIDE_INT
11382 int_expr_size (tree exp
)
11386 if (TREE_CODE (exp
) == WITH_SIZE_EXPR
)
11387 size
= TREE_OPERAND (exp
, 1);
11390 size
= tree_expr_size (exp
);
11394 if (size
== 0 || !tree_fits_shwi_p (size
))
11397 return tree_to_shwi (size
);
11400 #include "gt-expr.h"