1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
46 #include "langhooks.h"
49 #include "tree-iterator.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
56 #include "gimple-ssa.h"
58 #include "tree-ssanames.h"
60 #include "common/common-target.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
71 #ifndef STACK_PUSH_CODE
72 #ifdef STACK_GROWS_DOWNWARD
73 #define STACK_PUSH_CODE PRE_DEC
75 #define STACK_PUSH_CODE PRE_INC
80 /* If this is nonzero, we do not bother generating VOLATILE
81 around volatile memory references, and we are willing to
82 output indirect addresses. If cse is to follow, we reject
83 indirect addresses so a useful potential cse is generated;
84 if it is used only once, instruction combination will produce
85 the same indirect address eventually. */
88 /* This structure is used by move_by_pieces to describe the move to
90 struct move_by_pieces_d
99 int explicit_inc_from
;
100 unsigned HOST_WIDE_INT len
;
101 HOST_WIDE_INT offset
;
105 /* This structure is used by store_by_pieces to describe the clear to
108 struct store_by_pieces_d
114 unsigned HOST_WIDE_INT len
;
115 HOST_WIDE_INT offset
;
116 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
);
121 static void move_by_pieces_1 (insn_gen_fn
, machine_mode
,
122 struct move_by_pieces_d
*);
123 static bool block_move_libcall_safe_for_call_parm (void);
124 static bool emit_block_move_via_movmem (rtx
, rtx
, rtx
, unsigned, unsigned, HOST_WIDE_INT
,
125 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
126 unsigned HOST_WIDE_INT
);
127 static tree
emit_block_move_libcall_fn (int);
128 static void emit_block_move_via_loop (rtx
, rtx
, rtx
, unsigned);
129 static rtx
clear_by_pieces_1 (void *, HOST_WIDE_INT
, enum machine_mode
);
130 static void clear_by_pieces (rtx
, unsigned HOST_WIDE_INT
, unsigned int);
131 static void store_by_pieces_1 (struct store_by_pieces_d
*, unsigned int);
132 static void store_by_pieces_2 (insn_gen_fn
, machine_mode
,
133 struct store_by_pieces_d
*);
134 static tree
clear_storage_libcall_fn (int);
135 static rtx
compress_float_constant (rtx
, rtx
);
136 static rtx
get_subtarget (rtx
);
137 static void store_constructor_field (rtx
, unsigned HOST_WIDE_INT
,
138 HOST_WIDE_INT
, enum machine_mode
,
139 tree
, int, alias_set_type
);
140 static void store_constructor (tree
, rtx
, int, HOST_WIDE_INT
);
141 static rtx
store_field (rtx
, HOST_WIDE_INT
, HOST_WIDE_INT
,
142 unsigned HOST_WIDE_INT
, unsigned HOST_WIDE_INT
,
143 enum machine_mode
, tree
, alias_set_type
, bool);
145 static unsigned HOST_WIDE_INT
highest_pow2_factor_for_target (const_tree
, const_tree
);
147 static int is_aligning_offset (const_tree
, const_tree
);
148 static void expand_operands (tree
, tree
, rtx
, rtx
*, rtx
*,
149 enum expand_modifier
);
150 static rtx
reduce_to_bit_field_precision (rtx
, rtx
, tree
);
151 static rtx
do_store_flag (sepops
, rtx
, enum machine_mode
);
153 static void emit_single_push_insn (enum machine_mode
, rtx
, tree
);
155 static void do_tablejump (rtx
, enum machine_mode
, rtx
, rtx
, rtx
, int);
156 static rtx
const_vector_from_tree (tree
);
157 static void write_complex_part (rtx
, rtx
, bool);
160 /* This is run to set up which modes can be used
161 directly in memory and to initialize the block move optab. It is run
162 at the beginning of compilation and when the target is reinitialized. */
165 init_expr_target (void)
168 enum machine_mode mode
;
173 /* Try indexing by frame ptr and try by stack ptr.
174 It is known that on the Convex the stack ptr isn't a valid index.
175 With luck, one or the other is valid on any machine. */
176 mem
= gen_rtx_MEM (VOIDmode
, stack_pointer_rtx
);
177 mem1
= gen_rtx_MEM (VOIDmode
, frame_pointer_rtx
);
179 /* A scratch register we can modify in-place below to avoid
180 useless RTL allocations. */
181 reg
= gen_rtx_REG (VOIDmode
, -1);
183 insn
= rtx_alloc (INSN
);
184 pat
= gen_rtx_SET (VOIDmode
, NULL_RTX
, NULL_RTX
);
185 PATTERN (insn
) = pat
;
187 for (mode
= VOIDmode
; (int) mode
< NUM_MACHINE_MODES
;
188 mode
= (enum machine_mode
) ((int) mode
+ 1))
192 direct_load
[(int) mode
] = direct_store
[(int) mode
] = 0;
193 PUT_MODE (mem
, mode
);
194 PUT_MODE (mem1
, mode
);
195 PUT_MODE (reg
, mode
);
197 /* See if there is some register that can be used in this mode and
198 directly loaded or stored from memory. */
200 if (mode
!= VOIDmode
&& mode
!= BLKmode
)
201 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
202 && (direct_load
[(int) mode
] == 0 || direct_store
[(int) mode
] == 0);
205 if (! HARD_REGNO_MODE_OK (regno
, mode
))
208 SET_REGNO (reg
, regno
);
211 SET_DEST (pat
) = reg
;
212 if (recog (pat
, insn
, &num_clobbers
) >= 0)
213 direct_load
[(int) mode
] = 1;
215 SET_SRC (pat
) = mem1
;
216 SET_DEST (pat
) = reg
;
217 if (recog (pat
, insn
, &num_clobbers
) >= 0)
218 direct_load
[(int) mode
] = 1;
221 SET_DEST (pat
) = mem
;
222 if (recog (pat
, insn
, &num_clobbers
) >= 0)
223 direct_store
[(int) mode
] = 1;
226 SET_DEST (pat
) = mem1
;
227 if (recog (pat
, insn
, &num_clobbers
) >= 0)
228 direct_store
[(int) mode
] = 1;
232 mem
= gen_rtx_MEM (VOIDmode
, gen_rtx_raw_REG (Pmode
, 10000));
234 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); mode
!= VOIDmode
;
235 mode
= GET_MODE_WIDER_MODE (mode
))
237 enum machine_mode srcmode
;
238 for (srcmode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
); srcmode
!= mode
;
239 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
243 ic
= can_extend_p (mode
, srcmode
, 0);
244 if (ic
== CODE_FOR_nothing
)
247 PUT_MODE (mem
, srcmode
);
249 if (insn_operand_matches (ic
, 1, mem
))
250 float_extend_from_mem
[mode
][srcmode
] = true;
255 /* This is run at the start of compiling a function. */
260 memset (&crtl
->expr
, 0, sizeof (crtl
->expr
));
263 /* Copy data from FROM to TO, where the machine modes are not the same.
264 Both modes may be integer, or both may be floating, or both may be
266 UNSIGNEDP should be nonzero if FROM is an unsigned type.
267 This causes zero-extension instead of sign-extension. */
270 convert_move (rtx to
, rtx from
, int unsignedp
)
272 enum machine_mode to_mode
= GET_MODE (to
);
273 enum machine_mode from_mode
= GET_MODE (from
);
274 int to_real
= SCALAR_FLOAT_MODE_P (to_mode
);
275 int from_real
= SCALAR_FLOAT_MODE_P (from_mode
);
279 /* rtx code for making an equivalent value. */
280 enum rtx_code equiv_code
= (unsignedp
< 0 ? UNKNOWN
281 : (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
));
284 gcc_assert (to_real
== from_real
);
285 gcc_assert (to_mode
!= BLKmode
);
286 gcc_assert (from_mode
!= BLKmode
);
288 /* If the source and destination are already the same, then there's
293 /* If FROM is a SUBREG that indicates that we have already done at least
294 the required extension, strip it. We don't handle such SUBREGs as
297 if (GET_CODE (from
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (from
)
298 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from
)))
299 >= GET_MODE_PRECISION (to_mode
))
300 && SUBREG_PROMOTED_UNSIGNED_P (from
) == unsignedp
)
301 from
= gen_lowpart (to_mode
, from
), from_mode
= to_mode
;
303 gcc_assert (GET_CODE (to
) != SUBREG
|| !SUBREG_PROMOTED_VAR_P (to
));
305 if (to_mode
== from_mode
306 || (from_mode
== VOIDmode
&& CONSTANT_P (from
)))
308 emit_move_insn (to
, from
);
312 if (VECTOR_MODE_P (to_mode
) || VECTOR_MODE_P (from_mode
))
314 gcc_assert (GET_MODE_BITSIZE (from_mode
) == GET_MODE_BITSIZE (to_mode
));
316 if (VECTOR_MODE_P (to_mode
))
317 from
= simplify_gen_subreg (to_mode
, from
, GET_MODE (from
), 0);
319 to
= simplify_gen_subreg (from_mode
, to
, GET_MODE (to
), 0);
321 emit_move_insn (to
, from
);
325 if (GET_CODE (to
) == CONCAT
&& GET_CODE (from
) == CONCAT
)
327 convert_move (XEXP (to
, 0), XEXP (from
, 0), unsignedp
);
328 convert_move (XEXP (to
, 1), XEXP (from
, 1), unsignedp
);
337 gcc_assert ((GET_MODE_PRECISION (from_mode
)
338 != GET_MODE_PRECISION (to_mode
))
339 || (DECIMAL_FLOAT_MODE_P (from_mode
)
340 != DECIMAL_FLOAT_MODE_P (to_mode
)));
342 if (GET_MODE_PRECISION (from_mode
) == GET_MODE_PRECISION (to_mode
))
343 /* Conversion between decimal float and binary float, same size. */
344 tab
= DECIMAL_FLOAT_MODE_P (from_mode
) ? trunc_optab
: sext_optab
;
345 else if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
))
350 /* Try converting directly if the insn is supported. */
352 code
= convert_optab_handler (tab
, to_mode
, from_mode
);
353 if (code
!= CODE_FOR_nothing
)
355 emit_unop_insn (code
, to
, from
,
356 tab
== sext_optab
? FLOAT_EXTEND
: FLOAT_TRUNCATE
);
360 /* Otherwise use a libcall. */
361 libcall
= convert_optab_libfunc (tab
, to_mode
, from_mode
);
363 /* Is this conversion implemented yet? */
364 gcc_assert (libcall
);
367 value
= emit_library_call_value (libcall
, NULL_RTX
, LCT_CONST
, to_mode
,
369 insns
= get_insns ();
371 emit_libcall_block (insns
, to
, value
,
372 tab
== trunc_optab
? gen_rtx_FLOAT_TRUNCATE (to_mode
,
374 : gen_rtx_FLOAT_EXTEND (to_mode
, from
));
378 /* Handle pointer conversion. */ /* SPEE 900220. */
379 /* Targets are expected to provide conversion insns between PxImode and
380 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
381 if (GET_MODE_CLASS (to_mode
) == MODE_PARTIAL_INT
)
383 enum machine_mode full_mode
384 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode
), MODE_INT
);
386 gcc_assert (convert_optab_handler (trunc_optab
, to_mode
, full_mode
)
387 != CODE_FOR_nothing
);
389 if (full_mode
!= from_mode
)
390 from
= convert_to_mode (full_mode
, from
, unsignedp
);
391 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, full_mode
),
395 if (GET_MODE_CLASS (from_mode
) == MODE_PARTIAL_INT
)
398 enum machine_mode full_mode
399 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode
), MODE_INT
);
400 convert_optab ctab
= unsignedp
? zext_optab
: sext_optab
;
401 enum insn_code icode
;
403 icode
= convert_optab_handler (ctab
, full_mode
, from_mode
);
404 gcc_assert (icode
!= CODE_FOR_nothing
);
406 if (to_mode
== full_mode
)
408 emit_unop_insn (icode
, to
, from
, UNKNOWN
);
412 new_from
= gen_reg_rtx (full_mode
);
413 emit_unop_insn (icode
, new_from
, from
, UNKNOWN
);
415 /* else proceed to integer conversions below. */
416 from_mode
= full_mode
;
420 /* Make sure both are fixed-point modes or both are not. */
421 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
) ==
422 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode
));
423 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode
))
425 /* If we widen from_mode to to_mode and they are in the same class,
426 we won't saturate the result.
427 Otherwise, always saturate the result to play safe. */
428 if (GET_MODE_CLASS (from_mode
) == GET_MODE_CLASS (to_mode
)
429 && GET_MODE_SIZE (from_mode
) < GET_MODE_SIZE (to_mode
))
430 expand_fixed_convert (to
, from
, 0, 0);
432 expand_fixed_convert (to
, from
, 0, 1);
436 /* Now both modes are integers. */
438 /* Handle expanding beyond a word. */
439 if (GET_MODE_PRECISION (from_mode
) < GET_MODE_PRECISION (to_mode
)
440 && GET_MODE_PRECISION (to_mode
) > BITS_PER_WORD
)
447 enum machine_mode lowpart_mode
;
448 int nwords
= CEIL (GET_MODE_SIZE (to_mode
), UNITS_PER_WORD
);
450 /* Try converting directly if the insn is supported. */
451 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
454 /* If FROM is a SUBREG, put it into a register. Do this
455 so that we always generate the same set of insns for
456 better cse'ing; if an intermediate assignment occurred,
457 we won't be doing the operation directly on the SUBREG. */
458 if (optimize
> 0 && GET_CODE (from
) == SUBREG
)
459 from
= force_reg (from_mode
, from
);
460 emit_unop_insn (code
, to
, from
, equiv_code
);
463 /* Next, try converting via full word. */
464 else if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
465 && ((code
= can_extend_p (to_mode
, word_mode
, unsignedp
))
466 != CODE_FOR_nothing
))
468 rtx word_to
= gen_reg_rtx (word_mode
);
471 if (reg_overlap_mentioned_p (to
, from
))
472 from
= force_reg (from_mode
, from
);
475 convert_move (word_to
, from
, unsignedp
);
476 emit_unop_insn (code
, to
, word_to
, equiv_code
);
480 /* No special multiword conversion insn; do it by hand. */
483 /* Since we will turn this into a no conflict block, we must ensure the
484 the source does not overlap the target so force it into an isolated
485 register when maybe so. Likewise for any MEM input, since the
486 conversion sequence might require several references to it and we
487 must ensure we're getting the same value every time. */
489 if (MEM_P (from
) || reg_overlap_mentioned_p (to
, from
))
490 from
= force_reg (from_mode
, from
);
492 /* Get a copy of FROM widened to a word, if necessary. */
493 if (GET_MODE_PRECISION (from_mode
) < BITS_PER_WORD
)
494 lowpart_mode
= word_mode
;
496 lowpart_mode
= from_mode
;
498 lowfrom
= convert_to_mode (lowpart_mode
, from
, unsignedp
);
500 lowpart
= gen_lowpart (lowpart_mode
, to
);
501 emit_move_insn (lowpart
, lowfrom
);
503 /* Compute the value to put in each remaining word. */
505 fill_value
= const0_rtx
;
507 fill_value
= emit_store_flag_force (gen_reg_rtx (word_mode
),
508 LT
, lowfrom
, const0_rtx
,
509 lowpart_mode
, 0, -1);
511 /* Fill the remaining words. */
512 for (i
= GET_MODE_SIZE (lowpart_mode
) / UNITS_PER_WORD
; i
< nwords
; i
++)
514 int index
= (WORDS_BIG_ENDIAN
? nwords
- i
- 1 : i
);
515 rtx subword
= operand_subword (to
, index
, 1, to_mode
);
517 gcc_assert (subword
);
519 if (fill_value
!= subword
)
520 emit_move_insn (subword
, fill_value
);
523 insns
= get_insns ();
530 /* Truncating multi-word to a word or less. */
531 if (GET_MODE_PRECISION (from_mode
) > BITS_PER_WORD
532 && GET_MODE_PRECISION (to_mode
) <= BITS_PER_WORD
)
535 && ! MEM_VOLATILE_P (from
)
536 && direct_load
[(int) to_mode
]
537 && ! mode_dependent_address_p (XEXP (from
, 0),
538 MEM_ADDR_SPACE (from
)))
540 || GET_CODE (from
) == SUBREG
))
541 from
= force_reg (from_mode
, from
);
542 convert_move (to
, gen_lowpart (word_mode
, from
), 0);
546 /* Now follow all the conversions between integers
547 no more than a word long. */
549 /* For truncation, usually we can just refer to FROM in a narrower mode. */
550 if (GET_MODE_BITSIZE (to_mode
) < GET_MODE_BITSIZE (from_mode
)
551 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, from_mode
))
554 && ! MEM_VOLATILE_P (from
)
555 && direct_load
[(int) to_mode
]
556 && ! mode_dependent_address_p (XEXP (from
, 0),
557 MEM_ADDR_SPACE (from
)))
559 || GET_CODE (from
) == SUBREG
))
560 from
= force_reg (from_mode
, from
);
561 if (REG_P (from
) && REGNO (from
) < FIRST_PSEUDO_REGISTER
562 && ! HARD_REGNO_MODE_OK (REGNO (from
), to_mode
))
563 from
= copy_to_reg (from
);
564 emit_move_insn (to
, gen_lowpart (to_mode
, from
));
568 /* Handle extension. */
569 if (GET_MODE_PRECISION (to_mode
) > GET_MODE_PRECISION (from_mode
))
571 /* Convert directly if that works. */
572 if ((code
= can_extend_p (to_mode
, from_mode
, unsignedp
))
575 emit_unop_insn (code
, to
, from
, equiv_code
);
580 enum machine_mode intermediate
;
584 /* Search for a mode to convert via. */
585 for (intermediate
= from_mode
; intermediate
!= VOIDmode
;
586 intermediate
= GET_MODE_WIDER_MODE (intermediate
))
587 if (((can_extend_p (to_mode
, intermediate
, unsignedp
)
589 || (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (intermediate
)
590 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode
, intermediate
)))
591 && (can_extend_p (intermediate
, from_mode
, unsignedp
)
592 != CODE_FOR_nothing
))
594 convert_move (to
, convert_to_mode (intermediate
, from
,
595 unsignedp
), unsignedp
);
599 /* No suitable intermediate mode.
600 Generate what we need with shifts. */
601 shift_amount
= (GET_MODE_PRECISION (to_mode
)
602 - GET_MODE_PRECISION (from_mode
));
603 from
= gen_lowpart (to_mode
, force_reg (from_mode
, from
));
604 tmp
= expand_shift (LSHIFT_EXPR
, to_mode
, from
, shift_amount
,
606 tmp
= expand_shift (RSHIFT_EXPR
, to_mode
, tmp
, shift_amount
,
609 emit_move_insn (to
, tmp
);
614 /* Support special truncate insns for certain modes. */
615 if (convert_optab_handler (trunc_optab
, to_mode
,
616 from_mode
) != CODE_FOR_nothing
)
618 emit_unop_insn (convert_optab_handler (trunc_optab
, to_mode
, from_mode
),
623 /* Handle truncation of volatile memrefs, and so on;
624 the things that couldn't be truncated directly,
625 and for which there was no special instruction.
627 ??? Code above formerly short-circuited this, for most integer
628 mode pairs, with a force_reg in from_mode followed by a recursive
629 call to this routine. Appears always to have been wrong. */
630 if (GET_MODE_PRECISION (to_mode
) < GET_MODE_PRECISION (from_mode
))
632 rtx temp
= force_reg (to_mode
, gen_lowpart (to_mode
, from
));
633 emit_move_insn (to
, temp
);
637 /* Mode combination is not recognized. */
641 /* Return an rtx for a value that would result
642 from converting X to mode MODE.
643 Both X and MODE may be floating, or both integer.
644 UNSIGNEDP is nonzero if X is an unsigned value.
645 This can be done by referring to a part of X in place
646 or by copying to a new temporary with conversion. */
649 convert_to_mode (enum machine_mode mode
, rtx x
, int unsignedp
)
651 return convert_modes (mode
, VOIDmode
, x
, unsignedp
);
654 /* Return an rtx for a value that would result
655 from converting X from mode OLDMODE to mode MODE.
656 Both modes may be floating, or both integer.
657 UNSIGNEDP is nonzero if X is an unsigned value.
659 This can be done by referring to a part of X in place
660 or by copying to a new temporary with conversion.
662 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
665 convert_modes (enum machine_mode mode
, enum machine_mode oldmode
, rtx x
, int unsignedp
)
669 /* If FROM is a SUBREG that indicates that we have already done at least
670 the required extension, strip it. */
672 if (GET_CODE (x
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (x
)
673 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) >= GET_MODE_SIZE (mode
)
674 && SUBREG_PROMOTED_UNSIGNED_P (x
) == unsignedp
)
675 x
= gen_lowpart (mode
, SUBREG_REG (x
));
677 if (GET_MODE (x
) != VOIDmode
)
678 oldmode
= GET_MODE (x
);
683 /* There is one case that we must handle specially: If we are converting
684 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
685 we are to interpret the constant as unsigned, gen_lowpart will do
686 the wrong if the constant appears negative. What we want to do is
687 make the high-order word of the constant zero, not all ones. */
689 if (unsignedp
&& GET_MODE_CLASS (mode
) == MODE_INT
690 && GET_MODE_BITSIZE (mode
) == HOST_BITS_PER_DOUBLE_INT
691 && CONST_INT_P (x
) && INTVAL (x
) < 0)
693 double_int val
= double_int::from_uhwi (INTVAL (x
));
695 /* We need to zero extend VAL. */
696 if (oldmode
!= VOIDmode
)
697 val
= val
.zext (GET_MODE_BITSIZE (oldmode
));
699 return immed_double_int_const (val
, mode
);
702 /* We can do this with a gen_lowpart if both desired and current modes
703 are integer, and this is either a constant integer, a register, or a
704 non-volatile MEM. Except for the constant case where MODE is no
705 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
708 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
)
709 || (GET_MODE_CLASS (mode
) == MODE_INT
710 && GET_MODE_CLASS (oldmode
) == MODE_INT
711 && (CONST_DOUBLE_AS_INT_P (x
)
712 || (GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (oldmode
)
713 && ((MEM_P (x
) && ! MEM_VOLATILE_P (x
)
714 && direct_load
[(int) mode
])
716 && (! HARD_REGISTER_P (x
)
717 || HARD_REGNO_MODE_OK (REGNO (x
), mode
))
718 && TRULY_NOOP_TRUNCATION_MODES_P (mode
,
721 /* ?? If we don't know OLDMODE, we have to assume here that
722 X does not need sign- or zero-extension. This may not be
723 the case, but it's the best we can do. */
724 if (CONST_INT_P (x
) && oldmode
!= VOIDmode
725 && GET_MODE_PRECISION (mode
) > GET_MODE_PRECISION (oldmode
))
727 HOST_WIDE_INT val
= INTVAL (x
);
729 /* We must sign or zero-extend in this case. Start by
730 zero-extending, then sign extend if we need to. */
731 val
&= GET_MODE_MASK (oldmode
);
733 && val_signbit_known_set_p (oldmode
, val
))
734 val
|= ~GET_MODE_MASK (oldmode
);
736 return gen_int_mode (val
, mode
);
739 return gen_lowpart (mode
, x
);
742 /* Converting from integer constant into mode is always equivalent to an
744 if (VECTOR_MODE_P (mode
) && GET_MODE (x
) == VOIDmode
)
746 gcc_assert (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (oldmode
));
747 return simplify_gen_subreg (mode
, x
, oldmode
, 0);
750 temp
= gen_reg_rtx (mode
);
751 convert_move (temp
, x
, unsignedp
);
755 /* Return the largest alignment we can use for doing a move (or store)
756 of MAX_PIECES. ALIGN is the largest alignment we could use. */
759 alignment_for_piecewise_move (unsigned int max_pieces
, unsigned int align
)
761 enum machine_mode tmode
;
763 tmode
= mode_for_size (max_pieces
* BITS_PER_UNIT
, MODE_INT
, 1);
764 if (align
>= GET_MODE_ALIGNMENT (tmode
))
765 align
= GET_MODE_ALIGNMENT (tmode
);
768 enum machine_mode tmode
, xmode
;
770 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
), xmode
= tmode
;
772 xmode
= tmode
, tmode
= GET_MODE_WIDER_MODE (tmode
))
773 if (GET_MODE_SIZE (tmode
) > max_pieces
774 || SLOW_UNALIGNED_ACCESS (tmode
, align
))
777 align
= MAX (align
, GET_MODE_ALIGNMENT (xmode
));
783 /* Return the widest integer mode no wider than SIZE. If no such mode
784 can be found, return VOIDmode. */
786 static enum machine_mode
787 widest_int_mode_for_size (unsigned int size
)
789 enum machine_mode tmode
, mode
= VOIDmode
;
791 for (tmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
792 tmode
!= VOIDmode
; tmode
= GET_MODE_WIDER_MODE (tmode
))
793 if (GET_MODE_SIZE (tmode
) < size
)
799 /* Determine whether the LEN bytes can be moved by using several move
800 instructions. Return nonzero if a call to move_by_pieces should
804 can_move_by_pieces (unsigned HOST_WIDE_INT len
,
807 return targetm
.use_by_pieces_infrastructure_p (len
, align
, MOVE_BY_PIECES
,
808 optimize_insn_for_speed_p ());
811 /* Generate several move instructions to copy LEN bytes from block FROM to
812 block TO. (These are MEM rtx's with BLKmode).
814 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
815 used to push FROM to the stack.
817 ALIGN is maximum stack alignment we can assume.
819 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
820 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
824 move_by_pieces (rtx to
, rtx from
, unsigned HOST_WIDE_INT len
,
825 unsigned int align
, int endp
)
827 struct move_by_pieces_d data
;
828 enum machine_mode to_addr_mode
;
829 enum machine_mode from_addr_mode
= get_address_mode (from
);
830 rtx to_addr
, from_addr
= XEXP (from
, 0);
831 unsigned int max_size
= MOVE_MAX_PIECES
+ 1;
832 enum insn_code icode
;
834 align
= MIN (to
? MEM_ALIGN (to
) : align
, MEM_ALIGN (from
));
837 data
.from_addr
= from_addr
;
840 to_addr_mode
= get_address_mode (to
);
841 to_addr
= XEXP (to
, 0);
844 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
845 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
847 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
851 to_addr_mode
= VOIDmode
;
855 #ifdef STACK_GROWS_DOWNWARD
861 data
.to_addr
= to_addr
;
864 = (GET_CODE (from_addr
) == PRE_INC
|| GET_CODE (from_addr
) == PRE_DEC
865 || GET_CODE (from_addr
) == POST_INC
866 || GET_CODE (from_addr
) == POST_DEC
);
868 data
.explicit_inc_from
= 0;
869 data
.explicit_inc_to
= 0;
870 if (data
.reverse
) data
.offset
= len
;
873 /* If copying requires more than two move insns,
874 copy addresses to registers (to make displacements shorter)
875 and use post-increment if available. */
876 if (!(data
.autinc_from
&& data
.autinc_to
)
877 && move_by_pieces_ninsns (len
, align
, max_size
) > 2)
879 /* Find the mode of the largest move...
880 MODE might not be used depending on the definitions of the
881 USE_* macros below. */
882 enum machine_mode mode ATTRIBUTE_UNUSED
883 = widest_int_mode_for_size (max_size
);
885 if (USE_LOAD_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_from
)
887 data
.from_addr
= copy_to_mode_reg (from_addr_mode
,
888 plus_constant (from_addr_mode
,
890 data
.autinc_from
= 1;
891 data
.explicit_inc_from
= -1;
893 if (USE_LOAD_POST_INCREMENT (mode
) && ! data
.autinc_from
)
895 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
896 data
.autinc_from
= 1;
897 data
.explicit_inc_from
= 1;
899 if (!data
.autinc_from
&& CONSTANT_P (from_addr
))
900 data
.from_addr
= copy_to_mode_reg (from_addr_mode
, from_addr
);
901 if (USE_STORE_PRE_DECREMENT (mode
) && data
.reverse
&& ! data
.autinc_to
)
903 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
904 plus_constant (to_addr_mode
,
907 data
.explicit_inc_to
= -1;
909 if (USE_STORE_POST_INCREMENT (mode
) && ! data
.reverse
&& ! data
.autinc_to
)
911 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
913 data
.explicit_inc_to
= 1;
915 if (!data
.autinc_to
&& CONSTANT_P (to_addr
))
916 data
.to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
919 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
921 /* First move what we can in the largest integer mode, then go to
922 successively smaller modes. */
924 while (max_size
> 1 && data
.len
> 0)
926 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
928 if (mode
== VOIDmode
)
931 icode
= optab_handler (mov_optab
, mode
);
932 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
933 move_by_pieces_1 (GEN_FCN (icode
), mode
, &data
);
935 max_size
= GET_MODE_SIZE (mode
);
938 /* The code above should have handled everything. */
939 gcc_assert (!data
.len
);
945 gcc_assert (!data
.reverse
);
950 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
951 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
953 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
954 plus_constant (to_addr_mode
,
958 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
965 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
973 /* Return number of insns required to move L bytes by pieces.
974 ALIGN (in bits) is maximum alignment we can assume. */
976 unsigned HOST_WIDE_INT
977 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l
, unsigned int align
,
978 unsigned int max_size
)
980 unsigned HOST_WIDE_INT n_insns
= 0;
982 align
= alignment_for_piecewise_move (MOVE_MAX_PIECES
, align
);
984 while (max_size
> 1 && l
> 0)
986 enum machine_mode mode
;
987 enum insn_code icode
;
989 mode
= widest_int_mode_for_size (max_size
);
991 if (mode
== VOIDmode
)
994 icode
= optab_handler (mov_optab
, mode
);
995 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
996 n_insns
+= l
/ GET_MODE_SIZE (mode
), l
%= GET_MODE_SIZE (mode
);
998 max_size
= GET_MODE_SIZE (mode
);
1005 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1006 with move instructions for mode MODE. GENFUN is the gen_... function
1007 to make a move insn for that mode. DATA has all the other info. */
1010 move_by_pieces_1 (insn_gen_fn genfun
, machine_mode mode
,
1011 struct move_by_pieces_d
*data
)
1013 unsigned int size
= GET_MODE_SIZE (mode
);
1014 rtx to1
= NULL_RTX
, from1
;
1016 while (data
->len
>= size
)
1019 data
->offset
-= size
;
1023 if (data
->autinc_to
)
1024 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
1027 to1
= adjust_address (data
->to
, mode
, data
->offset
);
1030 if (data
->autinc_from
)
1031 from1
= adjust_automodify_address (data
->from
, mode
, data
->from_addr
,
1034 from1
= adjust_address (data
->from
, mode
, data
->offset
);
1036 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
1037 emit_insn (gen_add2_insn (data
->to_addr
,
1038 gen_int_mode (-(HOST_WIDE_INT
) size
,
1039 GET_MODE (data
->to_addr
))));
1040 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_from
< 0)
1041 emit_insn (gen_add2_insn (data
->from_addr
,
1042 gen_int_mode (-(HOST_WIDE_INT
) size
,
1043 GET_MODE (data
->from_addr
))));
1046 emit_insn ((*genfun
) (to1
, from1
));
1049 #ifdef PUSH_ROUNDING
1050 emit_single_push_insn (mode
, from1
, NULL
);
1056 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
1057 emit_insn (gen_add2_insn (data
->to_addr
,
1059 GET_MODE (data
->to_addr
))));
1060 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_from
> 0)
1061 emit_insn (gen_add2_insn (data
->from_addr
,
1063 GET_MODE (data
->from_addr
))));
1065 if (! data
->reverse
)
1066 data
->offset
+= size
;
1072 /* Emit code to move a block Y to a block X. This may be done with
1073 string-move instructions, with multiple scalar move instructions,
1074 or with a library call.
1076 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1077 SIZE is an rtx that says how long they are.
1078 ALIGN is the maximum alignment we can assume they have.
1079 METHOD describes what kind of copy this is, and what mechanisms may be used.
1080 MIN_SIZE is the minimal size of block to move
1081 MAX_SIZE is the maximal size of block to move, if it can not be represented
1082 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1084 Return the address of the new block, if memcpy is called and returns it,
1088 emit_block_move_hints (rtx x
, rtx y
, rtx size
, enum block_op_methods method
,
1089 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1090 unsigned HOST_WIDE_INT min_size
,
1091 unsigned HOST_WIDE_INT max_size
,
1092 unsigned HOST_WIDE_INT probable_max_size
)
1099 if (CONST_INT_P (size
)
1100 && INTVAL (size
) == 0)
1105 case BLOCK_OP_NORMAL
:
1106 case BLOCK_OP_TAILCALL
:
1107 may_use_call
= true;
1110 case BLOCK_OP_CALL_PARM
:
1111 may_use_call
= block_move_libcall_safe_for_call_parm ();
1113 /* Make inhibit_defer_pop nonzero around the library call
1114 to force it to pop the arguments right away. */
1118 case BLOCK_OP_NO_LIBCALL
:
1119 may_use_call
= false;
1126 gcc_assert (MEM_P (x
) && MEM_P (y
));
1127 align
= MIN (MEM_ALIGN (x
), MEM_ALIGN (y
));
1128 gcc_assert (align
>= BITS_PER_UNIT
);
1130 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1131 block copy is more efficient for other large modes, e.g. DCmode. */
1132 x
= adjust_address (x
, BLKmode
, 0);
1133 y
= adjust_address (y
, BLKmode
, 0);
1135 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1136 can be incorrect is coming from __builtin_memcpy. */
1137 if (CONST_INT_P (size
))
1139 x
= shallow_copy_rtx (x
);
1140 y
= shallow_copy_rtx (y
);
1141 set_mem_size (x
, INTVAL (size
));
1142 set_mem_size (y
, INTVAL (size
));
1145 if (CONST_INT_P (size
) && can_move_by_pieces (INTVAL (size
), align
))
1146 move_by_pieces (x
, y
, INTVAL (size
), align
, 0);
1147 else if (emit_block_move_via_movmem (x
, y
, size
, align
,
1148 expected_align
, expected_size
,
1149 min_size
, max_size
, probable_max_size
))
1151 else if (may_use_call
1152 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x
))
1153 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y
)))
1155 /* Since x and y are passed to a libcall, mark the corresponding
1156 tree EXPR as addressable. */
1157 tree y_expr
= MEM_EXPR (y
);
1158 tree x_expr
= MEM_EXPR (x
);
1160 mark_addressable (y_expr
);
1162 mark_addressable (x_expr
);
1163 retval
= emit_block_move_via_libcall (x
, y
, size
,
1164 method
== BLOCK_OP_TAILCALL
);
1168 emit_block_move_via_loop (x
, y
, size
, align
);
1170 if (method
== BLOCK_OP_CALL_PARM
)
1177 emit_block_move (rtx x
, rtx y
, rtx size
, enum block_op_methods method
)
1179 unsigned HOST_WIDE_INT max
, min
= 0;
1180 if (GET_CODE (size
) == CONST_INT
)
1181 min
= max
= UINTVAL (size
);
1183 max
= GET_MODE_MASK (GET_MODE (size
));
1184 return emit_block_move_hints (x
, y
, size
, method
, 0, -1,
1188 /* A subroutine of emit_block_move. Returns true if calling the
1189 block move libcall will not clobber any parameters which may have
1190 already been placed on the stack. */
1193 block_move_libcall_safe_for_call_parm (void)
1195 #if defined (REG_PARM_STACK_SPACE)
1199 /* If arguments are pushed on the stack, then they're safe. */
1203 /* If registers go on the stack anyway, any argument is sure to clobber
1204 an outgoing argument. */
1205 #if defined (REG_PARM_STACK_SPACE)
1206 fn
= emit_block_move_libcall_fn (false);
1207 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1208 depend on its argument. */
1210 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn
? NULL_TREE
: TREE_TYPE (fn
)))
1211 && REG_PARM_STACK_SPACE (fn
) != 0)
1215 /* If any argument goes in memory, then it might clobber an outgoing
1218 CUMULATIVE_ARGS args_so_far_v
;
1219 cumulative_args_t args_so_far
;
1222 fn
= emit_block_move_libcall_fn (false);
1223 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
1224 args_so_far
= pack_cumulative_args (&args_so_far_v
);
1226 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
1227 for ( ; arg
!= void_list_node
; arg
= TREE_CHAIN (arg
))
1229 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
1230 rtx tmp
= targetm
.calls
.function_arg (args_so_far
, mode
,
1232 if (!tmp
|| !REG_P (tmp
))
1234 if (targetm
.calls
.arg_partial_bytes (args_so_far
, mode
, NULL
, 1))
1236 targetm
.calls
.function_arg_advance (args_so_far
, mode
,
1243 /* A subroutine of emit_block_move. Expand a movmem pattern;
1244 return true if successful. */
1247 emit_block_move_via_movmem (rtx x
, rtx y
, rtx size
, unsigned int align
,
1248 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
1249 unsigned HOST_WIDE_INT min_size
,
1250 unsigned HOST_WIDE_INT max_size
,
1251 unsigned HOST_WIDE_INT probable_max_size
)
1253 int save_volatile_ok
= volatile_ok
;
1254 enum machine_mode mode
;
1256 if (expected_align
< align
)
1257 expected_align
= align
;
1258 if (expected_size
!= -1)
1260 if ((unsigned HOST_WIDE_INT
)expected_size
> probable_max_size
)
1261 expected_size
= probable_max_size
;
1262 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
1263 expected_size
= min_size
;
1266 /* Since this is a move insn, we don't care about volatility. */
1269 /* Try the most limited insn first, because there's no point
1270 including more than one in the machine description unless
1271 the more limited one has some advantage. */
1273 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
1274 mode
= GET_MODE_WIDER_MODE (mode
))
1276 enum insn_code code
= direct_optab_handler (movmem_optab
, mode
);
1278 if (code
!= CODE_FOR_nothing
1279 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1280 here because if SIZE is less than the mode mask, as it is
1281 returned by the macro, it will definitely be less than the
1282 actual mode mask. Since SIZE is within the Pmode address
1283 space, we limit MODE to Pmode. */
1284 && ((CONST_INT_P (size
)
1285 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
1286 <= (GET_MODE_MASK (mode
) >> 1)))
1287 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
1288 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
1290 struct expand_operand ops
[9];
1293 /* ??? When called via emit_block_move_for_call, it'd be
1294 nice if there were some way to inform the backend, so
1295 that it doesn't fail the expansion because it thinks
1296 emitting the libcall would be more efficient. */
1297 nops
= insn_data
[(int) code
].n_generator_args
;
1298 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
1300 create_fixed_operand (&ops
[0], x
);
1301 create_fixed_operand (&ops
[1], y
);
1302 /* The check above guarantees that this size conversion is valid. */
1303 create_convert_operand_to (&ops
[2], size
, mode
, true);
1304 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
1307 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
1308 create_integer_operand (&ops
[5], expected_size
);
1312 create_integer_operand (&ops
[6], min_size
);
1313 /* If we can not represent the maximal size,
1314 make parameter NULL. */
1315 if ((HOST_WIDE_INT
) max_size
!= -1)
1316 create_integer_operand (&ops
[7], max_size
);
1318 create_fixed_operand (&ops
[7], NULL
);
1322 /* If we can not represent the maximal size,
1323 make parameter NULL. */
1324 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
1325 create_integer_operand (&ops
[8], probable_max_size
);
1327 create_fixed_operand (&ops
[8], NULL
);
1329 if (maybe_expand_insn (code
, nops
, ops
))
1331 volatile_ok
= save_volatile_ok
;
1337 volatile_ok
= save_volatile_ok
;
1341 /* A subroutine of emit_block_move. Expand a call to memcpy.
1342 Return the return value from memcpy, 0 otherwise. */
1345 emit_block_move_via_libcall (rtx dst
, rtx src
, rtx size
, bool tailcall
)
1347 rtx dst_addr
, src_addr
;
1348 tree call_expr
, fn
, src_tree
, dst_tree
, size_tree
;
1349 enum machine_mode size_mode
;
1352 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1353 pseudos. We can then place those new pseudos into a VAR_DECL and
1356 dst_addr
= copy_addr_to_reg (XEXP (dst
, 0));
1357 src_addr
= copy_addr_to_reg (XEXP (src
, 0));
1359 dst_addr
= convert_memory_address (ptr_mode
, dst_addr
);
1360 src_addr
= convert_memory_address (ptr_mode
, src_addr
);
1362 dst_tree
= make_tree (ptr_type_node
, dst_addr
);
1363 src_tree
= make_tree (ptr_type_node
, src_addr
);
1365 size_mode
= TYPE_MODE (sizetype
);
1367 size
= convert_to_mode (size_mode
, size
, 1);
1368 size
= copy_to_mode_reg (size_mode
, size
);
1370 /* It is incorrect to use the libcall calling conventions to call
1371 memcpy in this context. This could be a user call to memcpy and
1372 the user may wish to examine the return value from memcpy. For
1373 targets where libcalls and normal calls have different conventions
1374 for returning pointers, we could end up generating incorrect code. */
1376 size_tree
= make_tree (sizetype
, size
);
1378 fn
= emit_block_move_libcall_fn (true);
1379 call_expr
= build_call_expr (fn
, 3, dst_tree
, src_tree
, size_tree
);
1380 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
1382 retval
= expand_normal (call_expr
);
1387 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1388 for the function we use for block copies. */
1390 static GTY(()) tree block_move_fn
;
1393 init_block_move_fn (const char *asmspec
)
1397 tree args
, fn
, attrs
, attr_args
;
1399 fn
= get_identifier ("memcpy");
1400 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
1401 const_ptr_type_node
, sizetype
,
1404 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
1405 DECL_EXTERNAL (fn
) = 1;
1406 TREE_PUBLIC (fn
) = 1;
1407 DECL_ARTIFICIAL (fn
) = 1;
1408 TREE_NOTHROW (fn
) = 1;
1409 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
1410 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
1412 attr_args
= build_tree_list (NULL_TREE
, build_string (1, "1"));
1413 attrs
= tree_cons (get_identifier ("fn spec"), attr_args
, NULL
);
1415 decl_attributes (&fn
, attrs
, ATTR_FLAG_BUILT_IN
);
1421 set_user_assembler_name (block_move_fn
, asmspec
);
1425 emit_block_move_libcall_fn (int for_call
)
1427 static bool emitted_extern
;
1430 init_block_move_fn (NULL
);
1432 if (for_call
&& !emitted_extern
)
1434 emitted_extern
= true;
1435 make_decl_rtl (block_move_fn
);
1438 return block_move_fn
;
1441 /* A subroutine of emit_block_move. Copy the data via an explicit
1442 loop. This is used only when libcalls are forbidden. */
1443 /* ??? It'd be nice to copy in hunks larger than QImode. */
1446 emit_block_move_via_loop (rtx x
, rtx y
, rtx size
,
1447 unsigned int align ATTRIBUTE_UNUSED
)
1449 rtx cmp_label
, top_label
, iter
, x_addr
, y_addr
, tmp
;
1450 enum machine_mode x_addr_mode
= get_address_mode (x
);
1451 enum machine_mode y_addr_mode
= get_address_mode (y
);
1452 enum machine_mode iter_mode
;
1454 iter_mode
= GET_MODE (size
);
1455 if (iter_mode
== VOIDmode
)
1456 iter_mode
= word_mode
;
1458 top_label
= gen_label_rtx ();
1459 cmp_label
= gen_label_rtx ();
1460 iter
= gen_reg_rtx (iter_mode
);
1462 emit_move_insn (iter
, const0_rtx
);
1464 x_addr
= force_operand (XEXP (x
, 0), NULL_RTX
);
1465 y_addr
= force_operand (XEXP (y
, 0), NULL_RTX
);
1466 do_pending_stack_adjust ();
1468 emit_jump (cmp_label
);
1469 emit_label (top_label
);
1471 tmp
= convert_modes (x_addr_mode
, iter_mode
, iter
, true);
1472 x_addr
= simplify_gen_binary (PLUS
, x_addr_mode
, x_addr
, tmp
);
1474 if (x_addr_mode
!= y_addr_mode
)
1475 tmp
= convert_modes (y_addr_mode
, iter_mode
, iter
, true);
1476 y_addr
= simplify_gen_binary (PLUS
, y_addr_mode
, y_addr
, tmp
);
1478 x
= change_address (x
, QImode
, x_addr
);
1479 y
= change_address (y
, QImode
, y_addr
);
1481 emit_move_insn (x
, y
);
1483 tmp
= expand_simple_binop (iter_mode
, PLUS
, iter
, const1_rtx
, iter
,
1484 true, OPTAB_LIB_WIDEN
);
1486 emit_move_insn (iter
, tmp
);
1488 emit_label (cmp_label
);
1490 emit_cmp_and_jump_insns (iter
, size
, LT
, NULL_RTX
, iter_mode
,
1491 true, top_label
, REG_BR_PROB_BASE
* 90 / 100);
1494 /* Copy all or part of a value X into registers starting at REGNO.
1495 The number of registers to be filled is NREGS. */
1498 move_block_to_reg (int regno
, rtx x
, int nregs
, enum machine_mode mode
)
1501 #ifdef HAVE_load_multiple
1509 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
1510 x
= validize_mem (force_const_mem (mode
, x
));
1512 /* See if the machine can do this with a load multiple insn. */
1513 #ifdef HAVE_load_multiple
1514 if (HAVE_load_multiple
)
1516 last
= get_last_insn ();
1517 pat
= gen_load_multiple (gen_rtx_REG (word_mode
, regno
), x
,
1525 delete_insns_since (last
);
1529 for (i
= 0; i
< nregs
; i
++)
1530 emit_move_insn (gen_rtx_REG (word_mode
, regno
+ i
),
1531 operand_subword_force (x
, i
, mode
));
1534 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1535 The number of registers to be filled is NREGS. */
1538 move_block_from_reg (int regno
, rtx x
, int nregs
)
1545 /* See if the machine can do this with a store multiple insn. */
1546 #ifdef HAVE_store_multiple
1547 if (HAVE_store_multiple
)
1549 rtx last
= get_last_insn ();
1550 rtx pat
= gen_store_multiple (x
, gen_rtx_REG (word_mode
, regno
),
1558 delete_insns_since (last
);
1562 for (i
= 0; i
< nregs
; i
++)
1564 rtx tem
= operand_subword (x
, i
, 1, BLKmode
);
1568 emit_move_insn (tem
, gen_rtx_REG (word_mode
, regno
+ i
));
1572 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1573 ORIG, where ORIG is a non-consecutive group of registers represented by
1574 a PARALLEL. The clone is identical to the original except in that the
1575 original set of registers is replaced by a new set of pseudo registers.
1576 The new set has the same modes as the original set. */
1579 gen_group_rtx (rtx orig
)
1584 gcc_assert (GET_CODE (orig
) == PARALLEL
);
1586 length
= XVECLEN (orig
, 0);
1587 tmps
= XALLOCAVEC (rtx
, length
);
1589 /* Skip a NULL entry in first slot. */
1590 i
= XEXP (XVECEXP (orig
, 0, 0), 0) ? 0 : 1;
1595 for (; i
< length
; i
++)
1597 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (orig
, 0, i
), 0));
1598 rtx offset
= XEXP (XVECEXP (orig
, 0, i
), 1);
1600 tmps
[i
] = gen_rtx_EXPR_LIST (VOIDmode
, gen_reg_rtx (mode
), offset
);
1603 return gen_rtx_PARALLEL (GET_MODE (orig
), gen_rtvec_v (length
, tmps
));
1606 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1607 except that values are placed in TMPS[i], and must later be moved
1608 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1611 emit_group_load_1 (rtx
*tmps
, rtx dst
, rtx orig_src
, tree type
, int ssize
)
1615 enum machine_mode m
= GET_MODE (orig_src
);
1617 gcc_assert (GET_CODE (dst
) == PARALLEL
);
1620 && !SCALAR_INT_MODE_P (m
)
1621 && !MEM_P (orig_src
)
1622 && GET_CODE (orig_src
) != CONCAT
)
1624 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_src
));
1625 if (imode
== BLKmode
)
1626 src
= assign_stack_temp (GET_MODE (orig_src
), ssize
);
1628 src
= gen_reg_rtx (imode
);
1629 if (imode
!= BLKmode
)
1630 src
= gen_lowpart (GET_MODE (orig_src
), src
);
1631 emit_move_insn (src
, orig_src
);
1632 /* ...and back again. */
1633 if (imode
!= BLKmode
)
1634 src
= gen_lowpart (imode
, src
);
1635 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1639 /* Check for a NULL entry, used to indicate that the parameter goes
1640 both on the stack and in registers. */
1641 if (XEXP (XVECEXP (dst
, 0, 0), 0))
1646 /* Process the pieces. */
1647 for (i
= start
; i
< XVECLEN (dst
, 0); i
++)
1649 enum machine_mode mode
= GET_MODE (XEXP (XVECEXP (dst
, 0, i
), 0));
1650 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (dst
, 0, i
), 1));
1651 unsigned int bytelen
= GET_MODE_SIZE (mode
);
1654 /* Handle trailing fragments that run over the size of the struct. */
1655 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
1657 /* Arrange to shift the fragment to where it belongs.
1658 extract_bit_field loads to the lsb of the reg. */
1660 #ifdef BLOCK_REG_PADDING
1661 BLOCK_REG_PADDING (GET_MODE (orig_src
), type
, i
== start
)
1662 == (BYTES_BIG_ENDIAN
? upward
: downward
)
1667 shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
1668 bytelen
= ssize
- bytepos
;
1669 gcc_assert (bytelen
> 0);
1672 /* If we won't be loading directly from memory, protect the real source
1673 from strange tricks we might play; but make sure that the source can
1674 be loaded directly into the destination. */
1676 if (!MEM_P (orig_src
)
1677 && (!CONSTANT_P (orig_src
)
1678 || (GET_MODE (orig_src
) != mode
1679 && GET_MODE (orig_src
) != VOIDmode
)))
1681 if (GET_MODE (orig_src
) == VOIDmode
)
1682 src
= gen_reg_rtx (mode
);
1684 src
= gen_reg_rtx (GET_MODE (orig_src
));
1686 emit_move_insn (src
, orig_src
);
1689 /* Optimize the access just a bit. */
1691 && (! SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (src
))
1692 || MEM_ALIGN (src
) >= GET_MODE_ALIGNMENT (mode
))
1693 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
1694 && bytelen
== GET_MODE_SIZE (mode
))
1696 tmps
[i
] = gen_reg_rtx (mode
);
1697 emit_move_insn (tmps
[i
], adjust_address (src
, mode
, bytepos
));
1699 else if (COMPLEX_MODE_P (mode
)
1700 && GET_MODE (src
) == mode
1701 && bytelen
== GET_MODE_SIZE (mode
))
1702 /* Let emit_move_complex do the bulk of the work. */
1704 else if (GET_CODE (src
) == CONCAT
)
1706 unsigned int slen
= GET_MODE_SIZE (GET_MODE (src
));
1707 unsigned int slen0
= GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)));
1709 if ((bytepos
== 0 && bytelen
== slen0
)
1710 || (bytepos
!= 0 && bytepos
+ bytelen
<= slen
))
1712 /* The following assumes that the concatenated objects all
1713 have the same size. In this case, a simple calculation
1714 can be used to determine the object and the bit field
1716 tmps
[i
] = XEXP (src
, bytepos
/ slen0
);
1717 if (! CONSTANT_P (tmps
[i
])
1718 && (!REG_P (tmps
[i
]) || GET_MODE (tmps
[i
]) != mode
))
1719 tmps
[i
] = extract_bit_field (tmps
[i
], bytelen
* BITS_PER_UNIT
,
1720 (bytepos
% slen0
) * BITS_PER_UNIT
,
1721 1, NULL_RTX
, mode
, mode
);
1727 gcc_assert (!bytepos
);
1728 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1729 emit_move_insn (mem
, src
);
1730 tmps
[i
] = extract_bit_field (mem
, bytelen
* BITS_PER_UNIT
,
1731 0, 1, NULL_RTX
, mode
, mode
);
1734 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1735 SIMD register, which is currently broken. While we get GCC
1736 to emit proper RTL for these cases, let's dump to memory. */
1737 else if (VECTOR_MODE_P (GET_MODE (dst
))
1740 int slen
= GET_MODE_SIZE (GET_MODE (src
));
1743 mem
= assign_stack_temp (GET_MODE (src
), slen
);
1744 emit_move_insn (mem
, src
);
1745 tmps
[i
] = adjust_address (mem
, mode
, (int) bytepos
);
1747 else if (CONSTANT_P (src
) && GET_MODE (dst
) != BLKmode
1748 && XVECLEN (dst
, 0) > 1)
1749 tmps
[i
] = simplify_gen_subreg (mode
, src
, GET_MODE (dst
), bytepos
);
1750 else if (CONSTANT_P (src
))
1752 HOST_WIDE_INT len
= (HOST_WIDE_INT
) bytelen
;
1760 gcc_assert (2 * len
== ssize
);
1761 split_double (src
, &first
, &second
);
1768 else if (REG_P (src
) && GET_MODE (src
) == mode
)
1771 tmps
[i
] = extract_bit_field (src
, bytelen
* BITS_PER_UNIT
,
1772 bytepos
* BITS_PER_UNIT
, 1, NULL_RTX
,
1776 tmps
[i
] = expand_shift (LSHIFT_EXPR
, mode
, tmps
[i
],
1781 /* Emit code to move a block SRC of type TYPE to a block DST,
1782 where DST is non-consecutive registers represented by a PARALLEL.
1783 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1787 emit_group_load (rtx dst
, rtx src
, tree type
, int ssize
)
1792 tmps
= XALLOCAVEC (rtx
, XVECLEN (dst
, 0));
1793 emit_group_load_1 (tmps
, dst
, src
, type
, ssize
);
1795 /* Copy the extracted pieces into the proper (probable) hard regs. */
1796 for (i
= 0; i
< XVECLEN (dst
, 0); i
++)
1798 rtx d
= XEXP (XVECEXP (dst
, 0, i
), 0);
1801 emit_move_insn (d
, tmps
[i
]);
1805 /* Similar, but load SRC into new pseudos in a format that looks like
1806 PARALLEL. This can later be fed to emit_group_move to get things
1807 in the right place. */
1810 emit_group_load_into_temps (rtx parallel
, rtx src
, tree type
, int ssize
)
1815 vec
= rtvec_alloc (XVECLEN (parallel
, 0));
1816 emit_group_load_1 (&RTVEC_ELT (vec
, 0), parallel
, src
, type
, ssize
);
1818 /* Convert the vector to look just like the original PARALLEL, except
1819 with the computed values. */
1820 for (i
= 0; i
< XVECLEN (parallel
, 0); i
++)
1822 rtx e
= XVECEXP (parallel
, 0, i
);
1823 rtx d
= XEXP (e
, 0);
1827 d
= force_reg (GET_MODE (d
), RTVEC_ELT (vec
, i
));
1828 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), d
, XEXP (e
, 1));
1830 RTVEC_ELT (vec
, i
) = e
;
1833 return gen_rtx_PARALLEL (GET_MODE (parallel
), vec
);
1836 /* Emit code to move a block SRC to block DST, where SRC and DST are
1837 non-consecutive groups of registers, each represented by a PARALLEL. */
1840 emit_group_move (rtx dst
, rtx src
)
1844 gcc_assert (GET_CODE (src
) == PARALLEL
1845 && GET_CODE (dst
) == PARALLEL
1846 && XVECLEN (src
, 0) == XVECLEN (dst
, 0));
1848 /* Skip first entry if NULL. */
1849 for (i
= XEXP (XVECEXP (src
, 0, 0), 0) ? 0 : 1; i
< XVECLEN (src
, 0); i
++)
1850 emit_move_insn (XEXP (XVECEXP (dst
, 0, i
), 0),
1851 XEXP (XVECEXP (src
, 0, i
), 0));
1854 /* Move a group of registers represented by a PARALLEL into pseudos. */
1857 emit_group_move_into_temps (rtx src
)
1859 rtvec vec
= rtvec_alloc (XVECLEN (src
, 0));
1862 for (i
= 0; i
< XVECLEN (src
, 0); i
++)
1864 rtx e
= XVECEXP (src
, 0, i
);
1865 rtx d
= XEXP (e
, 0);
1868 e
= alloc_EXPR_LIST (REG_NOTE_KIND (e
), copy_to_reg (d
), XEXP (e
, 1));
1869 RTVEC_ELT (vec
, i
) = e
;
1872 return gen_rtx_PARALLEL (GET_MODE (src
), vec
);
1875 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1876 where SRC is non-consecutive registers represented by a PARALLEL.
1877 SSIZE represents the total size of block ORIG_DST, or -1 if not
1881 emit_group_store (rtx orig_dst
, rtx src
, tree type ATTRIBUTE_UNUSED
, int ssize
)
1884 int start
, finish
, i
;
1885 enum machine_mode m
= GET_MODE (orig_dst
);
1887 gcc_assert (GET_CODE (src
) == PARALLEL
);
1889 if (!SCALAR_INT_MODE_P (m
)
1890 && !MEM_P (orig_dst
) && GET_CODE (orig_dst
) != CONCAT
)
1892 enum machine_mode imode
= int_mode_for_mode (GET_MODE (orig_dst
));
1893 if (imode
== BLKmode
)
1894 dst
= assign_stack_temp (GET_MODE (orig_dst
), ssize
);
1896 dst
= gen_reg_rtx (imode
);
1897 emit_group_store (dst
, src
, type
, ssize
);
1898 if (imode
!= BLKmode
)
1899 dst
= gen_lowpart (GET_MODE (orig_dst
), dst
);
1900 emit_move_insn (orig_dst
, dst
);
1904 /* Check for a NULL entry, used to indicate that the parameter goes
1905 both on the stack and in registers. */
1906 if (XEXP (XVECEXP (src
, 0, 0), 0))
1910 finish
= XVECLEN (src
, 0);
1912 tmps
= XALLOCAVEC (rtx
, finish
);
1914 /* Copy the (probable) hard regs into pseudos. */
1915 for (i
= start
; i
< finish
; i
++)
1917 rtx reg
= XEXP (XVECEXP (src
, 0, i
), 0);
1918 if (!REG_P (reg
) || REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
1920 tmps
[i
] = gen_reg_rtx (GET_MODE (reg
));
1921 emit_move_insn (tmps
[i
], reg
);
1927 /* If we won't be storing directly into memory, protect the real destination
1928 from strange tricks we might play. */
1930 if (GET_CODE (dst
) == PARALLEL
)
1934 /* We can get a PARALLEL dst if there is a conditional expression in
1935 a return statement. In that case, the dst and src are the same,
1936 so no action is necessary. */
1937 if (rtx_equal_p (dst
, src
))
1940 /* It is unclear if we can ever reach here, but we may as well handle
1941 it. Allocate a temporary, and split this into a store/load to/from
1943 temp
= assign_stack_temp (GET_MODE (dst
), ssize
);
1944 emit_group_store (temp
, src
, type
, ssize
);
1945 emit_group_load (dst
, temp
, type
, ssize
);
1948 else if (!MEM_P (dst
) && GET_CODE (dst
) != CONCAT
)
1950 enum machine_mode outer
= GET_MODE (dst
);
1951 enum machine_mode inner
;
1952 HOST_WIDE_INT bytepos
;
1956 if (!REG_P (dst
) || REGNO (dst
) < FIRST_PSEUDO_REGISTER
)
1957 dst
= gen_reg_rtx (outer
);
1959 /* Make life a bit easier for combine. */
1960 /* If the first element of the vector is the low part
1961 of the destination mode, use a paradoxical subreg to
1962 initialize the destination. */
1965 inner
= GET_MODE (tmps
[start
]);
1966 bytepos
= subreg_lowpart_offset (inner
, outer
);
1967 if (INTVAL (XEXP (XVECEXP (src
, 0, start
), 1)) == bytepos
)
1969 temp
= simplify_gen_subreg (outer
, tmps
[start
],
1973 emit_move_insn (dst
, temp
);
1980 /* If the first element wasn't the low part, try the last. */
1982 && start
< finish
- 1)
1984 inner
= GET_MODE (tmps
[finish
- 1]);
1985 bytepos
= subreg_lowpart_offset (inner
, outer
);
1986 if (INTVAL (XEXP (XVECEXP (src
, 0, finish
- 1), 1)) == bytepos
)
1988 temp
= simplify_gen_subreg (outer
, tmps
[finish
- 1],
1992 emit_move_insn (dst
, temp
);
1999 /* Otherwise, simply initialize the result to zero. */
2001 emit_move_insn (dst
, CONST0_RTX (outer
));
2004 /* Process the pieces. */
2005 for (i
= start
; i
< finish
; i
++)
2007 HOST_WIDE_INT bytepos
= INTVAL (XEXP (XVECEXP (src
, 0, i
), 1));
2008 enum machine_mode mode
= GET_MODE (tmps
[i
]);
2009 unsigned int bytelen
= GET_MODE_SIZE (mode
);
2010 unsigned int adj_bytelen
;
2013 /* Handle trailing fragments that run over the size of the struct. */
2014 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2015 adj_bytelen
= ssize
- bytepos
;
2017 adj_bytelen
= bytelen
;
2019 if (GET_CODE (dst
) == CONCAT
)
2021 if (bytepos
+ adj_bytelen
2022 <= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2023 dest
= XEXP (dst
, 0);
2024 else if (bytepos
>= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0))))
2026 bytepos
-= GET_MODE_SIZE (GET_MODE (XEXP (dst
, 0)));
2027 dest
= XEXP (dst
, 1);
2031 enum machine_mode dest_mode
= GET_MODE (dest
);
2032 enum machine_mode tmp_mode
= GET_MODE (tmps
[i
]);
2034 gcc_assert (bytepos
== 0 && XVECLEN (src
, 0));
2036 if (GET_MODE_ALIGNMENT (dest_mode
)
2037 >= GET_MODE_ALIGNMENT (tmp_mode
))
2039 dest
= assign_stack_temp (dest_mode
,
2040 GET_MODE_SIZE (dest_mode
));
2041 emit_move_insn (adjust_address (dest
,
2049 dest
= assign_stack_temp (tmp_mode
,
2050 GET_MODE_SIZE (tmp_mode
));
2051 emit_move_insn (dest
, tmps
[i
]);
2052 dst
= adjust_address (dest
, dest_mode
, bytepos
);
2058 /* Handle trailing fragments that run over the size of the struct. */
2059 if (ssize
>= 0 && bytepos
+ (HOST_WIDE_INT
) bytelen
> ssize
)
2061 /* store_bit_field always takes its value from the lsb.
2062 Move the fragment to the lsb if it's not already there. */
2064 #ifdef BLOCK_REG_PADDING
2065 BLOCK_REG_PADDING (GET_MODE (orig_dst
), type
, i
== start
)
2066 == (BYTES_BIG_ENDIAN
? upward
: downward
)
2072 int shift
= (bytelen
- (ssize
- bytepos
)) * BITS_PER_UNIT
;
2073 tmps
[i
] = expand_shift (RSHIFT_EXPR
, mode
, tmps
[i
],
2077 /* Make sure not to write past the end of the struct. */
2078 store_bit_field (dest
,
2079 adj_bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2080 bytepos
* BITS_PER_UNIT
, ssize
* BITS_PER_UNIT
- 1,
2084 /* Optimize the access just a bit. */
2085 else if (MEM_P (dest
)
2086 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (dest
))
2087 || MEM_ALIGN (dest
) >= GET_MODE_ALIGNMENT (mode
))
2088 && bytepos
* BITS_PER_UNIT
% GET_MODE_ALIGNMENT (mode
) == 0
2089 && bytelen
== GET_MODE_SIZE (mode
))
2090 emit_move_insn (adjust_address (dest
, mode
, bytepos
), tmps
[i
]);
2093 store_bit_field (dest
, bytelen
* BITS_PER_UNIT
, bytepos
* BITS_PER_UNIT
,
2094 0, 0, mode
, tmps
[i
]);
2097 /* Copy from the pseudo into the (probable) hard reg. */
2098 if (orig_dst
!= dst
)
2099 emit_move_insn (orig_dst
, dst
);
2102 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2103 of the value stored in X. */
2106 maybe_emit_group_store (rtx x
, tree type
)
2108 enum machine_mode mode
= TYPE_MODE (type
);
2109 gcc_checking_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
2110 if (GET_CODE (x
) == PARALLEL
)
2112 rtx result
= gen_reg_rtx (mode
);
2113 emit_group_store (result
, x
, type
, int_size_in_bytes (type
));
2119 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2121 This is used on targets that return BLKmode values in registers. */
2124 copy_blkmode_from_reg (rtx target
, rtx srcreg
, tree type
)
2126 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (type
);
2127 rtx src
= NULL
, dst
= NULL
;
2128 unsigned HOST_WIDE_INT bitsize
= MIN (TYPE_ALIGN (type
), BITS_PER_WORD
);
2129 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0;
2130 enum machine_mode mode
= GET_MODE (srcreg
);
2131 enum machine_mode tmode
= GET_MODE (target
);
2132 enum machine_mode copy_mode
;
2134 /* BLKmode registers created in the back-end shouldn't have survived. */
2135 gcc_assert (mode
!= BLKmode
);
2137 /* If the structure doesn't take up a whole number of words, see whether
2138 SRCREG is padded on the left or on the right. If it's on the left,
2139 set PADDING_CORRECTION to the number of bits to skip.
2141 In most ABIs, the structure will be returned at the least end of
2142 the register, which translates to right padding on little-endian
2143 targets and left padding on big-endian targets. The opposite
2144 holds if the structure is returned at the most significant
2145 end of the register. */
2146 if (bytes
% UNITS_PER_WORD
!= 0
2147 && (targetm
.calls
.return_in_msb (type
)
2149 : BYTES_BIG_ENDIAN
))
2151 = (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
) * BITS_PER_UNIT
));
2153 /* We can use a single move if we have an exact mode for the size. */
2154 else if (MEM_P (target
)
2155 && (!SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
))
2156 || MEM_ALIGN (target
) >= GET_MODE_ALIGNMENT (mode
))
2157 && bytes
== GET_MODE_SIZE (mode
))
2159 emit_move_insn (adjust_address (target
, mode
, 0), srcreg
);
2163 /* And if we additionally have the same mode for a register. */
2164 else if (REG_P (target
)
2165 && GET_MODE (target
) == mode
2166 && bytes
== GET_MODE_SIZE (mode
))
2168 emit_move_insn (target
, srcreg
);
2172 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2173 into a new pseudo which is a full word. */
2174 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2176 srcreg
= convert_to_mode (word_mode
, srcreg
, TYPE_UNSIGNED (type
));
2180 /* Copy the structure BITSIZE bits at a time. If the target lives in
2181 memory, take care of not reading/writing past its end by selecting
2182 a copy mode suited to BITSIZE. This should always be possible given
2185 If the target lives in register, make sure not to select a copy mode
2186 larger than the mode of the register.
2188 We could probably emit more efficient code for machines which do not use
2189 strict alignment, but it doesn't seem worth the effort at the current
2192 copy_mode
= word_mode
;
2195 enum machine_mode mem_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
2196 if (mem_mode
!= BLKmode
)
2197 copy_mode
= mem_mode
;
2199 else if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2202 for (bitpos
= 0, xbitpos
= padding_correction
;
2203 bitpos
< bytes
* BITS_PER_UNIT
;
2204 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2206 /* We need a new source operand each time xbitpos is on a
2207 word boundary and when xbitpos == padding_correction
2208 (the first time through). */
2209 if (xbitpos
% BITS_PER_WORD
== 0 || xbitpos
== padding_correction
)
2210 src
= operand_subword_force (srcreg
, xbitpos
/ BITS_PER_WORD
, mode
);
2212 /* We need a new destination operand each time bitpos is on
2214 if (REG_P (target
) && GET_MODE_BITSIZE (tmode
) < BITS_PER_WORD
)
2216 else if (bitpos
% BITS_PER_WORD
== 0)
2217 dst
= operand_subword (target
, bitpos
/ BITS_PER_WORD
, 1, tmode
);
2219 /* Use xbitpos for the source extraction (right justified) and
2220 bitpos for the destination store (left justified). */
2221 store_bit_field (dst
, bitsize
, bitpos
% BITS_PER_WORD
, 0, 0, copy_mode
,
2222 extract_bit_field (src
, bitsize
,
2223 xbitpos
% BITS_PER_WORD
, 1,
2224 NULL_RTX
, copy_mode
, copy_mode
));
2228 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2229 register if it contains any data, otherwise return null.
2231 This is used on targets that return BLKmode values in registers. */
2234 copy_blkmode_to_reg (enum machine_mode mode
, tree src
)
2237 unsigned HOST_WIDE_INT bitpos
, xbitpos
, padding_correction
= 0, bytes
;
2238 unsigned int bitsize
;
2239 rtx
*dst_words
, dst
, x
, src_word
= NULL_RTX
, dst_word
= NULL_RTX
;
2240 enum machine_mode dst_mode
;
2242 gcc_assert (TYPE_MODE (TREE_TYPE (src
)) == BLKmode
);
2244 x
= expand_normal (src
);
2246 bytes
= int_size_in_bytes (TREE_TYPE (src
));
2250 /* If the structure doesn't take up a whole number of words, see
2251 whether the register value should be padded on the left or on
2252 the right. Set PADDING_CORRECTION to the number of padding
2253 bits needed on the left side.
2255 In most ABIs, the structure will be returned at the least end of
2256 the register, which translates to right padding on little-endian
2257 targets and left padding on big-endian targets. The opposite
2258 holds if the structure is returned at the most significant
2259 end of the register. */
2260 if (bytes
% UNITS_PER_WORD
!= 0
2261 && (targetm
.calls
.return_in_msb (TREE_TYPE (src
))
2263 : BYTES_BIG_ENDIAN
))
2264 padding_correction
= (BITS_PER_WORD
- ((bytes
% UNITS_PER_WORD
)
2267 n_regs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
2268 dst_words
= XALLOCAVEC (rtx
, n_regs
);
2269 bitsize
= MIN (TYPE_ALIGN (TREE_TYPE (src
)), BITS_PER_WORD
);
2271 /* Copy the structure BITSIZE bits at a time. */
2272 for (bitpos
= 0, xbitpos
= padding_correction
;
2273 bitpos
< bytes
* BITS_PER_UNIT
;
2274 bitpos
+= bitsize
, xbitpos
+= bitsize
)
2276 /* We need a new destination pseudo each time xbitpos is
2277 on a word boundary and when xbitpos == padding_correction
2278 (the first time through). */
2279 if (xbitpos
% BITS_PER_WORD
== 0
2280 || xbitpos
== padding_correction
)
2282 /* Generate an appropriate register. */
2283 dst_word
= gen_reg_rtx (word_mode
);
2284 dst_words
[xbitpos
/ BITS_PER_WORD
] = dst_word
;
2286 /* Clear the destination before we move anything into it. */
2287 emit_move_insn (dst_word
, CONST0_RTX (word_mode
));
2290 /* We need a new source operand each time bitpos is on a word
2292 if (bitpos
% BITS_PER_WORD
== 0)
2293 src_word
= operand_subword_force (x
, bitpos
/ BITS_PER_WORD
, BLKmode
);
2295 /* Use bitpos for the source extraction (left justified) and
2296 xbitpos for the destination store (right justified). */
2297 store_bit_field (dst_word
, bitsize
, xbitpos
% BITS_PER_WORD
,
2299 extract_bit_field (src_word
, bitsize
,
2300 bitpos
% BITS_PER_WORD
, 1,
2301 NULL_RTX
, word_mode
, word_mode
));
2304 if (mode
== BLKmode
)
2306 /* Find the smallest integer mode large enough to hold the
2307 entire structure. */
2308 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
2310 mode
= GET_MODE_WIDER_MODE (mode
))
2311 /* Have we found a large enough mode? */
2312 if (GET_MODE_SIZE (mode
) >= bytes
)
2315 /* A suitable mode should have been found. */
2316 gcc_assert (mode
!= VOIDmode
);
2319 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (word_mode
))
2320 dst_mode
= word_mode
;
2323 dst
= gen_reg_rtx (dst_mode
);
2325 for (i
= 0; i
< n_regs
; i
++)
2326 emit_move_insn (operand_subword (dst
, i
, 0, dst_mode
), dst_words
[i
]);
2328 if (mode
!= dst_mode
)
2329 dst
= gen_lowpart (mode
, dst
);
2334 /* Add a USE expression for REG to the (possibly empty) list pointed
2335 to by CALL_FUSAGE. REG must denote a hard register. */
2338 use_reg_mode (rtx
*call_fusage
, rtx reg
, enum machine_mode mode
)
2340 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2343 = gen_rtx_EXPR_LIST (mode
, gen_rtx_USE (VOIDmode
, reg
), *call_fusage
);
2346 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2347 to by CALL_FUSAGE. REG must denote a hard register. */
2350 clobber_reg_mode (rtx
*call_fusage
, rtx reg
, enum machine_mode mode
)
2352 gcc_assert (REG_P (reg
) && REGNO (reg
) < FIRST_PSEUDO_REGISTER
);
2355 = gen_rtx_EXPR_LIST (mode
, gen_rtx_CLOBBER (VOIDmode
, reg
), *call_fusage
);
2358 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2359 starting at REGNO. All of these registers must be hard registers. */
2362 use_regs (rtx
*call_fusage
, int regno
, int nregs
)
2366 gcc_assert (regno
+ nregs
<= FIRST_PSEUDO_REGISTER
);
2368 for (i
= 0; i
< nregs
; i
++)
2369 use_reg (call_fusage
, regno_reg_rtx
[regno
+ i
]);
2372 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2373 PARALLEL REGS. This is for calls that pass values in multiple
2374 non-contiguous locations. The Irix 6 ABI has examples of this. */
2377 use_group_regs (rtx
*call_fusage
, rtx regs
)
2381 for (i
= 0; i
< XVECLEN (regs
, 0); i
++)
2383 rtx reg
= XEXP (XVECEXP (regs
, 0, i
), 0);
2385 /* A NULL entry means the parameter goes both on the stack and in
2386 registers. This can also be a MEM for targets that pass values
2387 partially on the stack and partially in registers. */
2388 if (reg
!= 0 && REG_P (reg
))
2389 use_reg (call_fusage
, reg
);
2393 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2394 assigment and the code of the expresion on the RHS is CODE. Return
2398 get_def_for_expr (tree name
, enum tree_code code
)
2402 if (TREE_CODE (name
) != SSA_NAME
)
2405 def_stmt
= get_gimple_for_ssa_name (name
);
2407 || gimple_assign_rhs_code (def_stmt
) != code
)
2413 #ifdef HAVE_conditional_move
2414 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2415 assigment and the class of the expresion on the RHS is CLASS. Return
2419 get_def_for_expr_class (tree name
, enum tree_code_class tclass
)
2423 if (TREE_CODE (name
) != SSA_NAME
)
2426 def_stmt
= get_gimple_for_ssa_name (name
);
2428 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt
)) != tclass
)
2436 /* Determine whether the LEN bytes generated by CONSTFUN can be
2437 stored to memory using several move instructions. CONSTFUNDATA is
2438 a pointer which will be passed as argument in every CONSTFUN call.
2439 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2440 a memset operation and false if it's a copy of a constant string.
2441 Return nonzero if a call to store_by_pieces should succeed. */
2444 can_store_by_pieces (unsigned HOST_WIDE_INT len
,
2445 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2446 void *constfundata
, unsigned int align
, bool memsetp
)
2448 unsigned HOST_WIDE_INT l
;
2449 unsigned int max_size
;
2450 HOST_WIDE_INT offset
= 0;
2451 enum machine_mode mode
;
2452 enum insn_code icode
;
2454 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2455 rtx cst ATTRIBUTE_UNUSED
;
2460 if (!targetm
.use_by_pieces_infrastructure_p (len
, align
,
2464 optimize_insn_for_speed_p ()))
2467 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2469 /* We would first store what we can in the largest integer mode, then go to
2470 successively smaller modes. */
2473 reverse
<= (HAVE_PRE_DECREMENT
|| HAVE_POST_DECREMENT
);
2477 max_size
= STORE_MAX_PIECES
+ 1;
2478 while (max_size
> 1 && l
> 0)
2480 mode
= widest_int_mode_for_size (max_size
);
2482 if (mode
== VOIDmode
)
2485 icode
= optab_handler (mov_optab
, mode
);
2486 if (icode
!= CODE_FOR_nothing
2487 && align
>= GET_MODE_ALIGNMENT (mode
))
2489 unsigned int size
= GET_MODE_SIZE (mode
);
2496 cst
= (*constfun
) (constfundata
, offset
, mode
);
2497 if (!targetm
.legitimate_constant_p (mode
, cst
))
2507 max_size
= GET_MODE_SIZE (mode
);
2510 /* The code above should have handled everything. */
2517 /* Generate several move instructions to store LEN bytes generated by
2518 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2519 pointer which will be passed as argument in every CONSTFUN call.
2520 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2521 a memset operation and false if it's a copy of a constant string.
2522 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2523 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2527 store_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
,
2528 rtx (*constfun
) (void *, HOST_WIDE_INT
, enum machine_mode
),
2529 void *constfundata
, unsigned int align
, bool memsetp
, int endp
)
2531 enum machine_mode to_addr_mode
= get_address_mode (to
);
2532 struct store_by_pieces_d data
;
2536 gcc_assert (endp
!= 2);
2540 gcc_assert (targetm
.use_by_pieces_infrastructure_p
2545 optimize_insn_for_speed_p ()));
2547 data
.constfun
= constfun
;
2548 data
.constfundata
= constfundata
;
2551 store_by_pieces_1 (&data
, align
);
2556 gcc_assert (!data
.reverse
);
2561 if (HAVE_POST_INCREMENT
&& data
.explicit_inc_to
> 0)
2562 emit_insn (gen_add2_insn (data
.to_addr
, constm1_rtx
));
2564 data
.to_addr
= copy_to_mode_reg (to_addr_mode
,
2565 plus_constant (to_addr_mode
,
2569 to1
= adjust_automodify_address (data
.to
, QImode
, data
.to_addr
,
2576 to1
= adjust_address (data
.to
, QImode
, data
.offset
);
2584 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2585 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2588 clear_by_pieces (rtx to
, unsigned HOST_WIDE_INT len
, unsigned int align
)
2590 struct store_by_pieces_d data
;
2595 data
.constfun
= clear_by_pieces_1
;
2596 data
.constfundata
= NULL
;
2599 store_by_pieces_1 (&data
, align
);
2602 /* Callback routine for clear_by_pieces.
2603 Return const0_rtx unconditionally. */
2606 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED
,
2607 HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2608 enum machine_mode mode ATTRIBUTE_UNUSED
)
2613 /* Subroutine of clear_by_pieces and store_by_pieces.
2614 Generate several move instructions to store LEN bytes of block TO. (A MEM
2615 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2618 store_by_pieces_1 (struct store_by_pieces_d
*data ATTRIBUTE_UNUSED
,
2619 unsigned int align ATTRIBUTE_UNUSED
)
2621 enum machine_mode to_addr_mode
= get_address_mode (data
->to
);
2622 rtx to_addr
= XEXP (data
->to
, 0);
2623 unsigned int max_size
= STORE_MAX_PIECES
+ 1;
2624 enum insn_code icode
;
2627 data
->to_addr
= to_addr
;
2629 = (GET_CODE (to_addr
) == PRE_INC
|| GET_CODE (to_addr
) == PRE_DEC
2630 || GET_CODE (to_addr
) == POST_INC
|| GET_CODE (to_addr
) == POST_DEC
);
2632 data
->explicit_inc_to
= 0;
2634 = (GET_CODE (to_addr
) == PRE_DEC
|| GET_CODE (to_addr
) == POST_DEC
);
2636 data
->offset
= data
->len
;
2638 /* If storing requires more than two move insns,
2639 copy addresses to registers (to make displacements shorter)
2640 and use post-increment if available. */
2641 if (!data
->autinc_to
2642 && move_by_pieces_ninsns (data
->len
, align
, max_size
) > 2)
2644 /* Determine the main mode we'll be using.
2645 MODE might not be used depending on the definitions of the
2646 USE_* macros below. */
2647 enum machine_mode mode ATTRIBUTE_UNUSED
2648 = widest_int_mode_for_size (max_size
);
2650 if (USE_STORE_PRE_DECREMENT (mode
) && data
->reverse
&& ! data
->autinc_to
)
2652 data
->to_addr
= copy_to_mode_reg (to_addr_mode
,
2653 plus_constant (to_addr_mode
,
2656 data
->autinc_to
= 1;
2657 data
->explicit_inc_to
= -1;
2660 if (USE_STORE_POST_INCREMENT (mode
) && ! data
->reverse
2661 && ! data
->autinc_to
)
2663 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2664 data
->autinc_to
= 1;
2665 data
->explicit_inc_to
= 1;
2668 if ( !data
->autinc_to
&& CONSTANT_P (to_addr
))
2669 data
->to_addr
= copy_to_mode_reg (to_addr_mode
, to_addr
);
2672 align
= alignment_for_piecewise_move (STORE_MAX_PIECES
, align
);
2674 /* First store what we can in the largest integer mode, then go to
2675 successively smaller modes. */
2677 while (max_size
> 1 && data
->len
> 0)
2679 enum machine_mode mode
= widest_int_mode_for_size (max_size
);
2681 if (mode
== VOIDmode
)
2684 icode
= optab_handler (mov_optab
, mode
);
2685 if (icode
!= CODE_FOR_nothing
&& align
>= GET_MODE_ALIGNMENT (mode
))
2686 store_by_pieces_2 (GEN_FCN (icode
), mode
, data
);
2688 max_size
= GET_MODE_SIZE (mode
);
2691 /* The code above should have handled everything. */
2692 gcc_assert (!data
->len
);
2695 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2696 with move instructions for mode MODE. GENFUN is the gen_... function
2697 to make a move insn for that mode. DATA has all the other info. */
2700 store_by_pieces_2 (insn_gen_fn genfun
, machine_mode mode
,
2701 struct store_by_pieces_d
*data
)
2703 unsigned int size
= GET_MODE_SIZE (mode
);
2706 while (data
->len
>= size
)
2709 data
->offset
-= size
;
2711 if (data
->autinc_to
)
2712 to1
= adjust_automodify_address (data
->to
, mode
, data
->to_addr
,
2715 to1
= adjust_address (data
->to
, mode
, data
->offset
);
2717 if (HAVE_PRE_DECREMENT
&& data
->explicit_inc_to
< 0)
2718 emit_insn (gen_add2_insn (data
->to_addr
,
2719 gen_int_mode (-(HOST_WIDE_INT
) size
,
2720 GET_MODE (data
->to_addr
))));
2722 cst
= (*data
->constfun
) (data
->constfundata
, data
->offset
, mode
);
2723 emit_insn ((*genfun
) (to1
, cst
));
2725 if (HAVE_POST_INCREMENT
&& data
->explicit_inc_to
> 0)
2726 emit_insn (gen_add2_insn (data
->to_addr
,
2728 GET_MODE (data
->to_addr
))));
2730 if (! data
->reverse
)
2731 data
->offset
+= size
;
2737 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2738 its length in bytes. */
2741 clear_storage_hints (rtx object
, rtx size
, enum block_op_methods method
,
2742 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2743 unsigned HOST_WIDE_INT min_size
,
2744 unsigned HOST_WIDE_INT max_size
,
2745 unsigned HOST_WIDE_INT probable_max_size
)
2747 enum machine_mode mode
= GET_MODE (object
);
2750 gcc_assert (method
== BLOCK_OP_NORMAL
|| method
== BLOCK_OP_TAILCALL
);
2752 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2753 just move a zero. Otherwise, do this a piece at a time. */
2755 && CONST_INT_P (size
)
2756 && INTVAL (size
) == (HOST_WIDE_INT
) GET_MODE_SIZE (mode
))
2758 rtx zero
= CONST0_RTX (mode
);
2761 emit_move_insn (object
, zero
);
2765 if (COMPLEX_MODE_P (mode
))
2767 zero
= CONST0_RTX (GET_MODE_INNER (mode
));
2770 write_complex_part (object
, zero
, 0);
2771 write_complex_part (object
, zero
, 1);
2777 if (size
== const0_rtx
)
2780 align
= MEM_ALIGN (object
);
2782 if (CONST_INT_P (size
)
2783 && targetm
.use_by_pieces_infrastructure_p (INTVAL (size
), align
,
2785 optimize_insn_for_speed_p ()))
2786 clear_by_pieces (object
, INTVAL (size
), align
);
2787 else if (set_storage_via_setmem (object
, size
, const0_rtx
, align
,
2788 expected_align
, expected_size
,
2789 min_size
, max_size
, probable_max_size
))
2791 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object
)))
2792 return set_storage_via_libcall (object
, size
, const0_rtx
,
2793 method
== BLOCK_OP_TAILCALL
);
2801 clear_storage (rtx object
, rtx size
, enum block_op_methods method
)
2803 unsigned HOST_WIDE_INT max
, min
= 0;
2804 if (GET_CODE (size
) == CONST_INT
)
2805 min
= max
= UINTVAL (size
);
2807 max
= GET_MODE_MASK (GET_MODE (size
));
2808 return clear_storage_hints (object
, size
, method
, 0, -1, min
, max
, max
);
2812 /* A subroutine of clear_storage. Expand a call to memset.
2813 Return the return value of memset, 0 otherwise. */
2816 set_storage_via_libcall (rtx object
, rtx size
, rtx val
, bool tailcall
)
2818 tree call_expr
, fn
, object_tree
, size_tree
, val_tree
;
2819 enum machine_mode size_mode
;
2822 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2823 place those into new pseudos into a VAR_DECL and use them later. */
2825 object
= copy_addr_to_reg (XEXP (object
, 0));
2827 size_mode
= TYPE_MODE (sizetype
);
2828 size
= convert_to_mode (size_mode
, size
, 1);
2829 size
= copy_to_mode_reg (size_mode
, size
);
2831 /* It is incorrect to use the libcall calling conventions to call
2832 memset in this context. This could be a user call to memset and
2833 the user may wish to examine the return value from memset. For
2834 targets where libcalls and normal calls have different conventions
2835 for returning pointers, we could end up generating incorrect code. */
2837 object_tree
= make_tree (ptr_type_node
, object
);
2838 if (!CONST_INT_P (val
))
2839 val
= convert_to_mode (TYPE_MODE (integer_type_node
), val
, 1);
2840 size_tree
= make_tree (sizetype
, size
);
2841 val_tree
= make_tree (integer_type_node
, val
);
2843 fn
= clear_storage_libcall_fn (true);
2844 call_expr
= build_call_expr (fn
, 3, object_tree
, val_tree
, size_tree
);
2845 CALL_EXPR_TAILCALL (call_expr
) = tailcall
;
2847 retval
= expand_normal (call_expr
);
2852 /* A subroutine of set_storage_via_libcall. Create the tree node
2853 for the function we use for block clears. */
2855 tree block_clear_fn
;
2858 init_block_clear_fn (const char *asmspec
)
2860 if (!block_clear_fn
)
2864 fn
= get_identifier ("memset");
2865 args
= build_function_type_list (ptr_type_node
, ptr_type_node
,
2866 integer_type_node
, sizetype
,
2869 fn
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
, fn
, args
);
2870 DECL_EXTERNAL (fn
) = 1;
2871 TREE_PUBLIC (fn
) = 1;
2872 DECL_ARTIFICIAL (fn
) = 1;
2873 TREE_NOTHROW (fn
) = 1;
2874 DECL_VISIBILITY (fn
) = VISIBILITY_DEFAULT
;
2875 DECL_VISIBILITY_SPECIFIED (fn
) = 1;
2877 block_clear_fn
= fn
;
2881 set_user_assembler_name (block_clear_fn
, asmspec
);
2885 clear_storage_libcall_fn (int for_call
)
2887 static bool emitted_extern
;
2889 if (!block_clear_fn
)
2890 init_block_clear_fn (NULL
);
2892 if (for_call
&& !emitted_extern
)
2894 emitted_extern
= true;
2895 make_decl_rtl (block_clear_fn
);
2898 return block_clear_fn
;
2901 /* Expand a setmem pattern; return true if successful. */
2904 set_storage_via_setmem (rtx object
, rtx size
, rtx val
, unsigned int align
,
2905 unsigned int expected_align
, HOST_WIDE_INT expected_size
,
2906 unsigned HOST_WIDE_INT min_size
,
2907 unsigned HOST_WIDE_INT max_size
,
2908 unsigned HOST_WIDE_INT probable_max_size
)
2910 /* Try the most limited insn first, because there's no point
2911 including more than one in the machine description unless
2912 the more limited one has some advantage. */
2914 enum machine_mode mode
;
2916 if (expected_align
< align
)
2917 expected_align
= align
;
2918 if (expected_size
!= -1)
2920 if ((unsigned HOST_WIDE_INT
)expected_size
> max_size
)
2921 expected_size
= max_size
;
2922 if ((unsigned HOST_WIDE_INT
)expected_size
< min_size
)
2923 expected_size
= min_size
;
2926 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
); mode
!= VOIDmode
;
2927 mode
= GET_MODE_WIDER_MODE (mode
))
2929 enum insn_code code
= direct_optab_handler (setmem_optab
, mode
);
2931 if (code
!= CODE_FOR_nothing
2932 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2933 here because if SIZE is less than the mode mask, as it is
2934 returned by the macro, it will definitely be less than the
2935 actual mode mask. Since SIZE is within the Pmode address
2936 space, we limit MODE to Pmode. */
2937 && ((CONST_INT_P (size
)
2938 && ((unsigned HOST_WIDE_INT
) INTVAL (size
)
2939 <= (GET_MODE_MASK (mode
) >> 1)))
2940 || max_size
<= (GET_MODE_MASK (mode
) >> 1)
2941 || GET_MODE_BITSIZE (mode
) >= GET_MODE_BITSIZE (Pmode
)))
2943 struct expand_operand ops
[9];
2946 nops
= insn_data
[(int) code
].n_generator_args
;
2947 gcc_assert (nops
== 4 || nops
== 6 || nops
== 8 || nops
== 9);
2949 create_fixed_operand (&ops
[0], object
);
2950 /* The check above guarantees that this size conversion is valid. */
2951 create_convert_operand_to (&ops
[1], size
, mode
, true);
2952 create_convert_operand_from (&ops
[2], val
, byte_mode
, true);
2953 create_integer_operand (&ops
[3], align
/ BITS_PER_UNIT
);
2956 create_integer_operand (&ops
[4], expected_align
/ BITS_PER_UNIT
);
2957 create_integer_operand (&ops
[5], expected_size
);
2961 create_integer_operand (&ops
[6], min_size
);
2962 /* If we can not represent the maximal size,
2963 make parameter NULL. */
2964 if ((HOST_WIDE_INT
) max_size
!= -1)
2965 create_integer_operand (&ops
[7], max_size
);
2967 create_fixed_operand (&ops
[7], NULL
);
2971 /* If we can not represent the maximal size,
2972 make parameter NULL. */
2973 if ((HOST_WIDE_INT
) probable_max_size
!= -1)
2974 create_integer_operand (&ops
[8], probable_max_size
);
2976 create_fixed_operand (&ops
[8], NULL
);
2978 if (maybe_expand_insn (code
, nops
, ops
))
2987 /* Write to one of the components of the complex value CPLX. Write VAL to
2988 the real part if IMAG_P is false, and the imaginary part if its true. */
2991 write_complex_part (rtx cplx
, rtx val
, bool imag_p
)
2993 enum machine_mode cmode
;
2994 enum machine_mode imode
;
2997 if (GET_CODE (cplx
) == CONCAT
)
2999 emit_move_insn (XEXP (cplx
, imag_p
), val
);
3003 cmode
= GET_MODE (cplx
);
3004 imode
= GET_MODE_INNER (cmode
);
3005 ibitsize
= GET_MODE_BITSIZE (imode
);
3007 /* For MEMs simplify_gen_subreg may generate an invalid new address
3008 because, e.g., the original address is considered mode-dependent
3009 by the target, which restricts simplify_subreg from invoking
3010 adjust_address_nv. Instead of preparing fallback support for an
3011 invalid address, we call adjust_address_nv directly. */
3014 emit_move_insn (adjust_address_nv (cplx
, imode
,
3015 imag_p
? GET_MODE_SIZE (imode
) : 0),
3020 /* If the sub-object is at least word sized, then we know that subregging
3021 will work. This special case is important, since store_bit_field
3022 wants to operate on integer modes, and there's rarely an OImode to
3023 correspond to TCmode. */
3024 if (ibitsize
>= BITS_PER_WORD
3025 /* For hard regs we have exact predicates. Assume we can split
3026 the original object if it spans an even number of hard regs.
3027 This special case is important for SCmode on 64-bit platforms
3028 where the natural size of floating-point regs is 32-bit. */
3030 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3031 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
3033 rtx part
= simplify_gen_subreg (imode
, cplx
, cmode
,
3034 imag_p
? GET_MODE_SIZE (imode
) : 0);
3037 emit_move_insn (part
, val
);
3041 /* simplify_gen_subreg may fail for sub-word MEMs. */
3042 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3045 store_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0, 0, 0, imode
, val
);
3048 /* Extract one of the components of the complex value CPLX. Extract the
3049 real part if IMAG_P is false, and the imaginary part if it's true. */
3052 read_complex_part (rtx cplx
, bool imag_p
)
3054 enum machine_mode cmode
, imode
;
3057 if (GET_CODE (cplx
) == CONCAT
)
3058 return XEXP (cplx
, imag_p
);
3060 cmode
= GET_MODE (cplx
);
3061 imode
= GET_MODE_INNER (cmode
);
3062 ibitsize
= GET_MODE_BITSIZE (imode
);
3064 /* Special case reads from complex constants that got spilled to memory. */
3065 if (MEM_P (cplx
) && GET_CODE (XEXP (cplx
, 0)) == SYMBOL_REF
)
3067 tree decl
= SYMBOL_REF_DECL (XEXP (cplx
, 0));
3068 if (decl
&& TREE_CODE (decl
) == COMPLEX_CST
)
3070 tree part
= imag_p
? TREE_IMAGPART (decl
) : TREE_REALPART (decl
);
3071 if (CONSTANT_CLASS_P (part
))
3072 return expand_expr (part
, NULL_RTX
, imode
, EXPAND_NORMAL
);
3076 /* For MEMs simplify_gen_subreg may generate an invalid new address
3077 because, e.g., the original address is considered mode-dependent
3078 by the target, which restricts simplify_subreg from invoking
3079 adjust_address_nv. Instead of preparing fallback support for an
3080 invalid address, we call adjust_address_nv directly. */
3082 return adjust_address_nv (cplx
, imode
,
3083 imag_p
? GET_MODE_SIZE (imode
) : 0);
3085 /* If the sub-object is at least word sized, then we know that subregging
3086 will work. This special case is important, since extract_bit_field
3087 wants to operate on integer modes, and there's rarely an OImode to
3088 correspond to TCmode. */
3089 if (ibitsize
>= BITS_PER_WORD
3090 /* For hard regs we have exact predicates. Assume we can split
3091 the original object if it spans an even number of hard regs.
3092 This special case is important for SCmode on 64-bit platforms
3093 where the natural size of floating-point regs is 32-bit. */
3095 && REGNO (cplx
) < FIRST_PSEUDO_REGISTER
3096 && hard_regno_nregs
[REGNO (cplx
)][cmode
] % 2 == 0))
3098 rtx ret
= simplify_gen_subreg (imode
, cplx
, cmode
,
3099 imag_p
? GET_MODE_SIZE (imode
) : 0);
3103 /* simplify_gen_subreg may fail for sub-word MEMs. */
3104 gcc_assert (MEM_P (cplx
) && ibitsize
< BITS_PER_WORD
);
3107 return extract_bit_field (cplx
, ibitsize
, imag_p
? ibitsize
: 0,
3108 true, NULL_RTX
, imode
, imode
);
3111 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3112 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3113 represented in NEW_MODE. If FORCE is true, this will never happen, as
3114 we'll force-create a SUBREG if needed. */
3117 emit_move_change_mode (enum machine_mode new_mode
,
3118 enum machine_mode old_mode
, rtx x
, bool force
)
3122 if (push_operand (x
, GET_MODE (x
)))
3124 ret
= gen_rtx_MEM (new_mode
, XEXP (x
, 0));
3125 MEM_COPY_ATTRIBUTES (ret
, x
);
3129 /* We don't have to worry about changing the address since the
3130 size in bytes is supposed to be the same. */
3131 if (reload_in_progress
)
3133 /* Copy the MEM to change the mode and move any
3134 substitutions from the old MEM to the new one. */
3135 ret
= adjust_address_nv (x
, new_mode
, 0);
3136 copy_replacements (x
, ret
);
3139 ret
= adjust_address (x
, new_mode
, 0);
3143 /* Note that we do want simplify_subreg's behavior of validating
3144 that the new mode is ok for a hard register. If we were to use
3145 simplify_gen_subreg, we would create the subreg, but would
3146 probably run into the target not being able to implement it. */
3147 /* Except, of course, when FORCE is true, when this is exactly what
3148 we want. Which is needed for CCmodes on some targets. */
3150 ret
= simplify_gen_subreg (new_mode
, x
, old_mode
, 0);
3152 ret
= simplify_subreg (new_mode
, x
, old_mode
, 0);
3158 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3159 an integer mode of the same size as MODE. Returns the instruction
3160 emitted, or NULL if such a move could not be generated. */
3163 emit_move_via_integer (enum machine_mode mode
, rtx x
, rtx y
, bool force
)
3165 enum machine_mode imode
;
3166 enum insn_code code
;
3168 /* There must exist a mode of the exact size we require. */
3169 imode
= int_mode_for_mode (mode
);
3170 if (imode
== BLKmode
)
3173 /* The target must support moves in this mode. */
3174 code
= optab_handler (mov_optab
, imode
);
3175 if (code
== CODE_FOR_nothing
)
3178 x
= emit_move_change_mode (imode
, mode
, x
, force
);
3181 y
= emit_move_change_mode (imode
, mode
, y
, force
);
3184 return emit_insn (GEN_FCN (code
) (x
, y
));
3187 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3188 Return an equivalent MEM that does not use an auto-increment. */
3191 emit_move_resolve_push (enum machine_mode mode
, rtx x
)
3193 enum rtx_code code
= GET_CODE (XEXP (x
, 0));
3194 HOST_WIDE_INT adjust
;
3197 adjust
= GET_MODE_SIZE (mode
);
3198 #ifdef PUSH_ROUNDING
3199 adjust
= PUSH_ROUNDING (adjust
);
3201 if (code
== PRE_DEC
|| code
== POST_DEC
)
3203 else if (code
== PRE_MODIFY
|| code
== POST_MODIFY
)
3205 rtx expr
= XEXP (XEXP (x
, 0), 1);
3208 gcc_assert (GET_CODE (expr
) == PLUS
|| GET_CODE (expr
) == MINUS
);
3209 gcc_assert (CONST_INT_P (XEXP (expr
, 1)));
3210 val
= INTVAL (XEXP (expr
, 1));
3211 if (GET_CODE (expr
) == MINUS
)
3213 gcc_assert (adjust
== val
|| adjust
== -val
);
3217 /* Do not use anti_adjust_stack, since we don't want to update
3218 stack_pointer_delta. */
3219 temp
= expand_simple_binop (Pmode
, PLUS
, stack_pointer_rtx
,
3220 gen_int_mode (adjust
, Pmode
), stack_pointer_rtx
,
3221 0, OPTAB_LIB_WIDEN
);
3222 if (temp
!= stack_pointer_rtx
)
3223 emit_move_insn (stack_pointer_rtx
, temp
);
3230 temp
= stack_pointer_rtx
;
3235 temp
= plus_constant (Pmode
, stack_pointer_rtx
, -adjust
);
3241 return replace_equiv_address (x
, temp
);
3244 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3245 X is known to satisfy push_operand, and MODE is known to be complex.
3246 Returns the last instruction emitted. */
3249 emit_move_complex_push (enum machine_mode mode
, rtx x
, rtx y
)
3251 enum machine_mode submode
= GET_MODE_INNER (mode
);
3254 #ifdef PUSH_ROUNDING
3255 unsigned int submodesize
= GET_MODE_SIZE (submode
);
3257 /* In case we output to the stack, but the size is smaller than the
3258 machine can push exactly, we need to use move instructions. */
3259 if (PUSH_ROUNDING (submodesize
) != submodesize
)
3261 x
= emit_move_resolve_push (mode
, x
);
3262 return emit_move_insn (x
, y
);
3266 /* Note that the real part always precedes the imag part in memory
3267 regardless of machine's endianness. */
3268 switch (GET_CODE (XEXP (x
, 0)))
3282 emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3283 read_complex_part (y
, imag_first
));
3284 return emit_move_insn (gen_rtx_MEM (submode
, XEXP (x
, 0)),
3285 read_complex_part (y
, !imag_first
));
3288 /* A subroutine of emit_move_complex. Perform the move from Y to X
3289 via two moves of the parts. Returns the last instruction emitted. */
3292 emit_move_complex_parts (rtx x
, rtx y
)
3294 /* Show the output dies here. This is necessary for SUBREGs
3295 of pseudos since we cannot track their lifetimes correctly;
3296 hard regs shouldn't appear here except as return values. */
3297 if (!reload_completed
&& !reload_in_progress
3298 && REG_P (x
) && !reg_overlap_mentioned_p (x
, y
))
3301 write_complex_part (x
, read_complex_part (y
, false), false);
3302 write_complex_part (x
, read_complex_part (y
, true), true);
3304 return get_last_insn ();
3307 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3308 MODE is known to be complex. Returns the last instruction emitted. */
3311 emit_move_complex (enum machine_mode mode
, rtx x
, rtx y
)
3315 /* Need to take special care for pushes, to maintain proper ordering
3316 of the data, and possibly extra padding. */
3317 if (push_operand (x
, mode
))
3318 return emit_move_complex_push (mode
, x
, y
);
3320 /* See if we can coerce the target into moving both values at once, except
3321 for floating point where we favor moving as parts if this is easy. */
3322 if (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
3323 && optab_handler (mov_optab
, GET_MODE_INNER (mode
)) != CODE_FOR_nothing
3325 && HARD_REGISTER_P (x
)
3326 && hard_regno_nregs
[REGNO (x
)][mode
] == 1)
3328 && HARD_REGISTER_P (y
)
3329 && hard_regno_nregs
[REGNO (y
)][mode
] == 1))
3331 /* Not possible if the values are inherently not adjacent. */
3332 else if (GET_CODE (x
) == CONCAT
|| GET_CODE (y
) == CONCAT
)
3334 /* Is possible if both are registers (or subregs of registers). */
3335 else if (register_operand (x
, mode
) && register_operand (y
, mode
))
3337 /* If one of the operands is a memory, and alignment constraints
3338 are friendly enough, we may be able to do combined memory operations.
3339 We do not attempt this if Y is a constant because that combination is
3340 usually better with the by-parts thing below. */
3341 else if ((MEM_P (x
) ? !CONSTANT_P (y
) : MEM_P (y
))
3342 && (!STRICT_ALIGNMENT
3343 || get_mode_alignment (mode
) == BIGGEST_ALIGNMENT
))
3352 /* For memory to memory moves, optimal behavior can be had with the
3353 existing block move logic. */
3354 if (MEM_P (x
) && MEM_P (y
))
3356 emit_block_move (x
, y
, GEN_INT (GET_MODE_SIZE (mode
)),
3357 BLOCK_OP_NO_LIBCALL
);
3358 return get_last_insn ();
3361 ret
= emit_move_via_integer (mode
, x
, y
, true);
3366 return emit_move_complex_parts (x
, y
);
3369 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3370 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3373 emit_move_ccmode (enum machine_mode mode
, rtx x
, rtx y
)
3377 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3380 enum insn_code code
= optab_handler (mov_optab
, CCmode
);
3381 if (code
!= CODE_FOR_nothing
)
3383 x
= emit_move_change_mode (CCmode
, mode
, x
, true);
3384 y
= emit_move_change_mode (CCmode
, mode
, y
, true);
3385 return emit_insn (GEN_FCN (code
) (x
, y
));
3389 /* Otherwise, find the MODE_INT mode of the same width. */
3390 ret
= emit_move_via_integer (mode
, x
, y
, false);
3391 gcc_assert (ret
!= NULL
);
3395 /* Return true if word I of OP lies entirely in the
3396 undefined bits of a paradoxical subreg. */
3399 undefined_operand_subword_p (const_rtx op
, int i
)
3401 enum machine_mode innermode
, innermostmode
;
3403 if (GET_CODE (op
) != SUBREG
)
3405 innermode
= GET_MODE (op
);
3406 innermostmode
= GET_MODE (SUBREG_REG (op
));
3407 offset
= i
* UNITS_PER_WORD
+ SUBREG_BYTE (op
);
3408 /* The SUBREG_BYTE represents offset, as if the value were stored in
3409 memory, except for a paradoxical subreg where we define
3410 SUBREG_BYTE to be 0; undo this exception as in
3412 if (SUBREG_BYTE (op
) == 0
3413 && GET_MODE_SIZE (innermostmode
) < GET_MODE_SIZE (innermode
))
3415 int difference
= (GET_MODE_SIZE (innermostmode
) - GET_MODE_SIZE (innermode
));
3416 if (WORDS_BIG_ENDIAN
)
3417 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3418 if (BYTES_BIG_ENDIAN
)
3419 offset
+= difference
% UNITS_PER_WORD
;
3421 if (offset
>= GET_MODE_SIZE (innermostmode
)
3422 || offset
<= -GET_MODE_SIZE (word_mode
))
3427 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3428 MODE is any multi-word or full-word mode that lacks a move_insn
3429 pattern. Note that you will get better code if you define such
3430 patterns, even if they must turn into multiple assembler instructions. */
3433 emit_move_multi_word (enum machine_mode mode
, rtx x
, rtx y
)
3440 gcc_assert (GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
);
3442 /* If X is a push on the stack, do the push now and replace
3443 X with a reference to the stack pointer. */
3444 if (push_operand (x
, mode
))
3445 x
= emit_move_resolve_push (mode
, x
);
3447 /* If we are in reload, see if either operand is a MEM whose address
3448 is scheduled for replacement. */
3449 if (reload_in_progress
&& MEM_P (x
)
3450 && (inner
= find_replacement (&XEXP (x
, 0))) != XEXP (x
, 0))
3451 x
= replace_equiv_address_nv (x
, inner
);
3452 if (reload_in_progress
&& MEM_P (y
)
3453 && (inner
= find_replacement (&XEXP (y
, 0))) != XEXP (y
, 0))
3454 y
= replace_equiv_address_nv (y
, inner
);
3458 need_clobber
= false;
3460 i
< (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
3463 rtx xpart
= operand_subword (x
, i
, 1, mode
);
3466 /* Do not generate code for a move if it would come entirely
3467 from the undefined bits of a paradoxical subreg. */
3468 if (undefined_operand_subword_p (y
, i
))
3471 ypart
= operand_subword (y
, i
, 1, mode
);
3473 /* If we can't get a part of Y, put Y into memory if it is a
3474 constant. Otherwise, force it into a register. Then we must
3475 be able to get a part of Y. */
3476 if (ypart
== 0 && CONSTANT_P (y
))
3478 y
= use_anchored_address (force_const_mem (mode
, y
));
3479 ypart
= operand_subword (y
, i
, 1, mode
);
3481 else if (ypart
== 0)
3482 ypart
= operand_subword_force (y
, i
, mode
);
3484 gcc_assert (xpart
&& ypart
);
3486 need_clobber
|= (GET_CODE (xpart
) == SUBREG
);
3488 last_insn
= emit_move_insn (xpart
, ypart
);
3494 /* Show the output dies here. This is necessary for SUBREGs
3495 of pseudos since we cannot track their lifetimes correctly;
3496 hard regs shouldn't appear here except as return values.
3497 We never want to emit such a clobber after reload. */
3499 && ! (reload_in_progress
|| reload_completed
)
3500 && need_clobber
!= 0)
3508 /* Low level part of emit_move_insn.
3509 Called just like emit_move_insn, but assumes X and Y
3510 are basically valid. */
3513 emit_move_insn_1 (rtx x
, rtx y
)
3515 enum machine_mode mode
= GET_MODE (x
);
3516 enum insn_code code
;
3518 gcc_assert ((unsigned int) mode
< (unsigned int) MAX_MACHINE_MODE
);
3520 code
= optab_handler (mov_optab
, mode
);
3521 if (code
!= CODE_FOR_nothing
)
3522 return emit_insn (GEN_FCN (code
) (x
, y
));
3524 /* Expand complex moves by moving real part and imag part. */
3525 if (COMPLEX_MODE_P (mode
))
3526 return emit_move_complex (mode
, x
, y
);
3528 if (GET_MODE_CLASS (mode
) == MODE_DECIMAL_FLOAT
3529 || ALL_FIXED_POINT_MODE_P (mode
))
3531 rtx result
= emit_move_via_integer (mode
, x
, y
, true);
3533 /* If we can't find an integer mode, use multi words. */
3537 return emit_move_multi_word (mode
, x
, y
);
3540 if (GET_MODE_CLASS (mode
) == MODE_CC
)
3541 return emit_move_ccmode (mode
, x
, y
);
3543 /* Try using a move pattern for the corresponding integer mode. This is
3544 only safe when simplify_subreg can convert MODE constants into integer
3545 constants. At present, it can only do this reliably if the value
3546 fits within a HOST_WIDE_INT. */
3547 if (!CONSTANT_P (y
) || GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
3549 rtx ret
= emit_move_via_integer (mode
, x
, y
, lra_in_progress
);
3553 if (! lra_in_progress
|| recog (PATTERN (ret
), ret
, 0) >= 0)
3558 return emit_move_multi_word (mode
, x
, y
);
3561 /* Generate code to copy Y into X.
3562 Both Y and X must have the same mode, except that
3563 Y can be a constant with VOIDmode.
3564 This mode cannot be BLKmode; use emit_block_move for that.
3566 Return the last instruction emitted. */
3569 emit_move_insn (rtx x
, rtx y
)
3571 enum machine_mode mode
= GET_MODE (x
);
3572 rtx y_cst
= NULL_RTX
;
3575 gcc_assert (mode
!= BLKmode
3576 && (GET_MODE (y
) == mode
|| GET_MODE (y
) == VOIDmode
));
3581 && SCALAR_FLOAT_MODE_P (GET_MODE (x
))
3582 && (last_insn
= compress_float_constant (x
, y
)))
3587 if (!targetm
.legitimate_constant_p (mode
, y
))
3589 y
= force_const_mem (mode
, y
);
3591 /* If the target's cannot_force_const_mem prevented the spill,
3592 assume that the target's move expanders will also take care
3593 of the non-legitimate constant. */
3597 y
= use_anchored_address (y
);
3601 /* If X or Y are memory references, verify that their addresses are valid
3604 && (! memory_address_addr_space_p (GET_MODE (x
), XEXP (x
, 0),
3606 && ! push_operand (x
, GET_MODE (x
))))
3607 x
= validize_mem (x
);
3610 && ! memory_address_addr_space_p (GET_MODE (y
), XEXP (y
, 0),
3611 MEM_ADDR_SPACE (y
)))
3612 y
= validize_mem (y
);
3614 gcc_assert (mode
!= BLKmode
);
3616 last_insn
= emit_move_insn_1 (x
, y
);
3618 if (y_cst
&& REG_P (x
)
3619 && (set
= single_set (last_insn
)) != NULL_RTX
3620 && SET_DEST (set
) == x
3621 && ! rtx_equal_p (y_cst
, SET_SRC (set
)))
3622 set_unique_reg_note (last_insn
, REG_EQUAL
, copy_rtx (y_cst
));
3627 /* If Y is representable exactly in a narrower mode, and the target can
3628 perform the extension directly from constant or memory, then emit the
3629 move as an extension. */
3632 compress_float_constant (rtx x
, rtx y
)
3634 enum machine_mode dstmode
= GET_MODE (x
);
3635 enum machine_mode orig_srcmode
= GET_MODE (y
);
3636 enum machine_mode srcmode
;
3638 int oldcost
, newcost
;
3639 bool speed
= optimize_insn_for_speed_p ();
3641 REAL_VALUE_FROM_CONST_DOUBLE (r
, y
);
3643 if (targetm
.legitimate_constant_p (dstmode
, y
))
3644 oldcost
= set_src_cost (y
, speed
);
3646 oldcost
= set_src_cost (force_const_mem (dstmode
, y
), speed
);
3648 for (srcmode
= GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode
));
3649 srcmode
!= orig_srcmode
;
3650 srcmode
= GET_MODE_WIDER_MODE (srcmode
))
3653 rtx trunc_y
, last_insn
;
3655 /* Skip if the target can't extend this way. */
3656 ic
= can_extend_p (dstmode
, srcmode
, 0);
3657 if (ic
== CODE_FOR_nothing
)
3660 /* Skip if the narrowed value isn't exact. */
3661 if (! exact_real_truncate (srcmode
, &r
))
3664 trunc_y
= CONST_DOUBLE_FROM_REAL_VALUE (r
, srcmode
);
3666 if (targetm
.legitimate_constant_p (srcmode
, trunc_y
))
3668 /* Skip if the target needs extra instructions to perform
3670 if (!insn_operand_matches (ic
, 1, trunc_y
))
3672 /* This is valid, but may not be cheaper than the original. */
3673 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3675 if (oldcost
< newcost
)
3678 else if (float_extend_from_mem
[dstmode
][srcmode
])
3680 trunc_y
= force_const_mem (srcmode
, trunc_y
);
3681 /* This is valid, but may not be cheaper than the original. */
3682 newcost
= set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode
, trunc_y
),
3684 if (oldcost
< newcost
)
3686 trunc_y
= validize_mem (trunc_y
);
3691 /* For CSE's benefit, force the compressed constant pool entry
3692 into a new pseudo. This constant may be used in different modes,
3693 and if not, combine will put things back together for us. */
3694 trunc_y
= force_reg (srcmode
, trunc_y
);
3696 /* If x is a hard register, perform the extension into a pseudo,
3697 so that e.g. stack realignment code is aware of it. */
3699 if (REG_P (x
) && HARD_REGISTER_P (x
))
3700 target
= gen_reg_rtx (dstmode
);
3702 emit_unop_insn (ic
, target
, trunc_y
, UNKNOWN
);
3703 last_insn
= get_last_insn ();
3706 set_unique_reg_note (last_insn
, REG_EQUAL
, y
);
3709 return emit_move_insn (x
, target
);
3716 /* Pushing data onto the stack. */
3718 /* Push a block of length SIZE (perhaps variable)
3719 and return an rtx to address the beginning of the block.
3720 The value may be virtual_outgoing_args_rtx.
3722 EXTRA is the number of bytes of padding to push in addition to SIZE.
3723 BELOW nonzero means this padding comes at low addresses;
3724 otherwise, the padding comes at high addresses. */
3727 push_block (rtx size
, int extra
, int below
)
3731 size
= convert_modes (Pmode
, ptr_mode
, size
, 1);
3732 if (CONSTANT_P (size
))
3733 anti_adjust_stack (plus_constant (Pmode
, size
, extra
));
3734 else if (REG_P (size
) && extra
== 0)
3735 anti_adjust_stack (size
);
3738 temp
= copy_to_mode_reg (Pmode
, size
);
3740 temp
= expand_binop (Pmode
, add_optab
, temp
,
3741 gen_int_mode (extra
, Pmode
),
3742 temp
, 0, OPTAB_LIB_WIDEN
);
3743 anti_adjust_stack (temp
);
3746 #ifndef STACK_GROWS_DOWNWARD
3752 temp
= virtual_outgoing_args_rtx
;
3753 if (extra
!= 0 && below
)
3754 temp
= plus_constant (Pmode
, temp
, extra
);
3758 if (CONST_INT_P (size
))
3759 temp
= plus_constant (Pmode
, virtual_outgoing_args_rtx
,
3760 -INTVAL (size
) - (below
? 0 : extra
));
3761 else if (extra
!= 0 && !below
)
3762 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3763 negate_rtx (Pmode
, plus_constant (Pmode
, size
,
3766 temp
= gen_rtx_PLUS (Pmode
, virtual_outgoing_args_rtx
,
3767 negate_rtx (Pmode
, size
));
3770 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT
), temp
);
3773 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3776 mem_autoinc_base (rtx mem
)
3780 rtx addr
= XEXP (mem
, 0);
3781 if (GET_RTX_CLASS (GET_CODE (addr
)) == RTX_AUTOINC
)
3782 return XEXP (addr
, 0);
3787 /* A utility routine used here, in reload, and in try_split. The insns
3788 after PREV up to and including LAST are known to adjust the stack,
3789 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3790 placing notes as appropriate. PREV may be NULL, indicating the
3791 entire insn sequence prior to LAST should be scanned.
3793 The set of allowed stack pointer modifications is small:
3794 (1) One or more auto-inc style memory references (aka pushes),
3795 (2) One or more addition/subtraction with the SP as destination,
3796 (3) A single move insn with the SP as destination,
3797 (4) A call_pop insn,
3798 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3800 Insns in the sequence that do not modify the SP are ignored,
3801 except for noreturn calls.
3803 The return value is the amount of adjustment that can be trivially
3804 verified, via immediate operand or auto-inc. If the adjustment
3805 cannot be trivially extracted, the return value is INT_MIN. */
3808 find_args_size_adjust (rtx insn
)
3813 pat
= PATTERN (insn
);
3816 /* Look for a call_pop pattern. */
3819 /* We have to allow non-call_pop patterns for the case
3820 of emit_single_push_insn of a TLS address. */
3821 if (GET_CODE (pat
) != PARALLEL
)
3824 /* All call_pop have a stack pointer adjust in the parallel.
3825 The call itself is always first, and the stack adjust is
3826 usually last, so search from the end. */
3827 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; --i
)
3829 set
= XVECEXP (pat
, 0, i
);
3830 if (GET_CODE (set
) != SET
)
3832 dest
= SET_DEST (set
);
3833 if (dest
== stack_pointer_rtx
)
3836 /* We'd better have found the stack pointer adjust. */
3839 /* Fall through to process the extracted SET and DEST
3840 as if it was a standalone insn. */
3842 else if (GET_CODE (pat
) == SET
)
3844 else if ((set
= single_set (insn
)) != NULL
)
3846 else if (GET_CODE (pat
) == PARALLEL
)
3848 /* ??? Some older ports use a parallel with a stack adjust
3849 and a store for a PUSH_ROUNDING pattern, rather than a
3850 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3851 /* ??? See h8300 and m68k, pushqi1. */
3852 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; --i
)
3854 set
= XVECEXP (pat
, 0, i
);
3855 if (GET_CODE (set
) != SET
)
3857 dest
= SET_DEST (set
);
3858 if (dest
== stack_pointer_rtx
)
3861 /* We do not expect an auto-inc of the sp in the parallel. */
3862 gcc_checking_assert (mem_autoinc_base (dest
) != stack_pointer_rtx
);
3863 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3864 != stack_pointer_rtx
);
3872 dest
= SET_DEST (set
);
3874 /* Look for direct modifications of the stack pointer. */
3875 if (REG_P (dest
) && REGNO (dest
) == STACK_POINTER_REGNUM
)
3877 /* Look for a trivial adjustment, otherwise assume nothing. */
3878 /* Note that the SPU restore_stack_block pattern refers to
3879 the stack pointer in V4SImode. Consider that non-trivial. */
3880 if (SCALAR_INT_MODE_P (GET_MODE (dest
))
3881 && GET_CODE (SET_SRC (set
)) == PLUS
3882 && XEXP (SET_SRC (set
), 0) == stack_pointer_rtx
3883 && CONST_INT_P (XEXP (SET_SRC (set
), 1)))
3884 return INTVAL (XEXP (SET_SRC (set
), 1));
3885 /* ??? Reload can generate no-op moves, which will be cleaned
3886 up later. Recognize it and continue searching. */
3887 else if (rtx_equal_p (dest
, SET_SRC (set
)))
3890 return HOST_WIDE_INT_MIN
;
3896 /* Otherwise only think about autoinc patterns. */
3897 if (mem_autoinc_base (dest
) == stack_pointer_rtx
)
3900 gcc_checking_assert (mem_autoinc_base (SET_SRC (set
))
3901 != stack_pointer_rtx
);
3903 else if (mem_autoinc_base (SET_SRC (set
)) == stack_pointer_rtx
)
3904 mem
= SET_SRC (set
);
3908 addr
= XEXP (mem
, 0);
3909 switch (GET_CODE (addr
))
3913 return GET_MODE_SIZE (GET_MODE (mem
));
3916 return -GET_MODE_SIZE (GET_MODE (mem
));
3919 addr
= XEXP (addr
, 1);
3920 gcc_assert (GET_CODE (addr
) == PLUS
);
3921 gcc_assert (XEXP (addr
, 0) == stack_pointer_rtx
);
3922 gcc_assert (CONST_INT_P (XEXP (addr
, 1)));
3923 return INTVAL (XEXP (addr
, 1));
3931 fixup_args_size_notes (rtx prev
, rtx last
, int end_args_size
)
3933 int args_size
= end_args_size
;
3934 bool saw_unknown
= false;
3937 for (insn
= last
; insn
!= prev
; insn
= PREV_INSN (insn
))
3939 HOST_WIDE_INT this_delta
;
3941 if (!NONDEBUG_INSN_P (insn
))
3944 this_delta
= find_args_size_adjust (insn
);
3945 if (this_delta
== 0)
3948 || ACCUMULATE_OUTGOING_ARGS
3949 || find_reg_note (insn
, REG_NORETURN
, NULL_RTX
) == NULL_RTX
)
3953 gcc_assert (!saw_unknown
);
3954 if (this_delta
== HOST_WIDE_INT_MIN
)
3957 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (args_size
));
3958 #ifdef STACK_GROWS_DOWNWARD
3959 this_delta
= -(unsigned HOST_WIDE_INT
) this_delta
;
3961 args_size
-= this_delta
;
3964 return saw_unknown
? INT_MIN
: args_size
;
3967 #ifdef PUSH_ROUNDING
3968 /* Emit single push insn. */
3971 emit_single_push_insn_1 (enum machine_mode mode
, rtx x
, tree type
)
3974 unsigned rounded_size
= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3976 enum insn_code icode
;
3978 stack_pointer_delta
+= PUSH_ROUNDING (GET_MODE_SIZE (mode
));
3979 /* If there is push pattern, use it. Otherwise try old way of throwing
3980 MEM representing push operation to move expander. */
3981 icode
= optab_handler (push_optab
, mode
);
3982 if (icode
!= CODE_FOR_nothing
)
3984 struct expand_operand ops
[1];
3986 create_input_operand (&ops
[0], x
, mode
);
3987 if (maybe_expand_insn (icode
, 1, ops
))
3990 if (GET_MODE_SIZE (mode
) == rounded_size
)
3991 dest_addr
= gen_rtx_fmt_e (STACK_PUSH_CODE
, Pmode
, stack_pointer_rtx
);
3992 /* If we are to pad downward, adjust the stack pointer first and
3993 then store X into the stack location using an offset. This is
3994 because emit_move_insn does not know how to pad; it does not have
3996 else if (FUNCTION_ARG_PADDING (mode
, type
) == downward
)
3998 unsigned padding_size
= rounded_size
- GET_MODE_SIZE (mode
);
3999 HOST_WIDE_INT offset
;
4001 emit_move_insn (stack_pointer_rtx
,
4002 expand_binop (Pmode
,
4003 #ifdef STACK_GROWS_DOWNWARD
4009 gen_int_mode (rounded_size
, Pmode
),
4010 NULL_RTX
, 0, OPTAB_LIB_WIDEN
));
4012 offset
= (HOST_WIDE_INT
) padding_size
;
4013 #ifdef STACK_GROWS_DOWNWARD
4014 if (STACK_PUSH_CODE
== POST_DEC
)
4015 /* We have already decremented the stack pointer, so get the
4017 offset
+= (HOST_WIDE_INT
) rounded_size
;
4019 if (STACK_PUSH_CODE
== POST_INC
)
4020 /* We have already incremented the stack pointer, so get the
4022 offset
-= (HOST_WIDE_INT
) rounded_size
;
4024 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4025 gen_int_mode (offset
, Pmode
));
4029 #ifdef STACK_GROWS_DOWNWARD
4030 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4031 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4032 gen_int_mode (-(HOST_WIDE_INT
) rounded_size
,
4035 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4036 dest_addr
= gen_rtx_PLUS (Pmode
, stack_pointer_rtx
,
4037 gen_int_mode (rounded_size
, Pmode
));
4039 dest_addr
= gen_rtx_PRE_MODIFY (Pmode
, stack_pointer_rtx
, dest_addr
);
4042 dest
= gen_rtx_MEM (mode
, dest_addr
);
4046 set_mem_attributes (dest
, type
, 1);
4048 if (cfun
->tail_call_marked
)
4049 /* Function incoming arguments may overlap with sibling call
4050 outgoing arguments and we cannot allow reordering of reads
4051 from function arguments with stores to outgoing arguments
4052 of sibling calls. */
4053 set_mem_alias_set (dest
, 0);
4055 emit_move_insn (dest
, x
);
4058 /* Emit and annotate a single push insn. */
4061 emit_single_push_insn (enum machine_mode mode
, rtx x
, tree type
)
4063 int delta
, old_delta
= stack_pointer_delta
;
4064 rtx prev
= get_last_insn ();
4067 emit_single_push_insn_1 (mode
, x
, type
);
4069 last
= get_last_insn ();
4071 /* Notice the common case where we emitted exactly one insn. */
4072 if (PREV_INSN (last
) == prev
)
4074 add_reg_note (last
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4078 delta
= fixup_args_size_notes (prev
, last
, stack_pointer_delta
);
4079 gcc_assert (delta
== INT_MIN
|| delta
== old_delta
);
4083 /* Generate code to push X onto the stack, assuming it has mode MODE and
4085 MODE is redundant except when X is a CONST_INT (since they don't
4087 SIZE is an rtx for the size of data to be copied (in bytes),
4088 needed only if X is BLKmode.
4090 ALIGN (in bits) is maximum alignment we can assume.
4092 If PARTIAL and REG are both nonzero, then copy that many of the first
4093 bytes of X into registers starting with REG, and push the rest of X.
4094 The amount of space pushed is decreased by PARTIAL bytes.
4095 REG must be a hard register in this case.
4096 If REG is zero but PARTIAL is not, take any all others actions for an
4097 argument partially in registers, but do not actually load any
4100 EXTRA is the amount in bytes of extra space to leave next to this arg.
4101 This is ignored if an argument block has already been allocated.
4103 On a machine that lacks real push insns, ARGS_ADDR is the address of
4104 the bottom of the argument block for this call. We use indexing off there
4105 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4106 argument block has not been preallocated.
4108 ARGS_SO_FAR is the size of args previously pushed for this call.
4110 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4111 for arguments passed in registers. If nonzero, it will be the number
4112 of bytes required. */
4115 emit_push_insn (rtx x
, enum machine_mode mode
, tree type
, rtx size
,
4116 unsigned int align
, int partial
, rtx reg
, int extra
,
4117 rtx args_addr
, rtx args_so_far
, int reg_parm_stack_space
,
4121 enum direction stack_direction
4122 #ifdef STACK_GROWS_DOWNWARD
4128 /* Decide where to pad the argument: `downward' for below,
4129 `upward' for above, or `none' for don't pad it.
4130 Default is below for small data on big-endian machines; else above. */
4131 enum direction where_pad
= FUNCTION_ARG_PADDING (mode
, type
);
4133 /* Invert direction if stack is post-decrement.
4135 if (STACK_PUSH_CODE
== POST_DEC
)
4136 if (where_pad
!= none
)
4137 where_pad
= (where_pad
== downward
? upward
: downward
);
4142 || (STRICT_ALIGNMENT
&& align
< GET_MODE_ALIGNMENT (mode
)))
4144 /* Copy a block into the stack, entirely or partially. */
4151 offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4152 used
= partial
- offset
;
4154 if (mode
!= BLKmode
)
4156 /* A value is to be stored in an insufficiently aligned
4157 stack slot; copy via a suitably aligned slot if
4159 size
= GEN_INT (GET_MODE_SIZE (mode
));
4160 if (!MEM_P (xinner
))
4162 temp
= assign_temp (type
, 1, 1);
4163 emit_move_insn (temp
, xinner
);
4170 /* USED is now the # of bytes we need not copy to the stack
4171 because registers will take care of them. */
4174 xinner
= adjust_address (xinner
, BLKmode
, used
);
4176 /* If the partial register-part of the arg counts in its stack size,
4177 skip the part of stack space corresponding to the registers.
4178 Otherwise, start copying to the beginning of the stack space,
4179 by setting SKIP to 0. */
4180 skip
= (reg_parm_stack_space
== 0) ? 0 : used
;
4182 #ifdef PUSH_ROUNDING
4183 /* Do it with several push insns if that doesn't take lots of insns
4184 and if there is no difficulty with push insns that skip bytes
4185 on the stack for alignment purposes. */
4188 && CONST_INT_P (size
)
4190 && MEM_ALIGN (xinner
) >= align
4191 && can_move_by_pieces ((unsigned) INTVAL (size
) - used
, align
)
4192 /* Here we avoid the case of a structure whose weak alignment
4193 forces many pushes of a small amount of data,
4194 and such small pushes do rounding that causes trouble. */
4195 && ((! SLOW_UNALIGNED_ACCESS (word_mode
, align
))
4196 || align
>= BIGGEST_ALIGNMENT
4197 || (PUSH_ROUNDING (align
/ BITS_PER_UNIT
)
4198 == (align
/ BITS_PER_UNIT
)))
4199 && (HOST_WIDE_INT
) PUSH_ROUNDING (INTVAL (size
)) == INTVAL (size
))
4201 /* Push padding now if padding above and stack grows down,
4202 or if padding below and stack grows up.
4203 But if space already allocated, this has already been done. */
4204 if (extra
&& args_addr
== 0
4205 && where_pad
!= none
&& where_pad
!= stack_direction
)
4206 anti_adjust_stack (GEN_INT (extra
));
4208 move_by_pieces (NULL
, xinner
, INTVAL (size
) - used
, align
, 0);
4211 #endif /* PUSH_ROUNDING */
4215 /* Otherwise make space on the stack and copy the data
4216 to the address of that space. */
4218 /* Deduct words put into registers from the size we must copy. */
4221 if (CONST_INT_P (size
))
4222 size
= GEN_INT (INTVAL (size
) - used
);
4224 size
= expand_binop (GET_MODE (size
), sub_optab
, size
,
4225 gen_int_mode (used
, GET_MODE (size
)),
4226 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4229 /* Get the address of the stack space.
4230 In this case, we do not deal with EXTRA separately.
4231 A single stack adjust will do. */
4234 temp
= push_block (size
, extra
, where_pad
== downward
);
4237 else if (CONST_INT_P (args_so_far
))
4238 temp
= memory_address (BLKmode
,
4239 plus_constant (Pmode
, args_addr
,
4240 skip
+ INTVAL (args_so_far
)));
4242 temp
= memory_address (BLKmode
,
4243 plus_constant (Pmode
,
4244 gen_rtx_PLUS (Pmode
,
4249 if (!ACCUMULATE_OUTGOING_ARGS
)
4251 /* If the source is referenced relative to the stack pointer,
4252 copy it to another register to stabilize it. We do not need
4253 to do this if we know that we won't be changing sp. */
4255 if (reg_mentioned_p (virtual_stack_dynamic_rtx
, temp
)
4256 || reg_mentioned_p (virtual_outgoing_args_rtx
, temp
))
4257 temp
= copy_to_reg (temp
);
4260 target
= gen_rtx_MEM (BLKmode
, temp
);
4262 /* We do *not* set_mem_attributes here, because incoming arguments
4263 may overlap with sibling call outgoing arguments and we cannot
4264 allow reordering of reads from function arguments with stores
4265 to outgoing arguments of sibling calls. We do, however, want
4266 to record the alignment of the stack slot. */
4267 /* ALIGN may well be better aligned than TYPE, e.g. due to
4268 PARM_BOUNDARY. Assume the caller isn't lying. */
4269 set_mem_align (target
, align
);
4271 emit_block_move (target
, xinner
, size
, BLOCK_OP_CALL_PARM
);
4274 else if (partial
> 0)
4276 /* Scalar partly in registers. */
4278 int size
= GET_MODE_SIZE (mode
) / UNITS_PER_WORD
;
4281 /* # bytes of start of argument
4282 that we must make space for but need not store. */
4283 int offset
= partial
% (PARM_BOUNDARY
/ BITS_PER_UNIT
);
4284 int args_offset
= INTVAL (args_so_far
);
4287 /* Push padding now if padding above and stack grows down,
4288 or if padding below and stack grows up.
4289 But if space already allocated, this has already been done. */
4290 if (extra
&& args_addr
== 0
4291 && where_pad
!= none
&& where_pad
!= stack_direction
)
4292 anti_adjust_stack (GEN_INT (extra
));
4294 /* If we make space by pushing it, we might as well push
4295 the real data. Otherwise, we can leave OFFSET nonzero
4296 and leave the space uninitialized. */
4300 /* Now NOT_STACK gets the number of words that we don't need to
4301 allocate on the stack. Convert OFFSET to words too. */
4302 not_stack
= (partial
- offset
) / UNITS_PER_WORD
;
4303 offset
/= UNITS_PER_WORD
;
4305 /* If the partial register-part of the arg counts in its stack size,
4306 skip the part of stack space corresponding to the registers.
4307 Otherwise, start copying to the beginning of the stack space,
4308 by setting SKIP to 0. */
4309 skip
= (reg_parm_stack_space
== 0) ? 0 : not_stack
;
4311 if (CONSTANT_P (x
) && !targetm
.legitimate_constant_p (mode
, x
))
4312 x
= validize_mem (force_const_mem (mode
, x
));
4314 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4315 SUBREGs of such registers are not allowed. */
4316 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
4317 && GET_MODE_CLASS (GET_MODE (x
)) != MODE_INT
))
4318 x
= copy_to_reg (x
);
4320 /* Loop over all the words allocated on the stack for this arg. */
4321 /* We can do it by words, because any scalar bigger than a word
4322 has a size a multiple of a word. */
4323 for (i
= size
- 1; i
>= not_stack
; i
--)
4324 if (i
>= not_stack
+ offset
)
4325 emit_push_insn (operand_subword_force (x
, i
, mode
),
4326 word_mode
, NULL_TREE
, NULL_RTX
, align
, 0, NULL_RTX
,
4328 GEN_INT (args_offset
+ ((i
- not_stack
+ skip
)
4330 reg_parm_stack_space
, alignment_pad
);
4337 /* Push padding now if padding above and stack grows down,
4338 or if padding below and stack grows up.
4339 But if space already allocated, this has already been done. */
4340 if (extra
&& args_addr
== 0
4341 && where_pad
!= none
&& where_pad
!= stack_direction
)
4342 anti_adjust_stack (GEN_INT (extra
));
4344 #ifdef PUSH_ROUNDING
4345 if (args_addr
== 0 && PUSH_ARGS
)
4346 emit_single_push_insn (mode
, x
, type
);
4350 if (CONST_INT_P (args_so_far
))
4352 = memory_address (mode
,
4353 plus_constant (Pmode
, args_addr
,
4354 INTVAL (args_so_far
)));
4356 addr
= memory_address (mode
, gen_rtx_PLUS (Pmode
, args_addr
,
4358 dest
= gen_rtx_MEM (mode
, addr
);
4360 /* We do *not* set_mem_attributes here, because incoming arguments
4361 may overlap with sibling call outgoing arguments and we cannot
4362 allow reordering of reads from function arguments with stores
4363 to outgoing arguments of sibling calls. We do, however, want
4364 to record the alignment of the stack slot. */
4365 /* ALIGN may well be better aligned than TYPE, e.g. due to
4366 PARM_BOUNDARY. Assume the caller isn't lying. */
4367 set_mem_align (dest
, align
);
4369 emit_move_insn (dest
, x
);
4373 /* If part should go in registers, copy that part
4374 into the appropriate registers. Do this now, at the end,
4375 since mem-to-mem copies above may do function calls. */
4376 if (partial
> 0 && reg
!= 0)
4378 /* Handle calls that pass values in multiple non-contiguous locations.
4379 The Irix 6 ABI has examples of this. */
4380 if (GET_CODE (reg
) == PARALLEL
)
4381 emit_group_load (reg
, x
, type
, -1);
4384 gcc_assert (partial
% UNITS_PER_WORD
== 0);
4385 move_block_to_reg (REGNO (reg
), x
, partial
/ UNITS_PER_WORD
, mode
);
4389 if (extra
&& args_addr
== 0 && where_pad
== stack_direction
)
4390 anti_adjust_stack (GEN_INT (extra
));
4392 if (alignment_pad
&& args_addr
== 0)
4393 anti_adjust_stack (alignment_pad
);
4396 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4400 get_subtarget (rtx x
)
4404 /* Only registers can be subtargets. */
4406 /* Don't use hard regs to avoid extending their life. */
4407 || REGNO (x
) < FIRST_PSEUDO_REGISTER
4411 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4412 FIELD is a bitfield. Returns true if the optimization was successful,
4413 and there's nothing else to do. */
4416 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize
,
4417 unsigned HOST_WIDE_INT bitpos
,
4418 unsigned HOST_WIDE_INT bitregion_start
,
4419 unsigned HOST_WIDE_INT bitregion_end
,
4420 enum machine_mode mode1
, rtx str_rtx
,
4423 enum machine_mode str_mode
= GET_MODE (str_rtx
);
4424 unsigned int str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4429 enum tree_code code
;
4431 if (mode1
!= VOIDmode
4432 || bitsize
>= BITS_PER_WORD
4433 || str_bitsize
> BITS_PER_WORD
4434 || TREE_SIDE_EFFECTS (to
)
4435 || TREE_THIS_VOLATILE (to
))
4439 if (TREE_CODE (src
) != SSA_NAME
)
4441 if (TREE_CODE (TREE_TYPE (src
)) != INTEGER_TYPE
)
4444 srcstmt
= get_gimple_for_ssa_name (src
);
4446 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt
)) != tcc_binary
)
4449 code
= gimple_assign_rhs_code (srcstmt
);
4451 op0
= gimple_assign_rhs1 (srcstmt
);
4453 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4454 to find its initialization. Hopefully the initialization will
4455 be from a bitfield load. */
4456 if (TREE_CODE (op0
) == SSA_NAME
)
4458 gimple op0stmt
= get_gimple_for_ssa_name (op0
);
4460 /* We want to eventually have OP0 be the same as TO, which
4461 should be a bitfield. */
4463 || !is_gimple_assign (op0stmt
)
4464 || gimple_assign_rhs_code (op0stmt
) != TREE_CODE (to
))
4466 op0
= gimple_assign_rhs1 (op0stmt
);
4469 op1
= gimple_assign_rhs2 (srcstmt
);
4471 if (!operand_equal_p (to
, op0
, 0))
4474 if (MEM_P (str_rtx
))
4476 unsigned HOST_WIDE_INT offset1
;
4478 if (str_bitsize
== 0 || str_bitsize
> BITS_PER_WORD
)
4479 str_mode
= word_mode
;
4480 str_mode
= get_best_mode (bitsize
, bitpos
,
4481 bitregion_start
, bitregion_end
,
4482 MEM_ALIGN (str_rtx
), str_mode
, 0);
4483 if (str_mode
== VOIDmode
)
4485 str_bitsize
= GET_MODE_BITSIZE (str_mode
);
4488 bitpos
%= str_bitsize
;
4489 offset1
= (offset1
- bitpos
) / BITS_PER_UNIT
;
4490 str_rtx
= adjust_address (str_rtx
, str_mode
, offset1
);
4492 else if (!REG_P (str_rtx
) && GET_CODE (str_rtx
) != SUBREG
)
4495 /* If the bit field covers the whole REG/MEM, store_field
4496 will likely generate better code. */
4497 if (bitsize
>= str_bitsize
)
4500 /* We can't handle fields split across multiple entities. */
4501 if (bitpos
+ bitsize
> str_bitsize
)
4504 if (BYTES_BIG_ENDIAN
)
4505 bitpos
= str_bitsize
- bitpos
- bitsize
;
4511 /* For now, just optimize the case of the topmost bitfield
4512 where we don't need to do any masking and also
4513 1 bit bitfields where xor can be used.
4514 We might win by one instruction for the other bitfields
4515 too if insv/extv instructions aren't used, so that
4516 can be added later. */
4517 if (bitpos
+ bitsize
!= str_bitsize
4518 && (bitsize
!= 1 || TREE_CODE (op1
) != INTEGER_CST
))
4521 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4522 value
= convert_modes (str_mode
,
4523 TYPE_MODE (TREE_TYPE (op1
)), value
,
4524 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4526 /* We may be accessing data outside the field, which means
4527 we can alias adjacent data. */
4528 if (MEM_P (str_rtx
))
4530 str_rtx
= shallow_copy_rtx (str_rtx
);
4531 set_mem_alias_set (str_rtx
, 0);
4532 set_mem_expr (str_rtx
, 0);
4535 binop
= code
== PLUS_EXPR
? add_optab
: sub_optab
;
4536 if (bitsize
== 1 && bitpos
+ bitsize
!= str_bitsize
)
4538 value
= expand_and (str_mode
, value
, const1_rtx
, NULL
);
4541 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4542 result
= expand_binop (str_mode
, binop
, str_rtx
,
4543 value
, str_rtx
, 1, OPTAB_WIDEN
);
4544 if (result
!= str_rtx
)
4545 emit_move_insn (str_rtx
, result
);
4550 if (TREE_CODE (op1
) != INTEGER_CST
)
4552 value
= expand_expr (op1
, NULL_RTX
, str_mode
, EXPAND_NORMAL
);
4553 value
= convert_modes (str_mode
,
4554 TYPE_MODE (TREE_TYPE (op1
)), value
,
4555 TYPE_UNSIGNED (TREE_TYPE (op1
)));
4557 /* We may be accessing data outside the field, which means
4558 we can alias adjacent data. */
4559 if (MEM_P (str_rtx
))
4561 str_rtx
= shallow_copy_rtx (str_rtx
);
4562 set_mem_alias_set (str_rtx
, 0);
4563 set_mem_expr (str_rtx
, 0);
4566 binop
= code
== BIT_IOR_EXPR
? ior_optab
: xor_optab
;
4567 if (bitpos
+ bitsize
!= str_bitsize
)
4569 rtx mask
= gen_int_mode (((unsigned HOST_WIDE_INT
) 1 << bitsize
) - 1,
4571 value
= expand_and (str_mode
, value
, mask
, NULL_RTX
);
4573 value
= expand_shift (LSHIFT_EXPR
, str_mode
, value
, bitpos
, NULL_RTX
, 1);
4574 result
= expand_binop (str_mode
, binop
, str_rtx
,
4575 value
, str_rtx
, 1, OPTAB_WIDEN
);
4576 if (result
!= str_rtx
)
4577 emit_move_insn (str_rtx
, result
);
4587 /* In the C++ memory model, consecutive bit fields in a structure are
4588 considered one memory location.
4590 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4591 returns the bit range of consecutive bits in which this COMPONENT_REF
4592 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4593 and *OFFSET may be adjusted in the process.
4595 If the access does not need to be restricted, 0 is returned in both
4596 *BITSTART and *BITEND. */
4599 get_bit_range (unsigned HOST_WIDE_INT
*bitstart
,
4600 unsigned HOST_WIDE_INT
*bitend
,
4602 HOST_WIDE_INT
*bitpos
,
4605 HOST_WIDE_INT bitoffset
;
4608 gcc_assert (TREE_CODE (exp
) == COMPONENT_REF
);
4610 field
= TREE_OPERAND (exp
, 1);
4611 repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
4612 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4613 need to limit the range we can access. */
4616 *bitstart
= *bitend
= 0;
4620 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4621 part of a larger bit field, then the representative does not serve any
4622 useful purpose. This can occur in Ada. */
4623 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4625 enum machine_mode rmode
;
4626 HOST_WIDE_INT rbitsize
, rbitpos
;
4630 get_inner_reference (TREE_OPERAND (exp
, 0), &rbitsize
, &rbitpos
,
4631 &roffset
, &rmode
, &unsignedp
, &volatilep
, false);
4632 if ((rbitpos
% BITS_PER_UNIT
) != 0)
4634 *bitstart
= *bitend
= 0;
4639 /* Compute the adjustment to bitpos from the offset of the field
4640 relative to the representative. DECL_FIELD_OFFSET of field and
4641 repr are the same by construction if they are not constants,
4642 see finish_bitfield_layout. */
4643 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field
))
4644 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr
)))
4645 bitoffset
= (tree_to_uhwi (DECL_FIELD_OFFSET (field
))
4646 - tree_to_uhwi (DECL_FIELD_OFFSET (repr
))) * BITS_PER_UNIT
;
4649 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
4650 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
4652 /* If the adjustment is larger than bitpos, we would have a negative bit
4653 position for the lower bound and this may wreak havoc later. Adjust
4654 offset and bitpos to make the lower bound non-negative in that case. */
4655 if (bitoffset
> *bitpos
)
4657 HOST_WIDE_INT adjust
= bitoffset
- *bitpos
;
4658 gcc_assert ((adjust
% BITS_PER_UNIT
) == 0);
4661 if (*offset
== NULL_TREE
)
4662 *offset
= size_int (-adjust
/ BITS_PER_UNIT
);
4665 = size_binop (MINUS_EXPR
, *offset
, size_int (adjust
/ BITS_PER_UNIT
));
4669 *bitstart
= *bitpos
- bitoffset
;
4671 *bitend
= *bitstart
+ tree_to_uhwi (DECL_SIZE (repr
)) - 1;
4674 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4675 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4676 DECL_RTL was not set yet, return NORTL. */
4679 addr_expr_of_non_mem_decl_p_1 (tree addr
, bool nortl
)
4681 if (TREE_CODE (addr
) != ADDR_EXPR
)
4684 tree base
= TREE_OPERAND (addr
, 0);
4687 || TREE_ADDRESSABLE (base
)
4688 || DECL_MODE (base
) == BLKmode
)
4691 if (!DECL_RTL_SET_P (base
))
4694 return (!MEM_P (DECL_RTL (base
)));
4697 /* Returns true if the MEM_REF REF refers to an object that does not
4698 reside in memory and has non-BLKmode. */
4701 mem_ref_refers_to_non_mem_p (tree ref
)
4703 tree base
= TREE_OPERAND (ref
, 0);
4704 return addr_expr_of_non_mem_decl_p_1 (base
, false);
4707 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4708 is true, try generating a nontemporal store. */
4711 expand_assignment (tree to
, tree from
, bool nontemporal
)
4715 enum machine_mode mode
;
4717 enum insn_code icode
;
4719 /* Don't crash if the lhs of the assignment was erroneous. */
4720 if (TREE_CODE (to
) == ERROR_MARK
)
4722 expand_normal (from
);
4726 /* Optimize away no-op moves without side-effects. */
4727 if (operand_equal_p (to
, from
, 0))
4730 /* Handle misaligned stores. */
4731 mode
= TYPE_MODE (TREE_TYPE (to
));
4732 if ((TREE_CODE (to
) == MEM_REF
4733 || TREE_CODE (to
) == TARGET_MEM_REF
)
4735 && !mem_ref_refers_to_non_mem_p (to
)
4736 && ((align
= get_object_alignment (to
))
4737 < GET_MODE_ALIGNMENT (mode
))
4738 && (((icode
= optab_handler (movmisalign_optab
, mode
))
4739 != CODE_FOR_nothing
)
4740 || SLOW_UNALIGNED_ACCESS (mode
, align
)))
4744 reg
= expand_expr (from
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4745 reg
= force_not_mem (reg
);
4746 mem
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4748 if (icode
!= CODE_FOR_nothing
)
4750 struct expand_operand ops
[2];
4752 create_fixed_operand (&ops
[0], mem
);
4753 create_input_operand (&ops
[1], reg
, mode
);
4754 /* The movmisalign<mode> pattern cannot fail, else the assignment
4755 would silently be omitted. */
4756 expand_insn (icode
, 2, ops
);
4759 store_bit_field (mem
, GET_MODE_BITSIZE (mode
), 0, 0, 0, mode
, reg
);
4763 /* Assignment of a structure component needs special treatment
4764 if the structure component's rtx is not simply a MEM.
4765 Assignment of an array element at a constant index, and assignment of
4766 an array element in an unaligned packed structure field, has the same
4767 problem. Same for (partially) storing into a non-memory object. */
4768 if (handled_component_p (to
)
4769 || (TREE_CODE (to
) == MEM_REF
4770 && mem_ref_refers_to_non_mem_p (to
))
4771 || TREE_CODE (TREE_TYPE (to
)) == ARRAY_TYPE
)
4773 enum machine_mode mode1
;
4774 HOST_WIDE_INT bitsize
, bitpos
;
4775 unsigned HOST_WIDE_INT bitregion_start
= 0;
4776 unsigned HOST_WIDE_INT bitregion_end
= 0;
4783 tem
= get_inner_reference (to
, &bitsize
, &bitpos
, &offset
, &mode1
,
4784 &unsignedp
, &volatilep
, true);
4786 /* Make sure bitpos is not negative, it can wreak havoc later. */
4789 gcc_assert (offset
== NULL_TREE
);
4790 offset
= size_int (bitpos
>> (BITS_PER_UNIT
== 8
4791 ? 3 : exact_log2 (BITS_PER_UNIT
)));
4792 bitpos
&= BITS_PER_UNIT
- 1;
4795 if (TREE_CODE (to
) == COMPONENT_REF
4796 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to
, 1)))
4797 get_bit_range (&bitregion_start
, &bitregion_end
, to
, &bitpos
, &offset
);
4798 /* The C++ memory model naturally applies to byte-aligned fields.
4799 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4800 BITSIZE are not byte-aligned, there is no need to limit the range
4801 we can access. This can occur with packed structures in Ada. */
4802 else if (bitsize
> 0
4803 && bitsize
% BITS_PER_UNIT
== 0
4804 && bitpos
% BITS_PER_UNIT
== 0)
4806 bitregion_start
= bitpos
;
4807 bitregion_end
= bitpos
+ bitsize
- 1;
4810 to_rtx
= expand_expr (tem
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4812 /* If the field has a mode, we want to access it in the
4813 field's mode, not the computed mode.
4814 If a MEM has VOIDmode (external with incomplete type),
4815 use BLKmode for it instead. */
4818 if (mode1
!= VOIDmode
)
4819 to_rtx
= adjust_address (to_rtx
, mode1
, 0);
4820 else if (GET_MODE (to_rtx
) == VOIDmode
)
4821 to_rtx
= adjust_address (to_rtx
, BLKmode
, 0);
4826 enum machine_mode address_mode
;
4829 if (!MEM_P (to_rtx
))
4831 /* We can get constant negative offsets into arrays with broken
4832 user code. Translate this to a trap instead of ICEing. */
4833 gcc_assert (TREE_CODE (offset
) == INTEGER_CST
);
4834 expand_builtin_trap ();
4835 to_rtx
= gen_rtx_MEM (BLKmode
, const0_rtx
);
4838 offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
4839 address_mode
= get_address_mode (to_rtx
);
4840 if (GET_MODE (offset_rtx
) != address_mode
)
4841 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
4843 /* The check for a constant address in TO_RTX not having VOIDmode
4844 is probably no longer necessary. */
4846 && GET_MODE (to_rtx
) == BLKmode
4847 && GET_MODE (XEXP (to_rtx
, 0)) != VOIDmode
4849 && (bitpos
% bitsize
) == 0
4850 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
4851 && MEM_ALIGN (to_rtx
) == GET_MODE_ALIGNMENT (mode1
))
4853 to_rtx
= adjust_address (to_rtx
, mode1
, bitpos
/ BITS_PER_UNIT
);
4854 bitregion_start
= 0;
4855 if (bitregion_end
>= (unsigned HOST_WIDE_INT
) bitpos
)
4856 bitregion_end
-= bitpos
;
4860 to_rtx
= offset_address (to_rtx
, offset_rtx
,
4861 highest_pow2_factor_for_target (to
,
4865 /* No action is needed if the target is not a memory and the field
4866 lies completely outside that target. This can occur if the source
4867 code contains an out-of-bounds access to a small array. */
4869 && GET_MODE (to_rtx
) != BLKmode
4870 && (unsigned HOST_WIDE_INT
) bitpos
4871 >= GET_MODE_PRECISION (GET_MODE (to_rtx
)))
4873 expand_normal (from
);
4876 /* Handle expand_expr of a complex value returning a CONCAT. */
4877 else if (GET_CODE (to_rtx
) == CONCAT
)
4879 unsigned short mode_bitsize
= GET_MODE_BITSIZE (GET_MODE (to_rtx
));
4880 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from
)))
4882 && bitsize
== mode_bitsize
)
4883 result
= store_expr (from
, to_rtx
, false, nontemporal
);
4884 else if (bitsize
== mode_bitsize
/ 2
4885 && (bitpos
== 0 || bitpos
== mode_bitsize
/ 2))
4886 result
= store_expr (from
, XEXP (to_rtx
, bitpos
!= 0), false,
4888 else if (bitpos
+ bitsize
<= mode_bitsize
/ 2)
4889 result
= store_field (XEXP (to_rtx
, 0), bitsize
, bitpos
,
4890 bitregion_start
, bitregion_end
,
4892 get_alias_set (to
), nontemporal
);
4893 else if (bitpos
>= mode_bitsize
/ 2)
4894 result
= store_field (XEXP (to_rtx
, 1), bitsize
,
4895 bitpos
- mode_bitsize
/ 2,
4896 bitregion_start
, bitregion_end
,
4898 get_alias_set (to
), nontemporal
);
4899 else if (bitpos
== 0 && bitsize
== mode_bitsize
)
4902 result
= expand_normal (from
);
4903 from_rtx
= simplify_gen_subreg (GET_MODE (to_rtx
), result
,
4904 TYPE_MODE (TREE_TYPE (from
)), 0);
4905 emit_move_insn (XEXP (to_rtx
, 0),
4906 read_complex_part (from_rtx
, false));
4907 emit_move_insn (XEXP (to_rtx
, 1),
4908 read_complex_part (from_rtx
, true));
4912 rtx temp
= assign_stack_temp (GET_MODE (to_rtx
),
4913 GET_MODE_SIZE (GET_MODE (to_rtx
)));
4914 write_complex_part (temp
, XEXP (to_rtx
, 0), false);
4915 write_complex_part (temp
, XEXP (to_rtx
, 1), true);
4916 result
= store_field (temp
, bitsize
, bitpos
,
4917 bitregion_start
, bitregion_end
,
4919 get_alias_set (to
), nontemporal
);
4920 emit_move_insn (XEXP (to_rtx
, 0), read_complex_part (temp
, false));
4921 emit_move_insn (XEXP (to_rtx
, 1), read_complex_part (temp
, true));
4928 /* If the field is at offset zero, we could have been given the
4929 DECL_RTX of the parent struct. Don't munge it. */
4930 to_rtx
= shallow_copy_rtx (to_rtx
);
4931 set_mem_attributes_minus_bitpos (to_rtx
, to
, 0, bitpos
);
4933 MEM_VOLATILE_P (to_rtx
) = 1;
4936 if (optimize_bitfield_assignment_op (bitsize
, bitpos
,
4937 bitregion_start
, bitregion_end
,
4942 result
= store_field (to_rtx
, bitsize
, bitpos
,
4943 bitregion_start
, bitregion_end
,
4945 get_alias_set (to
), nontemporal
);
4949 preserve_temp_slots (result
);
4954 /* If the rhs is a function call and its value is not an aggregate,
4955 call the function before we start to compute the lhs.
4956 This is needed for correct code for cases such as
4957 val = setjmp (buf) on machines where reference to val
4958 requires loading up part of an address in a separate insn.
4960 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4961 since it might be a promoted variable where the zero- or sign- extension
4962 needs to be done. Handling this in the normal way is safe because no
4963 computation is done before the call. The same is true for SSA names. */
4964 if (TREE_CODE (from
) == CALL_EXPR
&& ! aggregate_value_p (from
, from
)
4965 && COMPLETE_TYPE_P (TREE_TYPE (from
))
4966 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from
))) == INTEGER_CST
4967 && ! (((TREE_CODE (to
) == VAR_DECL
4968 || TREE_CODE (to
) == PARM_DECL
4969 || TREE_CODE (to
) == RESULT_DECL
)
4970 && REG_P (DECL_RTL (to
)))
4971 || TREE_CODE (to
) == SSA_NAME
))
4976 value
= expand_normal (from
);
4978 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4980 /* Handle calls that return values in multiple non-contiguous locations.
4981 The Irix 6 ABI has examples of this. */
4982 if (GET_CODE (to_rtx
) == PARALLEL
)
4984 if (GET_CODE (value
) == PARALLEL
)
4985 emit_group_move (to_rtx
, value
);
4987 emit_group_load (to_rtx
, value
, TREE_TYPE (from
),
4988 int_size_in_bytes (TREE_TYPE (from
)));
4990 else if (GET_CODE (value
) == PARALLEL
)
4991 emit_group_store (to_rtx
, value
, TREE_TYPE (from
),
4992 int_size_in_bytes (TREE_TYPE (from
)));
4993 else if (GET_MODE (to_rtx
) == BLKmode
)
4995 /* Handle calls that return BLKmode values in registers. */
4997 copy_blkmode_from_reg (to_rtx
, value
, TREE_TYPE (from
));
4999 emit_block_move (to_rtx
, value
, expr_size (from
), BLOCK_OP_NORMAL
);
5003 if (POINTER_TYPE_P (TREE_TYPE (to
)))
5004 value
= convert_memory_address_addr_space
5005 (GET_MODE (to_rtx
), value
,
5006 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to
))));
5008 emit_move_insn (to_rtx
, value
);
5010 preserve_temp_slots (to_rtx
);
5015 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5016 to_rtx
= expand_expr (to
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5018 /* Don't move directly into a return register. */
5019 if (TREE_CODE (to
) == RESULT_DECL
5020 && (REG_P (to_rtx
) || GET_CODE (to_rtx
) == PARALLEL
))
5026 /* If the source is itself a return value, it still is in a pseudo at
5027 this point so we can move it back to the return register directly. */
5029 && TYPE_MODE (TREE_TYPE (from
)) == BLKmode
5030 && TREE_CODE (from
) != CALL_EXPR
)
5031 temp
= copy_blkmode_to_reg (GET_MODE (to_rtx
), from
);
5033 temp
= expand_expr (from
, NULL_RTX
, GET_MODE (to_rtx
), EXPAND_NORMAL
);
5035 /* Handle calls that return values in multiple non-contiguous locations.
5036 The Irix 6 ABI has examples of this. */
5037 if (GET_CODE (to_rtx
) == PARALLEL
)
5039 if (GET_CODE (temp
) == PARALLEL
)
5040 emit_group_move (to_rtx
, temp
);
5042 emit_group_load (to_rtx
, temp
, TREE_TYPE (from
),
5043 int_size_in_bytes (TREE_TYPE (from
)));
5046 emit_move_insn (to_rtx
, temp
);
5048 preserve_temp_slots (to_rtx
);
5053 /* In case we are returning the contents of an object which overlaps
5054 the place the value is being stored, use a safe function when copying
5055 a value through a pointer into a structure value return block. */
5056 if (TREE_CODE (to
) == RESULT_DECL
5057 && TREE_CODE (from
) == INDIRECT_REF
5058 && ADDR_SPACE_GENERIC_P
5059 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from
, 0)))))
5060 && refs_may_alias_p (to
, from
)
5061 && cfun
->returns_struct
5062 && !cfun
->returns_pcc_struct
)
5067 size
= expr_size (from
);
5068 from_rtx
= expand_normal (from
);
5070 emit_library_call (memmove_libfunc
, LCT_NORMAL
,
5071 VOIDmode
, 3, XEXP (to_rtx
, 0), Pmode
,
5072 XEXP (from_rtx
, 0), Pmode
,
5073 convert_to_mode (TYPE_MODE (sizetype
),
5074 size
, TYPE_UNSIGNED (sizetype
)),
5075 TYPE_MODE (sizetype
));
5077 preserve_temp_slots (to_rtx
);
5082 /* Compute FROM and store the value in the rtx we got. */
5085 result
= store_expr (from
, to_rtx
, 0, nontemporal
);
5086 preserve_temp_slots (result
);
5091 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5092 succeeded, false otherwise. */
5095 emit_storent_insn (rtx to
, rtx from
)
5097 struct expand_operand ops
[2];
5098 enum machine_mode mode
= GET_MODE (to
);
5099 enum insn_code code
= optab_handler (storent_optab
, mode
);
5101 if (code
== CODE_FOR_nothing
)
5104 create_fixed_operand (&ops
[0], to
);
5105 create_input_operand (&ops
[1], from
, mode
);
5106 return maybe_expand_insn (code
, 2, ops
);
5109 /* Generate code for computing expression EXP,
5110 and storing the value into TARGET.
5112 If the mode is BLKmode then we may return TARGET itself.
5113 It turns out that in BLKmode it doesn't cause a problem.
5114 because C has no operators that could combine two different
5115 assignments into the same BLKmode object with different values
5116 with no sequence point. Will other languages need this to
5119 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5120 stack, and block moves may need to be treated specially.
5122 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5125 store_expr (tree exp
, rtx target
, int call_param_p
, bool nontemporal
)
5128 rtx alt_rtl
= NULL_RTX
;
5129 location_t loc
= curr_insn_location ();
5131 if (VOID_TYPE_P (TREE_TYPE (exp
)))
5133 /* C++ can generate ?: expressions with a throw expression in one
5134 branch and an rvalue in the other. Here, we resolve attempts to
5135 store the throw expression's nonexistent result. */
5136 gcc_assert (!call_param_p
);
5137 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5140 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
5142 /* Perform first part of compound expression, then assign from second
5144 expand_expr (TREE_OPERAND (exp
, 0), const0_rtx
, VOIDmode
,
5145 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5146 return store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5149 else if (TREE_CODE (exp
) == COND_EXPR
&& GET_MODE (target
) == BLKmode
)
5151 /* For conditional expression, get safe form of the target. Then
5152 test the condition, doing the appropriate assignment on either
5153 side. This avoids the creation of unnecessary temporaries.
5154 For non-BLKmode, it is more efficient not to do this. */
5156 rtx lab1
= gen_label_rtx (), lab2
= gen_label_rtx ();
5158 do_pending_stack_adjust ();
5160 jumpifnot (TREE_OPERAND (exp
, 0), lab1
, -1);
5161 store_expr (TREE_OPERAND (exp
, 1), target
, call_param_p
,
5163 emit_jump_insn (gen_jump (lab2
));
5166 store_expr (TREE_OPERAND (exp
, 2), target
, call_param_p
,
5173 else if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
5174 /* If this is a scalar in a register that is stored in a wider mode
5175 than the declared mode, compute the result into its declared mode
5176 and then convert to the wider mode. Our value is the computed
5179 rtx inner_target
= 0;
5181 /* We can do the conversion inside EXP, which will often result
5182 in some optimizations. Do the conversion in two steps: first
5183 change the signedness, if needed, then the extend. But don't
5184 do this if the type of EXP is a subtype of something else
5185 since then the conversion might involve more than just
5186 converting modes. */
5187 if (INTEGRAL_TYPE_P (TREE_TYPE (exp
))
5188 && TREE_TYPE (TREE_TYPE (exp
)) == 0
5189 && GET_MODE_PRECISION (GET_MODE (target
))
5190 == TYPE_PRECISION (TREE_TYPE (exp
)))
5192 if (TYPE_UNSIGNED (TREE_TYPE (exp
))
5193 != SUBREG_PROMOTED_UNSIGNED_P (target
))
5195 /* Some types, e.g. Fortran's logical*4, won't have a signed
5196 version, so use the mode instead. */
5198 = (signed_or_unsigned_type_for
5199 (SUBREG_PROMOTED_UNSIGNED_P (target
), TREE_TYPE (exp
)));
5201 ntype
= lang_hooks
.types
.type_for_mode
5202 (TYPE_MODE (TREE_TYPE (exp
)),
5203 SUBREG_PROMOTED_UNSIGNED_P (target
));
5205 exp
= fold_convert_loc (loc
, ntype
, exp
);
5208 exp
= fold_convert_loc (loc
, lang_hooks
.types
.type_for_mode
5209 (GET_MODE (SUBREG_REG (target
)),
5210 SUBREG_PROMOTED_UNSIGNED_P (target
)),
5213 inner_target
= SUBREG_REG (target
);
5216 temp
= expand_expr (exp
, inner_target
, VOIDmode
,
5217 call_param_p
? EXPAND_STACK_PARM
: EXPAND_NORMAL
);
5219 /* If TEMP is a VOIDmode constant, use convert_modes to make
5220 sure that we properly convert it. */
5221 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
5223 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5224 temp
, SUBREG_PROMOTED_UNSIGNED_P (target
));
5225 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
5226 GET_MODE (target
), temp
,
5227 SUBREG_PROMOTED_UNSIGNED_P (target
));
5230 convert_move (SUBREG_REG (target
), temp
,
5231 SUBREG_PROMOTED_UNSIGNED_P (target
));
5235 else if ((TREE_CODE (exp
) == STRING_CST
5236 || (TREE_CODE (exp
) == MEM_REF
5237 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
5238 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5240 && integer_zerop (TREE_OPERAND (exp
, 1))))
5241 && !nontemporal
&& !call_param_p
5244 /* Optimize initialization of an array with a STRING_CST. */
5245 HOST_WIDE_INT exp_len
, str_copy_len
;
5247 tree str
= TREE_CODE (exp
) == STRING_CST
5248 ? exp
: TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5250 exp_len
= int_expr_size (exp
);
5254 if (TREE_STRING_LENGTH (str
) <= 0)
5257 str_copy_len
= strlen (TREE_STRING_POINTER (str
));
5258 if (str_copy_len
< TREE_STRING_LENGTH (str
) - 1)
5261 str_copy_len
= TREE_STRING_LENGTH (str
);
5262 if ((STORE_MAX_PIECES
& (STORE_MAX_PIECES
- 1)) == 0
5263 && TREE_STRING_POINTER (str
)[TREE_STRING_LENGTH (str
) - 1] == '\0')
5265 str_copy_len
+= STORE_MAX_PIECES
- 1;
5266 str_copy_len
&= ~(STORE_MAX_PIECES
- 1);
5268 str_copy_len
= MIN (str_copy_len
, exp_len
);
5269 if (!can_store_by_pieces (str_copy_len
, builtin_strncpy_read_str
,
5270 CONST_CAST (char *, TREE_STRING_POINTER (str
)),
5271 MEM_ALIGN (target
), false))
5276 dest_mem
= store_by_pieces (dest_mem
,
5277 str_copy_len
, builtin_strncpy_read_str
,
5279 TREE_STRING_POINTER (str
)),
5280 MEM_ALIGN (target
), false,
5281 exp_len
> str_copy_len
? 1 : 0);
5282 if (exp_len
> str_copy_len
)
5283 clear_storage (adjust_address (dest_mem
, BLKmode
, 0),
5284 GEN_INT (exp_len
- str_copy_len
),
5293 /* If we want to use a nontemporal store, force the value to
5295 tmp_target
= nontemporal
? NULL_RTX
: target
;
5296 temp
= expand_expr_real (exp
, tmp_target
, GET_MODE (target
),
5298 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
),
5302 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5303 the same as that of TARGET, adjust the constant. This is needed, for
5304 example, in case it is a CONST_DOUBLE and we want only a word-sized
5306 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
5307 && TREE_CODE (exp
) != ERROR_MARK
5308 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
5309 temp
= convert_modes (GET_MODE (target
), TYPE_MODE (TREE_TYPE (exp
)),
5310 temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5312 /* If value was not generated in the target, store it there.
5313 Convert the value to TARGET's type first if necessary and emit the
5314 pending incrementations that have been queued when expanding EXP.
5315 Note that we cannot emit the whole queue blindly because this will
5316 effectively disable the POST_INC optimization later.
5318 If TEMP and TARGET compare equal according to rtx_equal_p, but
5319 one or both of them are volatile memory refs, we have to distinguish
5321 - expand_expr has used TARGET. In this case, we must not generate
5322 another copy. This can be detected by TARGET being equal according
5324 - expand_expr has not used TARGET - that means that the source just
5325 happens to have the same RTX form. Since temp will have been created
5326 by expand_expr, it will compare unequal according to == .
5327 We must generate a copy in this case, to reach the correct number
5328 of volatile memory references. */
5330 if ((! rtx_equal_p (temp
, target
)
5331 || (temp
!= target
&& (side_effects_p (temp
)
5332 || side_effects_p (target
))))
5333 && TREE_CODE (exp
) != ERROR_MARK
5334 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5335 but TARGET is not valid memory reference, TEMP will differ
5336 from TARGET although it is really the same location. */
5338 && rtx_equal_p (alt_rtl
, target
)
5339 && !side_effects_p (alt_rtl
)
5340 && !side_effects_p (target
))
5341 /* If there's nothing to copy, don't bother. Don't call
5342 expr_size unless necessary, because some front-ends (C++)
5343 expr_size-hook must not be given objects that are not
5344 supposed to be bit-copied or bit-initialized. */
5345 && expr_size (exp
) != const0_rtx
)
5347 if (GET_MODE (temp
) != GET_MODE (target
) && GET_MODE (temp
) != VOIDmode
)
5349 if (GET_MODE (target
) == BLKmode
)
5351 /* Handle calls that return BLKmode values in registers. */
5352 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
5353 copy_blkmode_from_reg (target
, temp
, TREE_TYPE (exp
));
5355 store_bit_field (target
,
5356 INTVAL (expr_size (exp
)) * BITS_PER_UNIT
,
5357 0, 0, 0, GET_MODE (temp
), temp
);
5360 convert_move (target
, temp
, TYPE_UNSIGNED (TREE_TYPE (exp
)));
5363 else if (GET_MODE (temp
) == BLKmode
&& TREE_CODE (exp
) == STRING_CST
)
5365 /* Handle copying a string constant into an array. The string
5366 constant may be shorter than the array. So copy just the string's
5367 actual length, and clear the rest. First get the size of the data
5368 type of the string, which is actually the size of the target. */
5369 rtx size
= expr_size (exp
);
5371 if (CONST_INT_P (size
)
5372 && INTVAL (size
) < TREE_STRING_LENGTH (exp
))
5373 emit_block_move (target
, temp
, size
,
5375 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5378 enum machine_mode pointer_mode
5379 = targetm
.addr_space
.pointer_mode (MEM_ADDR_SPACE (target
));
5380 enum machine_mode address_mode
= get_address_mode (target
);
5382 /* Compute the size of the data to copy from the string. */
5384 = size_binop_loc (loc
, MIN_EXPR
,
5385 make_tree (sizetype
, size
),
5386 size_int (TREE_STRING_LENGTH (exp
)));
5388 = expand_expr (copy_size
, NULL_RTX
, VOIDmode
,
5390 ? EXPAND_STACK_PARM
: EXPAND_NORMAL
));
5393 /* Copy that much. */
5394 copy_size_rtx
= convert_to_mode (pointer_mode
, copy_size_rtx
,
5395 TYPE_UNSIGNED (sizetype
));
5396 emit_block_move (target
, temp
, copy_size_rtx
,
5398 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5400 /* Figure out how much is left in TARGET that we have to clear.
5401 Do all calculations in pointer_mode. */
5402 if (CONST_INT_P (copy_size_rtx
))
5404 size
= plus_constant (address_mode
, size
,
5405 -INTVAL (copy_size_rtx
));
5406 target
= adjust_address (target
, BLKmode
,
5407 INTVAL (copy_size_rtx
));
5411 size
= expand_binop (TYPE_MODE (sizetype
), sub_optab
, size
,
5412 copy_size_rtx
, NULL_RTX
, 0,
5415 if (GET_MODE (copy_size_rtx
) != address_mode
)
5416 copy_size_rtx
= convert_to_mode (address_mode
,
5418 TYPE_UNSIGNED (sizetype
));
5420 target
= offset_address (target
, copy_size_rtx
,
5421 highest_pow2_factor (copy_size
));
5422 label
= gen_label_rtx ();
5423 emit_cmp_and_jump_insns (size
, const0_rtx
, LT
, NULL_RTX
,
5424 GET_MODE (size
), 0, label
);
5427 if (size
!= const0_rtx
)
5428 clear_storage (target
, size
, BLOCK_OP_NORMAL
);
5434 /* Handle calls that return values in multiple non-contiguous locations.
5435 The Irix 6 ABI has examples of this. */
5436 else if (GET_CODE (target
) == PARALLEL
)
5438 if (GET_CODE (temp
) == PARALLEL
)
5439 emit_group_move (target
, temp
);
5441 emit_group_load (target
, temp
, TREE_TYPE (exp
),
5442 int_size_in_bytes (TREE_TYPE (exp
)));
5444 else if (GET_CODE (temp
) == PARALLEL
)
5445 emit_group_store (target
, temp
, TREE_TYPE (exp
),
5446 int_size_in_bytes (TREE_TYPE (exp
)));
5447 else if (GET_MODE (temp
) == BLKmode
)
5448 emit_block_move (target
, temp
, expr_size (exp
),
5450 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
5451 /* If we emit a nontemporal store, there is nothing else to do. */
5452 else if (nontemporal
&& emit_storent_insn (target
, temp
))
5456 temp
= force_operand (temp
, target
);
5458 emit_move_insn (target
, temp
);
5465 /* Return true if field F of structure TYPE is a flexible array. */
5468 flexible_array_member_p (const_tree f
, const_tree type
)
5473 return (DECL_CHAIN (f
) == NULL
5474 && TREE_CODE (tf
) == ARRAY_TYPE
5476 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf
))
5477 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf
)))
5478 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf
))
5479 && int_size_in_bytes (type
) >= 0);
5482 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5483 must have in order for it to completely initialize a value of type TYPE.
5484 Return -1 if the number isn't known.
5486 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5488 static HOST_WIDE_INT
5489 count_type_elements (const_tree type
, bool for_ctor_p
)
5491 switch (TREE_CODE (type
))
5497 nelts
= array_type_nelts (type
);
5498 if (nelts
&& tree_fits_uhwi_p (nelts
))
5500 unsigned HOST_WIDE_INT n
;
5502 n
= tree_to_uhwi (nelts
) + 1;
5503 if (n
== 0 || for_ctor_p
)
5506 return n
* count_type_elements (TREE_TYPE (type
), false);
5508 return for_ctor_p
? -1 : 1;
5513 unsigned HOST_WIDE_INT n
;
5517 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5518 if (TREE_CODE (f
) == FIELD_DECL
)
5521 n
+= count_type_elements (TREE_TYPE (f
), false);
5522 else if (!flexible_array_member_p (f
, type
))
5523 /* Don't count flexible arrays, which are not supposed
5524 to be initialized. */
5532 case QUAL_UNION_TYPE
:
5537 gcc_assert (!for_ctor_p
);
5538 /* Estimate the number of scalars in each field and pick the
5539 maximum. Other estimates would do instead; the idea is simply
5540 to make sure that the estimate is not sensitive to the ordering
5543 for (f
= TYPE_FIELDS (type
); f
; f
= DECL_CHAIN (f
))
5544 if (TREE_CODE (f
) == FIELD_DECL
)
5546 m
= count_type_elements (TREE_TYPE (f
), false);
5547 /* If the field doesn't span the whole union, add an extra
5548 scalar for the rest. */
5549 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f
)),
5550 TYPE_SIZE (type
)) != 1)
5562 return TYPE_VECTOR_SUBPARTS (type
);
5566 case FIXED_POINT_TYPE
:
5571 case REFERENCE_TYPE
:
5587 /* Helper for categorize_ctor_elements. Identical interface. */
5590 categorize_ctor_elements_1 (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5591 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5593 unsigned HOST_WIDE_INT idx
;
5594 HOST_WIDE_INT nz_elts
, init_elts
, num_fields
;
5595 tree value
, purpose
, elt_type
;
5597 /* Whether CTOR is a valid constant initializer, in accordance with what
5598 initializer_constant_valid_p does. If inferred from the constructor
5599 elements, true until proven otherwise. */
5600 bool const_from_elts_p
= constructor_static_from_elts_p (ctor
);
5601 bool const_p
= const_from_elts_p
? true : TREE_STATIC (ctor
);
5606 elt_type
= NULL_TREE
;
5608 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), idx
, purpose
, value
)
5610 HOST_WIDE_INT mult
= 1;
5612 if (purpose
&& TREE_CODE (purpose
) == RANGE_EXPR
)
5614 tree lo_index
= TREE_OPERAND (purpose
, 0);
5615 tree hi_index
= TREE_OPERAND (purpose
, 1);
5617 if (tree_fits_uhwi_p (lo_index
) && tree_fits_uhwi_p (hi_index
))
5618 mult
= (tree_to_uhwi (hi_index
)
5619 - tree_to_uhwi (lo_index
) + 1);
5622 elt_type
= TREE_TYPE (value
);
5624 switch (TREE_CODE (value
))
5628 HOST_WIDE_INT nz
= 0, ic
= 0;
5630 bool const_elt_p
= categorize_ctor_elements_1 (value
, &nz
, &ic
,
5633 nz_elts
+= mult
* nz
;
5634 init_elts
+= mult
* ic
;
5636 if (const_from_elts_p
&& const_p
)
5637 const_p
= const_elt_p
;
5644 if (!initializer_zerop (value
))
5650 nz_elts
+= mult
* TREE_STRING_LENGTH (value
);
5651 init_elts
+= mult
* TREE_STRING_LENGTH (value
);
5655 if (!initializer_zerop (TREE_REALPART (value
)))
5657 if (!initializer_zerop (TREE_IMAGPART (value
)))
5665 for (i
= 0; i
< VECTOR_CST_NELTS (value
); ++i
)
5667 tree v
= VECTOR_CST_ELT (value
, i
);
5668 if (!initializer_zerop (v
))
5677 HOST_WIDE_INT tc
= count_type_elements (elt_type
, false);
5678 nz_elts
+= mult
* tc
;
5679 init_elts
+= mult
* tc
;
5681 if (const_from_elts_p
&& const_p
)
5682 const_p
= initializer_constant_valid_p (value
, elt_type
)
5689 if (*p_complete
&& !complete_ctor_at_level_p (TREE_TYPE (ctor
),
5690 num_fields
, elt_type
))
5691 *p_complete
= false;
5693 *p_nz_elts
+= nz_elts
;
5694 *p_init_elts
+= init_elts
;
5699 /* Examine CTOR to discover:
5700 * how many scalar fields are set to nonzero values,
5701 and place it in *P_NZ_ELTS;
5702 * how many scalar fields in total are in CTOR,
5703 and place it in *P_ELT_COUNT.
5704 * whether the constructor is complete -- in the sense that every
5705 meaningful byte is explicitly given a value --
5706 and place it in *P_COMPLETE.
5708 Return whether or not CTOR is a valid static constant initializer, the same
5709 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5712 categorize_ctor_elements (const_tree ctor
, HOST_WIDE_INT
*p_nz_elts
,
5713 HOST_WIDE_INT
*p_init_elts
, bool *p_complete
)
5719 return categorize_ctor_elements_1 (ctor
, p_nz_elts
, p_init_elts
, p_complete
);
5722 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5723 of which had type LAST_TYPE. Each element was itself a complete
5724 initializer, in the sense that every meaningful byte was explicitly
5725 given a value. Return true if the same is true for the constructor
5729 complete_ctor_at_level_p (const_tree type
, HOST_WIDE_INT num_elts
,
5730 const_tree last_type
)
5732 if (TREE_CODE (type
) == UNION_TYPE
5733 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5738 gcc_assert (num_elts
== 1 && last_type
);
5740 /* ??? We could look at each element of the union, and find the
5741 largest element. Which would avoid comparing the size of the
5742 initialized element against any tail padding in the union.
5743 Doesn't seem worth the effort... */
5744 return simple_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (last_type
)) == 1;
5747 return count_type_elements (type
, true) == num_elts
;
5750 /* Return 1 if EXP contains mostly (3/4) zeros. */
5753 mostly_zeros_p (const_tree exp
)
5755 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5757 HOST_WIDE_INT nz_elts
, init_elts
;
5760 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5761 return !complete_p
|| nz_elts
< init_elts
/ 4;
5764 return initializer_zerop (exp
);
5767 /* Return 1 if EXP contains all zeros. */
5770 all_zeros_p (const_tree exp
)
5772 if (TREE_CODE (exp
) == CONSTRUCTOR
)
5774 HOST_WIDE_INT nz_elts
, init_elts
;
5777 categorize_ctor_elements (exp
, &nz_elts
, &init_elts
, &complete_p
);
5778 return nz_elts
== 0;
5781 return initializer_zerop (exp
);
5784 /* Helper function for store_constructor.
5785 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5786 CLEARED is as for store_constructor.
5787 ALIAS_SET is the alias set to use for any stores.
5789 This provides a recursive shortcut back to store_constructor when it isn't
5790 necessary to go through store_field. This is so that we can pass through
5791 the cleared field to let store_constructor know that we may not have to
5792 clear a substructure if the outer structure has already been cleared. */
5795 store_constructor_field (rtx target
, unsigned HOST_WIDE_INT bitsize
,
5796 HOST_WIDE_INT bitpos
, enum machine_mode mode
,
5797 tree exp
, int cleared
, alias_set_type alias_set
)
5799 if (TREE_CODE (exp
) == CONSTRUCTOR
5800 /* We can only call store_constructor recursively if the size and
5801 bit position are on a byte boundary. */
5802 && bitpos
% BITS_PER_UNIT
== 0
5803 && (bitsize
> 0 && bitsize
% BITS_PER_UNIT
== 0)
5804 /* If we have a nonzero bitpos for a register target, then we just
5805 let store_field do the bitfield handling. This is unlikely to
5806 generate unnecessary clear instructions anyways. */
5807 && (bitpos
== 0 || MEM_P (target
)))
5811 = adjust_address (target
,
5812 GET_MODE (target
) == BLKmode
5814 % GET_MODE_ALIGNMENT (GET_MODE (target
)))
5815 ? BLKmode
: VOIDmode
, bitpos
/ BITS_PER_UNIT
);
5818 /* Update the alias set, if required. */
5819 if (MEM_P (target
) && ! MEM_KEEP_ALIAS_SET_P (target
)
5820 && MEM_ALIAS_SET (target
) != 0)
5822 target
= copy_rtx (target
);
5823 set_mem_alias_set (target
, alias_set
);
5826 store_constructor (exp
, target
, cleared
, bitsize
/ BITS_PER_UNIT
);
5829 store_field (target
, bitsize
, bitpos
, 0, 0, mode
, exp
, alias_set
, false);
5833 /* Returns the number of FIELD_DECLs in TYPE. */
5836 fields_length (const_tree type
)
5838 tree t
= TYPE_FIELDS (type
);
5841 for (; t
; t
= DECL_CHAIN (t
))
5842 if (TREE_CODE (t
) == FIELD_DECL
)
5849 /* Store the value of constructor EXP into the rtx TARGET.
5850 TARGET is either a REG or a MEM; we know it cannot conflict, since
5851 safe_from_p has been called.
5852 CLEARED is true if TARGET is known to have been zero'd.
5853 SIZE is the number of bytes of TARGET we are allowed to modify: this
5854 may not be the same as the size of EXP if we are assigning to a field
5855 which has been packed to exclude padding bits. */
5858 store_constructor (tree exp
, rtx target
, int cleared
, HOST_WIDE_INT size
)
5860 tree type
= TREE_TYPE (exp
);
5861 #ifdef WORD_REGISTER_OPERATIONS
5862 HOST_WIDE_INT exp_size
= int_size_in_bytes (type
);
5865 switch (TREE_CODE (type
))
5869 case QUAL_UNION_TYPE
:
5871 unsigned HOST_WIDE_INT idx
;
5874 /* If size is zero or the target is already cleared, do nothing. */
5875 if (size
== 0 || cleared
)
5877 /* We either clear the aggregate or indicate the value is dead. */
5878 else if ((TREE_CODE (type
) == UNION_TYPE
5879 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
5880 && ! CONSTRUCTOR_ELTS (exp
))
5881 /* If the constructor is empty, clear the union. */
5883 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
5887 /* If we are building a static constructor into a register,
5888 set the initial value as zero so we can fold the value into
5889 a constant. But if more than one register is involved,
5890 this probably loses. */
5891 else if (REG_P (target
) && TREE_STATIC (exp
)
5892 && GET_MODE_SIZE (GET_MODE (target
)) <= UNITS_PER_WORD
)
5894 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
5898 /* If the constructor has fewer fields than the structure or
5899 if we are initializing the structure to mostly zeros, clear
5900 the whole structure first. Don't do this if TARGET is a
5901 register whose mode size isn't equal to SIZE since
5902 clear_storage can't handle this case. */
5904 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp
))
5905 != fields_length (type
))
5906 || mostly_zeros_p (exp
))
5908 || ((HOST_WIDE_INT
) GET_MODE_SIZE (GET_MODE (target
))
5911 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
5915 if (REG_P (target
) && !cleared
)
5916 emit_clobber (target
);
5918 /* Store each element of the constructor into the
5919 corresponding field of TARGET. */
5920 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, field
, value
)
5922 enum machine_mode mode
;
5923 HOST_WIDE_INT bitsize
;
5924 HOST_WIDE_INT bitpos
= 0;
5926 rtx to_rtx
= target
;
5928 /* Just ignore missing fields. We cleared the whole
5929 structure, above, if any fields are missing. */
5933 if (cleared
&& initializer_zerop (value
))
5936 if (tree_fits_uhwi_p (DECL_SIZE (field
)))
5937 bitsize
= tree_to_uhwi (DECL_SIZE (field
));
5941 mode
= DECL_MODE (field
);
5942 if (DECL_BIT_FIELD (field
))
5945 offset
= DECL_FIELD_OFFSET (field
);
5946 if (tree_fits_shwi_p (offset
)
5947 && tree_fits_shwi_p (bit_position (field
)))
5949 bitpos
= int_bit_position (field
);
5953 bitpos
= tree_to_shwi (DECL_FIELD_BIT_OFFSET (field
));
5957 enum machine_mode address_mode
;
5961 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset
,
5962 make_tree (TREE_TYPE (exp
),
5965 offset_rtx
= expand_normal (offset
);
5966 gcc_assert (MEM_P (to_rtx
));
5968 address_mode
= get_address_mode (to_rtx
);
5969 if (GET_MODE (offset_rtx
) != address_mode
)
5970 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
5972 to_rtx
= offset_address (to_rtx
, offset_rtx
,
5973 highest_pow2_factor (offset
));
5976 #ifdef WORD_REGISTER_OPERATIONS
5977 /* If this initializes a field that is smaller than a
5978 word, at the start of a word, try to widen it to a full
5979 word. This special case allows us to output C++ member
5980 function initializations in a form that the optimizers
5983 && bitsize
< BITS_PER_WORD
5984 && bitpos
% BITS_PER_WORD
== 0
5985 && GET_MODE_CLASS (mode
) == MODE_INT
5986 && TREE_CODE (value
) == INTEGER_CST
5988 && bitpos
+ BITS_PER_WORD
<= exp_size
* BITS_PER_UNIT
)
5990 tree type
= TREE_TYPE (value
);
5992 if (TYPE_PRECISION (type
) < BITS_PER_WORD
)
5994 type
= lang_hooks
.types
.type_for_mode
5995 (word_mode
, TYPE_UNSIGNED (type
));
5996 value
= fold_convert (type
, value
);
5999 if (BYTES_BIG_ENDIAN
)
6001 = fold_build2 (LSHIFT_EXPR
, type
, value
,
6002 build_int_cst (type
,
6003 BITS_PER_WORD
- bitsize
));
6004 bitsize
= BITS_PER_WORD
;
6009 if (MEM_P (to_rtx
) && !MEM_KEEP_ALIAS_SET_P (to_rtx
)
6010 && DECL_NONADDRESSABLE_P (field
))
6012 to_rtx
= copy_rtx (to_rtx
);
6013 MEM_KEEP_ALIAS_SET_P (to_rtx
) = 1;
6016 store_constructor_field (to_rtx
, bitsize
, bitpos
, mode
,
6018 get_alias_set (TREE_TYPE (field
)));
6025 unsigned HOST_WIDE_INT i
;
6028 tree elttype
= TREE_TYPE (type
);
6030 HOST_WIDE_INT minelt
= 0;
6031 HOST_WIDE_INT maxelt
= 0;
6033 domain
= TYPE_DOMAIN (type
);
6034 const_bounds_p
= (TYPE_MIN_VALUE (domain
)
6035 && TYPE_MAX_VALUE (domain
)
6036 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain
))
6037 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain
)));
6039 /* If we have constant bounds for the range of the type, get them. */
6042 minelt
= tree_to_shwi (TYPE_MIN_VALUE (domain
));
6043 maxelt
= tree_to_shwi (TYPE_MAX_VALUE (domain
));
6046 /* If the constructor has fewer elements than the array, clear
6047 the whole array first. Similarly if this is static
6048 constructor of a non-BLKmode object. */
6051 else if (REG_P (target
) && TREE_STATIC (exp
))
6055 unsigned HOST_WIDE_INT idx
;
6057 HOST_WIDE_INT count
= 0, zero_count
= 0;
6058 need_to_clear
= ! const_bounds_p
;
6060 /* This loop is a more accurate version of the loop in
6061 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6062 is also needed to check for missing elements. */
6063 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), idx
, index
, value
)
6065 HOST_WIDE_INT this_node_count
;
6070 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6072 tree lo_index
= TREE_OPERAND (index
, 0);
6073 tree hi_index
= TREE_OPERAND (index
, 1);
6075 if (! tree_fits_uhwi_p (lo_index
)
6076 || ! tree_fits_uhwi_p (hi_index
))
6082 this_node_count
= (tree_to_uhwi (hi_index
)
6083 - tree_to_uhwi (lo_index
) + 1);
6086 this_node_count
= 1;
6088 count
+= this_node_count
;
6089 if (mostly_zeros_p (value
))
6090 zero_count
+= this_node_count
;
6093 /* Clear the entire array first if there are any missing
6094 elements, or if the incidence of zero elements is >=
6097 && (count
< maxelt
- minelt
+ 1
6098 || 4 * zero_count
>= 3 * count
))
6102 if (need_to_clear
&& size
> 0)
6105 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6107 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6111 if (!cleared
&& REG_P (target
))
6112 /* Inform later passes that the old value is dead. */
6113 emit_clobber (target
);
6115 /* Store each element of the constructor into the
6116 corresponding element of TARGET, determined by counting the
6118 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp
), i
, index
, value
)
6120 enum machine_mode mode
;
6121 HOST_WIDE_INT bitsize
;
6122 HOST_WIDE_INT bitpos
;
6123 rtx xtarget
= target
;
6125 if (cleared
&& initializer_zerop (value
))
6128 mode
= TYPE_MODE (elttype
);
6129 if (mode
== BLKmode
)
6130 bitsize
= (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6131 ? tree_to_uhwi (TYPE_SIZE (elttype
))
6134 bitsize
= GET_MODE_BITSIZE (mode
);
6136 if (index
!= NULL_TREE
&& TREE_CODE (index
) == RANGE_EXPR
)
6138 tree lo_index
= TREE_OPERAND (index
, 0);
6139 tree hi_index
= TREE_OPERAND (index
, 1);
6140 rtx index_r
, pos_rtx
;
6141 HOST_WIDE_INT lo
, hi
, count
;
6144 /* If the range is constant and "small", unroll the loop. */
6146 && tree_fits_shwi_p (lo_index
)
6147 && tree_fits_shwi_p (hi_index
)
6148 && (lo
= tree_to_shwi (lo_index
),
6149 hi
= tree_to_shwi (hi_index
),
6150 count
= hi
- lo
+ 1,
6153 || (tree_fits_uhwi_p (TYPE_SIZE (elttype
))
6154 && (tree_to_uhwi (TYPE_SIZE (elttype
)) * count
6157 lo
-= minelt
; hi
-= minelt
;
6158 for (; lo
<= hi
; lo
++)
6160 bitpos
= lo
* tree_to_shwi (TYPE_SIZE (elttype
));
6163 && !MEM_KEEP_ALIAS_SET_P (target
)
6164 && TREE_CODE (type
) == ARRAY_TYPE
6165 && TYPE_NONALIASED_COMPONENT (type
))
6167 target
= copy_rtx (target
);
6168 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6171 store_constructor_field
6172 (target
, bitsize
, bitpos
, mode
, value
, cleared
,
6173 get_alias_set (elttype
));
6178 rtx loop_start
= gen_label_rtx ();
6179 rtx loop_end
= gen_label_rtx ();
6182 expand_normal (hi_index
);
6184 index
= build_decl (EXPR_LOCATION (exp
),
6185 VAR_DECL
, NULL_TREE
, domain
);
6186 index_r
= gen_reg_rtx (promote_decl_mode (index
, NULL
));
6187 SET_DECL_RTL (index
, index_r
);
6188 store_expr (lo_index
, index_r
, 0, false);
6190 /* Build the head of the loop. */
6191 do_pending_stack_adjust ();
6192 emit_label (loop_start
);
6194 /* Assign value to element index. */
6196 fold_convert (ssizetype
,
6197 fold_build2 (MINUS_EXPR
,
6200 TYPE_MIN_VALUE (domain
)));
6203 size_binop (MULT_EXPR
, position
,
6204 fold_convert (ssizetype
,
6205 TYPE_SIZE_UNIT (elttype
)));
6207 pos_rtx
= expand_normal (position
);
6208 xtarget
= offset_address (target
, pos_rtx
,
6209 highest_pow2_factor (position
));
6210 xtarget
= adjust_address (xtarget
, mode
, 0);
6211 if (TREE_CODE (value
) == CONSTRUCTOR
)
6212 store_constructor (value
, xtarget
, cleared
,
6213 bitsize
/ BITS_PER_UNIT
);
6215 store_expr (value
, xtarget
, 0, false);
6217 /* Generate a conditional jump to exit the loop. */
6218 exit_cond
= build2 (LT_EXPR
, integer_type_node
,
6220 jumpif (exit_cond
, loop_end
, -1);
6222 /* Update the loop counter, and jump to the head of
6224 expand_assignment (index
,
6225 build2 (PLUS_EXPR
, TREE_TYPE (index
),
6226 index
, integer_one_node
),
6229 emit_jump (loop_start
);
6231 /* Build the end of the loop. */
6232 emit_label (loop_end
);
6235 else if ((index
!= 0 && ! tree_fits_shwi_p (index
))
6236 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype
)))
6241 index
= ssize_int (1);
6244 index
= fold_convert (ssizetype
,
6245 fold_build2 (MINUS_EXPR
,
6248 TYPE_MIN_VALUE (domain
)));
6251 size_binop (MULT_EXPR
, index
,
6252 fold_convert (ssizetype
,
6253 TYPE_SIZE_UNIT (elttype
)));
6254 xtarget
= offset_address (target
,
6255 expand_normal (position
),
6256 highest_pow2_factor (position
));
6257 xtarget
= adjust_address (xtarget
, mode
, 0);
6258 store_expr (value
, xtarget
, 0, false);
6263 bitpos
= ((tree_to_shwi (index
) - minelt
)
6264 * tree_to_uhwi (TYPE_SIZE (elttype
)));
6266 bitpos
= (i
* tree_to_uhwi (TYPE_SIZE (elttype
)));
6268 if (MEM_P (target
) && !MEM_KEEP_ALIAS_SET_P (target
)
6269 && TREE_CODE (type
) == ARRAY_TYPE
6270 && TYPE_NONALIASED_COMPONENT (type
))
6272 target
= copy_rtx (target
);
6273 MEM_KEEP_ALIAS_SET_P (target
) = 1;
6275 store_constructor_field (target
, bitsize
, bitpos
, mode
, value
,
6276 cleared
, get_alias_set (elttype
));
6284 unsigned HOST_WIDE_INT idx
;
6285 constructor_elt
*ce
;
6288 int icode
= CODE_FOR_nothing
;
6289 tree elttype
= TREE_TYPE (type
);
6290 int elt_size
= tree_to_uhwi (TYPE_SIZE (elttype
));
6291 enum machine_mode eltmode
= TYPE_MODE (elttype
);
6292 HOST_WIDE_INT bitsize
;
6293 HOST_WIDE_INT bitpos
;
6294 rtvec vector
= NULL
;
6296 alias_set_type alias
;
6298 gcc_assert (eltmode
!= BLKmode
);
6300 n_elts
= TYPE_VECTOR_SUBPARTS (type
);
6301 if (REG_P (target
) && VECTOR_MODE_P (GET_MODE (target
)))
6303 enum machine_mode mode
= GET_MODE (target
);
6305 icode
= (int) optab_handler (vec_init_optab
, mode
);
6306 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6307 if (icode
!= CODE_FOR_nothing
)
6311 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6312 if (TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
)
6314 icode
= CODE_FOR_nothing
;
6318 if (icode
!= CODE_FOR_nothing
)
6322 vector
= rtvec_alloc (n_elts
);
6323 for (i
= 0; i
< n_elts
; i
++)
6324 RTVEC_ELT (vector
, i
) = CONST0_RTX (GET_MODE_INNER (mode
));
6328 /* If the constructor has fewer elements than the vector,
6329 clear the whole array first. Similarly if this is static
6330 constructor of a non-BLKmode object. */
6333 else if (REG_P (target
) && TREE_STATIC (exp
))
6337 unsigned HOST_WIDE_INT count
= 0, zero_count
= 0;
6340 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
6342 int n_elts_here
= tree_to_uhwi
6343 (int_const_binop (TRUNC_DIV_EXPR
,
6344 TYPE_SIZE (TREE_TYPE (value
)),
6345 TYPE_SIZE (elttype
)));
6347 count
+= n_elts_here
;
6348 if (mostly_zeros_p (value
))
6349 zero_count
+= n_elts_here
;
6352 /* Clear the entire vector first if there are any missing elements,
6353 or if the incidence of zero elements is >= 75%. */
6354 need_to_clear
= (count
< n_elts
|| 4 * zero_count
>= 3 * count
);
6357 if (need_to_clear
&& size
> 0 && !vector
)
6360 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6362 clear_storage (target
, GEN_INT (size
), BLOCK_OP_NORMAL
);
6366 /* Inform later passes that the old value is dead. */
6367 if (!cleared
&& !vector
&& REG_P (target
))
6368 emit_move_insn (target
, CONST0_RTX (GET_MODE (target
)));
6371 alias
= MEM_ALIAS_SET (target
);
6373 alias
= get_alias_set (elttype
);
6375 /* Store each element of the constructor into the corresponding
6376 element of TARGET, determined by counting the elements. */
6377 for (idx
= 0, i
= 0;
6378 vec_safe_iterate (CONSTRUCTOR_ELTS (exp
), idx
, &ce
);
6379 idx
++, i
+= bitsize
/ elt_size
)
6381 HOST_WIDE_INT eltpos
;
6382 tree value
= ce
->value
;
6384 bitsize
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value
)));
6385 if (cleared
&& initializer_zerop (value
))
6389 eltpos
= tree_to_uhwi (ce
->index
);
6395 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6397 gcc_assert (TREE_CODE (TREE_TYPE (value
)) != VECTOR_TYPE
);
6398 RTVEC_ELT (vector
, eltpos
)
6399 = expand_normal (value
);
6403 enum machine_mode value_mode
=
6404 TREE_CODE (TREE_TYPE (value
)) == VECTOR_TYPE
6405 ? TYPE_MODE (TREE_TYPE (value
))
6407 bitpos
= eltpos
* elt_size
;
6408 store_constructor_field (target
, bitsize
, bitpos
, value_mode
,
6409 value
, cleared
, alias
);
6414 emit_insn (GEN_FCN (icode
)
6416 gen_rtx_PARALLEL (GET_MODE (target
), vector
)));
6425 /* Store the value of EXP (an expression tree)
6426 into a subfield of TARGET which has mode MODE and occupies
6427 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6428 If MODE is VOIDmode, it means that we are storing into a bit-field.
6430 BITREGION_START is bitpos of the first bitfield in this region.
6431 BITREGION_END is the bitpos of the ending bitfield in this region.
6432 These two fields are 0, if the C++ memory model does not apply,
6433 or we are not interested in keeping track of bitfield regions.
6435 Always return const0_rtx unless we have something particular to
6438 ALIAS_SET is the alias set for the destination. This value will
6439 (in general) be different from that for TARGET, since TARGET is a
6440 reference to the containing structure.
6442 If NONTEMPORAL is true, try generating a nontemporal store. */
6445 store_field (rtx target
, HOST_WIDE_INT bitsize
, HOST_WIDE_INT bitpos
,
6446 unsigned HOST_WIDE_INT bitregion_start
,
6447 unsigned HOST_WIDE_INT bitregion_end
,
6448 enum machine_mode mode
, tree exp
,
6449 alias_set_type alias_set
, bool nontemporal
)
6451 if (TREE_CODE (exp
) == ERROR_MARK
)
6454 /* If we have nothing to store, do nothing unless the expression has
6457 return expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6459 if (GET_CODE (target
) == CONCAT
)
6461 /* We're storing into a struct containing a single __complex. */
6463 gcc_assert (!bitpos
);
6464 return store_expr (exp
, target
, 0, nontemporal
);
6467 /* If the structure is in a register or if the component
6468 is a bit field, we cannot use addressing to access it.
6469 Use bit-field techniques or SUBREG to store in it. */
6471 if (mode
== VOIDmode
6472 || (mode
!= BLKmode
&& ! direct_store
[(int) mode
]
6473 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
6474 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
)
6476 || GET_CODE (target
) == SUBREG
6477 /* If the field isn't aligned enough to store as an ordinary memref,
6478 store it as a bit field. */
6480 && ((((MEM_ALIGN (target
) < GET_MODE_ALIGNMENT (mode
))
6481 || bitpos
% GET_MODE_ALIGNMENT (mode
))
6482 && SLOW_UNALIGNED_ACCESS (mode
, MEM_ALIGN (target
)))
6483 || (bitpos
% BITS_PER_UNIT
!= 0)))
6484 || (bitsize
>= 0 && mode
!= BLKmode
6485 && GET_MODE_BITSIZE (mode
) > bitsize
)
6486 /* If the RHS and field are a constant size and the size of the
6487 RHS isn't the same size as the bitfield, we must use bitfield
6490 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
6491 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)), bitsize
) != 0)
6492 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6493 decl we must use bitfield operations. */
6495 && TREE_CODE (exp
) == MEM_REF
6496 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
6497 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
6498 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp
, 0),0 ))
6499 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)) != BLKmode
))
6504 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6505 implies a mask operation. If the precision is the same size as
6506 the field we're storing into, that mask is redundant. This is
6507 particularly common with bit field assignments generated by the
6509 nop_def
= get_def_for_expr (exp
, NOP_EXPR
);
6512 tree type
= TREE_TYPE (exp
);
6513 if (INTEGRAL_TYPE_P (type
)
6514 && TYPE_PRECISION (type
) < GET_MODE_BITSIZE (TYPE_MODE (type
))
6515 && bitsize
== TYPE_PRECISION (type
))
6517 tree op
= gimple_assign_rhs1 (nop_def
);
6518 type
= TREE_TYPE (op
);
6519 if (INTEGRAL_TYPE_P (type
) && TYPE_PRECISION (type
) >= bitsize
)
6524 temp
= expand_normal (exp
);
6526 /* If BITSIZE is narrower than the size of the type of EXP
6527 we will be narrowing TEMP. Normally, what's wanted are the
6528 low-order bits. However, if EXP's type is a record and this is
6529 big-endian machine, we want the upper BITSIZE bits. */
6530 if (BYTES_BIG_ENDIAN
&& GET_MODE_CLASS (GET_MODE (temp
)) == MODE_INT
6531 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (temp
))
6532 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
)
6533 temp
= expand_shift (RSHIFT_EXPR
, GET_MODE (temp
), temp
,
6534 GET_MODE_BITSIZE (GET_MODE (temp
)) - bitsize
,
6537 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6538 if (mode
!= VOIDmode
&& mode
!= BLKmode
6539 && mode
!= TYPE_MODE (TREE_TYPE (exp
)))
6540 temp
= convert_modes (mode
, TYPE_MODE (TREE_TYPE (exp
)), temp
, 1);
6542 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6543 are both BLKmode, both must be in memory and BITPOS must be aligned
6544 on a byte boundary. If so, we simply do a block copy. Likewise for
6545 a BLKmode-like TARGET. */
6546 if (GET_CODE (temp
) != PARALLEL
6547 && GET_MODE (temp
) == BLKmode
6548 && (GET_MODE (target
) == BLKmode
6550 && GET_MODE_CLASS (GET_MODE (target
)) == MODE_INT
6551 && (bitpos
% BITS_PER_UNIT
) == 0
6552 && (bitsize
% BITS_PER_UNIT
) == 0)))
6554 gcc_assert (MEM_P (target
) && MEM_P (temp
)
6555 && (bitpos
% BITS_PER_UNIT
) == 0);
6557 target
= adjust_address (target
, VOIDmode
, bitpos
/ BITS_PER_UNIT
);
6558 emit_block_move (target
, temp
,
6559 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
6566 /* Handle calls that return values in multiple non-contiguous locations.
6567 The Irix 6 ABI has examples of this. */
6568 if (GET_CODE (temp
) == PARALLEL
)
6570 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6572 if (mode
== BLKmode
|| mode
== VOIDmode
)
6573 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6574 temp_target
= gen_reg_rtx (mode
);
6575 emit_group_store (temp_target
, temp
, TREE_TYPE (exp
), size
);
6578 else if (mode
== BLKmode
)
6580 /* Handle calls that return BLKmode values in registers. */
6581 if (REG_P (temp
) && TREE_CODE (exp
) == CALL_EXPR
)
6583 rtx temp_target
= gen_reg_rtx (GET_MODE (temp
));
6584 copy_blkmode_from_reg (temp_target
, temp
, TREE_TYPE (exp
));
6589 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
6591 mode
= smallest_mode_for_size (size
* BITS_PER_UNIT
, MODE_INT
);
6592 temp_target
= gen_reg_rtx (mode
);
6594 = extract_bit_field (temp
, size
* BITS_PER_UNIT
, 0, 1,
6595 temp_target
, mode
, mode
);
6600 /* Store the value in the bitfield. */
6601 store_bit_field (target
, bitsize
, bitpos
,
6602 bitregion_start
, bitregion_end
,
6609 /* Now build a reference to just the desired component. */
6610 rtx to_rtx
= adjust_address (target
, mode
, bitpos
/ BITS_PER_UNIT
);
6612 if (to_rtx
== target
)
6613 to_rtx
= copy_rtx (to_rtx
);
6615 if (!MEM_KEEP_ALIAS_SET_P (to_rtx
) && MEM_ALIAS_SET (to_rtx
) != 0)
6616 set_mem_alias_set (to_rtx
, alias_set
);
6618 return store_expr (exp
, to_rtx
, 0, nontemporal
);
6622 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6623 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6624 codes and find the ultimate containing object, which we return.
6626 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6627 bit position, and *PUNSIGNEDP to the signedness of the field.
6628 If the position of the field is variable, we store a tree
6629 giving the variable offset (in units) in *POFFSET.
6630 This offset is in addition to the bit position.
6631 If the position is not variable, we store 0 in *POFFSET.
6633 If any of the extraction expressions is volatile,
6634 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6636 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6637 Otherwise, it is a mode that can be used to access the field.
6639 If the field describes a variable-sized object, *PMODE is set to
6640 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6641 this case, but the address of the object can be found.
6643 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6644 look through nodes that serve as markers of a greater alignment than
6645 the one that can be deduced from the expression. These nodes make it
6646 possible for front-ends to prevent temporaries from being created by
6647 the middle-end on alignment considerations. For that purpose, the
6648 normal operating mode at high-level is to always pass FALSE so that
6649 the ultimate containing object is really returned; moreover, the
6650 associated predicate handled_component_p will always return TRUE
6651 on these nodes, thus indicating that they are essentially handled
6652 by get_inner_reference. TRUE should only be passed when the caller
6653 is scanning the expression in order to build another representation
6654 and specifically knows how to handle these nodes; as such, this is
6655 the normal operating mode in the RTL expanders. */
6658 get_inner_reference (tree exp
, HOST_WIDE_INT
*pbitsize
,
6659 HOST_WIDE_INT
*pbitpos
, tree
*poffset
,
6660 enum machine_mode
*pmode
, int *punsignedp
,
6661 int *pvolatilep
, bool keep_aligning
)
6664 enum machine_mode mode
= VOIDmode
;
6665 bool blkmode_bitfield
= false;
6666 tree offset
= size_zero_node
;
6667 double_int bit_offset
= double_int_zero
;
6669 /* First get the mode, signedness, and size. We do this from just the
6670 outermost expression. */
6672 if (TREE_CODE (exp
) == COMPONENT_REF
)
6674 tree field
= TREE_OPERAND (exp
, 1);
6675 size_tree
= DECL_SIZE (field
);
6676 if (flag_strict_volatile_bitfields
> 0
6677 && TREE_THIS_VOLATILE (exp
)
6678 && DECL_BIT_FIELD_TYPE (field
)
6679 && DECL_MODE (field
) != BLKmode
)
6680 /* Volatile bitfields should be accessed in the mode of the
6681 field's type, not the mode computed based on the bit
6683 mode
= TYPE_MODE (DECL_BIT_FIELD_TYPE (field
));
6684 else if (!DECL_BIT_FIELD (field
))
6685 mode
= DECL_MODE (field
);
6686 else if (DECL_MODE (field
) == BLKmode
)
6687 blkmode_bitfield
= true;
6689 *punsignedp
= DECL_UNSIGNED (field
);
6691 else if (TREE_CODE (exp
) == BIT_FIELD_REF
)
6693 size_tree
= TREE_OPERAND (exp
, 1);
6694 *punsignedp
= (! INTEGRAL_TYPE_P (TREE_TYPE (exp
))
6695 || TYPE_UNSIGNED (TREE_TYPE (exp
)));
6697 /* For vector types, with the correct size of access, use the mode of
6699 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == VECTOR_TYPE
6700 && TREE_TYPE (exp
) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6701 && tree_int_cst_equal (size_tree
, TYPE_SIZE (TREE_TYPE (exp
))))
6702 mode
= TYPE_MODE (TREE_TYPE (exp
));
6706 mode
= TYPE_MODE (TREE_TYPE (exp
));
6707 *punsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
6709 if (mode
== BLKmode
)
6710 size_tree
= TYPE_SIZE (TREE_TYPE (exp
));
6712 *pbitsize
= GET_MODE_BITSIZE (mode
);
6717 if (! tree_fits_uhwi_p (size_tree
))
6718 mode
= BLKmode
, *pbitsize
= -1;
6720 *pbitsize
= tree_to_uhwi (size_tree
);
6723 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6724 and find the ultimate containing object. */
6727 switch (TREE_CODE (exp
))
6730 bit_offset
+= tree_to_double_int (TREE_OPERAND (exp
, 2));
6735 tree field
= TREE_OPERAND (exp
, 1);
6736 tree this_offset
= component_ref_field_offset (exp
);
6738 /* If this field hasn't been filled in yet, don't go past it.
6739 This should only happen when folding expressions made during
6740 type construction. */
6741 if (this_offset
== 0)
6744 offset
= size_binop (PLUS_EXPR
, offset
, this_offset
);
6745 bit_offset
+= tree_to_double_int (DECL_FIELD_BIT_OFFSET (field
));
6747 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6752 case ARRAY_RANGE_REF
:
6754 tree index
= TREE_OPERAND (exp
, 1);
6755 tree low_bound
= array_ref_low_bound (exp
);
6756 tree unit_size
= array_ref_element_size (exp
);
6758 /* We assume all arrays have sizes that are a multiple of a byte.
6759 First subtract the lower bound, if any, in the type of the
6760 index, then convert to sizetype and multiply by the size of
6761 the array element. */
6762 if (! integer_zerop (low_bound
))
6763 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
6766 offset
= size_binop (PLUS_EXPR
, offset
,
6767 size_binop (MULT_EXPR
,
6768 fold_convert (sizetype
, index
),
6777 bit_offset
+= double_int::from_uhwi (*pbitsize
);
6780 case VIEW_CONVERT_EXPR
:
6781 if (keep_aligning
&& STRICT_ALIGNMENT
6782 && (TYPE_ALIGN (TREE_TYPE (exp
))
6783 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0))))
6784 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp
, 0)))
6785 < BIGGEST_ALIGNMENT
)
6786 && (TYPE_ALIGN_OK (TREE_TYPE (exp
))
6787 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
6792 /* Hand back the decl for MEM[&decl, off]. */
6793 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
)
6795 tree off
= TREE_OPERAND (exp
, 1);
6796 if (!integer_zerop (off
))
6798 double_int boff
, coff
= mem_ref_offset (exp
);
6799 boff
= coff
.lshift (BITS_PER_UNIT
== 8
6800 ? 3 : exact_log2 (BITS_PER_UNIT
));
6803 exp
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
6811 /* If any reference in the chain is volatile, the effect is volatile. */
6812 if (TREE_THIS_VOLATILE (exp
))
6815 exp
= TREE_OPERAND (exp
, 0);
6819 /* If OFFSET is constant, see if we can return the whole thing as a
6820 constant bit position. Make sure to handle overflow during
6822 if (TREE_CODE (offset
) == INTEGER_CST
)
6824 double_int tem
= tree_to_double_int (offset
);
6825 tem
= tem
.sext (TYPE_PRECISION (sizetype
));
6826 tem
= tem
.lshift (BITS_PER_UNIT
== 8 ? 3 : exact_log2 (BITS_PER_UNIT
));
6828 if (tem
.fits_shwi ())
6830 *pbitpos
= tem
.to_shwi ();
6831 *poffset
= offset
= NULL_TREE
;
6835 /* Otherwise, split it up. */
6838 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6839 if (bit_offset
.is_negative () || !bit_offset
.fits_shwi ())
6842 = double_int::mask (BITS_PER_UNIT
== 8
6843 ? 3 : exact_log2 (BITS_PER_UNIT
));
6844 double_int tem
= bit_offset
.and_not (mask
);
6845 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6846 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6848 tem
= tem
.arshift (BITS_PER_UNIT
== 8
6849 ? 3 : exact_log2 (BITS_PER_UNIT
),
6850 HOST_BITS_PER_DOUBLE_INT
);
6851 offset
= size_binop (PLUS_EXPR
, offset
,
6852 double_int_to_tree (sizetype
, tem
));
6855 *pbitpos
= bit_offset
.to_shwi ();
6859 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6860 if (mode
== VOIDmode
6862 && (*pbitpos
% BITS_PER_UNIT
) == 0
6863 && (*pbitsize
% BITS_PER_UNIT
) == 0)
6871 /* Return a tree of sizetype representing the size, in bytes, of the element
6872 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6875 array_ref_element_size (tree exp
)
6877 tree aligned_size
= TREE_OPERAND (exp
, 3);
6878 tree elmt_type
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6879 location_t loc
= EXPR_LOCATION (exp
);
6881 /* If a size was specified in the ARRAY_REF, it's the size measured
6882 in alignment units of the element type. So multiply by that value. */
6885 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6886 sizetype from another type of the same width and signedness. */
6887 if (TREE_TYPE (aligned_size
) != sizetype
)
6888 aligned_size
= fold_convert_loc (loc
, sizetype
, aligned_size
);
6889 return size_binop_loc (loc
, MULT_EXPR
, aligned_size
,
6890 size_int (TYPE_ALIGN_UNIT (elmt_type
)));
6893 /* Otherwise, take the size from that of the element type. Substitute
6894 any PLACEHOLDER_EXPR that we have. */
6896 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type
), exp
);
6899 /* Return a tree representing the lower bound of the array mentioned in
6900 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6903 array_ref_low_bound (tree exp
)
6905 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6907 /* If a lower bound is specified in EXP, use it. */
6908 if (TREE_OPERAND (exp
, 2))
6909 return TREE_OPERAND (exp
, 2);
6911 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6912 substituting for a PLACEHOLDER_EXPR as needed. */
6913 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6914 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type
), exp
);
6916 /* Otherwise, return a zero of the appropriate type. */
6917 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp
, 1)), 0);
6920 /* Returns true if REF is an array reference to an array at the end of
6921 a structure. If this is the case, the array may be allocated larger
6922 than its upper bound implies. */
6925 array_at_struct_end_p (tree ref
)
6927 if (TREE_CODE (ref
) != ARRAY_REF
6928 && TREE_CODE (ref
) != ARRAY_RANGE_REF
)
6931 while (handled_component_p (ref
))
6933 /* If the reference chain contains a component reference to a
6934 non-union type and there follows another field the reference
6935 is not at the end of a structure. */
6936 if (TREE_CODE (ref
) == COMPONENT_REF
6937 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 0))) == RECORD_TYPE
)
6939 tree nextf
= DECL_CHAIN (TREE_OPERAND (ref
, 1));
6940 while (nextf
&& TREE_CODE (nextf
) != FIELD_DECL
)
6941 nextf
= DECL_CHAIN (nextf
);
6946 ref
= TREE_OPERAND (ref
, 0);
6949 /* If the reference is based on a declared entity, the size of the array
6950 is constrained by its given domain. */
6957 /* Return a tree representing the upper bound of the array mentioned in
6958 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6961 array_ref_up_bound (tree exp
)
6963 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp
, 0)));
6965 /* If there is a domain type and it has an upper bound, use it, substituting
6966 for a PLACEHOLDER_EXPR as needed. */
6967 if (domain_type
&& TYPE_MAX_VALUE (domain_type
))
6968 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type
), exp
);
6970 /* Otherwise fail. */
6974 /* Return a tree representing the offset, in bytes, of the field referenced
6975 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6978 component_ref_field_offset (tree exp
)
6980 tree aligned_offset
= TREE_OPERAND (exp
, 2);
6981 tree field
= TREE_OPERAND (exp
, 1);
6982 location_t loc
= EXPR_LOCATION (exp
);
6984 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6985 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6989 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6990 sizetype from another type of the same width and signedness. */
6991 if (TREE_TYPE (aligned_offset
) != sizetype
)
6992 aligned_offset
= fold_convert_loc (loc
, sizetype
, aligned_offset
);
6993 return size_binop_loc (loc
, MULT_EXPR
, aligned_offset
,
6994 size_int (DECL_OFFSET_ALIGN (field
)
6998 /* Otherwise, take the offset from that of the field. Substitute
6999 any PLACEHOLDER_EXPR that we have. */
7001 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field
), exp
);
7004 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7006 static unsigned HOST_WIDE_INT
7007 target_align (const_tree target
)
7009 /* We might have a chain of nested references with intermediate misaligning
7010 bitfields components, so need to recurse to find out. */
7012 unsigned HOST_WIDE_INT this_align
, outer_align
;
7014 switch (TREE_CODE (target
))
7020 this_align
= DECL_ALIGN (TREE_OPERAND (target
, 1));
7021 outer_align
= target_align (TREE_OPERAND (target
, 0));
7022 return MIN (this_align
, outer_align
);
7025 case ARRAY_RANGE_REF
:
7026 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7027 outer_align
= target_align (TREE_OPERAND (target
, 0));
7028 return MIN (this_align
, outer_align
);
7031 case NON_LVALUE_EXPR
:
7032 case VIEW_CONVERT_EXPR
:
7033 this_align
= TYPE_ALIGN (TREE_TYPE (target
));
7034 outer_align
= target_align (TREE_OPERAND (target
, 0));
7035 return MAX (this_align
, outer_align
);
7038 return TYPE_ALIGN (TREE_TYPE (target
));
7043 /* Given an rtx VALUE that may contain additions and multiplications, return
7044 an equivalent value that just refers to a register, memory, or constant.
7045 This is done by generating instructions to perform the arithmetic and
7046 returning a pseudo-register containing the value.
7048 The returned value may be a REG, SUBREG, MEM or constant. */
7051 force_operand (rtx value
, rtx target
)
7054 /* Use subtarget as the target for operand 0 of a binary operation. */
7055 rtx subtarget
= get_subtarget (target
);
7056 enum rtx_code code
= GET_CODE (value
);
7058 /* Check for subreg applied to an expression produced by loop optimizer. */
7060 && !REG_P (SUBREG_REG (value
))
7061 && !MEM_P (SUBREG_REG (value
)))
7064 = simplify_gen_subreg (GET_MODE (value
),
7065 force_reg (GET_MODE (SUBREG_REG (value
)),
7066 force_operand (SUBREG_REG (value
),
7068 GET_MODE (SUBREG_REG (value
)),
7069 SUBREG_BYTE (value
));
7070 code
= GET_CODE (value
);
7073 /* Check for a PIC address load. */
7074 if ((code
== PLUS
|| code
== MINUS
)
7075 && XEXP (value
, 0) == pic_offset_table_rtx
7076 && (GET_CODE (XEXP (value
, 1)) == SYMBOL_REF
7077 || GET_CODE (XEXP (value
, 1)) == LABEL_REF
7078 || GET_CODE (XEXP (value
, 1)) == CONST
))
7081 subtarget
= gen_reg_rtx (GET_MODE (value
));
7082 emit_move_insn (subtarget
, value
);
7086 if (ARITHMETIC_P (value
))
7088 op2
= XEXP (value
, 1);
7089 if (!CONSTANT_P (op2
) && !(REG_P (op2
) && op2
!= subtarget
))
7091 if (code
== MINUS
&& CONST_INT_P (op2
))
7094 op2
= negate_rtx (GET_MODE (value
), op2
);
7097 /* Check for an addition with OP2 a constant integer and our first
7098 operand a PLUS of a virtual register and something else. In that
7099 case, we want to emit the sum of the virtual register and the
7100 constant first and then add the other value. This allows virtual
7101 register instantiation to simply modify the constant rather than
7102 creating another one around this addition. */
7103 if (code
== PLUS
&& CONST_INT_P (op2
)
7104 && GET_CODE (XEXP (value
, 0)) == PLUS
7105 && REG_P (XEXP (XEXP (value
, 0), 0))
7106 && REGNO (XEXP (XEXP (value
, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7107 && REGNO (XEXP (XEXP (value
, 0), 0)) <= LAST_VIRTUAL_REGISTER
)
7109 rtx temp
= expand_simple_binop (GET_MODE (value
), code
,
7110 XEXP (XEXP (value
, 0), 0), op2
,
7111 subtarget
, 0, OPTAB_LIB_WIDEN
);
7112 return expand_simple_binop (GET_MODE (value
), code
, temp
,
7113 force_operand (XEXP (XEXP (value
,
7115 target
, 0, OPTAB_LIB_WIDEN
);
7118 op1
= force_operand (XEXP (value
, 0), subtarget
);
7119 op2
= force_operand (op2
, NULL_RTX
);
7123 return expand_mult (GET_MODE (value
), op1
, op2
, target
, 1);
7125 if (!INTEGRAL_MODE_P (GET_MODE (value
)))
7126 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7127 target
, 1, OPTAB_LIB_WIDEN
);
7129 return expand_divmod (0,
7130 FLOAT_MODE_P (GET_MODE (value
))
7131 ? RDIV_EXPR
: TRUNC_DIV_EXPR
,
7132 GET_MODE (value
), op1
, op2
, target
, 0);
7134 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7137 return expand_divmod (0, TRUNC_DIV_EXPR
, GET_MODE (value
), op1
, op2
,
7140 return expand_divmod (1, TRUNC_MOD_EXPR
, GET_MODE (value
), op1
, op2
,
7143 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7144 target
, 0, OPTAB_LIB_WIDEN
);
7146 return expand_simple_binop (GET_MODE (value
), code
, op1
, op2
,
7147 target
, 1, OPTAB_LIB_WIDEN
);
7150 if (UNARY_P (value
))
7153 target
= gen_reg_rtx (GET_MODE (value
));
7154 op1
= force_operand (XEXP (value
, 0), NULL_RTX
);
7161 case FLOAT_TRUNCATE
:
7162 convert_move (target
, op1
, code
== ZERO_EXTEND
);
7167 expand_fix (target
, op1
, code
== UNSIGNED_FIX
);
7171 case UNSIGNED_FLOAT
:
7172 expand_float (target
, op1
, code
== UNSIGNED_FLOAT
);
7176 return expand_simple_unop (GET_MODE (value
), code
, op1
, target
, 0);
7180 #ifdef INSN_SCHEDULING
7181 /* On machines that have insn scheduling, we want all memory reference to be
7182 explicit, so we need to deal with such paradoxical SUBREGs. */
7183 if (paradoxical_subreg_p (value
) && MEM_P (SUBREG_REG (value
)))
7185 = simplify_gen_subreg (GET_MODE (value
),
7186 force_reg (GET_MODE (SUBREG_REG (value
)),
7187 force_operand (SUBREG_REG (value
),
7189 GET_MODE (SUBREG_REG (value
)),
7190 SUBREG_BYTE (value
));
7196 /* Subroutine of expand_expr: return nonzero iff there is no way that
7197 EXP can reference X, which is being modified. TOP_P is nonzero if this
7198 call is going to be used to determine whether we need a temporary
7199 for EXP, as opposed to a recursive call to this function.
7201 It is always safe for this routine to return zero since it merely
7202 searches for optimization opportunities. */
7205 safe_from_p (const_rtx x
, tree exp
, int top_p
)
7211 /* If EXP has varying size, we MUST use a target since we currently
7212 have no way of allocating temporaries of variable size
7213 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7214 So we assume here that something at a higher level has prevented a
7215 clash. This is somewhat bogus, but the best we can do. Only
7216 do this when X is BLKmode and when we are at the top level. */
7217 || (top_p
&& TREE_TYPE (exp
) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp
))
7218 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) != INTEGER_CST
7219 && (TREE_CODE (TREE_TYPE (exp
)) != ARRAY_TYPE
7220 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)) == NULL_TREE
7221 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp
)))
7223 && GET_MODE (x
) == BLKmode
)
7224 /* If X is in the outgoing argument area, it is always safe. */
7226 && (XEXP (x
, 0) == virtual_outgoing_args_rtx
7227 || (GET_CODE (XEXP (x
, 0)) == PLUS
7228 && XEXP (XEXP (x
, 0), 0) == virtual_outgoing_args_rtx
))))
7231 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7232 find the underlying pseudo. */
7233 if (GET_CODE (x
) == SUBREG
)
7236 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7240 /* Now look at our tree code and possibly recurse. */
7241 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
7243 case tcc_declaration
:
7244 exp_rtl
= DECL_RTL_IF_SET (exp
);
7250 case tcc_exceptional
:
7251 if (TREE_CODE (exp
) == TREE_LIST
)
7255 if (TREE_VALUE (exp
) && !safe_from_p (x
, TREE_VALUE (exp
), 0))
7257 exp
= TREE_CHAIN (exp
);
7260 if (TREE_CODE (exp
) != TREE_LIST
)
7261 return safe_from_p (x
, exp
, 0);
7264 else if (TREE_CODE (exp
) == CONSTRUCTOR
)
7266 constructor_elt
*ce
;
7267 unsigned HOST_WIDE_INT idx
;
7269 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp
), idx
, ce
)
7270 if ((ce
->index
!= NULL_TREE
&& !safe_from_p (x
, ce
->index
, 0))
7271 || !safe_from_p (x
, ce
->value
, 0))
7275 else if (TREE_CODE (exp
) == ERROR_MARK
)
7276 return 1; /* An already-visited SAVE_EXPR? */
7281 /* The only case we look at here is the DECL_INITIAL inside a
7283 return (TREE_CODE (exp
) != DECL_EXPR
7284 || TREE_CODE (DECL_EXPR_DECL (exp
)) != VAR_DECL
7285 || !DECL_INITIAL (DECL_EXPR_DECL (exp
))
7286 || safe_from_p (x
, DECL_INITIAL (DECL_EXPR_DECL (exp
)), 0));
7289 case tcc_comparison
:
7290 if (!safe_from_p (x
, TREE_OPERAND (exp
, 1), 0))
7295 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7297 case tcc_expression
:
7300 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7301 the expression. If it is set, we conflict iff we are that rtx or
7302 both are in memory. Otherwise, we check all operands of the
7303 expression recursively. */
7305 switch (TREE_CODE (exp
))
7308 /* If the operand is static or we are static, we can't conflict.
7309 Likewise if we don't conflict with the operand at all. */
7310 if (staticp (TREE_OPERAND (exp
, 0))
7311 || TREE_STATIC (exp
)
7312 || safe_from_p (x
, TREE_OPERAND (exp
, 0), 0))
7315 /* Otherwise, the only way this can conflict is if we are taking
7316 the address of a DECL a that address if part of X, which is
7318 exp
= TREE_OPERAND (exp
, 0);
7321 if (!DECL_RTL_SET_P (exp
)
7322 || !MEM_P (DECL_RTL (exp
)))
7325 exp_rtl
= XEXP (DECL_RTL (exp
), 0);
7331 && alias_sets_conflict_p (MEM_ALIAS_SET (x
),
7332 get_alias_set (exp
)))
7337 /* Assume that the call will clobber all hard registers and
7339 if ((REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
7344 case WITH_CLEANUP_EXPR
:
7345 case CLEANUP_POINT_EXPR
:
7346 /* Lowered by gimplify.c. */
7350 return safe_from_p (x
, TREE_OPERAND (exp
, 0), 0);
7356 /* If we have an rtx, we do not need to scan our operands. */
7360 nops
= TREE_OPERAND_LENGTH (exp
);
7361 for (i
= 0; i
< nops
; i
++)
7362 if (TREE_OPERAND (exp
, i
) != 0
7363 && ! safe_from_p (x
, TREE_OPERAND (exp
, i
), 0))
7369 /* Should never get a type here. */
7373 /* If we have an rtl, find any enclosed object. Then see if we conflict
7377 if (GET_CODE (exp_rtl
) == SUBREG
)
7379 exp_rtl
= SUBREG_REG (exp_rtl
);
7381 && REGNO (exp_rtl
) < FIRST_PSEUDO_REGISTER
)
7385 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7386 are memory and they conflict. */
7387 return ! (rtx_equal_p (x
, exp_rtl
)
7388 || (MEM_P (x
) && MEM_P (exp_rtl
)
7389 && true_dependence (exp_rtl
, VOIDmode
, x
)));
7392 /* If we reach here, it is safe. */
7397 /* Return the highest power of two that EXP is known to be a multiple of.
7398 This is used in updating alignment of MEMs in array references. */
7400 unsigned HOST_WIDE_INT
7401 highest_pow2_factor (const_tree exp
)
7403 unsigned HOST_WIDE_INT ret
;
7404 int trailing_zeros
= tree_ctz (exp
);
7405 if (trailing_zeros
>= HOST_BITS_PER_WIDE_INT
)
7406 return BIGGEST_ALIGNMENT
;
7407 ret
= (unsigned HOST_WIDE_INT
) 1 << trailing_zeros
;
7408 if (ret
> BIGGEST_ALIGNMENT
)
7409 return BIGGEST_ALIGNMENT
;
7413 /* Similar, except that the alignment requirements of TARGET are
7414 taken into account. Assume it is at least as aligned as its
7415 type, unless it is a COMPONENT_REF in which case the layout of
7416 the structure gives the alignment. */
7418 static unsigned HOST_WIDE_INT
7419 highest_pow2_factor_for_target (const_tree target
, const_tree exp
)
7421 unsigned HOST_WIDE_INT talign
= target_align (target
) / BITS_PER_UNIT
;
7422 unsigned HOST_WIDE_INT factor
= highest_pow2_factor (exp
);
7424 return MAX (factor
, talign
);
7427 #ifdef HAVE_conditional_move
7428 /* Convert the tree comparison code TCODE to the rtl one where the
7429 signedness is UNSIGNEDP. */
7431 static enum rtx_code
7432 convert_tree_comp_to_rtx (enum tree_code tcode
, int unsignedp
)
7444 code
= unsignedp
? LTU
: LT
;
7447 code
= unsignedp
? LEU
: LE
;
7450 code
= unsignedp
? GTU
: GT
;
7453 code
= unsignedp
? GEU
: GE
;
7455 case UNORDERED_EXPR
:
7487 /* Subroutine of expand_expr. Expand the two operands of a binary
7488 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7489 The value may be stored in TARGET if TARGET is nonzero. The
7490 MODIFIER argument is as documented by expand_expr. */
7493 expand_operands (tree exp0
, tree exp1
, rtx target
, rtx
*op0
, rtx
*op1
,
7494 enum expand_modifier modifier
)
7496 if (! safe_from_p (target
, exp1
, 1))
7498 if (operand_equal_p (exp0
, exp1
, 0))
7500 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7501 *op1
= copy_rtx (*op0
);
7505 /* If we need to preserve evaluation order, copy exp0 into its own
7506 temporary variable so that it can't be clobbered by exp1. */
7507 if (flag_evaluation_order
&& TREE_SIDE_EFFECTS (exp1
))
7508 exp0
= save_expr (exp0
);
7509 *op0
= expand_expr (exp0
, target
, VOIDmode
, modifier
);
7510 *op1
= expand_expr (exp1
, NULL_RTX
, VOIDmode
, modifier
);
7515 /* Return a MEM that contains constant EXP. DEFER is as for
7516 output_constant_def and MODIFIER is as for expand_expr. */
7519 expand_expr_constant (tree exp
, int defer
, enum expand_modifier modifier
)
7523 mem
= output_constant_def (exp
, defer
);
7524 if (modifier
!= EXPAND_INITIALIZER
)
7525 mem
= use_anchored_address (mem
);
7529 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7530 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7533 expand_expr_addr_expr_1 (tree exp
, rtx target
, enum machine_mode tmode
,
7534 enum expand_modifier modifier
, addr_space_t as
)
7536 rtx result
, subtarget
;
7538 HOST_WIDE_INT bitsize
, bitpos
;
7539 int volatilep
, unsignedp
;
7540 enum machine_mode mode1
;
7542 /* If we are taking the address of a constant and are at the top level,
7543 we have to use output_constant_def since we can't call force_const_mem
7545 /* ??? This should be considered a front-end bug. We should not be
7546 generating ADDR_EXPR of something that isn't an LVALUE. The only
7547 exception here is STRING_CST. */
7548 if (CONSTANT_CLASS_P (exp
))
7550 result
= XEXP (expand_expr_constant (exp
, 0, modifier
), 0);
7551 if (modifier
< EXPAND_SUM
)
7552 result
= force_operand (result
, target
);
7556 /* Everything must be something allowed by is_gimple_addressable. */
7557 switch (TREE_CODE (exp
))
7560 /* This case will happen via recursion for &a->b. */
7561 return expand_expr (TREE_OPERAND (exp
, 0), target
, tmode
, modifier
);
7565 tree tem
= TREE_OPERAND (exp
, 0);
7566 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
7567 tem
= fold_build_pointer_plus (tem
, TREE_OPERAND (exp
, 1));
7568 return expand_expr (tem
, target
, tmode
, modifier
);
7572 /* Expand the initializer like constants above. */
7573 result
= XEXP (expand_expr_constant (DECL_INITIAL (exp
),
7575 if (modifier
< EXPAND_SUM
)
7576 result
= force_operand (result
, target
);
7580 /* The real part of the complex number is always first, therefore
7581 the address is the same as the address of the parent object. */
7584 inner
= TREE_OPERAND (exp
, 0);
7588 /* The imaginary part of the complex number is always second.
7589 The expression is therefore always offset by the size of the
7592 bitpos
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp
)));
7593 inner
= TREE_OPERAND (exp
, 0);
7596 case COMPOUND_LITERAL_EXPR
:
7597 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7598 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7599 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7600 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7601 the initializers aren't gimplified. */
7602 if (COMPOUND_LITERAL_EXPR_DECL (exp
)
7603 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp
)))
7604 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp
),
7605 target
, tmode
, modifier
, as
);
7608 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7609 expand_expr, as that can have various side effects; LABEL_DECLs for
7610 example, may not have their DECL_RTL set yet. Expand the rtl of
7611 CONSTRUCTORs too, which should yield a memory reference for the
7612 constructor's contents. Assume language specific tree nodes can
7613 be expanded in some interesting way. */
7614 gcc_assert (TREE_CODE (exp
) < LAST_AND_UNUSED_TREE_CODE
);
7616 || TREE_CODE (exp
) == CONSTRUCTOR
7617 || TREE_CODE (exp
) == COMPOUND_LITERAL_EXPR
)
7619 result
= expand_expr (exp
, target
, tmode
,
7620 modifier
== EXPAND_INITIALIZER
7621 ? EXPAND_INITIALIZER
: EXPAND_CONST_ADDRESS
);
7623 /* If the DECL isn't in memory, then the DECL wasn't properly
7624 marked TREE_ADDRESSABLE, which will be either a front-end
7625 or a tree optimizer bug. */
7627 if (TREE_ADDRESSABLE (exp
)
7629 && ! targetm
.calls
.allocate_stack_slots_for_args ())
7631 error ("local frame unavailable (naked function?)");
7635 gcc_assert (MEM_P (result
));
7636 result
= XEXP (result
, 0);
7638 /* ??? Is this needed anymore? */
7640 TREE_USED (exp
) = 1;
7642 if (modifier
!= EXPAND_INITIALIZER
7643 && modifier
!= EXPAND_CONST_ADDRESS
7644 && modifier
!= EXPAND_SUM
)
7645 result
= force_operand (result
, target
);
7649 /* Pass FALSE as the last argument to get_inner_reference although
7650 we are expanding to RTL. The rationale is that we know how to
7651 handle "aligning nodes" here: we can just bypass them because
7652 they won't change the final object whose address will be returned
7653 (they actually exist only for that purpose). */
7654 inner
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
7655 &mode1
, &unsignedp
, &volatilep
, false);
7659 /* We must have made progress. */
7660 gcc_assert (inner
!= exp
);
7662 subtarget
= offset
|| bitpos
? NULL_RTX
: target
;
7663 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7664 inner alignment, force the inner to be sufficiently aligned. */
7665 if (CONSTANT_CLASS_P (inner
)
7666 && TYPE_ALIGN (TREE_TYPE (inner
)) < TYPE_ALIGN (TREE_TYPE (exp
)))
7668 inner
= copy_node (inner
);
7669 TREE_TYPE (inner
) = copy_node (TREE_TYPE (inner
));
7670 TYPE_ALIGN (TREE_TYPE (inner
)) = TYPE_ALIGN (TREE_TYPE (exp
));
7671 TYPE_USER_ALIGN (TREE_TYPE (inner
)) = 1;
7673 result
= expand_expr_addr_expr_1 (inner
, subtarget
, tmode
, modifier
, as
);
7679 if (modifier
!= EXPAND_NORMAL
)
7680 result
= force_operand (result
, NULL
);
7681 tmp
= expand_expr (offset
, NULL_RTX
, tmode
,
7682 modifier
== EXPAND_INITIALIZER
7683 ? EXPAND_INITIALIZER
: EXPAND_NORMAL
);
7685 /* expand_expr is allowed to return an object in a mode other
7686 than TMODE. If it did, we need to convert. */
7687 if (GET_MODE (tmp
) != VOIDmode
&& tmode
!= GET_MODE (tmp
))
7688 tmp
= convert_modes (tmode
, GET_MODE (tmp
),
7689 tmp
, TYPE_UNSIGNED (TREE_TYPE (offset
)));
7690 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7691 tmp
= convert_memory_address_addr_space (tmode
, tmp
, as
);
7693 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
7694 result
= simplify_gen_binary (PLUS
, tmode
, result
, tmp
);
7697 subtarget
= bitpos
? NULL_RTX
: target
;
7698 result
= expand_simple_binop (tmode
, PLUS
, result
, tmp
, subtarget
,
7699 1, OPTAB_LIB_WIDEN
);
7705 /* Someone beforehand should have rejected taking the address
7706 of such an object. */
7707 gcc_assert ((bitpos
% BITS_PER_UNIT
) == 0);
7709 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7710 result
= plus_constant (tmode
, result
, bitpos
/ BITS_PER_UNIT
);
7711 if (modifier
< EXPAND_SUM
)
7712 result
= force_operand (result
, target
);
7718 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7719 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7722 expand_expr_addr_expr (tree exp
, rtx target
, enum machine_mode tmode
,
7723 enum expand_modifier modifier
)
7725 addr_space_t as
= ADDR_SPACE_GENERIC
;
7726 enum machine_mode address_mode
= Pmode
;
7727 enum machine_mode pointer_mode
= ptr_mode
;
7728 enum machine_mode rmode
;
7731 /* Target mode of VOIDmode says "whatever's natural". */
7732 if (tmode
== VOIDmode
)
7733 tmode
= TYPE_MODE (TREE_TYPE (exp
));
7735 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
7737 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
7738 address_mode
= targetm
.addr_space
.address_mode (as
);
7739 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
7742 /* We can get called with some Weird Things if the user does silliness
7743 like "(short) &a". In that case, convert_memory_address won't do
7744 the right thing, so ignore the given target mode. */
7745 if (tmode
!= address_mode
&& tmode
!= pointer_mode
)
7746 tmode
= address_mode
;
7748 result
= expand_expr_addr_expr_1 (TREE_OPERAND (exp
, 0), target
,
7749 tmode
, modifier
, as
);
7751 /* Despite expand_expr claims concerning ignoring TMODE when not
7752 strictly convenient, stuff breaks if we don't honor it. Note
7753 that combined with the above, we only do this for pointer modes. */
7754 rmode
= GET_MODE (result
);
7755 if (rmode
== VOIDmode
)
7758 result
= convert_memory_address_addr_space (tmode
, result
, as
);
7763 /* Generate code for computing CONSTRUCTOR EXP.
7764 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7765 is TRUE, instead of creating a temporary variable in memory
7766 NULL is returned and the caller needs to handle it differently. */
7769 expand_constructor (tree exp
, rtx target
, enum expand_modifier modifier
,
7770 bool avoid_temp_mem
)
7772 tree type
= TREE_TYPE (exp
);
7773 enum machine_mode mode
= TYPE_MODE (type
);
7775 /* Try to avoid creating a temporary at all. This is possible
7776 if all of the initializer is zero.
7777 FIXME: try to handle all [0..255] initializers we can handle
7779 if (TREE_STATIC (exp
)
7780 && !TREE_ADDRESSABLE (exp
)
7781 && target
!= 0 && mode
== BLKmode
7782 && all_zeros_p (exp
))
7784 clear_storage (target
, expr_size (exp
), BLOCK_OP_NORMAL
);
7788 /* All elts simple constants => refer to a constant in memory. But
7789 if this is a non-BLKmode mode, let it store a field at a time
7790 since that should make a CONST_INT or CONST_DOUBLE when we
7791 fold. Likewise, if we have a target we can use, it is best to
7792 store directly into the target unless the type is large enough
7793 that memcpy will be used. If we are making an initializer and
7794 all operands are constant, put it in memory as well.
7796 FIXME: Avoid trying to fill vector constructors piece-meal.
7797 Output them with output_constant_def below unless we're sure
7798 they're zeros. This should go away when vector initializers
7799 are treated like VECTOR_CST instead of arrays. */
7800 if ((TREE_STATIC (exp
)
7801 && ((mode
== BLKmode
7802 && ! (target
!= 0 && safe_from_p (target
, exp
, 1)))
7803 || TREE_ADDRESSABLE (exp
)
7804 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
7805 && (! can_move_by_pieces
7806 (tree_to_uhwi (TYPE_SIZE_UNIT (type
)),
7808 && ! mostly_zeros_p (exp
))))
7809 || ((modifier
== EXPAND_INITIALIZER
|| modifier
== EXPAND_CONST_ADDRESS
)
7810 && TREE_CONSTANT (exp
)))
7817 constructor
= expand_expr_constant (exp
, 1, modifier
);
7819 if (modifier
!= EXPAND_CONST_ADDRESS
7820 && modifier
!= EXPAND_INITIALIZER
7821 && modifier
!= EXPAND_SUM
)
7822 constructor
= validize_mem (constructor
);
7827 /* Handle calls that pass values in multiple non-contiguous
7828 locations. The Irix 6 ABI has examples of this. */
7829 if (target
== 0 || ! safe_from_p (target
, exp
, 1)
7830 || GET_CODE (target
) == PARALLEL
|| modifier
== EXPAND_STACK_PARM
)
7835 target
= assign_temp (type
, TREE_ADDRESSABLE (exp
), 1);
7838 store_constructor (exp
, target
, 0, int_expr_size (exp
));
7843 /* expand_expr: generate code for computing expression EXP.
7844 An rtx for the computed value is returned. The value is never null.
7845 In the case of a void EXP, const0_rtx is returned.
7847 The value may be stored in TARGET if TARGET is nonzero.
7848 TARGET is just a suggestion; callers must assume that
7849 the rtx returned may not be the same as TARGET.
7851 If TARGET is CONST0_RTX, it means that the value will be ignored.
7853 If TMODE is not VOIDmode, it suggests generating the
7854 result in mode TMODE. But this is done only when convenient.
7855 Otherwise, TMODE is ignored and the value generated in its natural mode.
7856 TMODE is just a suggestion; callers must assume that
7857 the rtx returned may not have mode TMODE.
7859 Note that TARGET may have neither TMODE nor MODE. In that case, it
7860 probably will not be used.
7862 If MODIFIER is EXPAND_SUM then when EXP is an addition
7863 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7864 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7865 products as above, or REG or MEM, or constant.
7866 Ordinarily in such cases we would output mul or add instructions
7867 and then return a pseudo reg containing the sum.
7869 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7870 it also marks a label as absolutely required (it can't be dead).
7871 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7872 This is used for outputting expressions used in initializers.
7874 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7875 with a constant address even if that address is not normally legitimate.
7876 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7878 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7879 a call parameter. Such targets require special care as we haven't yet
7880 marked TARGET so that it's safe from being trashed by libcalls. We
7881 don't want to use TARGET for anything but the final result;
7882 Intermediate values must go elsewhere. Additionally, calls to
7883 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7885 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7886 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7887 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7888 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7891 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7892 In this case, we don't adjust a returned MEM rtx that wouldn't be
7893 sufficiently aligned for its mode; instead, it's up to the caller
7894 to deal with it afterwards. This is used to make sure that unaligned
7895 base objects for which out-of-bounds accesses are supported, for
7896 example record types with trailing arrays, aren't realigned behind
7897 the back of the caller.
7898 The normal operating mode is to pass FALSE for this parameter. */
7901 expand_expr_real (tree exp
, rtx target
, enum machine_mode tmode
,
7902 enum expand_modifier modifier
, rtx
*alt_rtl
,
7903 bool inner_reference_p
)
7907 /* Handle ERROR_MARK before anybody tries to access its type. */
7908 if (TREE_CODE (exp
) == ERROR_MARK
7909 || (TREE_CODE (TREE_TYPE (exp
)) == ERROR_MARK
))
7911 ret
= CONST0_RTX (tmode
);
7912 return ret
? ret
: const0_rtx
;
7915 ret
= expand_expr_real_1 (exp
, target
, tmode
, modifier
, alt_rtl
,
7920 /* Try to expand the conditional expression which is represented by
7921 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7922 return the rtl reg which repsents the result. Otherwise return
7926 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED
,
7927 tree treeop1 ATTRIBUTE_UNUSED
,
7928 tree treeop2 ATTRIBUTE_UNUSED
)
7930 #ifdef HAVE_conditional_move
7932 rtx op00
, op01
, op1
, op2
;
7933 enum rtx_code comparison_code
;
7934 enum machine_mode comparison_mode
;
7937 tree type
= TREE_TYPE (treeop1
);
7938 int unsignedp
= TYPE_UNSIGNED (type
);
7939 enum machine_mode mode
= TYPE_MODE (type
);
7940 enum machine_mode orig_mode
= mode
;
7942 /* If we cannot do a conditional move on the mode, try doing it
7943 with the promoted mode. */
7944 if (!can_conditionally_move_p (mode
))
7946 mode
= promote_mode (type
, mode
, &unsignedp
);
7947 if (!can_conditionally_move_p (mode
))
7949 temp
= assign_temp (type
, 0, 0); /* Use promoted mode for temp. */
7952 temp
= assign_temp (type
, 0, 1);
7955 expand_operands (treeop1
, treeop2
,
7956 temp
, &op1
, &op2
, EXPAND_NORMAL
);
7958 if (TREE_CODE (treeop0
) == SSA_NAME
7959 && (srcstmt
= get_def_for_expr_class (treeop0
, tcc_comparison
)))
7961 tree type
= TREE_TYPE (gimple_assign_rhs1 (srcstmt
));
7962 enum tree_code cmpcode
= gimple_assign_rhs_code (srcstmt
);
7963 op00
= expand_normal (gimple_assign_rhs1 (srcstmt
));
7964 op01
= expand_normal (gimple_assign_rhs2 (srcstmt
));
7965 comparison_mode
= TYPE_MODE (type
);
7966 unsignedp
= TYPE_UNSIGNED (type
);
7967 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7969 else if (TREE_CODE_CLASS (TREE_CODE (treeop0
)) == tcc_comparison
)
7971 tree type
= TREE_TYPE (TREE_OPERAND (treeop0
, 0));
7972 enum tree_code cmpcode
= TREE_CODE (treeop0
);
7973 op00
= expand_normal (TREE_OPERAND (treeop0
, 0));
7974 op01
= expand_normal (TREE_OPERAND (treeop0
, 1));
7975 unsignedp
= TYPE_UNSIGNED (type
);
7976 comparison_mode
= TYPE_MODE (type
);
7977 comparison_code
= convert_tree_comp_to_rtx (cmpcode
, unsignedp
);
7981 op00
= expand_normal (treeop0
);
7983 comparison_code
= NE
;
7984 comparison_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
7987 if (GET_MODE (op1
) != mode
)
7988 op1
= gen_lowpart (mode
, op1
);
7990 if (GET_MODE (op2
) != mode
)
7991 op2
= gen_lowpart (mode
, op2
);
7993 /* Try to emit the conditional move. */
7994 insn
= emit_conditional_move (temp
, comparison_code
,
7995 op00
, op01
, comparison_mode
,
7999 /* If we could do the conditional move, emit the sequence,
8003 rtx seq
= get_insns ();
8006 return convert_modes (orig_mode
, mode
, temp
, 0);
8009 /* Otherwise discard the sequence and fall back to code with
8017 expand_expr_real_2 (sepops ops
, rtx target
, enum machine_mode tmode
,
8018 enum expand_modifier modifier
)
8020 rtx op0
, op1
, op2
, temp
;
8023 enum machine_mode mode
;
8024 enum tree_code code
= ops
->code
;
8026 rtx subtarget
, original_target
;
8028 bool reduce_bit_field
;
8029 location_t loc
= ops
->location
;
8030 tree treeop0
, treeop1
, treeop2
;
8031 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8032 ? reduce_to_bit_field_precision ((expr), \
8038 mode
= TYPE_MODE (type
);
8039 unsignedp
= TYPE_UNSIGNED (type
);
8045 /* We should be called only on simple (binary or unary) expressions,
8046 exactly those that are valid in gimple expressions that aren't
8047 GIMPLE_SINGLE_RHS (or invalid). */
8048 gcc_assert (get_gimple_rhs_class (code
) == GIMPLE_UNARY_RHS
8049 || get_gimple_rhs_class (code
) == GIMPLE_BINARY_RHS
8050 || get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
);
8052 ignore
= (target
== const0_rtx
8053 || ((CONVERT_EXPR_CODE_P (code
)
8054 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
8055 && TREE_CODE (type
) == VOID_TYPE
));
8057 /* We should be called only if we need the result. */
8058 gcc_assert (!ignore
);
8060 /* An operation in what may be a bit-field type needs the
8061 result to be reduced to the precision of the bit-field type,
8062 which is narrower than that of the type's mode. */
8063 reduce_bit_field
= (INTEGRAL_TYPE_P (type
)
8064 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
8066 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
8069 /* Use subtarget as the target for operand 0 of a binary operation. */
8070 subtarget
= get_subtarget (target
);
8071 original_target
= target
;
8075 case NON_LVALUE_EXPR
:
8078 if (treeop0
== error_mark_node
)
8081 if (TREE_CODE (type
) == UNION_TYPE
)
8083 tree valtype
= TREE_TYPE (treeop0
);
8085 /* If both input and output are BLKmode, this conversion isn't doing
8086 anything except possibly changing memory attribute. */
8087 if (mode
== BLKmode
&& TYPE_MODE (valtype
) == BLKmode
)
8089 rtx result
= expand_expr (treeop0
, target
, tmode
,
8092 result
= copy_rtx (result
);
8093 set_mem_attributes (result
, type
, 0);
8099 if (TYPE_MODE (type
) != BLKmode
)
8100 target
= gen_reg_rtx (TYPE_MODE (type
));
8102 target
= assign_temp (type
, 1, 1);
8106 /* Store data into beginning of memory target. */
8107 store_expr (treeop0
,
8108 adjust_address (target
, TYPE_MODE (valtype
), 0),
8109 modifier
== EXPAND_STACK_PARM
,
8114 gcc_assert (REG_P (target
));
8116 /* Store this field into a union of the proper type. */
8117 store_field (target
,
8118 MIN ((int_size_in_bytes (TREE_TYPE
8121 (HOST_WIDE_INT
) GET_MODE_BITSIZE (mode
)),
8122 0, 0, 0, TYPE_MODE (valtype
), treeop0
, 0, false);
8125 /* Return the entire union. */
8129 if (mode
== TYPE_MODE (TREE_TYPE (treeop0
)))
8131 op0
= expand_expr (treeop0
, target
, VOIDmode
,
8134 /* If the signedness of the conversion differs and OP0 is
8135 a promoted SUBREG, clear that indication since we now
8136 have to do the proper extension. */
8137 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)) != unsignedp
8138 && GET_CODE (op0
) == SUBREG
)
8139 SUBREG_PROMOTED_VAR_P (op0
) = 0;
8141 return REDUCE_BIT_FIELD (op0
);
8144 op0
= expand_expr (treeop0
, NULL_RTX
, mode
,
8145 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
);
8146 if (GET_MODE (op0
) == mode
)
8149 /* If OP0 is a constant, just convert it into the proper mode. */
8150 else if (CONSTANT_P (op0
))
8152 tree inner_type
= TREE_TYPE (treeop0
);
8153 enum machine_mode inner_mode
= GET_MODE (op0
);
8155 if (inner_mode
== VOIDmode
)
8156 inner_mode
= TYPE_MODE (inner_type
);
8158 if (modifier
== EXPAND_INITIALIZER
)
8159 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
8160 subreg_lowpart_offset (mode
,
8163 op0
= convert_modes (mode
, inner_mode
, op0
,
8164 TYPE_UNSIGNED (inner_type
));
8167 else if (modifier
== EXPAND_INITIALIZER
)
8168 op0
= gen_rtx_fmt_e (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
);
8170 else if (target
== 0)
8171 op0
= convert_to_mode (mode
, op0
,
8172 TYPE_UNSIGNED (TREE_TYPE
8176 convert_move (target
, op0
,
8177 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8181 return REDUCE_BIT_FIELD (op0
);
8183 case ADDR_SPACE_CONVERT_EXPR
:
8185 tree treeop0_type
= TREE_TYPE (treeop0
);
8187 addr_space_t as_from
;
8189 gcc_assert (POINTER_TYPE_P (type
));
8190 gcc_assert (POINTER_TYPE_P (treeop0_type
));
8192 as_to
= TYPE_ADDR_SPACE (TREE_TYPE (type
));
8193 as_from
= TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type
));
8195 /* Conversions between pointers to the same address space should
8196 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8197 gcc_assert (as_to
!= as_from
);
8199 /* Ask target code to handle conversion between pointers
8200 to overlapping address spaces. */
8201 if (targetm
.addr_space
.subset_p (as_to
, as_from
)
8202 || targetm
.addr_space
.subset_p (as_from
, as_to
))
8204 op0
= expand_expr (treeop0
, NULL_RTX
, VOIDmode
, modifier
);
8205 op0
= targetm
.addr_space
.convert (op0
, treeop0_type
, type
);
8210 /* For disjoint address spaces, converting anything but
8211 a null pointer invokes undefined behaviour. We simply
8212 always return a null pointer here. */
8213 return CONST0_RTX (mode
);
8216 case POINTER_PLUS_EXPR
:
8217 /* Even though the sizetype mode and the pointer's mode can be different
8218 expand is able to handle this correctly and get the correct result out
8219 of the PLUS_EXPR code. */
8220 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8221 if sizetype precision is smaller than pointer precision. */
8222 if (TYPE_PRECISION (sizetype
) < TYPE_PRECISION (type
))
8223 treeop1
= fold_convert_loc (loc
, type
,
8224 fold_convert_loc (loc
, ssizetype
,
8226 /* If sizetype precision is larger than pointer precision, truncate the
8227 offset to have matching modes. */
8228 else if (TYPE_PRECISION (sizetype
) > TYPE_PRECISION (type
))
8229 treeop1
= fold_convert_loc (loc
, type
, treeop1
);
8232 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8233 something else, make sure we add the register to the constant and
8234 then to the other thing. This case can occur during strength
8235 reduction and doing it this way will produce better code if the
8236 frame pointer or argument pointer is eliminated.
8238 fold-const.c will ensure that the constant is always in the inner
8239 PLUS_EXPR, so the only case we need to do anything about is if
8240 sp, ap, or fp is our second argument, in which case we must swap
8241 the innermost first argument and our second argument. */
8243 if (TREE_CODE (treeop0
) == PLUS_EXPR
8244 && TREE_CODE (TREE_OPERAND (treeop0
, 1)) == INTEGER_CST
8245 && TREE_CODE (treeop1
) == VAR_DECL
8246 && (DECL_RTL (treeop1
) == frame_pointer_rtx
8247 || DECL_RTL (treeop1
) == stack_pointer_rtx
8248 || DECL_RTL (treeop1
) == arg_pointer_rtx
))
8253 /* If the result is to be ptr_mode and we are adding an integer to
8254 something, we might be forming a constant. So try to use
8255 plus_constant. If it produces a sum and we can't accept it,
8256 use force_operand. This allows P = &ARR[const] to generate
8257 efficient code on machines where a SYMBOL_REF is not a valid
8260 If this is an EXPAND_SUM call, always return the sum. */
8261 if (modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
8262 || (mode
== ptr_mode
&& (unsignedp
|| ! flag_trapv
)))
8264 if (modifier
== EXPAND_STACK_PARM
)
8266 if (TREE_CODE (treeop0
) == INTEGER_CST
8267 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8268 && TREE_CONSTANT (treeop1
))
8272 op1
= expand_expr (treeop1
, subtarget
, VOIDmode
,
8274 /* Use immed_double_const to ensure that the constant is
8275 truncated according to the mode of OP1, then sign extended
8276 to a HOST_WIDE_INT. Using the constant directly can result
8277 in non-canonical RTL in a 64x32 cross compile. */
8279 = immed_double_const (TREE_INT_CST_LOW (treeop0
),
8281 TYPE_MODE (TREE_TYPE (treeop1
)));
8282 op1
= plus_constant (mode
, op1
, INTVAL (constant_part
));
8283 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8284 op1
= force_operand (op1
, target
);
8285 return REDUCE_BIT_FIELD (op1
);
8288 else if (TREE_CODE (treeop1
) == INTEGER_CST
8289 && GET_MODE_PRECISION (mode
) <= HOST_BITS_PER_WIDE_INT
8290 && TREE_CONSTANT (treeop0
))
8294 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8295 (modifier
== EXPAND_INITIALIZER
8296 ? EXPAND_INITIALIZER
: EXPAND_SUM
));
8297 if (! CONSTANT_P (op0
))
8299 op1
= expand_expr (treeop1
, NULL_RTX
,
8300 VOIDmode
, modifier
);
8301 /* Return a PLUS if modifier says it's OK. */
8302 if (modifier
== EXPAND_SUM
8303 || modifier
== EXPAND_INITIALIZER
)
8304 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
8307 /* Use immed_double_const to ensure that the constant is
8308 truncated according to the mode of OP1, then sign extended
8309 to a HOST_WIDE_INT. Using the constant directly can result
8310 in non-canonical RTL in a 64x32 cross compile. */
8312 = immed_double_const (TREE_INT_CST_LOW (treeop1
),
8314 TYPE_MODE (TREE_TYPE (treeop0
)));
8315 op0
= plus_constant (mode
, op0
, INTVAL (constant_part
));
8316 if (modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8317 op0
= force_operand (op0
, target
);
8318 return REDUCE_BIT_FIELD (op0
);
8322 /* Use TER to expand pointer addition of a negated value
8323 as pointer subtraction. */
8324 if ((POINTER_TYPE_P (TREE_TYPE (treeop0
))
8325 || (TREE_CODE (TREE_TYPE (treeop0
)) == VECTOR_TYPE
8326 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0
)))))
8327 && TREE_CODE (treeop1
) == SSA_NAME
8328 && TYPE_MODE (TREE_TYPE (treeop0
))
8329 == TYPE_MODE (TREE_TYPE (treeop1
)))
8331 gimple def
= get_def_for_expr (treeop1
, NEGATE_EXPR
);
8334 treeop1
= gimple_assign_rhs1 (def
);
8340 /* No sense saving up arithmetic to be done
8341 if it's all in the wrong mode to form part of an address.
8342 And force_operand won't know whether to sign-extend or
8344 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8345 || mode
!= ptr_mode
)
8347 expand_operands (treeop0
, treeop1
,
8348 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8349 if (op0
== const0_rtx
)
8351 if (op1
== const0_rtx
)
8356 expand_operands (treeop0
, treeop1
,
8357 subtarget
, &op0
, &op1
, modifier
);
8358 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8362 /* For initializers, we are allowed to return a MINUS of two
8363 symbolic constants. Here we handle all cases when both operands
8365 /* Handle difference of two symbolic constants,
8366 for the sake of an initializer. */
8367 if ((modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
8368 && really_constant_p (treeop0
)
8369 && really_constant_p (treeop1
))
8371 expand_operands (treeop0
, treeop1
,
8372 NULL_RTX
, &op0
, &op1
, modifier
);
8374 /* If the last operand is a CONST_INT, use plus_constant of
8375 the negated constant. Else make the MINUS. */
8376 if (CONST_INT_P (op1
))
8377 return REDUCE_BIT_FIELD (plus_constant (mode
, op0
,
8380 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode
, op0
, op1
));
8383 /* No sense saving up arithmetic to be done
8384 if it's all in the wrong mode to form part of an address.
8385 And force_operand won't know whether to sign-extend or
8387 if ((modifier
!= EXPAND_SUM
&& modifier
!= EXPAND_INITIALIZER
)
8388 || mode
!= ptr_mode
)
8391 expand_operands (treeop0
, treeop1
,
8392 subtarget
, &op0
, &op1
, modifier
);
8394 /* Convert A - const to A + (-const). */
8395 if (CONST_INT_P (op1
))
8397 op1
= negate_rtx (mode
, op1
);
8398 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS
, mode
, op0
, op1
));
8403 case WIDEN_MULT_PLUS_EXPR
:
8404 case WIDEN_MULT_MINUS_EXPR
:
8405 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8406 op2
= expand_normal (treeop2
);
8407 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
8411 case WIDEN_MULT_EXPR
:
8412 /* If first operand is constant, swap them.
8413 Thus the following special case checks need only
8414 check the second operand. */
8415 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8422 /* First, check if we have a multiplication of one signed and one
8423 unsigned operand. */
8424 if (TREE_CODE (treeop1
) != INTEGER_CST
8425 && (TYPE_UNSIGNED (TREE_TYPE (treeop0
))
8426 != TYPE_UNSIGNED (TREE_TYPE (treeop1
))))
8428 enum machine_mode innermode
= TYPE_MODE (TREE_TYPE (treeop0
));
8429 this_optab
= usmul_widen_optab
;
8430 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8431 != CODE_FOR_nothing
)
8433 if (TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8434 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8437 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op1
, &op0
,
8439 /* op0 and op1 might still be constant, despite the above
8440 != INTEGER_CST check. Handle it. */
8441 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8443 op0
= convert_modes (innermode
, mode
, op0
, true);
8444 op1
= convert_modes (innermode
, mode
, op1
, false);
8445 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8446 target
, unsignedp
));
8451 /* Check for a multiplication with matching signedness. */
8452 else if ((TREE_CODE (treeop1
) == INTEGER_CST
8453 && int_fits_type_p (treeop1
, TREE_TYPE (treeop0
)))
8454 || (TYPE_UNSIGNED (TREE_TYPE (treeop1
))
8455 == TYPE_UNSIGNED (TREE_TYPE (treeop0
))))
8457 tree op0type
= TREE_TYPE (treeop0
);
8458 enum machine_mode innermode
= TYPE_MODE (op0type
);
8459 bool zextend_p
= TYPE_UNSIGNED (op0type
);
8460 optab other_optab
= zextend_p
? smul_widen_optab
: umul_widen_optab
;
8461 this_optab
= zextend_p
? umul_widen_optab
: smul_widen_optab
;
8463 if (TREE_CODE (treeop0
) != INTEGER_CST
)
8465 if (find_widening_optab_handler (this_optab
, mode
, innermode
, 0)
8466 != CODE_FOR_nothing
)
8468 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
,
8470 /* op0 and op1 might still be constant, despite the above
8471 != INTEGER_CST check. Handle it. */
8472 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8475 op0
= convert_modes (innermode
, mode
, op0
, zextend_p
);
8477 = convert_modes (innermode
, mode
, op1
,
8478 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8479 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
,
8483 temp
= expand_widening_mult (mode
, op0
, op1
, target
,
8484 unsignedp
, this_optab
);
8485 return REDUCE_BIT_FIELD (temp
);
8487 if (find_widening_optab_handler (other_optab
, mode
, innermode
, 0)
8489 && innermode
== word_mode
)
8492 op0
= expand_normal (treeop0
);
8493 if (TREE_CODE (treeop1
) == INTEGER_CST
)
8494 op1
= convert_modes (innermode
, mode
,
8495 expand_normal (treeop1
),
8496 TYPE_UNSIGNED (TREE_TYPE (treeop1
)));
8498 op1
= expand_normal (treeop1
);
8499 /* op0 and op1 might still be constant, despite the above
8500 != INTEGER_CST check. Handle it. */
8501 if (GET_MODE (op0
) == VOIDmode
&& GET_MODE (op1
) == VOIDmode
)
8502 goto widen_mult_const
;
8503 temp
= expand_binop (mode
, other_optab
, op0
, op1
, target
,
8504 unsignedp
, OPTAB_LIB_WIDEN
);
8505 hipart
= gen_highpart (innermode
, temp
);
8506 htem
= expand_mult_highpart_adjust (innermode
, hipart
,
8510 emit_move_insn (hipart
, htem
);
8511 return REDUCE_BIT_FIELD (temp
);
8515 treeop0
= fold_build1 (CONVERT_EXPR
, type
, treeop0
);
8516 treeop1
= fold_build1 (CONVERT_EXPR
, type
, treeop1
);
8517 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8518 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8522 optab opt
= fma_optab
;
8525 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8527 if (optab_handler (fma_optab
, mode
) == CODE_FOR_nothing
)
8529 tree fn
= mathfn_built_in (TREE_TYPE (treeop0
), BUILT_IN_FMA
);
8532 gcc_assert (fn
!= NULL_TREE
);
8533 call_expr
= build_call_expr (fn
, 3, treeop0
, treeop1
, treeop2
);
8534 return expand_builtin (call_expr
, target
, subtarget
, mode
, false);
8537 def0
= get_def_for_expr (treeop0
, NEGATE_EXPR
);
8538 def2
= get_def_for_expr (treeop2
, NEGATE_EXPR
);
8543 && optab_handler (fnms_optab
, mode
) != CODE_FOR_nothing
)
8546 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8547 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8550 && optab_handler (fnma_optab
, mode
) != CODE_FOR_nothing
)
8553 op0
= expand_normal (gimple_assign_rhs1 (def0
));
8556 && optab_handler (fms_optab
, mode
) != CODE_FOR_nothing
)
8559 op2
= expand_normal (gimple_assign_rhs1 (def2
));
8563 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
8565 op2
= expand_normal (treeop2
);
8566 op1
= expand_normal (treeop1
);
8568 return expand_ternary_op (TYPE_MODE (type
), opt
,
8569 op0
, op1
, op2
, target
, 0);
8573 /* If this is a fixed-point operation, then we cannot use the code
8574 below because "expand_mult" doesn't support sat/no-sat fixed-point
8576 if (ALL_FIXED_POINT_MODE_P (mode
))
8579 /* If first operand is constant, swap them.
8580 Thus the following special case checks need only
8581 check the second operand. */
8582 if (TREE_CODE (treeop0
) == INTEGER_CST
)
8589 /* Attempt to return something suitable for generating an
8590 indexed address, for machines that support that. */
8592 if (modifier
== EXPAND_SUM
&& mode
== ptr_mode
8593 && tree_fits_shwi_p (treeop1
))
8595 tree exp1
= treeop1
;
8597 op0
= expand_expr (treeop0
, subtarget
, VOIDmode
,
8601 op0
= force_operand (op0
, NULL_RTX
);
8603 op0
= copy_to_mode_reg (mode
, op0
);
8605 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode
, op0
,
8606 gen_int_mode (tree_to_shwi (exp1
),
8607 TYPE_MODE (TREE_TYPE (exp1
)))));
8610 if (modifier
== EXPAND_STACK_PARM
)
8613 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8614 return REDUCE_BIT_FIELD (expand_mult (mode
, op0
, op1
, target
, unsignedp
));
8616 case TRUNC_DIV_EXPR
:
8617 case FLOOR_DIV_EXPR
:
8619 case ROUND_DIV_EXPR
:
8620 case EXACT_DIV_EXPR
:
8621 /* If this is a fixed-point operation, then we cannot use the code
8622 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8624 if (ALL_FIXED_POINT_MODE_P (mode
))
8627 if (modifier
== EXPAND_STACK_PARM
)
8629 /* Possible optimization: compute the dividend with EXPAND_SUM
8630 then if the divisor is constant can optimize the case
8631 where some terms of the dividend have coeffs divisible by it. */
8632 expand_operands (treeop0
, treeop1
,
8633 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8634 return expand_divmod (0, code
, mode
, op0
, op1
, target
, unsignedp
);
8639 case MULT_HIGHPART_EXPR
:
8640 expand_operands (treeop0
, treeop1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8641 temp
= expand_mult_highpart (mode
, op0
, op1
, target
, unsignedp
);
8645 case TRUNC_MOD_EXPR
:
8646 case FLOOR_MOD_EXPR
:
8648 case ROUND_MOD_EXPR
:
8649 if (modifier
== EXPAND_STACK_PARM
)
8651 expand_operands (treeop0
, treeop1
,
8652 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
8653 return expand_divmod (1, code
, mode
, op0
, op1
, target
, unsignedp
);
8655 case FIXED_CONVERT_EXPR
:
8656 op0
= expand_normal (treeop0
);
8657 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8658 target
= gen_reg_rtx (mode
);
8660 if ((TREE_CODE (TREE_TYPE (treeop0
)) == INTEGER_TYPE
8661 && TYPE_UNSIGNED (TREE_TYPE (treeop0
)))
8662 || (TREE_CODE (type
) == INTEGER_TYPE
&& TYPE_UNSIGNED (type
)))
8663 expand_fixed_convert (target
, op0
, 1, TYPE_SATURATING (type
));
8665 expand_fixed_convert (target
, op0
, 0, TYPE_SATURATING (type
));
8668 case FIX_TRUNC_EXPR
:
8669 op0
= expand_normal (treeop0
);
8670 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8671 target
= gen_reg_rtx (mode
);
8672 expand_fix (target
, op0
, unsignedp
);
8676 op0
= expand_normal (treeop0
);
8677 if (target
== 0 || modifier
== EXPAND_STACK_PARM
)
8678 target
= gen_reg_rtx (mode
);
8679 /* expand_float can't figure out what to do if FROM has VOIDmode.
8680 So give it the correct mode. With -O, cse will optimize this. */
8681 if (GET_MODE (op0
) == VOIDmode
)
8682 op0
= copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0
)),
8684 expand_float (target
, op0
,
8685 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
8689 op0
= expand_expr (treeop0
, subtarget
,
8690 VOIDmode
, EXPAND_NORMAL
);
8691 if (modifier
== EXPAND_STACK_PARM
)
8693 temp
= expand_unop (mode
,
8694 optab_for_tree_code (NEGATE_EXPR
, type
,
8698 return REDUCE_BIT_FIELD (temp
);
8701 op0
= expand_expr (treeop0
, subtarget
,
8702 VOIDmode
, EXPAND_NORMAL
);
8703 if (modifier
== EXPAND_STACK_PARM
)
8706 /* ABS_EXPR is not valid for complex arguments. */
8707 gcc_assert (GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
8708 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
);
8710 /* Unsigned abs is simply the operand. Testing here means we don't
8711 risk generating incorrect code below. */
8712 if (TYPE_UNSIGNED (type
))
8715 return expand_abs (mode
, op0
, target
, unsignedp
,
8716 safe_from_p (target
, treeop0
, 1));
8720 target
= original_target
;
8722 || modifier
== EXPAND_STACK_PARM
8723 || (MEM_P (target
) && MEM_VOLATILE_P (target
))
8724 || GET_MODE (target
) != mode
8726 && REGNO (target
) < FIRST_PSEUDO_REGISTER
))
8727 target
= gen_reg_rtx (mode
);
8728 expand_operands (treeop0
, treeop1
,
8729 target
, &op0
, &op1
, EXPAND_NORMAL
);
8731 /* First try to do it with a special MIN or MAX instruction.
8732 If that does not win, use a conditional jump to select the proper
8734 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
8735 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
, unsignedp
,
8740 /* At this point, a MEM target is no longer useful; we will get better
8743 if (! REG_P (target
))
8744 target
= gen_reg_rtx (mode
);
8746 /* If op1 was placed in target, swap op0 and op1. */
8747 if (target
!= op0
&& target
== op1
)
8754 /* We generate better code and avoid problems with op1 mentioning
8755 target by forcing op1 into a pseudo if it isn't a constant. */
8756 if (! CONSTANT_P (op1
))
8757 op1
= force_reg (mode
, op1
);
8760 enum rtx_code comparison_code
;
8763 if (code
== MAX_EXPR
)
8764 comparison_code
= unsignedp
? GEU
: GE
;
8766 comparison_code
= unsignedp
? LEU
: LE
;
8768 /* Canonicalize to comparisons against 0. */
8769 if (op1
== const1_rtx
)
8771 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8772 or (a != 0 ? a : 1) for unsigned.
8773 For MIN we are safe converting (a <= 1 ? a : 1)
8774 into (a <= 0 ? a : 1) */
8775 cmpop1
= const0_rtx
;
8776 if (code
== MAX_EXPR
)
8777 comparison_code
= unsignedp
? NE
: GT
;
8779 if (op1
== constm1_rtx
&& !unsignedp
)
8781 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8782 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8783 cmpop1
= const0_rtx
;
8784 if (code
== MIN_EXPR
)
8785 comparison_code
= LT
;
8787 #ifdef HAVE_conditional_move
8788 /* Use a conditional move if possible. */
8789 if (can_conditionally_move_p (mode
))
8795 /* Try to emit the conditional move. */
8796 insn
= emit_conditional_move (target
, comparison_code
,
8801 /* If we could do the conditional move, emit the sequence,
8805 rtx seq
= get_insns ();
8811 /* Otherwise discard the sequence and fall back to code with
8817 emit_move_insn (target
, op0
);
8819 temp
= gen_label_rtx ();
8820 do_compare_rtx_and_jump (target
, cmpop1
, comparison_code
,
8821 unsignedp
, mode
, NULL_RTX
, NULL_RTX
, temp
,
8824 emit_move_insn (target
, op1
);
8829 op0
= expand_expr (treeop0
, subtarget
,
8830 VOIDmode
, EXPAND_NORMAL
);
8831 if (modifier
== EXPAND_STACK_PARM
)
8833 /* In case we have to reduce the result to bitfield precision
8834 for unsigned bitfield expand this as XOR with a proper constant
8836 if (reduce_bit_field
&& TYPE_UNSIGNED (type
))
8837 temp
= expand_binop (mode
, xor_optab
, op0
,
8838 immed_double_int_const
8839 (double_int::mask (TYPE_PRECISION (type
)), mode
),
8840 target
, 1, OPTAB_LIB_WIDEN
);
8842 temp
= expand_unop (mode
, one_cmpl_optab
, op0
, target
, 1);
8846 /* ??? Can optimize bitwise operations with one arg constant.
8847 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8848 and (a bitwise1 b) bitwise2 b (etc)
8849 but that is probably not worth while. */
8858 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type
))
8859 || (GET_MODE_PRECISION (TYPE_MODE (type
))
8860 == TYPE_PRECISION (type
)));
8865 /* If this is a fixed-point operation, then we cannot use the code
8866 below because "expand_shift" doesn't support sat/no-sat fixed-point
8868 if (ALL_FIXED_POINT_MODE_P (mode
))
8871 if (! safe_from_p (subtarget
, treeop1
, 1))
8873 if (modifier
== EXPAND_STACK_PARM
)
8875 op0
= expand_expr (treeop0
, subtarget
,
8876 VOIDmode
, EXPAND_NORMAL
);
8877 temp
= expand_variable_shift (code
, mode
, op0
, treeop1
, target
,
8879 if (code
== LSHIFT_EXPR
)
8880 temp
= REDUCE_BIT_FIELD (temp
);
8883 /* Could determine the answer when only additive constants differ. Also,
8884 the addition of one can be handled by changing the condition. */
8891 case UNORDERED_EXPR
:
8899 temp
= do_store_flag (ops
,
8900 modifier
!= EXPAND_STACK_PARM
? target
: NULL_RTX
,
8901 tmode
!= VOIDmode
? tmode
: mode
);
8905 /* Use a compare and a jump for BLKmode comparisons, or for function
8906 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8909 || modifier
== EXPAND_STACK_PARM
8910 || ! safe_from_p (target
, treeop0
, 1)
8911 || ! safe_from_p (target
, treeop1
, 1)
8912 /* Make sure we don't have a hard reg (such as function's return
8913 value) live across basic blocks, if not optimizing. */
8914 || (!optimize
&& REG_P (target
)
8915 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)))
8916 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
8918 emit_move_insn (target
, const0_rtx
);
8920 op1
= gen_label_rtx ();
8921 jumpifnot_1 (code
, treeop0
, treeop1
, op1
, -1);
8923 if (TYPE_PRECISION (type
) == 1 && !TYPE_UNSIGNED (type
))
8924 emit_move_insn (target
, constm1_rtx
);
8926 emit_move_insn (target
, const1_rtx
);
8932 /* Get the rtx code of the operands. */
8933 op0
= expand_normal (treeop0
);
8934 op1
= expand_normal (treeop1
);
8937 target
= gen_reg_rtx (TYPE_MODE (type
));
8939 /* If target overlaps with op1, then either we need to force
8940 op1 into a pseudo (if target also overlaps with op0),
8941 or write the complex parts in reverse order. */
8942 switch (GET_CODE (target
))
8945 if (reg_overlap_mentioned_p (XEXP (target
, 0), op1
))
8947 if (reg_overlap_mentioned_p (XEXP (target
, 1), op0
))
8949 complex_expr_force_op1
:
8950 temp
= gen_reg_rtx (GET_MODE_INNER (GET_MODE (target
)));
8951 emit_move_insn (temp
, op1
);
8955 complex_expr_swap_order
:
8956 /* Move the imaginary (op1) and real (op0) parts to their
8958 write_complex_part (target
, op1
, true);
8959 write_complex_part (target
, op0
, false);
8965 temp
= adjust_address_nv (target
,
8966 GET_MODE_INNER (GET_MODE (target
)), 0);
8967 if (reg_overlap_mentioned_p (temp
, op1
))
8969 enum machine_mode imode
= GET_MODE_INNER (GET_MODE (target
));
8970 temp
= adjust_address_nv (target
, imode
,
8971 GET_MODE_SIZE (imode
));
8972 if (reg_overlap_mentioned_p (temp
, op0
))
8973 goto complex_expr_force_op1
;
8974 goto complex_expr_swap_order
;
8978 if (reg_overlap_mentioned_p (target
, op1
))
8980 if (reg_overlap_mentioned_p (target
, op0
))
8981 goto complex_expr_force_op1
;
8982 goto complex_expr_swap_order
;
8987 /* Move the real (op0) and imaginary (op1) parts to their location. */
8988 write_complex_part (target
, op0
, false);
8989 write_complex_part (target
, op1
, true);
8993 case WIDEN_SUM_EXPR
:
8995 tree oprnd0
= treeop0
;
8996 tree oprnd1
= treeop1
;
8998 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
8999 target
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, op1
,
9004 case REDUC_MAX_EXPR
:
9005 case REDUC_MIN_EXPR
:
9006 case REDUC_PLUS_EXPR
:
9008 op0
= expand_normal (treeop0
);
9009 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9010 enum machine_mode vec_mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9012 if (optab_handler (this_optab
, vec_mode
) != CODE_FOR_nothing
)
9014 struct expand_operand ops
[2];
9015 enum insn_code icode
= optab_handler (this_optab
, vec_mode
);
9017 create_output_operand (&ops
[0], target
, mode
);
9018 create_input_operand (&ops
[1], op0
, vec_mode
);
9019 if (maybe_expand_insn (icode
, 2, ops
))
9021 target
= ops
[0].value
;
9022 if (GET_MODE (target
) != mode
)
9023 return gen_lowpart (tmode
, target
);
9027 /* Fall back to optab with vector result, and then extract scalar. */
9028 this_optab
= scalar_reduc_to_vector (this_optab
, type
);
9029 temp
= expand_unop (vec_mode
, this_optab
, op0
, NULL_RTX
, unsignedp
);
9031 /* The tree code produces a scalar result, but (somewhat by convention)
9032 the optab produces a vector with the result in element 0 if
9033 little-endian, or element N-1 if big-endian. So pull the scalar
9034 result out of that element. */
9035 int index
= BYTES_BIG_ENDIAN
? GET_MODE_NUNITS (vec_mode
) - 1 : 0;
9036 int bitsize
= GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode
));
9037 temp
= extract_bit_field (temp
, bitsize
, bitsize
* index
, unsignedp
,
9038 target
, mode
, mode
);
9043 case VEC_RSHIFT_EXPR
:
9045 target
= expand_vec_shift_expr (ops
, target
);
9049 case VEC_UNPACK_HI_EXPR
:
9050 case VEC_UNPACK_LO_EXPR
:
9052 op0
= expand_normal (treeop0
);
9053 temp
= expand_widen_pattern_expr (ops
, op0
, NULL_RTX
, NULL_RTX
,
9059 case VEC_UNPACK_FLOAT_HI_EXPR
:
9060 case VEC_UNPACK_FLOAT_LO_EXPR
:
9062 op0
= expand_normal (treeop0
);
9063 /* The signedness is determined from input operand. */
9064 temp
= expand_widen_pattern_expr
9065 (ops
, op0
, NULL_RTX
, NULL_RTX
,
9066 target
, TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
9072 case VEC_WIDEN_MULT_HI_EXPR
:
9073 case VEC_WIDEN_MULT_LO_EXPR
:
9074 case VEC_WIDEN_MULT_EVEN_EXPR
:
9075 case VEC_WIDEN_MULT_ODD_EXPR
:
9076 case VEC_WIDEN_LSHIFT_HI_EXPR
:
9077 case VEC_WIDEN_LSHIFT_LO_EXPR
:
9078 expand_operands (treeop0
, treeop1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9079 target
= expand_widen_pattern_expr (ops
, op0
, op1
, NULL_RTX
,
9081 gcc_assert (target
);
9084 case VEC_PACK_TRUNC_EXPR
:
9085 case VEC_PACK_SAT_EXPR
:
9086 case VEC_PACK_FIX_TRUNC_EXPR
:
9087 mode
= TYPE_MODE (TREE_TYPE (treeop0
));
9091 expand_operands (treeop0
, treeop1
, target
, &op0
, &op1
, EXPAND_NORMAL
);
9092 op2
= expand_normal (treeop2
);
9094 /* Careful here: if the target doesn't support integral vector modes,
9095 a constant selection vector could wind up smooshed into a normal
9096 integral constant. */
9097 if (CONSTANT_P (op2
) && GET_CODE (op2
) != CONST_VECTOR
)
9099 tree sel_type
= TREE_TYPE (treeop2
);
9100 enum machine_mode vmode
9101 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type
)),
9102 TYPE_VECTOR_SUBPARTS (sel_type
));
9103 gcc_assert (GET_MODE_CLASS (vmode
) == MODE_VECTOR_INT
);
9104 op2
= simplify_subreg (vmode
, op2
, TYPE_MODE (sel_type
), 0);
9105 gcc_assert (op2
&& GET_CODE (op2
) == CONST_VECTOR
);
9108 gcc_assert (GET_MODE_CLASS (GET_MODE (op2
)) == MODE_VECTOR_INT
);
9110 temp
= expand_vec_perm (mode
, op0
, op1
, op2
, target
);
9116 tree oprnd0
= treeop0
;
9117 tree oprnd1
= treeop1
;
9118 tree oprnd2
= treeop2
;
9121 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9122 op2
= expand_normal (oprnd2
);
9123 target
= expand_widen_pattern_expr (ops
, op0
, op1
, op2
,
9128 case REALIGN_LOAD_EXPR
:
9130 tree oprnd0
= treeop0
;
9131 tree oprnd1
= treeop1
;
9132 tree oprnd2
= treeop2
;
9135 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9136 expand_operands (oprnd0
, oprnd1
, NULL_RTX
, &op0
, &op1
, EXPAND_NORMAL
);
9137 op2
= expand_normal (oprnd2
);
9138 temp
= expand_ternary_op (mode
, this_optab
, op0
, op1
, op2
,
9145 /* A COND_EXPR with its type being VOID_TYPE represents a
9146 conditional jump and is handled in
9147 expand_gimple_cond_expr. */
9148 gcc_assert (!VOID_TYPE_P (type
));
9150 /* Note that COND_EXPRs whose type is a structure or union
9151 are required to be constructed to contain assignments of
9152 a temporary variable, so that we can evaluate them here
9153 for side effect only. If type is void, we must do likewise. */
9155 gcc_assert (!TREE_ADDRESSABLE (type
)
9157 && TREE_TYPE (treeop1
) != void_type_node
9158 && TREE_TYPE (treeop2
) != void_type_node
);
9160 temp
= expand_cond_expr_using_cmove (treeop0
, treeop1
, treeop2
);
9164 /* If we are not to produce a result, we have no target. Otherwise,
9165 if a target was specified use it; it will not be used as an
9166 intermediate target unless it is safe. If no target, use a
9169 if (modifier
!= EXPAND_STACK_PARM
9171 && safe_from_p (original_target
, treeop0
, 1)
9172 && GET_MODE (original_target
) == mode
9173 && !MEM_P (original_target
))
9174 temp
= original_target
;
9176 temp
= assign_temp (type
, 0, 1);
9178 do_pending_stack_adjust ();
9180 op0
= gen_label_rtx ();
9181 op1
= gen_label_rtx ();
9182 jumpifnot (treeop0
, op0
, -1);
9183 store_expr (treeop1
, temp
,
9184 modifier
== EXPAND_STACK_PARM
,
9187 emit_jump_insn (gen_jump (op1
));
9190 store_expr (treeop2
, temp
,
9191 modifier
== EXPAND_STACK_PARM
,
9199 target
= expand_vec_cond_expr (type
, treeop0
, treeop1
, treeop2
, target
);
9206 /* Here to do an ordinary binary operator. */
9208 expand_operands (treeop0
, treeop1
,
9209 subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
9211 this_optab
= optab_for_tree_code (code
, type
, optab_default
);
9213 if (modifier
== EXPAND_STACK_PARM
)
9215 temp
= expand_binop (mode
, this_optab
, op0
, op1
, target
,
9216 unsignedp
, OPTAB_LIB_WIDEN
);
9218 /* Bitwise operations do not need bitfield reduction as we expect their
9219 operands being properly truncated. */
9220 if (code
== BIT_XOR_EXPR
9221 || code
== BIT_AND_EXPR
9222 || code
== BIT_IOR_EXPR
)
9224 return REDUCE_BIT_FIELD (temp
);
9226 #undef REDUCE_BIT_FIELD
9229 /* Return TRUE if expression STMT is suitable for replacement.
9230 Never consider memory loads as replaceable, because those don't ever lead
9231 into constant expressions. */
9234 stmt_is_replaceable_p (gimple stmt
)
9236 if (ssa_is_replaceable_p (stmt
))
9238 /* Don't move around loads. */
9239 if (!gimple_assign_single_p (stmt
)
9240 || is_gimple_val (gimple_assign_rhs1 (stmt
)))
9247 expand_expr_real_1 (tree exp
, rtx target
, enum machine_mode tmode
,
9248 enum expand_modifier modifier
, rtx
*alt_rtl
,
9249 bool inner_reference_p
)
9251 rtx op0
, op1
, temp
, decl_rtl
;
9254 enum machine_mode mode
;
9255 enum tree_code code
= TREE_CODE (exp
);
9256 rtx subtarget
, original_target
;
9259 bool reduce_bit_field
;
9260 location_t loc
= EXPR_LOCATION (exp
);
9261 struct separate_ops ops
;
9262 tree treeop0
, treeop1
, treeop2
;
9263 tree ssa_name
= NULL_TREE
;
9266 type
= TREE_TYPE (exp
);
9267 mode
= TYPE_MODE (type
);
9268 unsignedp
= TYPE_UNSIGNED (type
);
9270 treeop0
= treeop1
= treeop2
= NULL_TREE
;
9271 if (!VL_EXP_CLASS_P (exp
))
9272 switch (TREE_CODE_LENGTH (code
))
9275 case 3: treeop2
= TREE_OPERAND (exp
, 2);
9276 case 2: treeop1
= TREE_OPERAND (exp
, 1);
9277 case 1: treeop0
= TREE_OPERAND (exp
, 0);
9287 ignore
= (target
== const0_rtx
9288 || ((CONVERT_EXPR_CODE_P (code
)
9289 || code
== COND_EXPR
|| code
== VIEW_CONVERT_EXPR
)
9290 && TREE_CODE (type
) == VOID_TYPE
));
9292 /* An operation in what may be a bit-field type needs the
9293 result to be reduced to the precision of the bit-field type,
9294 which is narrower than that of the type's mode. */
9295 reduce_bit_field
= (!ignore
9296 && INTEGRAL_TYPE_P (type
)
9297 && GET_MODE_PRECISION (mode
) > TYPE_PRECISION (type
));
9299 /* If we are going to ignore this result, we need only do something
9300 if there is a side-effect somewhere in the expression. If there
9301 is, short-circuit the most common cases here. Note that we must
9302 not call expand_expr with anything but const0_rtx in case this
9303 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9307 if (! TREE_SIDE_EFFECTS (exp
))
9310 /* Ensure we reference a volatile object even if value is ignored, but
9311 don't do this if all we are doing is taking its address. */
9312 if (TREE_THIS_VOLATILE (exp
)
9313 && TREE_CODE (exp
) != FUNCTION_DECL
9314 && mode
!= VOIDmode
&& mode
!= BLKmode
9315 && modifier
!= EXPAND_CONST_ADDRESS
)
9317 temp
= expand_expr (exp
, NULL_RTX
, VOIDmode
, modifier
);
9323 if (TREE_CODE_CLASS (code
) == tcc_unary
9324 || code
== BIT_FIELD_REF
9325 || code
== COMPONENT_REF
9326 || code
== INDIRECT_REF
)
9327 return expand_expr (treeop0
, const0_rtx
, VOIDmode
,
9330 else if (TREE_CODE_CLASS (code
) == tcc_binary
9331 || TREE_CODE_CLASS (code
) == tcc_comparison
9332 || code
== ARRAY_REF
|| code
== ARRAY_RANGE_REF
)
9334 expand_expr (treeop0
, const0_rtx
, VOIDmode
, modifier
);
9335 expand_expr (treeop1
, const0_rtx
, VOIDmode
, modifier
);
9342 if (reduce_bit_field
&& modifier
== EXPAND_STACK_PARM
)
9345 /* Use subtarget as the target for operand 0 of a binary operation. */
9346 subtarget
= get_subtarget (target
);
9347 original_target
= target
;
9353 tree function
= decl_function_context (exp
);
9355 temp
= label_rtx (exp
);
9356 temp
= gen_rtx_LABEL_REF (Pmode
, temp
);
9358 if (function
!= current_function_decl
9360 LABEL_REF_NONLOCAL_P (temp
) = 1;
9362 temp
= gen_rtx_MEM (FUNCTION_MODE
, temp
);
9367 /* ??? ivopts calls expander, without any preparation from
9368 out-of-ssa. So fake instructions as if this was an access to the
9369 base variable. This unnecessarily allocates a pseudo, see how we can
9370 reuse it, if partition base vars have it set already. */
9371 if (!currently_expanding_to_rtl
)
9373 tree var
= SSA_NAME_VAR (exp
);
9374 if (var
&& DECL_RTL_SET_P (var
))
9375 return DECL_RTL (var
);
9376 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp
)),
9377 LAST_VIRTUAL_REGISTER
+ 1);
9380 g
= get_gimple_for_ssa_name (exp
);
9381 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9383 && modifier
== EXPAND_INITIALIZER
9384 && !SSA_NAME_IS_DEFAULT_DEF (exp
)
9385 && (optimize
|| DECL_IGNORED_P (SSA_NAME_VAR (exp
)))
9386 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp
)))
9387 g
= SSA_NAME_DEF_STMT (exp
);
9391 location_t saved_loc
= curr_insn_location ();
9393 set_curr_insn_location (gimple_location (g
));
9394 r
= expand_expr_real (gimple_assign_rhs_to_tree (g
), target
,
9395 tmode
, modifier
, NULL
, inner_reference_p
);
9396 set_curr_insn_location (saved_loc
);
9397 if (REG_P (r
) && !REG_EXPR (r
))
9398 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp
), r
);
9403 decl_rtl
= get_rtx_for_ssa_name (ssa_name
);
9404 exp
= SSA_NAME_VAR (ssa_name
);
9405 goto expand_decl_rtl
;
9409 /* If a static var's type was incomplete when the decl was written,
9410 but the type is complete now, lay out the decl now. */
9411 if (DECL_SIZE (exp
) == 0
9412 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp
))
9413 && (TREE_STATIC (exp
) || DECL_EXTERNAL (exp
)))
9414 layout_decl (exp
, 0);
9416 /* ... fall through ... */
9420 decl_rtl
= DECL_RTL (exp
);
9422 gcc_assert (decl_rtl
);
9423 decl_rtl
= copy_rtx (decl_rtl
);
9424 /* Record writes to register variables. */
9425 if (modifier
== EXPAND_WRITE
9427 && HARD_REGISTER_P (decl_rtl
))
9428 add_to_hard_reg_set (&crtl
->asm_clobbers
,
9429 GET_MODE (decl_rtl
), REGNO (decl_rtl
));
9431 /* Ensure variable marked as used even if it doesn't go through
9432 a parser. If it hasn't be used yet, write out an external
9434 TREE_USED (exp
) = 1;
9436 /* Show we haven't gotten RTL for this yet. */
9439 /* Variables inherited from containing functions should have
9440 been lowered by this point. */
9441 context
= decl_function_context (exp
);
9442 gcc_assert (SCOPE_FILE_SCOPE_P (context
)
9443 || context
== current_function_decl
9444 || TREE_STATIC (exp
)
9445 || DECL_EXTERNAL (exp
)
9446 /* ??? C++ creates functions that are not TREE_STATIC. */
9447 || TREE_CODE (exp
) == FUNCTION_DECL
);
9449 /* This is the case of an array whose size is to be determined
9450 from its initializer, while the initializer is still being parsed.
9451 ??? We aren't parsing while expanding anymore. */
9453 if (MEM_P (decl_rtl
) && REG_P (XEXP (decl_rtl
, 0)))
9454 temp
= validize_mem (decl_rtl
);
9456 /* If DECL_RTL is memory, we are in the normal case and the
9457 address is not valid, get the address into a register. */
9459 else if (MEM_P (decl_rtl
) && modifier
!= EXPAND_INITIALIZER
)
9462 *alt_rtl
= decl_rtl
;
9463 decl_rtl
= use_anchored_address (decl_rtl
);
9464 if (modifier
!= EXPAND_CONST_ADDRESS
9465 && modifier
!= EXPAND_SUM
9466 && !memory_address_addr_space_p (DECL_MODE (exp
),
9468 MEM_ADDR_SPACE (decl_rtl
)))
9469 temp
= replace_equiv_address (decl_rtl
,
9470 copy_rtx (XEXP (decl_rtl
, 0)));
9473 /* If we got something, return it. But first, set the alignment
9474 if the address is a register. */
9477 if (MEM_P (temp
) && REG_P (XEXP (temp
, 0)))
9478 mark_reg_pointer (XEXP (temp
, 0), DECL_ALIGN (exp
));
9483 /* If the mode of DECL_RTL does not match that of the decl,
9484 there are two cases: we are dealing with a BLKmode value
9485 that is returned in a register, or we are dealing with
9486 a promoted value. In the latter case, return a SUBREG
9487 of the wanted mode, but mark it so that we know that it
9488 was already extended. */
9489 if (REG_P (decl_rtl
)
9490 && DECL_MODE (exp
) != BLKmode
9491 && GET_MODE (decl_rtl
) != DECL_MODE (exp
))
9493 enum machine_mode pmode
;
9495 /* Get the signedness to be used for this variable. Ensure we get
9496 the same mode we got when the variable was declared. */
9497 if (code
== SSA_NAME
9498 && (g
= SSA_NAME_DEF_STMT (ssa_name
))
9499 && gimple_code (g
) == GIMPLE_CALL
9500 && !gimple_call_internal_p (g
))
9501 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
9502 gimple_call_fntype (g
),
9505 pmode
= promote_decl_mode (exp
, &unsignedp
);
9506 gcc_assert (GET_MODE (decl_rtl
) == pmode
);
9508 temp
= gen_lowpart_SUBREG (mode
, decl_rtl
);
9509 SUBREG_PROMOTED_VAR_P (temp
) = 1;
9510 SUBREG_PROMOTED_UNSIGNED_SET (temp
, unsignedp
);
9517 temp
= immed_double_const (TREE_INT_CST_LOW (exp
),
9518 TREE_INT_CST_HIGH (exp
), mode
);
9524 tree tmp
= NULL_TREE
;
9525 if (GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
9526 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
9527 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FRACT
9528 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UFRACT
9529 || GET_MODE_CLASS (mode
) == MODE_VECTOR_ACCUM
9530 || GET_MODE_CLASS (mode
) == MODE_VECTOR_UACCUM
)
9531 return const_vector_from_tree (exp
);
9532 if (GET_MODE_CLASS (mode
) == MODE_INT
)
9534 tree type_for_mode
= lang_hooks
.types
.type_for_mode (mode
, 1);
9536 tmp
= fold_unary_loc (loc
, VIEW_CONVERT_EXPR
, type_for_mode
, exp
);
9540 vec
<constructor_elt
, va_gc
> *v
;
9542 vec_alloc (v
, VECTOR_CST_NELTS (exp
));
9543 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
9544 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, VECTOR_CST_ELT (exp
, i
));
9545 tmp
= build_constructor (type
, v
);
9547 return expand_expr (tmp
, ignore
? const0_rtx
: target
,
9552 return expand_expr (DECL_INITIAL (exp
), target
, VOIDmode
, modifier
);
9555 /* If optimized, generate immediate CONST_DOUBLE
9556 which will be turned into memory by reload if necessary.
9558 We used to force a register so that loop.c could see it. But
9559 this does not allow gen_* patterns to perform optimizations with
9560 the constants. It also produces two insns in cases like "x = 1.0;".
9561 On most machines, floating-point constants are not permitted in
9562 many insns, so we'd end up copying it to a register in any case.
9564 Now, we do the copying in expand_binop, if appropriate. */
9565 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp
),
9566 TYPE_MODE (TREE_TYPE (exp
)));
9569 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp
),
9570 TYPE_MODE (TREE_TYPE (exp
)));
9573 /* Handle evaluating a complex constant in a CONCAT target. */
9574 if (original_target
&& GET_CODE (original_target
) == CONCAT
)
9576 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (exp
)));
9579 rtarg
= XEXP (original_target
, 0);
9580 itarg
= XEXP (original_target
, 1);
9582 /* Move the real and imaginary parts separately. */
9583 op0
= expand_expr (TREE_REALPART (exp
), rtarg
, mode
, EXPAND_NORMAL
);
9584 op1
= expand_expr (TREE_IMAGPART (exp
), itarg
, mode
, EXPAND_NORMAL
);
9587 emit_move_insn (rtarg
, op0
);
9589 emit_move_insn (itarg
, op1
);
9591 return original_target
;
9594 /* ... fall through ... */
9597 temp
= expand_expr_constant (exp
, 1, modifier
);
9599 /* temp contains a constant address.
9600 On RISC machines where a constant address isn't valid,
9601 make some insns to get that address into a register. */
9602 if (modifier
!= EXPAND_CONST_ADDRESS
9603 && modifier
!= EXPAND_INITIALIZER
9604 && modifier
!= EXPAND_SUM
9605 && ! memory_address_addr_space_p (mode
, XEXP (temp
, 0),
9606 MEM_ADDR_SPACE (temp
)))
9607 return replace_equiv_address (temp
,
9608 copy_rtx (XEXP (temp
, 0)));
9614 rtx ret
= expand_expr_real_1 (val
, target
, tmode
, modifier
, alt_rtl
,
9617 if (!SAVE_EXPR_RESOLVED_P (exp
))
9619 /* We can indeed still hit this case, typically via builtin
9620 expanders calling save_expr immediately before expanding
9621 something. Assume this means that we only have to deal
9622 with non-BLKmode values. */
9623 gcc_assert (GET_MODE (ret
) != BLKmode
);
9625 val
= build_decl (curr_insn_location (),
9626 VAR_DECL
, NULL
, TREE_TYPE (exp
));
9627 DECL_ARTIFICIAL (val
) = 1;
9628 DECL_IGNORED_P (val
) = 1;
9630 TREE_OPERAND (exp
, 0) = treeop0
;
9631 SAVE_EXPR_RESOLVED_P (exp
) = 1;
9633 if (!CONSTANT_P (ret
))
9634 ret
= copy_to_reg (ret
);
9635 SET_DECL_RTL (val
, ret
);
9643 /* If we don't need the result, just ensure we evaluate any
9647 unsigned HOST_WIDE_INT idx
;
9650 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), idx
, value
)
9651 expand_expr (value
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9656 return expand_constructor (exp
, target
, modifier
, false);
9658 case TARGET_MEM_REF
:
9661 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9662 enum insn_code icode
;
9665 op0
= addr_for_mem_ref (exp
, as
, true);
9666 op0
= memory_address_addr_space (mode
, op0
, as
);
9667 temp
= gen_rtx_MEM (mode
, op0
);
9668 set_mem_attributes (temp
, exp
, 0);
9669 set_mem_addr_space (temp
, as
);
9670 align
= get_object_alignment (exp
);
9671 if (modifier
!= EXPAND_WRITE
9672 && modifier
!= EXPAND_MEMORY
9674 && align
< GET_MODE_ALIGNMENT (mode
)
9675 /* If the target does not have special handling for unaligned
9676 loads of mode then it can use regular moves for them. */
9677 && ((icode
= optab_handler (movmisalign_optab
, mode
))
9678 != CODE_FOR_nothing
))
9680 struct expand_operand ops
[2];
9682 /* We've already validated the memory, and we're creating a
9683 new pseudo destination. The predicates really can't fail,
9684 nor can the generator. */
9685 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9686 create_fixed_operand (&ops
[1], temp
);
9687 expand_insn (icode
, 2, ops
);
9688 temp
= ops
[0].value
;
9696 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
9697 enum machine_mode address_mode
;
9698 tree base
= TREE_OPERAND (exp
, 0);
9700 enum insn_code icode
;
9702 /* Handle expansion of non-aliased memory with non-BLKmode. That
9703 might end up in a register. */
9704 if (mem_ref_refers_to_non_mem_p (exp
))
9706 HOST_WIDE_INT offset
= mem_ref_offset (exp
).low
;
9707 base
= TREE_OPERAND (base
, 0);
9709 && tree_fits_uhwi_p (TYPE_SIZE (type
))
9710 && (GET_MODE_BITSIZE (DECL_MODE (base
))
9711 == tree_to_uhwi (TYPE_SIZE (type
))))
9712 return expand_expr (build1 (VIEW_CONVERT_EXPR
, type
, base
),
9713 target
, tmode
, modifier
);
9714 if (TYPE_MODE (type
) == BLKmode
)
9716 temp
= assign_stack_temp (DECL_MODE (base
),
9717 GET_MODE_SIZE (DECL_MODE (base
)));
9718 store_expr (base
, temp
, 0, false);
9719 temp
= adjust_address (temp
, BLKmode
, offset
);
9720 set_mem_size (temp
, int_size_in_bytes (type
));
9723 exp
= build3 (BIT_FIELD_REF
, type
, base
, TYPE_SIZE (type
),
9724 bitsize_int (offset
* BITS_PER_UNIT
));
9725 return expand_expr (exp
, target
, tmode
, modifier
);
9727 address_mode
= targetm
.addr_space
.address_mode (as
);
9728 base
= TREE_OPERAND (exp
, 0);
9729 if ((def_stmt
= get_def_for_expr (base
, BIT_AND_EXPR
)))
9731 tree mask
= gimple_assign_rhs2 (def_stmt
);
9732 base
= build2 (BIT_AND_EXPR
, TREE_TYPE (base
),
9733 gimple_assign_rhs1 (def_stmt
), mask
);
9734 TREE_OPERAND (exp
, 0) = base
;
9736 align
= get_object_alignment (exp
);
9737 op0
= expand_expr (base
, NULL_RTX
, VOIDmode
, EXPAND_SUM
);
9738 op0
= memory_address_addr_space (mode
, op0
, as
);
9739 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
9742 = immed_double_int_const (mem_ref_offset (exp
), address_mode
);
9743 op0
= simplify_gen_binary (PLUS
, address_mode
, op0
, off
);
9744 op0
= memory_address_addr_space (mode
, op0
, as
);
9746 temp
= gen_rtx_MEM (mode
, op0
);
9747 set_mem_attributes (temp
, exp
, 0);
9748 set_mem_addr_space (temp
, as
);
9749 if (TREE_THIS_VOLATILE (exp
))
9750 MEM_VOLATILE_P (temp
) = 1;
9751 if (modifier
!= EXPAND_WRITE
9752 && modifier
!= EXPAND_MEMORY
9753 && !inner_reference_p
9755 && align
< GET_MODE_ALIGNMENT (mode
))
9757 if ((icode
= optab_handler (movmisalign_optab
, mode
))
9758 != CODE_FOR_nothing
)
9760 struct expand_operand ops
[2];
9762 /* We've already validated the memory, and we're creating a
9763 new pseudo destination. The predicates really can't fail,
9764 nor can the generator. */
9765 create_output_operand (&ops
[0], NULL_RTX
, mode
);
9766 create_fixed_operand (&ops
[1], temp
);
9767 expand_insn (icode
, 2, ops
);
9768 temp
= ops
[0].value
;
9770 else if (SLOW_UNALIGNED_ACCESS (mode
, align
))
9771 temp
= extract_bit_field (temp
, GET_MODE_BITSIZE (mode
),
9772 0, TYPE_UNSIGNED (TREE_TYPE (exp
)),
9773 (modifier
== EXPAND_STACK_PARM
9774 ? NULL_RTX
: target
),
9783 tree array
= treeop0
;
9784 tree index
= treeop1
;
9787 /* Fold an expression like: "foo"[2].
9788 This is not done in fold so it won't happen inside &.
9789 Don't fold if this is for wide characters since it's too
9790 difficult to do correctly and this is a very rare case. */
9792 if (modifier
!= EXPAND_CONST_ADDRESS
9793 && modifier
!= EXPAND_INITIALIZER
9794 && modifier
!= EXPAND_MEMORY
)
9796 tree t
= fold_read_from_constant_string (exp
);
9799 return expand_expr (t
, target
, tmode
, modifier
);
9802 /* If this is a constant index into a constant array,
9803 just get the value from the array. Handle both the cases when
9804 we have an explicit constructor and when our operand is a variable
9805 that was declared const. */
9807 if (modifier
!= EXPAND_CONST_ADDRESS
9808 && modifier
!= EXPAND_INITIALIZER
9809 && modifier
!= EXPAND_MEMORY
9810 && TREE_CODE (array
) == CONSTRUCTOR
9811 && ! TREE_SIDE_EFFECTS (array
)
9812 && TREE_CODE (index
) == INTEGER_CST
)
9814 unsigned HOST_WIDE_INT ix
;
9817 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array
), ix
,
9819 if (tree_int_cst_equal (field
, index
))
9821 if (!TREE_SIDE_EFFECTS (value
))
9822 return expand_expr (fold (value
), target
, tmode
, modifier
);
9827 else if (optimize
>= 1
9828 && modifier
!= EXPAND_CONST_ADDRESS
9829 && modifier
!= EXPAND_INITIALIZER
9830 && modifier
!= EXPAND_MEMORY
9831 && TREE_READONLY (array
) && ! TREE_SIDE_EFFECTS (array
)
9832 && TREE_CODE (index
) == INTEGER_CST
9833 && (TREE_CODE (array
) == VAR_DECL
9834 || TREE_CODE (array
) == CONST_DECL
)
9835 && (init
= ctor_for_folding (array
)) != error_mark_node
)
9837 if (init
== NULL_TREE
)
9839 tree value
= build_zero_cst (type
);
9840 if (TREE_CODE (value
) == CONSTRUCTOR
)
9842 /* If VALUE is a CONSTRUCTOR, this optimization is only
9843 useful if this doesn't store the CONSTRUCTOR into
9844 memory. If it does, it is more efficient to just
9845 load the data from the array directly. */
9846 rtx ret
= expand_constructor (value
, target
,
9848 if (ret
== NULL_RTX
)
9853 return expand_expr (value
, target
, tmode
, modifier
);
9855 else if (TREE_CODE (init
) == CONSTRUCTOR
)
9857 unsigned HOST_WIDE_INT ix
;
9860 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init
), ix
,
9862 if (tree_int_cst_equal (field
, index
))
9864 if (TREE_SIDE_EFFECTS (value
))
9867 if (TREE_CODE (value
) == CONSTRUCTOR
)
9869 /* If VALUE is a CONSTRUCTOR, this
9870 optimization is only useful if
9871 this doesn't store the CONSTRUCTOR
9872 into memory. If it does, it is more
9873 efficient to just load the data from
9874 the array directly. */
9875 rtx ret
= expand_constructor (value
, target
,
9877 if (ret
== NULL_RTX
)
9882 expand_expr (fold (value
), target
, tmode
, modifier
);
9885 else if (TREE_CODE (init
) == STRING_CST
)
9887 tree low_bound
= array_ref_low_bound (exp
);
9888 tree index1
= fold_convert_loc (loc
, sizetype
, treeop1
);
9890 /* Optimize the special case of a zero lower bound.
9892 We convert the lower bound to sizetype to avoid problems
9893 with constant folding. E.g. suppose the lower bound is
9894 1 and its mode is QI. Without the conversion
9895 (ARRAY + (INDEX - (unsigned char)1))
9897 (ARRAY + (-(unsigned char)1) + INDEX)
9899 (ARRAY + 255 + INDEX). Oops! */
9900 if (!integer_zerop (low_bound
))
9901 index1
= size_diffop_loc (loc
, index1
,
9902 fold_convert_loc (loc
, sizetype
,
9905 if (compare_tree_int (index1
, TREE_STRING_LENGTH (init
)) < 0)
9907 tree type
= TREE_TYPE (TREE_TYPE (init
));
9908 enum machine_mode mode
= TYPE_MODE (type
);
9910 if (GET_MODE_CLASS (mode
) == MODE_INT
9911 && GET_MODE_SIZE (mode
) == 1)
9912 return gen_int_mode (TREE_STRING_POINTER (init
)
9913 [TREE_INT_CST_LOW (index1
)],
9919 goto normal_inner_ref
;
9922 /* If the operand is a CONSTRUCTOR, we can just extract the
9923 appropriate field if it is present. */
9924 if (TREE_CODE (treeop0
) == CONSTRUCTOR
)
9926 unsigned HOST_WIDE_INT idx
;
9929 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0
),
9931 if (field
== treeop1
9932 /* We can normally use the value of the field in the
9933 CONSTRUCTOR. However, if this is a bitfield in
9934 an integral mode that we can fit in a HOST_WIDE_INT,
9935 we must mask only the number of bits in the bitfield,
9936 since this is done implicitly by the constructor. If
9937 the bitfield does not meet either of those conditions,
9938 we can't do this optimization. */
9939 && (! DECL_BIT_FIELD (field
)
9940 || ((GET_MODE_CLASS (DECL_MODE (field
)) == MODE_INT
)
9941 && (GET_MODE_PRECISION (DECL_MODE (field
))
9942 <= HOST_BITS_PER_WIDE_INT
))))
9944 if (DECL_BIT_FIELD (field
)
9945 && modifier
== EXPAND_STACK_PARM
)
9947 op0
= expand_expr (value
, target
, tmode
, modifier
);
9948 if (DECL_BIT_FIELD (field
))
9950 HOST_WIDE_INT bitsize
= TREE_INT_CST_LOW (DECL_SIZE (field
));
9951 enum machine_mode imode
= TYPE_MODE (TREE_TYPE (field
));
9953 if (TYPE_UNSIGNED (TREE_TYPE (field
)))
9955 op1
= gen_int_mode (((HOST_WIDE_INT
) 1 << bitsize
) - 1,
9957 op0
= expand_and (imode
, op0
, op1
, target
);
9961 int count
= GET_MODE_PRECISION (imode
) - bitsize
;
9963 op0
= expand_shift (LSHIFT_EXPR
, imode
, op0
, count
,
9965 op0
= expand_shift (RSHIFT_EXPR
, imode
, op0
, count
,
9973 goto normal_inner_ref
;
9976 case ARRAY_RANGE_REF
:
9979 enum machine_mode mode1
, mode2
;
9980 HOST_WIDE_INT bitsize
, bitpos
;
9982 int volatilep
= 0, must_force_mem
;
9983 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
9984 &mode1
, &unsignedp
, &volatilep
, true);
9985 rtx orig_op0
, memloc
;
9986 bool clear_mem_expr
= false;
9988 /* If we got back the original object, something is wrong. Perhaps
9989 we are evaluating an expression too early. In any event, don't
9990 infinitely recurse. */
9991 gcc_assert (tem
!= exp
);
9993 /* If TEM's type is a union of variable size, pass TARGET to the inner
9994 computation, since it will need a temporary and TARGET is known
9995 to have to do. This occurs in unchecked conversion in Ada. */
9997 = expand_expr_real (tem
,
9998 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
9999 && COMPLETE_TYPE_P (TREE_TYPE (tem
))
10000 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10002 && modifier
!= EXPAND_STACK_PARM
10003 ? target
: NULL_RTX
),
10005 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10008 /* If the field has a mode, we want to access it in the
10009 field's mode, not the computed mode.
10010 If a MEM has VOIDmode (external with incomplete type),
10011 use BLKmode for it instead. */
10014 if (mode1
!= VOIDmode
)
10015 op0
= adjust_address (op0
, mode1
, 0);
10016 else if (GET_MODE (op0
) == VOIDmode
)
10017 op0
= adjust_address (op0
, BLKmode
, 0);
10021 = CONSTANT_P (op0
) ? TYPE_MODE (TREE_TYPE (tem
)) : GET_MODE (op0
);
10023 /* If we have either an offset, a BLKmode result, or a reference
10024 outside the underlying object, we must force it to memory.
10025 Such a case can occur in Ada if we have unchecked conversion
10026 of an expression from a scalar type to an aggregate type or
10027 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10028 passed a partially uninitialized object or a view-conversion
10029 to a larger size. */
10030 must_force_mem
= (offset
10031 || mode1
== BLKmode
10032 || bitpos
+ bitsize
> GET_MODE_BITSIZE (mode2
));
10034 /* Handle CONCAT first. */
10035 if (GET_CODE (op0
) == CONCAT
&& !must_force_mem
)
10038 && bitsize
== GET_MODE_BITSIZE (GET_MODE (op0
)))
10041 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10044 op0
= XEXP (op0
, 0);
10045 mode2
= GET_MODE (op0
);
10047 else if (bitpos
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
10048 && bitsize
== GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 1)))
10052 op0
= XEXP (op0
, 1);
10054 mode2
= GET_MODE (op0
);
10057 /* Otherwise force into memory. */
10058 must_force_mem
= 1;
10061 /* If this is a constant, put it in a register if it is a legitimate
10062 constant and we don't need a memory reference. */
10063 if (CONSTANT_P (op0
)
10064 && mode2
!= BLKmode
10065 && targetm
.legitimate_constant_p (mode2
, op0
)
10066 && !must_force_mem
)
10067 op0
= force_reg (mode2
, op0
);
10069 /* Otherwise, if this is a constant, try to force it to the constant
10070 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10071 is a legitimate constant. */
10072 else if (CONSTANT_P (op0
) && (memloc
= force_const_mem (mode2
, op0
)))
10073 op0
= validize_mem (memloc
);
10075 /* Otherwise, if this is a constant or the object is not in memory
10076 and need be, put it there. */
10077 else if (CONSTANT_P (op0
) || (!MEM_P (op0
) && must_force_mem
))
10079 memloc
= assign_temp (TREE_TYPE (tem
), 1, 1);
10080 emit_move_insn (memloc
, op0
);
10082 clear_mem_expr
= true;
10087 enum machine_mode address_mode
;
10088 rtx offset_rtx
= expand_expr (offset
, NULL_RTX
, VOIDmode
,
10091 gcc_assert (MEM_P (op0
));
10093 address_mode
= get_address_mode (op0
);
10094 if (GET_MODE (offset_rtx
) != address_mode
)
10095 offset_rtx
= convert_to_mode (address_mode
, offset_rtx
, 0);
10097 if (GET_MODE (op0
) == BLKmode
10098 /* The check for a constant address in OP0 not having VOIDmode
10099 is probably no longer necessary. */
10100 && GET_MODE (XEXP (op0
, 0)) != VOIDmode
10102 && (bitpos
% bitsize
) == 0
10103 && (bitsize
% GET_MODE_ALIGNMENT (mode1
)) == 0
10104 && MEM_ALIGN (op0
) == GET_MODE_ALIGNMENT (mode1
))
10106 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10110 op0
= offset_address (op0
, offset_rtx
,
10111 highest_pow2_factor (offset
));
10114 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10115 record its alignment as BIGGEST_ALIGNMENT. */
10116 if (MEM_P (op0
) && bitpos
== 0 && offset
!= 0
10117 && is_aligning_offset (offset
, tem
))
10118 set_mem_align (op0
, BIGGEST_ALIGNMENT
);
10120 /* Don't forget about volatility even if this is a bitfield. */
10121 if (MEM_P (op0
) && volatilep
&& ! MEM_VOLATILE_P (op0
))
10123 if (op0
== orig_op0
)
10124 op0
= copy_rtx (op0
);
10126 MEM_VOLATILE_P (op0
) = 1;
10129 /* In cases where an aligned union has an unaligned object
10130 as a field, we might be extracting a BLKmode value from
10131 an integer-mode (e.g., SImode) object. Handle this case
10132 by doing the extract into an object as wide as the field
10133 (which we know to be the width of a basic mode), then
10134 storing into memory, and changing the mode to BLKmode. */
10135 if (mode1
== VOIDmode
10136 || REG_P (op0
) || GET_CODE (op0
) == SUBREG
10137 || (mode1
!= BLKmode
&& ! direct_load
[(int) mode1
]
10138 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_INT
10139 && GET_MODE_CLASS (mode
) != MODE_COMPLEX_FLOAT
10140 && modifier
!= EXPAND_CONST_ADDRESS
10141 && modifier
!= EXPAND_INITIALIZER
10142 && modifier
!= EXPAND_MEMORY
)
10143 /* If the bitfield is volatile and the bitsize
10144 is narrower than the access size of the bitfield,
10145 we need to extract bitfields from the access. */
10146 || (volatilep
&& TREE_CODE (exp
) == COMPONENT_REF
10147 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp
, 1))
10148 && mode1
!= BLKmode
10149 && bitsize
< GET_MODE_SIZE (mode1
) * BITS_PER_UNIT
)
10150 /* If the field isn't aligned enough to fetch as a memref,
10151 fetch it as a bit field. */
10152 || (mode1
!= BLKmode
10153 && (((TYPE_ALIGN (TREE_TYPE (tem
)) < GET_MODE_ALIGNMENT (mode
)
10154 || (bitpos
% GET_MODE_ALIGNMENT (mode
) != 0)
10156 && (MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode1
)
10157 || (bitpos
% GET_MODE_ALIGNMENT (mode1
) != 0))))
10158 && modifier
!= EXPAND_MEMORY
10159 && ((modifier
== EXPAND_CONST_ADDRESS
10160 || modifier
== EXPAND_INITIALIZER
)
10162 : SLOW_UNALIGNED_ACCESS (mode1
, MEM_ALIGN (op0
))))
10163 || (bitpos
% BITS_PER_UNIT
!= 0)))
10164 /* If the type and the field are a constant size and the
10165 size of the type isn't the same size as the bitfield,
10166 we must use bitfield operations. */
10168 && TYPE_SIZE (TREE_TYPE (exp
))
10169 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp
))) == INTEGER_CST
10170 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp
)),
10173 enum machine_mode ext_mode
= mode
;
10175 if (ext_mode
== BLKmode
10176 && ! (target
!= 0 && MEM_P (op0
)
10178 && bitpos
% BITS_PER_UNIT
== 0))
10179 ext_mode
= mode_for_size (bitsize
, MODE_INT
, 1);
10181 if (ext_mode
== BLKmode
)
10184 target
= assign_temp (type
, 1, 1);
10186 /* ??? Unlike the similar test a few lines below, this one is
10187 very likely obsolete. */
10191 /* In this case, BITPOS must start at a byte boundary and
10192 TARGET, if specified, must be a MEM. */
10193 gcc_assert (MEM_P (op0
)
10194 && (!target
|| MEM_P (target
))
10195 && !(bitpos
% BITS_PER_UNIT
));
10197 emit_block_move (target
,
10198 adjust_address (op0
, VOIDmode
,
10199 bitpos
/ BITS_PER_UNIT
),
10200 GEN_INT ((bitsize
+ BITS_PER_UNIT
- 1)
10202 (modifier
== EXPAND_STACK_PARM
10203 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10208 /* If we have nothing to extract, the result will be 0 for targets
10209 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10210 return 0 for the sake of consistency, as reading a zero-sized
10211 bitfield is valid in Ada and the value is fully specified. */
10215 op0
= validize_mem (op0
);
10217 if (MEM_P (op0
) && REG_P (XEXP (op0
, 0)))
10218 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10220 op0
= extract_bit_field (op0
, bitsize
, bitpos
, unsignedp
,
10221 (modifier
== EXPAND_STACK_PARM
10222 ? NULL_RTX
: target
),
10223 ext_mode
, ext_mode
);
10225 /* If the result is a record type and BITSIZE is narrower than
10226 the mode of OP0, an integral mode, and this is a big endian
10227 machine, we must put the field into the high-order bits. */
10228 if (TREE_CODE (type
) == RECORD_TYPE
&& BYTES_BIG_ENDIAN
10229 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
10230 && bitsize
< (HOST_WIDE_INT
) GET_MODE_BITSIZE (GET_MODE (op0
)))
10231 op0
= expand_shift (LSHIFT_EXPR
, GET_MODE (op0
), op0
,
10232 GET_MODE_BITSIZE (GET_MODE (op0
))
10233 - bitsize
, op0
, 1);
10235 /* If the result type is BLKmode, store the data into a temporary
10236 of the appropriate type, but with the mode corresponding to the
10237 mode for the data we have (op0's mode). */
10238 if (mode
== BLKmode
)
10241 = assign_stack_temp_for_type (ext_mode
,
10242 GET_MODE_BITSIZE (ext_mode
),
10244 emit_move_insn (new_rtx
, op0
);
10245 op0
= copy_rtx (new_rtx
);
10246 PUT_MODE (op0
, BLKmode
);
10252 /* If the result is BLKmode, use that to access the object
10254 if (mode
== BLKmode
)
10257 /* Get a reference to just this component. */
10258 if (modifier
== EXPAND_CONST_ADDRESS
10259 || modifier
== EXPAND_SUM
|| modifier
== EXPAND_INITIALIZER
)
10260 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10262 op0
= adjust_address (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
10264 if (op0
== orig_op0
)
10265 op0
= copy_rtx (op0
);
10267 set_mem_attributes (op0
, exp
, 0);
10269 if (REG_P (XEXP (op0
, 0)))
10270 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10272 /* If op0 is a temporary because the original expressions was forced
10273 to memory, clear MEM_EXPR so that the original expression cannot
10274 be marked as addressable through MEM_EXPR of the temporary. */
10275 if (clear_mem_expr
)
10276 set_mem_expr (op0
, NULL_TREE
);
10278 MEM_VOLATILE_P (op0
) |= volatilep
;
10279 if (mode
== mode1
|| mode1
== BLKmode
|| mode1
== tmode
10280 || modifier
== EXPAND_CONST_ADDRESS
10281 || modifier
== EXPAND_INITIALIZER
)
10285 target
= gen_reg_rtx (tmode
!= VOIDmode
? tmode
: mode
);
10287 convert_move (target
, op0
, unsignedp
);
10292 return expand_expr (OBJ_TYPE_REF_EXPR (exp
), target
, tmode
, modifier
);
10295 /* All valid uses of __builtin_va_arg_pack () are removed during
10297 if (CALL_EXPR_VA_ARG_PACK (exp
))
10298 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
10300 tree fndecl
= get_callee_fndecl (exp
), attr
;
10303 && (attr
= lookup_attribute ("error",
10304 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10305 error ("%Kcall to %qs declared with attribute error: %s",
10306 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10307 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10309 && (attr
= lookup_attribute ("warning",
10310 DECL_ATTRIBUTES (fndecl
))) != NULL
)
10311 warning_at (tree_nonartificial_location (exp
),
10312 0, "%Kcall to %qs declared with attribute warning: %s",
10313 exp
, identifier_to_locale (lang_hooks
.decl_printable_name (fndecl
, 1)),
10314 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr
))));
10316 /* Check for a built-in function. */
10317 if (fndecl
&& DECL_BUILT_IN (fndecl
))
10319 gcc_assert (DECL_BUILT_IN_CLASS (fndecl
) != BUILT_IN_FRONTEND
);
10320 return expand_builtin (exp
, target
, subtarget
, tmode
, ignore
);
10323 return expand_call (exp
, target
, ignore
);
10325 case VIEW_CONVERT_EXPR
:
10328 /* If we are converting to BLKmode, try to avoid an intermediate
10329 temporary by fetching an inner memory reference. */
10330 if (mode
== BLKmode
10331 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
10332 && TYPE_MODE (TREE_TYPE (treeop0
)) != BLKmode
10333 && handled_component_p (treeop0
))
10335 enum machine_mode mode1
;
10336 HOST_WIDE_INT bitsize
, bitpos
;
10341 = get_inner_reference (treeop0
, &bitsize
, &bitpos
,
10342 &offset
, &mode1
, &unsignedp
, &volatilep
,
10346 /* ??? We should work harder and deal with non-zero offsets. */
10348 && (bitpos
% BITS_PER_UNIT
) == 0
10350 && compare_tree_int (TYPE_SIZE (type
), bitsize
) == 0)
10352 /* See the normal_inner_ref case for the rationale. */
10354 = expand_expr_real (tem
,
10355 (TREE_CODE (TREE_TYPE (tem
)) == UNION_TYPE
10356 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem
)))
10358 && modifier
!= EXPAND_STACK_PARM
10359 ? target
: NULL_RTX
),
10361 modifier
== EXPAND_SUM
? EXPAND_NORMAL
: modifier
,
10364 if (MEM_P (orig_op0
))
10368 /* Get a reference to just this component. */
10369 if (modifier
== EXPAND_CONST_ADDRESS
10370 || modifier
== EXPAND_SUM
10371 || modifier
== EXPAND_INITIALIZER
)
10372 op0
= adjust_address_nv (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10374 op0
= adjust_address (op0
, mode
, bitpos
/ BITS_PER_UNIT
);
10376 if (op0
== orig_op0
)
10377 op0
= copy_rtx (op0
);
10379 set_mem_attributes (op0
, treeop0
, 0);
10380 if (REG_P (XEXP (op0
, 0)))
10381 mark_reg_pointer (XEXP (op0
, 0), MEM_ALIGN (op0
));
10383 MEM_VOLATILE_P (op0
) |= volatilep
;
10389 op0
= expand_expr_real (treeop0
, NULL_RTX
, VOIDmode
, modifier
,
10390 NULL
, inner_reference_p
);
10392 /* If the input and output modes are both the same, we are done. */
10393 if (mode
== GET_MODE (op0
))
10395 /* If neither mode is BLKmode, and both modes are the same size
10396 then we can use gen_lowpart. */
10397 else if (mode
!= BLKmode
&& GET_MODE (op0
) != BLKmode
10398 && (GET_MODE_PRECISION (mode
)
10399 == GET_MODE_PRECISION (GET_MODE (op0
)))
10400 && !COMPLEX_MODE_P (GET_MODE (op0
)))
10402 if (GET_CODE (op0
) == SUBREG
)
10403 op0
= force_reg (GET_MODE (op0
), op0
);
10404 temp
= gen_lowpart_common (mode
, op0
);
10409 if (!REG_P (op0
) && !MEM_P (op0
))
10410 op0
= force_reg (GET_MODE (op0
), op0
);
10411 op0
= gen_lowpart (mode
, op0
);
10414 /* If both types are integral, convert from one mode to the other. */
10415 else if (INTEGRAL_TYPE_P (type
) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0
)))
10416 op0
= convert_modes (mode
, GET_MODE (op0
), op0
,
10417 TYPE_UNSIGNED (TREE_TYPE (treeop0
)));
10418 /* If the output type is a bit-field type, do an extraction. */
10419 else if (reduce_bit_field
)
10420 return extract_bit_field (op0
, TYPE_PRECISION (type
), 0,
10421 TYPE_UNSIGNED (type
), NULL_RTX
,
10423 /* As a last resort, spill op0 to memory, and reload it in a
10425 else if (!MEM_P (op0
))
10427 /* If the operand is not a MEM, force it into memory. Since we
10428 are going to be changing the mode of the MEM, don't call
10429 force_const_mem for constants because we don't allow pool
10430 constants to change mode. */
10431 tree inner_type
= TREE_TYPE (treeop0
);
10433 gcc_assert (!TREE_ADDRESSABLE (exp
));
10435 if (target
== 0 || GET_MODE (target
) != TYPE_MODE (inner_type
))
10437 = assign_stack_temp_for_type
10438 (TYPE_MODE (inner_type
),
10439 GET_MODE_SIZE (TYPE_MODE (inner_type
)), inner_type
);
10441 emit_move_insn (target
, op0
);
10445 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10446 output type is such that the operand is known to be aligned, indicate
10447 that it is. Otherwise, we need only be concerned about alignment for
10448 non-BLKmode results. */
10451 enum insn_code icode
;
10453 if (TYPE_ALIGN_OK (type
))
10455 /* ??? Copying the MEM without substantially changing it might
10456 run afoul of the code handling volatile memory references in
10457 store_expr, which assumes that TARGET is returned unmodified
10458 if it has been used. */
10459 op0
= copy_rtx (op0
);
10460 set_mem_align (op0
, MAX (MEM_ALIGN (op0
), TYPE_ALIGN (type
)));
10462 else if (modifier
!= EXPAND_WRITE
10463 && modifier
!= EXPAND_MEMORY
10464 && !inner_reference_p
10466 && MEM_ALIGN (op0
) < GET_MODE_ALIGNMENT (mode
))
10468 /* If the target does have special handling for unaligned
10469 loads of mode then use them. */
10470 if ((icode
= optab_handler (movmisalign_optab
, mode
))
10471 != CODE_FOR_nothing
)
10475 op0
= adjust_address (op0
, mode
, 0);
10476 /* We've already validated the memory, and we're creating a
10477 new pseudo destination. The predicates really can't
10479 reg
= gen_reg_rtx (mode
);
10481 /* Nor can the insn generator. */
10482 insn
= GEN_FCN (icode
) (reg
, op0
);
10486 else if (STRICT_ALIGNMENT
)
10488 tree inner_type
= TREE_TYPE (treeop0
);
10489 HOST_WIDE_INT temp_size
10490 = MAX (int_size_in_bytes (inner_type
),
10491 (HOST_WIDE_INT
) GET_MODE_SIZE (mode
));
10493 = assign_stack_temp_for_type (mode
, temp_size
, type
);
10494 rtx new_with_op0_mode
10495 = adjust_address (new_rtx
, GET_MODE (op0
), 0);
10497 gcc_assert (!TREE_ADDRESSABLE (exp
));
10499 if (GET_MODE (op0
) == BLKmode
)
10500 emit_block_move (new_with_op0_mode
, op0
,
10501 GEN_INT (GET_MODE_SIZE (mode
)),
10502 (modifier
== EXPAND_STACK_PARM
10503 ? BLOCK_OP_CALL_PARM
: BLOCK_OP_NORMAL
));
10505 emit_move_insn (new_with_op0_mode
, op0
);
10511 op0
= adjust_address (op0
, mode
, 0);
10518 tree lhs
= treeop0
;
10519 tree rhs
= treeop1
;
10520 gcc_assert (ignore
);
10522 /* Check for |= or &= of a bitfield of size one into another bitfield
10523 of size 1. In this case, (unless we need the result of the
10524 assignment) we can do this more efficiently with a
10525 test followed by an assignment, if necessary.
10527 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10528 things change so we do, this code should be enhanced to
10530 if (TREE_CODE (lhs
) == COMPONENT_REF
10531 && (TREE_CODE (rhs
) == BIT_IOR_EXPR
10532 || TREE_CODE (rhs
) == BIT_AND_EXPR
)
10533 && TREE_OPERAND (rhs
, 0) == lhs
10534 && TREE_CODE (TREE_OPERAND (rhs
, 1)) == COMPONENT_REF
10535 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs
, 1)))
10536 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs
, 1), 1))))
10538 rtx label
= gen_label_rtx ();
10539 int value
= TREE_CODE (rhs
) == BIT_IOR_EXPR
;
10540 do_jump (TREE_OPERAND (rhs
, 1),
10542 value
? 0 : label
, -1);
10543 expand_assignment (lhs
, build_int_cst (TREE_TYPE (rhs
), value
),
10545 do_pending_stack_adjust ();
10546 emit_label (label
);
10550 expand_assignment (lhs
, rhs
, false);
10555 return expand_expr_addr_expr (exp
, target
, tmode
, modifier
);
10557 case REALPART_EXPR
:
10558 op0
= expand_normal (treeop0
);
10559 return read_complex_part (op0
, false);
10561 case IMAGPART_EXPR
:
10562 op0
= expand_normal (treeop0
);
10563 return read_complex_part (op0
, true);
10570 /* Expanded in cfgexpand.c. */
10571 gcc_unreachable ();
10573 case TRY_CATCH_EXPR
:
10575 case EH_FILTER_EXPR
:
10576 case TRY_FINALLY_EXPR
:
10577 /* Lowered by tree-eh.c. */
10578 gcc_unreachable ();
10580 case WITH_CLEANUP_EXPR
:
10581 case CLEANUP_POINT_EXPR
:
10583 case CASE_LABEL_EXPR
:
10588 case COMPOUND_EXPR
:
10589 case PREINCREMENT_EXPR
:
10590 case PREDECREMENT_EXPR
:
10591 case POSTINCREMENT_EXPR
:
10592 case POSTDECREMENT_EXPR
:
10595 case COMPOUND_LITERAL_EXPR
:
10596 /* Lowered by gimplify.c. */
10597 gcc_unreachable ();
10600 /* Function descriptors are not valid except for as
10601 initialization constants, and should not be expanded. */
10602 gcc_unreachable ();
10604 case WITH_SIZE_EXPR
:
10605 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10606 have pulled out the size to use in whatever context it needed. */
10607 return expand_expr_real (treeop0
, original_target
, tmode
,
10608 modifier
, alt_rtl
, inner_reference_p
);
10611 return expand_expr_real_2 (&ops
, target
, tmode
, modifier
);
10615 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10616 signedness of TYPE), possibly returning the result in TARGET. */
10618 reduce_to_bit_field_precision (rtx exp
, rtx target
, tree type
)
10620 HOST_WIDE_INT prec
= TYPE_PRECISION (type
);
10621 if (target
&& GET_MODE (target
) != GET_MODE (exp
))
10623 /* For constant values, reduce using build_int_cst_type. */
10624 if (CONST_INT_P (exp
))
10626 HOST_WIDE_INT value
= INTVAL (exp
);
10627 tree t
= build_int_cst_type (type
, value
);
10628 return expand_expr (t
, target
, VOIDmode
, EXPAND_NORMAL
);
10630 else if (TYPE_UNSIGNED (type
))
10632 rtx mask
= immed_double_int_const (double_int::mask (prec
),
10634 return expand_and (GET_MODE (exp
), exp
, mask
, target
);
10638 int count
= GET_MODE_PRECISION (GET_MODE (exp
)) - prec
;
10639 exp
= expand_shift (LSHIFT_EXPR
, GET_MODE (exp
),
10640 exp
, count
, target
, 0);
10641 return expand_shift (RSHIFT_EXPR
, GET_MODE (exp
),
10642 exp
, count
, target
, 0);
10646 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10647 when applied to the address of EXP produces an address known to be
10648 aligned more than BIGGEST_ALIGNMENT. */
10651 is_aligning_offset (const_tree offset
, const_tree exp
)
10653 /* Strip off any conversions. */
10654 while (CONVERT_EXPR_P (offset
))
10655 offset
= TREE_OPERAND (offset
, 0);
10657 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10658 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10659 if (TREE_CODE (offset
) != BIT_AND_EXPR
10660 || !tree_fits_uhwi_p (TREE_OPERAND (offset
, 1))
10661 || compare_tree_int (TREE_OPERAND (offset
, 1),
10662 BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
) <= 0
10663 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset
, 1)) + 1) < 0)
10666 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10667 It must be NEGATE_EXPR. Then strip any more conversions. */
10668 offset
= TREE_OPERAND (offset
, 0);
10669 while (CONVERT_EXPR_P (offset
))
10670 offset
= TREE_OPERAND (offset
, 0);
10672 if (TREE_CODE (offset
) != NEGATE_EXPR
)
10675 offset
= TREE_OPERAND (offset
, 0);
10676 while (CONVERT_EXPR_P (offset
))
10677 offset
= TREE_OPERAND (offset
, 0);
10679 /* This must now be the address of EXP. */
10680 return TREE_CODE (offset
) == ADDR_EXPR
&& TREE_OPERAND (offset
, 0) == exp
;
10683 /* Return the tree node if an ARG corresponds to a string constant or zero
10684 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10685 in bytes within the string that ARG is accessing. The type of the
10686 offset will be `sizetype'. */
10689 string_constant (tree arg
, tree
*ptr_offset
)
10691 tree array
, offset
, lower_bound
;
10694 if (TREE_CODE (arg
) == ADDR_EXPR
)
10696 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == STRING_CST
)
10698 *ptr_offset
= size_zero_node
;
10699 return TREE_OPERAND (arg
, 0);
10701 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == VAR_DECL
)
10703 array
= TREE_OPERAND (arg
, 0);
10704 offset
= size_zero_node
;
10706 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
10708 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10709 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10710 if (TREE_CODE (array
) != STRING_CST
10711 && TREE_CODE (array
) != VAR_DECL
)
10714 /* Check if the array has a nonzero lower bound. */
10715 lower_bound
= array_ref_low_bound (TREE_OPERAND (arg
, 0));
10716 if (!integer_zerop (lower_bound
))
10718 /* If the offset and base aren't both constants, return 0. */
10719 if (TREE_CODE (lower_bound
) != INTEGER_CST
)
10721 if (TREE_CODE (offset
) != INTEGER_CST
)
10723 /* Adjust offset by the lower bound. */
10724 offset
= size_diffop (fold_convert (sizetype
, offset
),
10725 fold_convert (sizetype
, lower_bound
));
10728 else if (TREE_CODE (TREE_OPERAND (arg
, 0)) == MEM_REF
)
10730 array
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
10731 offset
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 1);
10732 if (TREE_CODE (array
) != ADDR_EXPR
)
10734 array
= TREE_OPERAND (array
, 0);
10735 if (TREE_CODE (array
) != STRING_CST
10736 && TREE_CODE (array
) != VAR_DECL
)
10742 else if (TREE_CODE (arg
) == PLUS_EXPR
|| TREE_CODE (arg
) == POINTER_PLUS_EXPR
)
10744 tree arg0
= TREE_OPERAND (arg
, 0);
10745 tree arg1
= TREE_OPERAND (arg
, 1);
10750 if (TREE_CODE (arg0
) == ADDR_EXPR
10751 && (TREE_CODE (TREE_OPERAND (arg0
, 0)) == STRING_CST
10752 || TREE_CODE (TREE_OPERAND (arg0
, 0)) == VAR_DECL
))
10754 array
= TREE_OPERAND (arg0
, 0);
10757 else if (TREE_CODE (arg1
) == ADDR_EXPR
10758 && (TREE_CODE (TREE_OPERAND (arg1
, 0)) == STRING_CST
10759 || TREE_CODE (TREE_OPERAND (arg1
, 0)) == VAR_DECL
))
10761 array
= TREE_OPERAND (arg1
, 0);
10770 if (TREE_CODE (array
) == STRING_CST
)
10772 *ptr_offset
= fold_convert (sizetype
, offset
);
10775 else if (TREE_CODE (array
) == VAR_DECL
10776 || TREE_CODE (array
) == CONST_DECL
)
10779 tree init
= ctor_for_folding (array
);
10781 /* Variables initialized to string literals can be handled too. */
10782 if (init
== error_mark_node
10784 || TREE_CODE (init
) != STRING_CST
)
10787 /* Avoid const char foo[4] = "abcde"; */
10788 if (DECL_SIZE_UNIT (array
) == NULL_TREE
10789 || TREE_CODE (DECL_SIZE_UNIT (array
)) != INTEGER_CST
10790 || (length
= TREE_STRING_LENGTH (init
)) <= 0
10791 || compare_tree_int (DECL_SIZE_UNIT (array
), length
) < 0)
10794 /* If variable is bigger than the string literal, OFFSET must be constant
10795 and inside of the bounds of the string literal. */
10796 offset
= fold_convert (sizetype
, offset
);
10797 if (compare_tree_int (DECL_SIZE_UNIT (array
), length
) > 0
10798 && (! tree_fits_uhwi_p (offset
)
10799 || compare_tree_int (offset
, length
) >= 0))
10802 *ptr_offset
= offset
;
10809 /* Generate code to calculate OPS, and exploded expression
10810 using a store-flag instruction and return an rtx for the result.
10811 OPS reflects a comparison.
10813 If TARGET is nonzero, store the result there if convenient.
10815 Return zero if there is no suitable set-flag instruction
10816 available on this machine.
10818 Once expand_expr has been called on the arguments of the comparison,
10819 we are committed to doing the store flag, since it is not safe to
10820 re-evaluate the expression. We emit the store-flag insn by calling
10821 emit_store_flag, but only expand the arguments if we have a reason
10822 to believe that emit_store_flag will be successful. If we think that
10823 it will, but it isn't, we have to simulate the store-flag with a
10824 set/jump/set sequence. */
10827 do_store_flag (sepops ops
, rtx target
, enum machine_mode mode
)
10829 enum rtx_code code
;
10830 tree arg0
, arg1
, type
;
10832 enum machine_mode operand_mode
;
10835 rtx subtarget
= target
;
10836 location_t loc
= ops
->location
;
10841 /* Don't crash if the comparison was erroneous. */
10842 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
10845 type
= TREE_TYPE (arg0
);
10846 operand_mode
= TYPE_MODE (type
);
10847 unsignedp
= TYPE_UNSIGNED (type
);
10849 /* We won't bother with BLKmode store-flag operations because it would mean
10850 passing a lot of information to emit_store_flag. */
10851 if (operand_mode
== BLKmode
)
10854 /* We won't bother with store-flag operations involving function pointers
10855 when function pointers must be canonicalized before comparisons. */
10856 #ifdef HAVE_canonicalize_funcptr_for_compare
10857 if (HAVE_canonicalize_funcptr_for_compare
10858 && ((TREE_CODE (TREE_TYPE (arg0
)) == POINTER_TYPE
10859 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
)))
10861 || (TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
10862 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
)))
10863 == FUNCTION_TYPE
))))
10870 /* For vector typed comparisons emit code to generate the desired
10871 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10872 expander for this. */
10873 if (TREE_CODE (ops
->type
) == VECTOR_TYPE
)
10875 tree ifexp
= build2 (ops
->code
, ops
->type
, arg0
, arg1
);
10876 tree if_true
= constant_boolean_node (true, ops
->type
);
10877 tree if_false
= constant_boolean_node (false, ops
->type
);
10878 return expand_vec_cond_expr (ops
->type
, ifexp
, if_true
, if_false
, target
);
10881 /* Get the rtx comparison code to use. We know that EXP is a comparison
10882 operation of some type. Some comparisons against 1 and -1 can be
10883 converted to comparisons with zero. Do so here so that the tests
10884 below will be aware that we have a comparison with zero. These
10885 tests will not catch constants in the first operand, but constants
10886 are rarely passed as the first operand. */
10897 if (integer_onep (arg1
))
10898 arg1
= integer_zero_node
, code
= unsignedp
? LEU
: LE
;
10900 code
= unsignedp
? LTU
: LT
;
10903 if (! unsignedp
&& integer_all_onesp (arg1
))
10904 arg1
= integer_zero_node
, code
= LT
;
10906 code
= unsignedp
? LEU
: LE
;
10909 if (! unsignedp
&& integer_all_onesp (arg1
))
10910 arg1
= integer_zero_node
, code
= GE
;
10912 code
= unsignedp
? GTU
: GT
;
10915 if (integer_onep (arg1
))
10916 arg1
= integer_zero_node
, code
= unsignedp
? GTU
: GT
;
10918 code
= unsignedp
? GEU
: GE
;
10921 case UNORDERED_EXPR
:
10947 gcc_unreachable ();
10950 /* Put a constant second. */
10951 if (TREE_CODE (arg0
) == REAL_CST
|| TREE_CODE (arg0
) == INTEGER_CST
10952 || TREE_CODE (arg0
) == FIXED_CST
)
10954 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
10955 code
= swap_condition (code
);
10958 /* If this is an equality or inequality test of a single bit, we can
10959 do this by shifting the bit being tested to the low-order bit and
10960 masking the result with the constant 1. If the condition was EQ,
10961 we xor it with 1. This does not require an scc insn and is faster
10962 than an scc insn even if we have it.
10964 The code to make this transformation was moved into fold_single_bit_test,
10965 so we just call into the folder and expand its result. */
10967 if ((code
== NE
|| code
== EQ
)
10968 && integer_zerop (arg1
)
10969 && (TYPE_PRECISION (ops
->type
) != 1 || TYPE_UNSIGNED (ops
->type
)))
10971 gimple srcstmt
= get_def_for_expr (arg0
, BIT_AND_EXPR
);
10973 && integer_pow2p (gimple_assign_rhs2 (srcstmt
)))
10975 enum tree_code tcode
= code
== NE
? NE_EXPR
: EQ_EXPR
;
10976 tree type
= lang_hooks
.types
.type_for_mode (mode
, unsignedp
);
10977 tree temp
= fold_build2_loc (loc
, BIT_AND_EXPR
, TREE_TYPE (arg1
),
10978 gimple_assign_rhs1 (srcstmt
),
10979 gimple_assign_rhs2 (srcstmt
));
10980 temp
= fold_single_bit_test (loc
, tcode
, temp
, arg1
, type
);
10982 return expand_expr (temp
, target
, VOIDmode
, EXPAND_NORMAL
);
10986 if (! get_subtarget (target
)
10987 || GET_MODE (subtarget
) != operand_mode
)
10990 expand_operands (arg0
, arg1
, subtarget
, &op0
, &op1
, EXPAND_NORMAL
);
10993 target
= gen_reg_rtx (mode
);
10995 /* Try a cstore if possible. */
10996 return emit_store_flag_force (target
, code
, op0
, op1
,
10997 operand_mode
, unsignedp
,
10998 (TYPE_PRECISION (ops
->type
) == 1
10999 && !TYPE_UNSIGNED (ops
->type
)) ? -1 : 1);
11003 /* Stubs in case we haven't got a casesi insn. */
11004 #ifndef HAVE_casesi
11005 # define HAVE_casesi 0
11006 # define gen_casesi(a, b, c, d, e) (0)
11007 # define CODE_FOR_casesi CODE_FOR_nothing
11010 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11011 0 otherwise (i.e. if there is no casesi instruction).
11013 DEFAULT_PROBABILITY is the probability of jumping to the default
11016 try_casesi (tree index_type
, tree index_expr
, tree minval
, tree range
,
11017 rtx table_label
, rtx default_label
, rtx fallback_label
,
11018 int default_probability
)
11020 struct expand_operand ops
[5];
11021 enum machine_mode index_mode
= SImode
;
11022 rtx op1
, op2
, index
;
11027 /* Convert the index to SImode. */
11028 if (GET_MODE_BITSIZE (TYPE_MODE (index_type
)) > GET_MODE_BITSIZE (index_mode
))
11030 enum machine_mode omode
= TYPE_MODE (index_type
);
11031 rtx rangertx
= expand_normal (range
);
11033 /* We must handle the endpoints in the original mode. */
11034 index_expr
= build2 (MINUS_EXPR
, index_type
,
11035 index_expr
, minval
);
11036 minval
= integer_zero_node
;
11037 index
= expand_normal (index_expr
);
11039 emit_cmp_and_jump_insns (rangertx
, index
, LTU
, NULL_RTX
,
11040 omode
, 1, default_label
,
11041 default_probability
);
11042 /* Now we can safely truncate. */
11043 index
= convert_to_mode (index_mode
, index
, 0);
11047 if (TYPE_MODE (index_type
) != index_mode
)
11049 index_type
= lang_hooks
.types
.type_for_mode (index_mode
, 0);
11050 index_expr
= fold_convert (index_type
, index_expr
);
11053 index
= expand_normal (index_expr
);
11056 do_pending_stack_adjust ();
11058 op1
= expand_normal (minval
);
11059 op2
= expand_normal (range
);
11061 create_input_operand (&ops
[0], index
, index_mode
);
11062 create_convert_operand_from_type (&ops
[1], op1
, TREE_TYPE (minval
));
11063 create_convert_operand_from_type (&ops
[2], op2
, TREE_TYPE (range
));
11064 create_fixed_operand (&ops
[3], table_label
);
11065 create_fixed_operand (&ops
[4], (default_label
11067 : fallback_label
));
11068 expand_jump_insn (CODE_FOR_casesi
, 5, ops
);
11072 /* Attempt to generate a tablejump instruction; same concept. */
11073 #ifndef HAVE_tablejump
11074 #define HAVE_tablejump 0
11075 #define gen_tablejump(x, y) (0)
11078 /* Subroutine of the next function.
11080 INDEX is the value being switched on, with the lowest value
11081 in the table already subtracted.
11082 MODE is its expected mode (needed if INDEX is constant).
11083 RANGE is the length of the jump table.
11084 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11086 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11087 index value is out of range.
11088 DEFAULT_PROBABILITY is the probability of jumping to
11089 the default label. */
11092 do_tablejump (rtx index
, enum machine_mode mode
, rtx range
, rtx table_label
,
11093 rtx default_label
, int default_probability
)
11097 if (INTVAL (range
) > cfun
->cfg
->max_jumptable_ents
)
11098 cfun
->cfg
->max_jumptable_ents
= INTVAL (range
);
11100 /* Do an unsigned comparison (in the proper mode) between the index
11101 expression and the value which represents the length of the range.
11102 Since we just finished subtracting the lower bound of the range
11103 from the index expression, this comparison allows us to simultaneously
11104 check that the original index expression value is both greater than
11105 or equal to the minimum value of the range and less than or equal to
11106 the maximum value of the range. */
11109 emit_cmp_and_jump_insns (index
, range
, GTU
, NULL_RTX
, mode
, 1,
11110 default_label
, default_probability
);
11113 /* If index is in range, it must fit in Pmode.
11114 Convert to Pmode so we can index with it. */
11116 index
= convert_to_mode (Pmode
, index
, 1);
11118 /* Don't let a MEM slip through, because then INDEX that comes
11119 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11120 and break_out_memory_refs will go to work on it and mess it up. */
11121 #ifdef PIC_CASE_VECTOR_ADDRESS
11122 if (flag_pic
&& !REG_P (index
))
11123 index
= copy_to_mode_reg (Pmode
, index
);
11126 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11127 GET_MODE_SIZE, because this indicates how large insns are. The other
11128 uses should all be Pmode, because they are addresses. This code
11129 could fail if addresses and insns are not the same size. */
11130 index
= simplify_gen_binary (MULT
, Pmode
, index
,
11131 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE
),
11133 index
= simplify_gen_binary (PLUS
, Pmode
, index
,
11134 gen_rtx_LABEL_REF (Pmode
, table_label
));
11136 #ifdef PIC_CASE_VECTOR_ADDRESS
11138 index
= PIC_CASE_VECTOR_ADDRESS (index
);
11141 index
= memory_address (CASE_VECTOR_MODE
, index
);
11142 temp
= gen_reg_rtx (CASE_VECTOR_MODE
);
11143 vector
= gen_const_mem (CASE_VECTOR_MODE
, index
);
11144 convert_move (temp
, vector
, 0);
11146 emit_jump_insn (gen_tablejump (temp
, table_label
));
11148 /* If we are generating PIC code or if the table is PC-relative, the
11149 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11150 if (! CASE_VECTOR_PC_RELATIVE
&& ! flag_pic
)
11155 try_tablejump (tree index_type
, tree index_expr
, tree minval
, tree range
,
11156 rtx table_label
, rtx default_label
, int default_probability
)
11160 if (! HAVE_tablejump
)
11163 index_expr
= fold_build2 (MINUS_EXPR
, index_type
,
11164 fold_convert (index_type
, index_expr
),
11165 fold_convert (index_type
, minval
));
11166 index
= expand_normal (index_expr
);
11167 do_pending_stack_adjust ();
11169 do_tablejump (index
, TYPE_MODE (index_type
),
11170 convert_modes (TYPE_MODE (index_type
),
11171 TYPE_MODE (TREE_TYPE (range
)),
11172 expand_normal (range
),
11173 TYPE_UNSIGNED (TREE_TYPE (range
))),
11174 table_label
, default_label
, default_probability
);
11178 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11180 const_vector_from_tree (tree exp
)
11186 enum machine_mode inner
, mode
;
11188 mode
= TYPE_MODE (TREE_TYPE (exp
));
11190 if (initializer_zerop (exp
))
11191 return CONST0_RTX (mode
);
11193 units
= GET_MODE_NUNITS (mode
);
11194 inner
= GET_MODE_INNER (mode
);
11196 v
= rtvec_alloc (units
);
11198 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
11200 elt
= VECTOR_CST_ELT (exp
, i
);
11202 if (TREE_CODE (elt
) == REAL_CST
)
11203 RTVEC_ELT (v
, i
) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt
),
11205 else if (TREE_CODE (elt
) == FIXED_CST
)
11206 RTVEC_ELT (v
, i
) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt
),
11209 RTVEC_ELT (v
, i
) = immed_double_int_const (tree_to_double_int (elt
),
11213 return gen_rtx_CONST_VECTOR (mode
, v
);
11216 /* Build a decl for a personality function given a language prefix. */
11219 build_personality_function (const char *lang
)
11221 const char *unwind_and_version
;
11225 switch (targetm_common
.except_unwind_info (&global_options
))
11230 unwind_and_version
= "_sj0";
11234 unwind_and_version
= "_v0";
11237 unwind_and_version
= "_seh0";
11240 gcc_unreachable ();
11243 name
= ACONCAT (("__", lang
, "_personality", unwind_and_version
, NULL
));
11245 type
= build_function_type_list (integer_type_node
, integer_type_node
,
11246 long_long_unsigned_type_node
,
11247 ptr_type_node
, ptr_type_node
, NULL_TREE
);
11248 decl
= build_decl (UNKNOWN_LOCATION
, FUNCTION_DECL
,
11249 get_identifier (name
), type
);
11250 DECL_ARTIFICIAL (decl
) = 1;
11251 DECL_EXTERNAL (decl
) = 1;
11252 TREE_PUBLIC (decl
) = 1;
11254 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11255 are the flags assigned by targetm.encode_section_info. */
11256 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl
), 0), NULL
);
11261 /* Extracts the personality function of DECL and returns the corresponding
11265 get_personality_function (tree decl
)
11267 tree personality
= DECL_FUNCTION_PERSONALITY (decl
);
11268 enum eh_personality_kind pk
;
11270 pk
= function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl
));
11271 if (pk
== eh_personality_none
)
11275 && pk
== eh_personality_any
)
11276 personality
= lang_hooks
.eh_personality ();
11278 if (pk
== eh_personality_lang
)
11279 gcc_assert (personality
!= NULL_TREE
);
11281 return XEXP (DECL_RTL (personality
), 0);
11284 #include "gt-expr.h"